##// END OF EJS Templates
pylons: fixed code and test suite after removal of pylons.
marcink -
r2358:d7106a21 default
parent child Browse files
Show More
@@ -1,63 +1,63 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22
22
23 RhodeCode, a web based repository management software
23 RhodeCode, a web based repository management software
24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 """
25 """
26
26
27 import os
27 import os
28 import sys
28 import sys
29 import platform
29 import platform
30
30
31 VERSION = tuple(open(os.path.join(
31 VERSION = tuple(open(os.path.join(
32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33
33
34 BACKENDS = {
34 BACKENDS = {
35 'hg': 'Mercurial repository',
35 'hg': 'Mercurial repository',
36 'git': 'Git repository',
36 'git': 'Git repository',
37 'svn': 'Subversion repository',
37 'svn': 'Subversion repository',
38 }
38 }
39
39
40 CELERY_ENABLED = False
40 CELERY_ENABLED = False
41 CELERY_EAGER = False
41 CELERY_EAGER = False
42
42
43 # link to config for pylons
43 # link to config for pyramid
44 CONFIG = {}
44 CONFIG = {}
45
45
46 # Populated with the settings dictionary from application init in
46 # Populated with the settings dictionary from application init in
47 # rhodecode.conf.environment.load_pyramid_environment
47 # rhodecode.conf.environment.load_pyramid_environment
48 PYRAMID_SETTINGS = {}
48 PYRAMID_SETTINGS = {}
49
49
50 # Linked module for extensions
50 # Linked module for extensions
51 EXTENSIONS = {}
51 EXTENSIONS = {}
52
52
53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
54 __dbversion__ = 81 # defines current db version for migrations
54 __dbversion__ = 81 # defines current db version for migrations
55 __platform__ = platform.system()
55 __platform__ = platform.system()
56 __license__ = 'AGPLv3, and Commercial License'
56 __license__ = 'AGPLv3, and Commercial License'
57 __author__ = 'RhodeCode GmbH'
57 __author__ = 'RhodeCode GmbH'
58 __url__ = 'https://code.rhodecode.com'
58 __url__ = 'https://code.rhodecode.com'
59
59
60 is_windows = __platform__ in ['Windows']
60 is_windows = __platform__ in ['Windows']
61 is_unix = not is_windows
61 is_unix = not is_windows
62 is_test = False
62 is_test = False
63 disable_error_handler = False
63 disable_error_handler = False
@@ -1,2066 +1,2067 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import time
22 import time
23
23
24 import rhodecode
24 import rhodecode
25 from rhodecode.api import (
25 from rhodecode.api import (
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
27 from rhodecode.api.utils import (
27 from rhodecode.api.utils import (
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
31 validate_set_owner_permissions)
31 validate_set_owner_permissions)
32 from rhodecode.lib import audit_logger
32 from rhodecode.lib import audit_logger
33 from rhodecode.lib import repo_maintenance
33 from rhodecode.lib import repo_maintenance
34 from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi
34 from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi
35 from rhodecode.lib.utils2 import str2bool, time_to_datetime
35 from rhodecode.lib.utils2 import str2bool, time_to_datetime
36 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
37 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
38 from rhodecode.model.changeset_status import ChangesetStatusModel
38 from rhodecode.model.changeset_status import ChangesetStatusModel
39 from rhodecode.model.comment import CommentsModel
39 from rhodecode.model.comment import CommentsModel
40 from rhodecode.model.db import (
40 from rhodecode.model.db import (
41 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
41 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
42 ChangesetComment)
42 ChangesetComment)
43 from rhodecode.model.repo import RepoModel
43 from rhodecode.model.repo import RepoModel
44 from rhodecode.model.scm import ScmModel, RepoList
44 from rhodecode.model.scm import ScmModel, RepoList
45 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
45 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
46 from rhodecode.model import validation_schema
46 from rhodecode.model import validation_schema
47 from rhodecode.model.validation_schema.schemas import repo_schema
47 from rhodecode.model.validation_schema.schemas import repo_schema
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 @jsonrpc_method()
52 @jsonrpc_method()
53 def get_repo(request, apiuser, repoid, cache=Optional(True)):
53 def get_repo(request, apiuser, repoid, cache=Optional(True)):
54 """
54 """
55 Gets an existing repository by its name or repository_id.
55 Gets an existing repository by its name or repository_id.
56
56
57 The members section so the output returns users groups or users
57 The members section so the output returns users groups or users
58 associated with that repository.
58 associated with that repository.
59
59
60 This command can only be run using an |authtoken| with admin rights,
60 This command can only be run using an |authtoken| with admin rights,
61 or users with at least read rights to the |repo|.
61 or users with at least read rights to the |repo|.
62
62
63 :param apiuser: This is filled automatically from the |authtoken|.
63 :param apiuser: This is filled automatically from the |authtoken|.
64 :type apiuser: AuthUser
64 :type apiuser: AuthUser
65 :param repoid: The repository name or repository id.
65 :param repoid: The repository name or repository id.
66 :type repoid: str or int
66 :type repoid: str or int
67 :param cache: use the cached value for last changeset
67 :param cache: use the cached value for last changeset
68 :type: cache: Optional(bool)
68 :type: cache: Optional(bool)
69
69
70 Example output:
70 Example output:
71
71
72 .. code-block:: bash
72 .. code-block:: bash
73
73
74 {
74 {
75 "error": null,
75 "error": null,
76 "id": <repo_id>,
76 "id": <repo_id>,
77 "result": {
77 "result": {
78 "clone_uri": null,
78 "clone_uri": null,
79 "created_on": "timestamp",
79 "created_on": "timestamp",
80 "description": "repo description",
80 "description": "repo description",
81 "enable_downloads": false,
81 "enable_downloads": false,
82 "enable_locking": false,
82 "enable_locking": false,
83 "enable_statistics": false,
83 "enable_statistics": false,
84 "followers": [
84 "followers": [
85 {
85 {
86 "active": true,
86 "active": true,
87 "admin": false,
87 "admin": false,
88 "api_key": "****************************************",
88 "api_key": "****************************************",
89 "api_keys": [
89 "api_keys": [
90 "****************************************"
90 "****************************************"
91 ],
91 ],
92 "email": "user@example.com",
92 "email": "user@example.com",
93 "emails": [
93 "emails": [
94 "user@example.com"
94 "user@example.com"
95 ],
95 ],
96 "extern_name": "rhodecode",
96 "extern_name": "rhodecode",
97 "extern_type": "rhodecode",
97 "extern_type": "rhodecode",
98 "firstname": "username",
98 "firstname": "username",
99 "ip_addresses": [],
99 "ip_addresses": [],
100 "language": null,
100 "language": null,
101 "last_login": "2015-09-16T17:16:35.854",
101 "last_login": "2015-09-16T17:16:35.854",
102 "lastname": "surname",
102 "lastname": "surname",
103 "user_id": <user_id>,
103 "user_id": <user_id>,
104 "username": "name"
104 "username": "name"
105 }
105 }
106 ],
106 ],
107 "fork_of": "parent-repo",
107 "fork_of": "parent-repo",
108 "landing_rev": [
108 "landing_rev": [
109 "rev",
109 "rev",
110 "tip"
110 "tip"
111 ],
111 ],
112 "last_changeset": {
112 "last_changeset": {
113 "author": "User <user@example.com>",
113 "author": "User <user@example.com>",
114 "branch": "default",
114 "branch": "default",
115 "date": "timestamp",
115 "date": "timestamp",
116 "message": "last commit message",
116 "message": "last commit message",
117 "parents": [
117 "parents": [
118 {
118 {
119 "raw_id": "commit-id"
119 "raw_id": "commit-id"
120 }
120 }
121 ],
121 ],
122 "raw_id": "commit-id",
122 "raw_id": "commit-id",
123 "revision": <revision number>,
123 "revision": <revision number>,
124 "short_id": "short id"
124 "short_id": "short id"
125 },
125 },
126 "lock_reason": null,
126 "lock_reason": null,
127 "locked_by": null,
127 "locked_by": null,
128 "locked_date": null,
128 "locked_date": null,
129 "members": [
129 "members": [
130 {
130 {
131 "name": "super-admin-name",
131 "name": "super-admin-name",
132 "origin": "super-admin",
132 "origin": "super-admin",
133 "permission": "repository.admin",
133 "permission": "repository.admin",
134 "type": "user"
134 "type": "user"
135 },
135 },
136 {
136 {
137 "name": "owner-name",
137 "name": "owner-name",
138 "origin": "owner",
138 "origin": "owner",
139 "permission": "repository.admin",
139 "permission": "repository.admin",
140 "type": "user"
140 "type": "user"
141 },
141 },
142 {
142 {
143 "name": "user-group-name",
143 "name": "user-group-name",
144 "origin": "permission",
144 "origin": "permission",
145 "permission": "repository.write",
145 "permission": "repository.write",
146 "type": "user_group"
146 "type": "user_group"
147 }
147 }
148 ],
148 ],
149 "owner": "owner-name",
149 "owner": "owner-name",
150 "permissions": [
150 "permissions": [
151 {
151 {
152 "name": "super-admin-name",
152 "name": "super-admin-name",
153 "origin": "super-admin",
153 "origin": "super-admin",
154 "permission": "repository.admin",
154 "permission": "repository.admin",
155 "type": "user"
155 "type": "user"
156 },
156 },
157 {
157 {
158 "name": "owner-name",
158 "name": "owner-name",
159 "origin": "owner",
159 "origin": "owner",
160 "permission": "repository.admin",
160 "permission": "repository.admin",
161 "type": "user"
161 "type": "user"
162 },
162 },
163 {
163 {
164 "name": "user-group-name",
164 "name": "user-group-name",
165 "origin": "permission",
165 "origin": "permission",
166 "permission": "repository.write",
166 "permission": "repository.write",
167 "type": "user_group"
167 "type": "user_group"
168 }
168 }
169 ],
169 ],
170 "private": true,
170 "private": true,
171 "repo_id": 676,
171 "repo_id": 676,
172 "repo_name": "user-group/repo-name",
172 "repo_name": "user-group/repo-name",
173 "repo_type": "hg"
173 "repo_type": "hg"
174 }
174 }
175 }
175 }
176 """
176 """
177
177
178 repo = get_repo_or_error(repoid)
178 repo = get_repo_or_error(repoid)
179 cache = Optional.extract(cache)
179 cache = Optional.extract(cache)
180
180
181 include_secrets = False
181 include_secrets = False
182 if has_superadmin_permission(apiuser):
182 if has_superadmin_permission(apiuser):
183 include_secrets = True
183 include_secrets = True
184 else:
184 else:
185 # check if we have at least read permission for this repo !
185 # check if we have at least read permission for this repo !
186 _perms = (
186 _perms = (
187 'repository.admin', 'repository.write', 'repository.read',)
187 'repository.admin', 'repository.write', 'repository.read',)
188 validate_repo_permissions(apiuser, repoid, repo, _perms)
188 validate_repo_permissions(apiuser, repoid, repo, _perms)
189
189
190 permissions = []
190 permissions = []
191 for _user in repo.permissions():
191 for _user in repo.permissions():
192 user_data = {
192 user_data = {
193 'name': _user.username,
193 'name': _user.username,
194 'permission': _user.permission,
194 'permission': _user.permission,
195 'origin': get_origin(_user),
195 'origin': get_origin(_user),
196 'type': "user",
196 'type': "user",
197 }
197 }
198 permissions.append(user_data)
198 permissions.append(user_data)
199
199
200 for _user_group in repo.permission_user_groups():
200 for _user_group in repo.permission_user_groups():
201 user_group_data = {
201 user_group_data = {
202 'name': _user_group.users_group_name,
202 'name': _user_group.users_group_name,
203 'permission': _user_group.permission,
203 'permission': _user_group.permission,
204 'origin': get_origin(_user_group),
204 'origin': get_origin(_user_group),
205 'type': "user_group",
205 'type': "user_group",
206 }
206 }
207 permissions.append(user_group_data)
207 permissions.append(user_group_data)
208
208
209 following_users = [
209 following_users = [
210 user.user.get_api_data(include_secrets=include_secrets)
210 user.user.get_api_data(include_secrets=include_secrets)
211 for user in repo.followers]
211 for user in repo.followers]
212
212
213 if not cache:
213 if not cache:
214 repo.update_commit_cache()
214 repo.update_commit_cache()
215 data = repo.get_api_data(include_secrets=include_secrets)
215 data = repo.get_api_data(include_secrets=include_secrets)
216 data['members'] = permissions # TODO: this should be deprecated soon
216 data['members'] = permissions # TODO: this should be deprecated soon
217 data['permissions'] = permissions
217 data['permissions'] = permissions
218 data['followers'] = following_users
218 data['followers'] = following_users
219 return data
219 return data
220
220
221
221
222 @jsonrpc_method()
222 @jsonrpc_method()
223 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
223 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
224 """
224 """
225 Lists all existing repositories.
225 Lists all existing repositories.
226
226
227 This command can only be run using an |authtoken| with admin rights,
227 This command can only be run using an |authtoken| with admin rights,
228 or users with at least read rights to |repos|.
228 or users with at least read rights to |repos|.
229
229
230 :param apiuser: This is filled automatically from the |authtoken|.
230 :param apiuser: This is filled automatically from the |authtoken|.
231 :type apiuser: AuthUser
231 :type apiuser: AuthUser
232 :param root: specify root repository group to fetch repositories.
232 :param root: specify root repository group to fetch repositories.
233 filters the returned repositories to be members of given root group.
233 filters the returned repositories to be members of given root group.
234 :type root: Optional(None)
234 :type root: Optional(None)
235 :param traverse: traverse given root into subrepositories. With this flag
235 :param traverse: traverse given root into subrepositories. With this flag
236 set to False, it will only return top-level repositories from `root`.
236 set to False, it will only return top-level repositories from `root`.
237 if root is empty it will return just top-level repositories.
237 if root is empty it will return just top-level repositories.
238 :type traverse: Optional(True)
238 :type traverse: Optional(True)
239
239
240
240
241 Example output:
241 Example output:
242
242
243 .. code-block:: bash
243 .. code-block:: bash
244
244
245 id : <id_given_in_input>
245 id : <id_given_in_input>
246 result: [
246 result: [
247 {
247 {
248 "repo_id" : "<repo_id>",
248 "repo_id" : "<repo_id>",
249 "repo_name" : "<reponame>"
249 "repo_name" : "<reponame>"
250 "repo_type" : "<repo_type>",
250 "repo_type" : "<repo_type>",
251 "clone_uri" : "<clone_uri>",
251 "clone_uri" : "<clone_uri>",
252 "private": : "<bool>",
252 "private": : "<bool>",
253 "created_on" : "<datetimecreated>",
253 "created_on" : "<datetimecreated>",
254 "description" : "<description>",
254 "description" : "<description>",
255 "landing_rev": "<landing_rev>",
255 "landing_rev": "<landing_rev>",
256 "owner": "<repo_owner>",
256 "owner": "<repo_owner>",
257 "fork_of": "<name_of_fork_parent>",
257 "fork_of": "<name_of_fork_parent>",
258 "enable_downloads": "<bool>",
258 "enable_downloads": "<bool>",
259 "enable_locking": "<bool>",
259 "enable_locking": "<bool>",
260 "enable_statistics": "<bool>",
260 "enable_statistics": "<bool>",
261 },
261 },
262 ...
262 ...
263 ]
263 ]
264 error: null
264 error: null
265 """
265 """
266
266
267 include_secrets = has_superadmin_permission(apiuser)
267 include_secrets = has_superadmin_permission(apiuser)
268 _perms = ('repository.read', 'repository.write', 'repository.admin',)
268 _perms = ('repository.read', 'repository.write', 'repository.admin',)
269 extras = {'user': apiuser}
269 extras = {'user': apiuser}
270
270
271 root = Optional.extract(root)
271 root = Optional.extract(root)
272 traverse = Optional.extract(traverse, binary=True)
272 traverse = Optional.extract(traverse, binary=True)
273
273
274 if root:
274 if root:
275 # verify parent existance, if it's empty return an error
275 # verify parent existance, if it's empty return an error
276 parent = RepoGroup.get_by_group_name(root)
276 parent = RepoGroup.get_by_group_name(root)
277 if not parent:
277 if not parent:
278 raise JSONRPCError(
278 raise JSONRPCError(
279 'Root repository group `{}` does not exist'.format(root))
279 'Root repository group `{}` does not exist'.format(root))
280
280
281 if traverse:
281 if traverse:
282 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
282 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
283 else:
283 else:
284 repos = RepoModel().get_repos_for_root(root=parent)
284 repos = RepoModel().get_repos_for_root(root=parent)
285 else:
285 else:
286 if traverse:
286 if traverse:
287 repos = RepoModel().get_all()
287 repos = RepoModel().get_all()
288 else:
288 else:
289 # return just top-level
289 # return just top-level
290 repos = RepoModel().get_repos_for_root(root=None)
290 repos = RepoModel().get_repos_for_root(root=None)
291
291
292 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
292 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
293 return [repo.get_api_data(include_secrets=include_secrets)
293 return [repo.get_api_data(include_secrets=include_secrets)
294 for repo in repo_list]
294 for repo in repo_list]
295
295
296
296
297 @jsonrpc_method()
297 @jsonrpc_method()
298 def get_repo_changeset(request, apiuser, repoid, revision,
298 def get_repo_changeset(request, apiuser, repoid, revision,
299 details=Optional('basic')):
299 details=Optional('basic')):
300 """
300 """
301 Returns information about a changeset.
301 Returns information about a changeset.
302
302
303 Additionally parameters define the amount of details returned by
303 Additionally parameters define the amount of details returned by
304 this function.
304 this function.
305
305
306 This command can only be run using an |authtoken| with admin rights,
306 This command can only be run using an |authtoken| with admin rights,
307 or users with at least read rights to the |repo|.
307 or users with at least read rights to the |repo|.
308
308
309 :param apiuser: This is filled automatically from the |authtoken|.
309 :param apiuser: This is filled automatically from the |authtoken|.
310 :type apiuser: AuthUser
310 :type apiuser: AuthUser
311 :param repoid: The repository name or repository id
311 :param repoid: The repository name or repository id
312 :type repoid: str or int
312 :type repoid: str or int
313 :param revision: revision for which listing should be done
313 :param revision: revision for which listing should be done
314 :type revision: str
314 :type revision: str
315 :param details: details can be 'basic|extended|full' full gives diff
315 :param details: details can be 'basic|extended|full' full gives diff
316 info details like the diff itself, and number of changed files etc.
316 info details like the diff itself, and number of changed files etc.
317 :type details: Optional(str)
317 :type details: Optional(str)
318
318
319 """
319 """
320 repo = get_repo_or_error(repoid)
320 repo = get_repo_or_error(repoid)
321 if not has_superadmin_permission(apiuser):
321 if not has_superadmin_permission(apiuser):
322 _perms = (
322 _perms = (
323 'repository.admin', 'repository.write', 'repository.read',)
323 'repository.admin', 'repository.write', 'repository.read',)
324 validate_repo_permissions(apiuser, repoid, repo, _perms)
324 validate_repo_permissions(apiuser, repoid, repo, _perms)
325
325
326 changes_details = Optional.extract(details)
326 changes_details = Optional.extract(details)
327 _changes_details_types = ['basic', 'extended', 'full']
327 _changes_details_types = ['basic', 'extended', 'full']
328 if changes_details not in _changes_details_types:
328 if changes_details not in _changes_details_types:
329 raise JSONRPCError(
329 raise JSONRPCError(
330 'ret_type must be one of %s' % (
330 'ret_type must be one of %s' % (
331 ','.join(_changes_details_types)))
331 ','.join(_changes_details_types)))
332
332
333 pre_load = ['author', 'branch', 'date', 'message', 'parents',
333 pre_load = ['author', 'branch', 'date', 'message', 'parents',
334 'status', '_commit', '_file_paths']
334 'status', '_commit', '_file_paths']
335
335
336 try:
336 try:
337 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
337 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
338 except TypeError as e:
338 except TypeError as e:
339 raise JSONRPCError(e.message)
339 raise JSONRPCError(e.message)
340 _cs_json = cs.__json__()
340 _cs_json = cs.__json__()
341 _cs_json['diff'] = build_commit_data(cs, changes_details)
341 _cs_json['diff'] = build_commit_data(cs, changes_details)
342 if changes_details == 'full':
342 if changes_details == 'full':
343 _cs_json['refs'] = cs._get_refs()
343 _cs_json['refs'] = cs._get_refs()
344 return _cs_json
344 return _cs_json
345
345
346
346
347 @jsonrpc_method()
347 @jsonrpc_method()
348 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
348 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
349 details=Optional('basic')):
349 details=Optional('basic')):
350 """
350 """
351 Returns a set of commits limited by the number starting
351 Returns a set of commits limited by the number starting
352 from the `start_rev` option.
352 from the `start_rev` option.
353
353
354 Additional parameters define the amount of details returned by this
354 Additional parameters define the amount of details returned by this
355 function.
355 function.
356
356
357 This command can only be run using an |authtoken| with admin rights,
357 This command can only be run using an |authtoken| with admin rights,
358 or users with at least read rights to |repos|.
358 or users with at least read rights to |repos|.
359
359
360 :param apiuser: This is filled automatically from the |authtoken|.
360 :param apiuser: This is filled automatically from the |authtoken|.
361 :type apiuser: AuthUser
361 :type apiuser: AuthUser
362 :param repoid: The repository name or repository ID.
362 :param repoid: The repository name or repository ID.
363 :type repoid: str or int
363 :type repoid: str or int
364 :param start_rev: The starting revision from where to get changesets.
364 :param start_rev: The starting revision from where to get changesets.
365 :type start_rev: str
365 :type start_rev: str
366 :param limit: Limit the number of commits to this amount
366 :param limit: Limit the number of commits to this amount
367 :type limit: str or int
367 :type limit: str or int
368 :param details: Set the level of detail returned. Valid option are:
368 :param details: Set the level of detail returned. Valid option are:
369 ``basic``, ``extended`` and ``full``.
369 ``basic``, ``extended`` and ``full``.
370 :type details: Optional(str)
370 :type details: Optional(str)
371
371
372 .. note::
372 .. note::
373
373
374 Setting the parameter `details` to the value ``full`` is extensive
374 Setting the parameter `details` to the value ``full`` is extensive
375 and returns details like the diff itself, and the number
375 and returns details like the diff itself, and the number
376 of changed files.
376 of changed files.
377
377
378 """
378 """
379 repo = get_repo_or_error(repoid)
379 repo = get_repo_or_error(repoid)
380 if not has_superadmin_permission(apiuser):
380 if not has_superadmin_permission(apiuser):
381 _perms = (
381 _perms = (
382 'repository.admin', 'repository.write', 'repository.read',)
382 'repository.admin', 'repository.write', 'repository.read',)
383 validate_repo_permissions(apiuser, repoid, repo, _perms)
383 validate_repo_permissions(apiuser, repoid, repo, _perms)
384
384
385 changes_details = Optional.extract(details)
385 changes_details = Optional.extract(details)
386 _changes_details_types = ['basic', 'extended', 'full']
386 _changes_details_types = ['basic', 'extended', 'full']
387 if changes_details not in _changes_details_types:
387 if changes_details not in _changes_details_types:
388 raise JSONRPCError(
388 raise JSONRPCError(
389 'ret_type must be one of %s' % (
389 'ret_type must be one of %s' % (
390 ','.join(_changes_details_types)))
390 ','.join(_changes_details_types)))
391
391
392 limit = int(limit)
392 limit = int(limit)
393 pre_load = ['author', 'branch', 'date', 'message', 'parents',
393 pre_load = ['author', 'branch', 'date', 'message', 'parents',
394 'status', '_commit', '_file_paths']
394 'status', '_commit', '_file_paths']
395
395
396 vcs_repo = repo.scm_instance()
396 vcs_repo = repo.scm_instance()
397 # SVN needs a special case to distinguish its index and commit id
397 # SVN needs a special case to distinguish its index and commit id
398 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
398 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
399 start_rev = vcs_repo.commit_ids[0]
399 start_rev = vcs_repo.commit_ids[0]
400
400
401 try:
401 try:
402 commits = vcs_repo.get_commits(
402 commits = vcs_repo.get_commits(
403 start_id=start_rev, pre_load=pre_load)
403 start_id=start_rev, pre_load=pre_load)
404 except TypeError as e:
404 except TypeError as e:
405 raise JSONRPCError(e.message)
405 raise JSONRPCError(e.message)
406 except Exception:
406 except Exception:
407 log.exception('Fetching of commits failed')
407 log.exception('Fetching of commits failed')
408 raise JSONRPCError('Error occurred during commit fetching')
408 raise JSONRPCError('Error occurred during commit fetching')
409
409
410 ret = []
410 ret = []
411 for cnt, commit in enumerate(commits):
411 for cnt, commit in enumerate(commits):
412 if cnt >= limit != -1:
412 if cnt >= limit != -1:
413 break
413 break
414 _cs_json = commit.__json__()
414 _cs_json = commit.__json__()
415 _cs_json['diff'] = build_commit_data(commit, changes_details)
415 _cs_json['diff'] = build_commit_data(commit, changes_details)
416 if changes_details == 'full':
416 if changes_details == 'full':
417 _cs_json['refs'] = {
417 _cs_json['refs'] = {
418 'branches': [commit.branch],
418 'branches': [commit.branch],
419 'bookmarks': getattr(commit, 'bookmarks', []),
419 'bookmarks': getattr(commit, 'bookmarks', []),
420 'tags': commit.tags
420 'tags': commit.tags
421 }
421 }
422 ret.append(_cs_json)
422 ret.append(_cs_json)
423 return ret
423 return ret
424
424
425
425
426 @jsonrpc_method()
426 @jsonrpc_method()
427 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
427 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
428 ret_type=Optional('all'), details=Optional('basic'),
428 ret_type=Optional('all'), details=Optional('basic'),
429 max_file_bytes=Optional(None)):
429 max_file_bytes=Optional(None)):
430 """
430 """
431 Returns a list of nodes and children in a flat list for a given
431 Returns a list of nodes and children in a flat list for a given
432 path at given revision.
432 path at given revision.
433
433
434 It's possible to specify ret_type to show only `files` or `dirs`.
434 It's possible to specify ret_type to show only `files` or `dirs`.
435
435
436 This command can only be run using an |authtoken| with admin rights,
436 This command can only be run using an |authtoken| with admin rights,
437 or users with at least read rights to |repos|.
437 or users with at least read rights to |repos|.
438
438
439 :param apiuser: This is filled automatically from the |authtoken|.
439 :param apiuser: This is filled automatically from the |authtoken|.
440 :type apiuser: AuthUser
440 :type apiuser: AuthUser
441 :param repoid: The repository name or repository ID.
441 :param repoid: The repository name or repository ID.
442 :type repoid: str or int
442 :type repoid: str or int
443 :param revision: The revision for which listing should be done.
443 :param revision: The revision for which listing should be done.
444 :type revision: str
444 :type revision: str
445 :param root_path: The path from which to start displaying.
445 :param root_path: The path from which to start displaying.
446 :type root_path: str
446 :type root_path: str
447 :param ret_type: Set the return type. Valid options are
447 :param ret_type: Set the return type. Valid options are
448 ``all`` (default), ``files`` and ``dirs``.
448 ``all`` (default), ``files`` and ``dirs``.
449 :type ret_type: Optional(str)
449 :type ret_type: Optional(str)
450 :param details: Returns extended information about nodes, such as
450 :param details: Returns extended information about nodes, such as
451 md5, binary, and or content. The valid options are ``basic`` and
451 md5, binary, and or content. The valid options are ``basic`` and
452 ``full``.
452 ``full``.
453 :type details: Optional(str)
453 :type details: Optional(str)
454 :param max_file_bytes: Only return file content under this file size bytes
454 :param max_file_bytes: Only return file content under this file size bytes
455 :type details: Optional(int)
455 :type details: Optional(int)
456
456
457 Example output:
457 Example output:
458
458
459 .. code-block:: bash
459 .. code-block:: bash
460
460
461 id : <id_given_in_input>
461 id : <id_given_in_input>
462 result: [
462 result: [
463 {
463 {
464 "name" : "<name>"
464 "name" : "<name>"
465 "type" : "<type>",
465 "type" : "<type>",
466 "binary": "<true|false>" (only in extended mode)
466 "binary": "<true|false>" (only in extended mode)
467 "md5" : "<md5 of file content>" (only in extended mode)
467 "md5" : "<md5 of file content>" (only in extended mode)
468 },
468 },
469 ...
469 ...
470 ]
470 ]
471 error: null
471 error: null
472 """
472 """
473
473
474 repo = get_repo_or_error(repoid)
474 repo = get_repo_or_error(repoid)
475 if not has_superadmin_permission(apiuser):
475 if not has_superadmin_permission(apiuser):
476 _perms = (
476 _perms = (
477 'repository.admin', 'repository.write', 'repository.read',)
477 'repository.admin', 'repository.write', 'repository.read',)
478 validate_repo_permissions(apiuser, repoid, repo, _perms)
478 validate_repo_permissions(apiuser, repoid, repo, _perms)
479
479
480 ret_type = Optional.extract(ret_type)
480 ret_type = Optional.extract(ret_type)
481 details = Optional.extract(details)
481 details = Optional.extract(details)
482 _extended_types = ['basic', 'full']
482 _extended_types = ['basic', 'full']
483 if details not in _extended_types:
483 if details not in _extended_types:
484 raise JSONRPCError(
484 raise JSONRPCError(
485 'ret_type must be one of %s' % (','.join(_extended_types)))
485 'ret_type must be one of %s' % (','.join(_extended_types)))
486 extended_info = False
486 extended_info = False
487 content = False
487 content = False
488 if details == 'basic':
488 if details == 'basic':
489 extended_info = True
489 extended_info = True
490
490
491 if details == 'full':
491 if details == 'full':
492 extended_info = content = True
492 extended_info = content = True
493
493
494 _map = {}
494 _map = {}
495 try:
495 try:
496 # check if repo is not empty by any chance, skip quicker if it is.
496 # check if repo is not empty by any chance, skip quicker if it is.
497 _scm = repo.scm_instance()
497 _scm = repo.scm_instance()
498 if _scm.is_empty():
498 if _scm.is_empty():
499 return []
499 return []
500
500
501 _d, _f = ScmModel().get_nodes(
501 _d, _f = ScmModel().get_nodes(
502 repo, revision, root_path, flat=False,
502 repo, revision, root_path, flat=False,
503 extended_info=extended_info, content=content,
503 extended_info=extended_info, content=content,
504 max_file_bytes=max_file_bytes)
504 max_file_bytes=max_file_bytes)
505 _map = {
505 _map = {
506 'all': _d + _f,
506 'all': _d + _f,
507 'files': _f,
507 'files': _f,
508 'dirs': _d,
508 'dirs': _d,
509 }
509 }
510 return _map[ret_type]
510 return _map[ret_type]
511 except KeyError:
511 except KeyError:
512 raise JSONRPCError(
512 raise JSONRPCError(
513 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
513 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
514 except Exception:
514 except Exception:
515 log.exception("Exception occurred while trying to get repo nodes")
515 log.exception("Exception occurred while trying to get repo nodes")
516 raise JSONRPCError(
516 raise JSONRPCError(
517 'failed to get repo: `%s` nodes' % repo.repo_name
517 'failed to get repo: `%s` nodes' % repo.repo_name
518 )
518 )
519
519
520
520
521 @jsonrpc_method()
521 @jsonrpc_method()
522 def get_repo_refs(request, apiuser, repoid):
522 def get_repo_refs(request, apiuser, repoid):
523 """
523 """
524 Returns a dictionary of current references. It returns
524 Returns a dictionary of current references. It returns
525 bookmarks, branches, closed_branches, and tags for given repository
525 bookmarks, branches, closed_branches, and tags for given repository
526
526
527 It's possible to specify ret_type to show only `files` or `dirs`.
527 It's possible to specify ret_type to show only `files` or `dirs`.
528
528
529 This command can only be run using an |authtoken| with admin rights,
529 This command can only be run using an |authtoken| with admin rights,
530 or users with at least read rights to |repos|.
530 or users with at least read rights to |repos|.
531
531
532 :param apiuser: This is filled automatically from the |authtoken|.
532 :param apiuser: This is filled automatically from the |authtoken|.
533 :type apiuser: AuthUser
533 :type apiuser: AuthUser
534 :param repoid: The repository name or repository ID.
534 :param repoid: The repository name or repository ID.
535 :type repoid: str or int
535 :type repoid: str or int
536
536
537 Example output:
537 Example output:
538
538
539 .. code-block:: bash
539 .. code-block:: bash
540
540
541 id : <id_given_in_input>
541 id : <id_given_in_input>
542 "result": {
542 "result": {
543 "bookmarks": {
543 "bookmarks": {
544 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
544 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
545 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
545 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
546 },
546 },
547 "branches": {
547 "branches": {
548 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
548 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
549 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
549 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
550 },
550 },
551 "branches_closed": {},
551 "branches_closed": {},
552 "tags": {
552 "tags": {
553 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
553 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
554 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
554 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
555 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
555 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
556 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
556 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
557 }
557 }
558 }
558 }
559 error: null
559 error: null
560 """
560 """
561
561
562 repo = get_repo_or_error(repoid)
562 repo = get_repo_or_error(repoid)
563 if not has_superadmin_permission(apiuser):
563 if not has_superadmin_permission(apiuser):
564 _perms = ('repository.admin', 'repository.write', 'repository.read',)
564 _perms = ('repository.admin', 'repository.write', 'repository.read',)
565 validate_repo_permissions(apiuser, repoid, repo, _perms)
565 validate_repo_permissions(apiuser, repoid, repo, _perms)
566
566
567 try:
567 try:
568 # check if repo is not empty by any chance, skip quicker if it is.
568 # check if repo is not empty by any chance, skip quicker if it is.
569 vcs_instance = repo.scm_instance()
569 vcs_instance = repo.scm_instance()
570 refs = vcs_instance.refs()
570 refs = vcs_instance.refs()
571 return refs
571 return refs
572 except Exception:
572 except Exception:
573 log.exception("Exception occurred while trying to get repo refs")
573 log.exception("Exception occurred while trying to get repo refs")
574 raise JSONRPCError(
574 raise JSONRPCError(
575 'failed to get repo: `%s` references' % repo.repo_name
575 'failed to get repo: `%s` references' % repo.repo_name
576 )
576 )
577
577
578
578
579 @jsonrpc_method()
579 @jsonrpc_method()
580 def create_repo(
580 def create_repo(
581 request, apiuser, repo_name, repo_type,
581 request, apiuser, repo_name, repo_type,
582 owner=Optional(OAttr('apiuser')),
582 owner=Optional(OAttr('apiuser')),
583 description=Optional(''),
583 description=Optional(''),
584 private=Optional(False),
584 private=Optional(False),
585 clone_uri=Optional(None),
585 clone_uri=Optional(None),
586 landing_rev=Optional('rev:tip'),
586 landing_rev=Optional('rev:tip'),
587 enable_statistics=Optional(False),
587 enable_statistics=Optional(False),
588 enable_locking=Optional(False),
588 enable_locking=Optional(False),
589 enable_downloads=Optional(False),
589 enable_downloads=Optional(False),
590 copy_permissions=Optional(False)):
590 copy_permissions=Optional(False)):
591 """
591 """
592 Creates a repository.
592 Creates a repository.
593
593
594 * If the repository name contains "/", repository will be created inside
594 * If the repository name contains "/", repository will be created inside
595 a repository group or nested repository groups
595 a repository group or nested repository groups
596
596
597 For example "foo/bar/repo1" will create |repo| called "repo1" inside
597 For example "foo/bar/repo1" will create |repo| called "repo1" inside
598 group "foo/bar". You have to have permissions to access and write to
598 group "foo/bar". You have to have permissions to access and write to
599 the last repository group ("bar" in this example)
599 the last repository group ("bar" in this example)
600
600
601 This command can only be run using an |authtoken| with at least
601 This command can only be run using an |authtoken| with at least
602 permissions to create repositories, or write permissions to
602 permissions to create repositories, or write permissions to
603 parent repository groups.
603 parent repository groups.
604
604
605 :param apiuser: This is filled automatically from the |authtoken|.
605 :param apiuser: This is filled automatically from the |authtoken|.
606 :type apiuser: AuthUser
606 :type apiuser: AuthUser
607 :param repo_name: Set the repository name.
607 :param repo_name: Set the repository name.
608 :type repo_name: str
608 :type repo_name: str
609 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
609 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
610 :type repo_type: str
610 :type repo_type: str
611 :param owner: user_id or username
611 :param owner: user_id or username
612 :type owner: Optional(str)
612 :type owner: Optional(str)
613 :param description: Set the repository description.
613 :param description: Set the repository description.
614 :type description: Optional(str)
614 :type description: Optional(str)
615 :param private: set repository as private
615 :param private: set repository as private
616 :type private: bool
616 :type private: bool
617 :param clone_uri: set clone_uri
617 :param clone_uri: set clone_uri
618 :type clone_uri: str
618 :type clone_uri: str
619 :param landing_rev: <rev_type>:<rev>
619 :param landing_rev: <rev_type>:<rev>
620 :type landing_rev: str
620 :type landing_rev: str
621 :param enable_locking:
621 :param enable_locking:
622 :type enable_locking: bool
622 :type enable_locking: bool
623 :param enable_downloads:
623 :param enable_downloads:
624 :type enable_downloads: bool
624 :type enable_downloads: bool
625 :param enable_statistics:
625 :param enable_statistics:
626 :type enable_statistics: bool
626 :type enable_statistics: bool
627 :param copy_permissions: Copy permission from group in which the
627 :param copy_permissions: Copy permission from group in which the
628 repository is being created.
628 repository is being created.
629 :type copy_permissions: bool
629 :type copy_permissions: bool
630
630
631
631
632 Example output:
632 Example output:
633
633
634 .. code-block:: bash
634 .. code-block:: bash
635
635
636 id : <id_given_in_input>
636 id : <id_given_in_input>
637 result: {
637 result: {
638 "msg": "Created new repository `<reponame>`",
638 "msg": "Created new repository `<reponame>`",
639 "success": true,
639 "success": true,
640 "task": "<celery task id or None if done sync>"
640 "task": "<celery task id or None if done sync>"
641 }
641 }
642 error: null
642 error: null
643
643
644
644
645 Example error output:
645 Example error output:
646
646
647 .. code-block:: bash
647 .. code-block:: bash
648
648
649 id : <id_given_in_input>
649 id : <id_given_in_input>
650 result : null
650 result : null
651 error : {
651 error : {
652 'failed to create repository `<repo_name>`'
652 'failed to create repository `<repo_name>`'
653 }
653 }
654
654
655 """
655 """
656
656
657 owner = validate_set_owner_permissions(apiuser, owner)
657 owner = validate_set_owner_permissions(apiuser, owner)
658
658
659 description = Optional.extract(description)
659 description = Optional.extract(description)
660 copy_permissions = Optional.extract(copy_permissions)
660 copy_permissions = Optional.extract(copy_permissions)
661 clone_uri = Optional.extract(clone_uri)
661 clone_uri = Optional.extract(clone_uri)
662 landing_commit_ref = Optional.extract(landing_rev)
662 landing_commit_ref = Optional.extract(landing_rev)
663
663
664 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
664 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
665 if isinstance(private, Optional):
665 if isinstance(private, Optional):
666 private = defs.get('repo_private') or Optional.extract(private)
666 private = defs.get('repo_private') or Optional.extract(private)
667 if isinstance(repo_type, Optional):
667 if isinstance(repo_type, Optional):
668 repo_type = defs.get('repo_type')
668 repo_type = defs.get('repo_type')
669 if isinstance(enable_statistics, Optional):
669 if isinstance(enable_statistics, Optional):
670 enable_statistics = defs.get('repo_enable_statistics')
670 enable_statistics = defs.get('repo_enable_statistics')
671 if isinstance(enable_locking, Optional):
671 if isinstance(enable_locking, Optional):
672 enable_locking = defs.get('repo_enable_locking')
672 enable_locking = defs.get('repo_enable_locking')
673 if isinstance(enable_downloads, Optional):
673 if isinstance(enable_downloads, Optional):
674 enable_downloads = defs.get('repo_enable_downloads')
674 enable_downloads = defs.get('repo_enable_downloads')
675
675
676 schema = repo_schema.RepoSchema().bind(
676 schema = repo_schema.RepoSchema().bind(
677 repo_type_options=rhodecode.BACKENDS.keys(),
677 repo_type_options=rhodecode.BACKENDS.keys(),
678 # user caller
678 # user caller
679 user=apiuser)
679 user=apiuser)
680
680
681 try:
681 try:
682 schema_data = schema.deserialize(dict(
682 schema_data = schema.deserialize(dict(
683 repo_name=repo_name,
683 repo_name=repo_name,
684 repo_type=repo_type,
684 repo_type=repo_type,
685 repo_owner=owner.username,
685 repo_owner=owner.username,
686 repo_description=description,
686 repo_description=description,
687 repo_landing_commit_ref=landing_commit_ref,
687 repo_landing_commit_ref=landing_commit_ref,
688 repo_clone_uri=clone_uri,
688 repo_clone_uri=clone_uri,
689 repo_private=private,
689 repo_private=private,
690 repo_copy_permissions=copy_permissions,
690 repo_copy_permissions=copy_permissions,
691 repo_enable_statistics=enable_statistics,
691 repo_enable_statistics=enable_statistics,
692 repo_enable_downloads=enable_downloads,
692 repo_enable_downloads=enable_downloads,
693 repo_enable_locking=enable_locking))
693 repo_enable_locking=enable_locking))
694 except validation_schema.Invalid as err:
694 except validation_schema.Invalid as err:
695 raise JSONRPCValidationError(colander_exc=err)
695 raise JSONRPCValidationError(colander_exc=err)
696
696
697 try:
697 try:
698 data = {
698 data = {
699 'owner': owner,
699 'owner': owner,
700 'repo_name': schema_data['repo_group']['repo_name_without_group'],
700 'repo_name': schema_data['repo_group']['repo_name_without_group'],
701 'repo_name_full': schema_data['repo_name'],
701 'repo_name_full': schema_data['repo_name'],
702 'repo_group': schema_data['repo_group']['repo_group_id'],
702 'repo_group': schema_data['repo_group']['repo_group_id'],
703 'repo_type': schema_data['repo_type'],
703 'repo_type': schema_data['repo_type'],
704 'repo_description': schema_data['repo_description'],
704 'repo_description': schema_data['repo_description'],
705 'repo_private': schema_data['repo_private'],
705 'repo_private': schema_data['repo_private'],
706 'clone_uri': schema_data['repo_clone_uri'],
706 'clone_uri': schema_data['repo_clone_uri'],
707 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
707 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
708 'enable_statistics': schema_data['repo_enable_statistics'],
708 'enable_statistics': schema_data['repo_enable_statistics'],
709 'enable_locking': schema_data['repo_enable_locking'],
709 'enable_locking': schema_data['repo_enable_locking'],
710 'enable_downloads': schema_data['repo_enable_downloads'],
710 'enable_downloads': schema_data['repo_enable_downloads'],
711 'repo_copy_permissions': schema_data['repo_copy_permissions'],
711 'repo_copy_permissions': schema_data['repo_copy_permissions'],
712 }
712 }
713
713
714 task = RepoModel().create(form_data=data, cur_user=owner)
714 task = RepoModel().create(form_data=data, cur_user=owner)
715 from celery.result import BaseAsyncResult
715 from celery.result import BaseAsyncResult
716 task_id = None
716 task_id = None
717 if isinstance(task, BaseAsyncResult):
717 if isinstance(task, BaseAsyncResult):
718 task_id = task.task_id
718 task_id = task.task_id
719 # no commit, it's done in RepoModel, or async via celery
719 # no commit, it's done in RepoModel, or async via celery
720 return {
720 return {
721 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
721 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
722 'success': True, # cannot return the repo data here since fork
722 'success': True, # cannot return the repo data here since fork
723 # can be done async
723 # can be done async
724 'task': task_id
724 'task': task_id
725 }
725 }
726 except Exception:
726 except Exception:
727 log.exception(
727 log.exception(
728 u"Exception while trying to create the repository %s",
728 u"Exception while trying to create the repository %s",
729 schema_data['repo_name'])
729 schema_data['repo_name'])
730 raise JSONRPCError(
730 raise JSONRPCError(
731 'failed to create repository `%s`' % (schema_data['repo_name'],))
731 'failed to create repository `%s`' % (schema_data['repo_name'],))
732
732
733
733
734 @jsonrpc_method()
734 @jsonrpc_method()
735 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
735 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
736 description=Optional('')):
736 description=Optional('')):
737 """
737 """
738 Adds an extra field to a repository.
738 Adds an extra field to a repository.
739
739
740 This command can only be run using an |authtoken| with at least
740 This command can only be run using an |authtoken| with at least
741 write permissions to the |repo|.
741 write permissions to the |repo|.
742
742
743 :param apiuser: This is filled automatically from the |authtoken|.
743 :param apiuser: This is filled automatically from the |authtoken|.
744 :type apiuser: AuthUser
744 :type apiuser: AuthUser
745 :param repoid: Set the repository name or repository id.
745 :param repoid: Set the repository name or repository id.
746 :type repoid: str or int
746 :type repoid: str or int
747 :param key: Create a unique field key for this repository.
747 :param key: Create a unique field key for this repository.
748 :type key: str
748 :type key: str
749 :param label:
749 :param label:
750 :type label: Optional(str)
750 :type label: Optional(str)
751 :param description:
751 :param description:
752 :type description: Optional(str)
752 :type description: Optional(str)
753 """
753 """
754 repo = get_repo_or_error(repoid)
754 repo = get_repo_or_error(repoid)
755 if not has_superadmin_permission(apiuser):
755 if not has_superadmin_permission(apiuser):
756 _perms = ('repository.admin',)
756 _perms = ('repository.admin',)
757 validate_repo_permissions(apiuser, repoid, repo, _perms)
757 validate_repo_permissions(apiuser, repoid, repo, _perms)
758
758
759 label = Optional.extract(label) or key
759 label = Optional.extract(label) or key
760 description = Optional.extract(description)
760 description = Optional.extract(description)
761
761
762 field = RepositoryField.get_by_key_name(key, repo)
762 field = RepositoryField.get_by_key_name(key, repo)
763 if field:
763 if field:
764 raise JSONRPCError('Field with key '
764 raise JSONRPCError('Field with key '
765 '`%s` exists for repo `%s`' % (key, repoid))
765 '`%s` exists for repo `%s`' % (key, repoid))
766
766
767 try:
767 try:
768 RepoModel().add_repo_field(repo, key, field_label=label,
768 RepoModel().add_repo_field(repo, key, field_label=label,
769 field_desc=description)
769 field_desc=description)
770 Session().commit()
770 Session().commit()
771 return {
771 return {
772 'msg': "Added new repository field `%s`" % (key,),
772 'msg': "Added new repository field `%s`" % (key,),
773 'success': True,
773 'success': True,
774 }
774 }
775 except Exception:
775 except Exception:
776 log.exception("Exception occurred while trying to add field to repo")
776 log.exception("Exception occurred while trying to add field to repo")
777 raise JSONRPCError(
777 raise JSONRPCError(
778 'failed to create new field for repository `%s`' % (repoid,))
778 'failed to create new field for repository `%s`' % (repoid,))
779
779
780
780
781 @jsonrpc_method()
781 @jsonrpc_method()
782 def remove_field_from_repo(request, apiuser, repoid, key):
782 def remove_field_from_repo(request, apiuser, repoid, key):
783 """
783 """
784 Removes an extra field from a repository.
784 Removes an extra field from a repository.
785
785
786 This command can only be run using an |authtoken| with at least
786 This command can only be run using an |authtoken| with at least
787 write permissions to the |repo|.
787 write permissions to the |repo|.
788
788
789 :param apiuser: This is filled automatically from the |authtoken|.
789 :param apiuser: This is filled automatically from the |authtoken|.
790 :type apiuser: AuthUser
790 :type apiuser: AuthUser
791 :param repoid: Set the repository name or repository ID.
791 :param repoid: Set the repository name or repository ID.
792 :type repoid: str or int
792 :type repoid: str or int
793 :param key: Set the unique field key for this repository.
793 :param key: Set the unique field key for this repository.
794 :type key: str
794 :type key: str
795 """
795 """
796
796
797 repo = get_repo_or_error(repoid)
797 repo = get_repo_or_error(repoid)
798 if not has_superadmin_permission(apiuser):
798 if not has_superadmin_permission(apiuser):
799 _perms = ('repository.admin',)
799 _perms = ('repository.admin',)
800 validate_repo_permissions(apiuser, repoid, repo, _perms)
800 validate_repo_permissions(apiuser, repoid, repo, _perms)
801
801
802 field = RepositoryField.get_by_key_name(key, repo)
802 field = RepositoryField.get_by_key_name(key, repo)
803 if not field:
803 if not field:
804 raise JSONRPCError('Field with key `%s` does not '
804 raise JSONRPCError('Field with key `%s` does not '
805 'exists for repo `%s`' % (key, repoid))
805 'exists for repo `%s`' % (key, repoid))
806
806
807 try:
807 try:
808 RepoModel().delete_repo_field(repo, field_key=key)
808 RepoModel().delete_repo_field(repo, field_key=key)
809 Session().commit()
809 Session().commit()
810 return {
810 return {
811 'msg': "Deleted repository field `%s`" % (key,),
811 'msg': "Deleted repository field `%s`" % (key,),
812 'success': True,
812 'success': True,
813 }
813 }
814 except Exception:
814 except Exception:
815 log.exception(
815 log.exception(
816 "Exception occurred while trying to delete field from repo")
816 "Exception occurred while trying to delete field from repo")
817 raise JSONRPCError(
817 raise JSONRPCError(
818 'failed to delete field for repository `%s`' % (repoid,))
818 'failed to delete field for repository `%s`' % (repoid,))
819
819
820
820
821 @jsonrpc_method()
821 @jsonrpc_method()
822 def update_repo(
822 def update_repo(
823 request, apiuser, repoid, repo_name=Optional(None),
823 request, apiuser, repoid, repo_name=Optional(None),
824 owner=Optional(OAttr('apiuser')), description=Optional(''),
824 owner=Optional(OAttr('apiuser')), description=Optional(''),
825 private=Optional(False), clone_uri=Optional(None),
825 private=Optional(False), clone_uri=Optional(None),
826 landing_rev=Optional('rev:tip'), fork_of=Optional(None),
826 landing_rev=Optional('rev:tip'), fork_of=Optional(None),
827 enable_statistics=Optional(False),
827 enable_statistics=Optional(False),
828 enable_locking=Optional(False),
828 enable_locking=Optional(False),
829 enable_downloads=Optional(False), fields=Optional('')):
829 enable_downloads=Optional(False), fields=Optional('')):
830 """
830 """
831 Updates a repository with the given information.
831 Updates a repository with the given information.
832
832
833 This command can only be run using an |authtoken| with at least
833 This command can only be run using an |authtoken| with at least
834 admin permissions to the |repo|.
834 admin permissions to the |repo|.
835
835
836 * If the repository name contains "/", repository will be updated
836 * If the repository name contains "/", repository will be updated
837 accordingly with a repository group or nested repository groups
837 accordingly with a repository group or nested repository groups
838
838
839 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
839 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
840 called "repo-test" and place it inside group "foo/bar".
840 called "repo-test" and place it inside group "foo/bar".
841 You have to have permissions to access and write to the last repository
841 You have to have permissions to access and write to the last repository
842 group ("bar" in this example)
842 group ("bar" in this example)
843
843
844 :param apiuser: This is filled automatically from the |authtoken|.
844 :param apiuser: This is filled automatically from the |authtoken|.
845 :type apiuser: AuthUser
845 :type apiuser: AuthUser
846 :param repoid: repository name or repository ID.
846 :param repoid: repository name or repository ID.
847 :type repoid: str or int
847 :type repoid: str or int
848 :param repo_name: Update the |repo| name, including the
848 :param repo_name: Update the |repo| name, including the
849 repository group it's in.
849 repository group it's in.
850 :type repo_name: str
850 :type repo_name: str
851 :param owner: Set the |repo| owner.
851 :param owner: Set the |repo| owner.
852 :type owner: str
852 :type owner: str
853 :param fork_of: Set the |repo| as fork of another |repo|.
853 :param fork_of: Set the |repo| as fork of another |repo|.
854 :type fork_of: str
854 :type fork_of: str
855 :param description: Update the |repo| description.
855 :param description: Update the |repo| description.
856 :type description: str
856 :type description: str
857 :param private: Set the |repo| as private. (True | False)
857 :param private: Set the |repo| as private. (True | False)
858 :type private: bool
858 :type private: bool
859 :param clone_uri: Update the |repo| clone URI.
859 :param clone_uri: Update the |repo| clone URI.
860 :type clone_uri: str
860 :type clone_uri: str
861 :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``.
861 :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``.
862 :type landing_rev: str
862 :type landing_rev: str
863 :param enable_statistics: Enable statistics on the |repo|, (True | False).
863 :param enable_statistics: Enable statistics on the |repo|, (True | False).
864 :type enable_statistics: bool
864 :type enable_statistics: bool
865 :param enable_locking: Enable |repo| locking.
865 :param enable_locking: Enable |repo| locking.
866 :type enable_locking: bool
866 :type enable_locking: bool
867 :param enable_downloads: Enable downloads from the |repo|, (True | False).
867 :param enable_downloads: Enable downloads from the |repo|, (True | False).
868 :type enable_downloads: bool
868 :type enable_downloads: bool
869 :param fields: Add extra fields to the |repo|. Use the following
869 :param fields: Add extra fields to the |repo|. Use the following
870 example format: ``field_key=field_val,field_key2=fieldval2``.
870 example format: ``field_key=field_val,field_key2=fieldval2``.
871 Escape ', ' with \,
871 Escape ', ' with \,
872 :type fields: str
872 :type fields: str
873 """
873 """
874
874
875 repo = get_repo_or_error(repoid)
875 repo = get_repo_or_error(repoid)
876
876
877 include_secrets = False
877 include_secrets = False
878 if not has_superadmin_permission(apiuser):
878 if not has_superadmin_permission(apiuser):
879 validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',))
879 validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',))
880 else:
880 else:
881 include_secrets = True
881 include_secrets = True
882
882
883 updates = dict(
883 updates = dict(
884 repo_name=repo_name
884 repo_name=repo_name
885 if not isinstance(repo_name, Optional) else repo.repo_name,
885 if not isinstance(repo_name, Optional) else repo.repo_name,
886
886
887 fork_id=fork_of
887 fork_id=fork_of
888 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
888 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
889
889
890 user=owner
890 user=owner
891 if not isinstance(owner, Optional) else repo.user.username,
891 if not isinstance(owner, Optional) else repo.user.username,
892
892
893 repo_description=description
893 repo_description=description
894 if not isinstance(description, Optional) else repo.description,
894 if not isinstance(description, Optional) else repo.description,
895
895
896 repo_private=private
896 repo_private=private
897 if not isinstance(private, Optional) else repo.private,
897 if not isinstance(private, Optional) else repo.private,
898
898
899 clone_uri=clone_uri
899 clone_uri=clone_uri
900 if not isinstance(clone_uri, Optional) else repo.clone_uri,
900 if not isinstance(clone_uri, Optional) else repo.clone_uri,
901
901
902 repo_landing_rev=landing_rev
902 repo_landing_rev=landing_rev
903 if not isinstance(landing_rev, Optional) else repo._landing_revision,
903 if not isinstance(landing_rev, Optional) else repo._landing_revision,
904
904
905 repo_enable_statistics=enable_statistics
905 repo_enable_statistics=enable_statistics
906 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
906 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
907
907
908 repo_enable_locking=enable_locking
908 repo_enable_locking=enable_locking
909 if not isinstance(enable_locking, Optional) else repo.enable_locking,
909 if not isinstance(enable_locking, Optional) else repo.enable_locking,
910
910
911 repo_enable_downloads=enable_downloads
911 repo_enable_downloads=enable_downloads
912 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
912 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
913
913
914 ref_choices, _labels = ScmModel().get_repo_landing_revs(repo=repo)
914 ref_choices, _labels = ScmModel().get_repo_landing_revs(
915 request.translate, repo=repo)
915
916
916 old_values = repo.get_api_data()
917 old_values = repo.get_api_data()
917 schema = repo_schema.RepoSchema().bind(
918 schema = repo_schema.RepoSchema().bind(
918 repo_type_options=rhodecode.BACKENDS.keys(),
919 repo_type_options=rhodecode.BACKENDS.keys(),
919 repo_ref_options=ref_choices,
920 repo_ref_options=ref_choices,
920 # user caller
921 # user caller
921 user=apiuser,
922 user=apiuser,
922 old_values=old_values)
923 old_values=old_values)
923 try:
924 try:
924 schema_data = schema.deserialize(dict(
925 schema_data = schema.deserialize(dict(
925 # we save old value, users cannot change type
926 # we save old value, users cannot change type
926 repo_type=repo.repo_type,
927 repo_type=repo.repo_type,
927
928
928 repo_name=updates['repo_name'],
929 repo_name=updates['repo_name'],
929 repo_owner=updates['user'],
930 repo_owner=updates['user'],
930 repo_description=updates['repo_description'],
931 repo_description=updates['repo_description'],
931 repo_clone_uri=updates['clone_uri'],
932 repo_clone_uri=updates['clone_uri'],
932 repo_fork_of=updates['fork_id'],
933 repo_fork_of=updates['fork_id'],
933 repo_private=updates['repo_private'],
934 repo_private=updates['repo_private'],
934 repo_landing_commit_ref=updates['repo_landing_rev'],
935 repo_landing_commit_ref=updates['repo_landing_rev'],
935 repo_enable_statistics=updates['repo_enable_statistics'],
936 repo_enable_statistics=updates['repo_enable_statistics'],
936 repo_enable_downloads=updates['repo_enable_downloads'],
937 repo_enable_downloads=updates['repo_enable_downloads'],
937 repo_enable_locking=updates['repo_enable_locking']))
938 repo_enable_locking=updates['repo_enable_locking']))
938 except validation_schema.Invalid as err:
939 except validation_schema.Invalid as err:
939 raise JSONRPCValidationError(colander_exc=err)
940 raise JSONRPCValidationError(colander_exc=err)
940
941
941 # save validated data back into the updates dict
942 # save validated data back into the updates dict
942 validated_updates = dict(
943 validated_updates = dict(
943 repo_name=schema_data['repo_group']['repo_name_without_group'],
944 repo_name=schema_data['repo_group']['repo_name_without_group'],
944 repo_group=schema_data['repo_group']['repo_group_id'],
945 repo_group=schema_data['repo_group']['repo_group_id'],
945
946
946 user=schema_data['repo_owner'],
947 user=schema_data['repo_owner'],
947 repo_description=schema_data['repo_description'],
948 repo_description=schema_data['repo_description'],
948 repo_private=schema_data['repo_private'],
949 repo_private=schema_data['repo_private'],
949 clone_uri=schema_data['repo_clone_uri'],
950 clone_uri=schema_data['repo_clone_uri'],
950 repo_landing_rev=schema_data['repo_landing_commit_ref'],
951 repo_landing_rev=schema_data['repo_landing_commit_ref'],
951 repo_enable_statistics=schema_data['repo_enable_statistics'],
952 repo_enable_statistics=schema_data['repo_enable_statistics'],
952 repo_enable_locking=schema_data['repo_enable_locking'],
953 repo_enable_locking=schema_data['repo_enable_locking'],
953 repo_enable_downloads=schema_data['repo_enable_downloads'],
954 repo_enable_downloads=schema_data['repo_enable_downloads'],
954 )
955 )
955
956
956 if schema_data['repo_fork_of']:
957 if schema_data['repo_fork_of']:
957 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
958 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
958 validated_updates['fork_id'] = fork_repo.repo_id
959 validated_updates['fork_id'] = fork_repo.repo_id
959
960
960 # extra fields
961 # extra fields
961 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
962 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
962 if fields:
963 if fields:
963 validated_updates.update(fields)
964 validated_updates.update(fields)
964
965
965 try:
966 try:
966 RepoModel().update(repo, **validated_updates)
967 RepoModel().update(repo, **validated_updates)
967 audit_logger.store_api(
968 audit_logger.store_api(
968 'repo.edit', action_data={'old_data': old_values},
969 'repo.edit', action_data={'old_data': old_values},
969 user=apiuser, repo=repo)
970 user=apiuser, repo=repo)
970 Session().commit()
971 Session().commit()
971 return {
972 return {
972 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
973 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
973 'repository': repo.get_api_data(include_secrets=include_secrets)
974 'repository': repo.get_api_data(include_secrets=include_secrets)
974 }
975 }
975 except Exception:
976 except Exception:
976 log.exception(
977 log.exception(
977 u"Exception while trying to update the repository %s",
978 u"Exception while trying to update the repository %s",
978 repoid)
979 repoid)
979 raise JSONRPCError('failed to update repo `%s`' % repoid)
980 raise JSONRPCError('failed to update repo `%s`' % repoid)
980
981
981
982
982 @jsonrpc_method()
983 @jsonrpc_method()
983 def fork_repo(request, apiuser, repoid, fork_name,
984 def fork_repo(request, apiuser, repoid, fork_name,
984 owner=Optional(OAttr('apiuser')),
985 owner=Optional(OAttr('apiuser')),
985 description=Optional(''),
986 description=Optional(''),
986 private=Optional(False),
987 private=Optional(False),
987 clone_uri=Optional(None),
988 clone_uri=Optional(None),
988 landing_rev=Optional('rev:tip'),
989 landing_rev=Optional('rev:tip'),
989 copy_permissions=Optional(False)):
990 copy_permissions=Optional(False)):
990 """
991 """
991 Creates a fork of the specified |repo|.
992 Creates a fork of the specified |repo|.
992
993
993 * If the fork_name contains "/", fork will be created inside
994 * If the fork_name contains "/", fork will be created inside
994 a repository group or nested repository groups
995 a repository group or nested repository groups
995
996
996 For example "foo/bar/fork-repo" will create fork called "fork-repo"
997 For example "foo/bar/fork-repo" will create fork called "fork-repo"
997 inside group "foo/bar". You have to have permissions to access and
998 inside group "foo/bar". You have to have permissions to access and
998 write to the last repository group ("bar" in this example)
999 write to the last repository group ("bar" in this example)
999
1000
1000 This command can only be run using an |authtoken| with minimum
1001 This command can only be run using an |authtoken| with minimum
1001 read permissions of the forked repo, create fork permissions for an user.
1002 read permissions of the forked repo, create fork permissions for an user.
1002
1003
1003 :param apiuser: This is filled automatically from the |authtoken|.
1004 :param apiuser: This is filled automatically from the |authtoken|.
1004 :type apiuser: AuthUser
1005 :type apiuser: AuthUser
1005 :param repoid: Set repository name or repository ID.
1006 :param repoid: Set repository name or repository ID.
1006 :type repoid: str or int
1007 :type repoid: str or int
1007 :param fork_name: Set the fork name, including it's repository group membership.
1008 :param fork_name: Set the fork name, including it's repository group membership.
1008 :type fork_name: str
1009 :type fork_name: str
1009 :param owner: Set the fork owner.
1010 :param owner: Set the fork owner.
1010 :type owner: str
1011 :type owner: str
1011 :param description: Set the fork description.
1012 :param description: Set the fork description.
1012 :type description: str
1013 :type description: str
1013 :param copy_permissions: Copy permissions from parent |repo|. The
1014 :param copy_permissions: Copy permissions from parent |repo|. The
1014 default is False.
1015 default is False.
1015 :type copy_permissions: bool
1016 :type copy_permissions: bool
1016 :param private: Make the fork private. The default is False.
1017 :param private: Make the fork private. The default is False.
1017 :type private: bool
1018 :type private: bool
1018 :param landing_rev: Set the landing revision. The default is tip.
1019 :param landing_rev: Set the landing revision. The default is tip.
1019
1020
1020 Example output:
1021 Example output:
1021
1022
1022 .. code-block:: bash
1023 .. code-block:: bash
1023
1024
1024 id : <id_for_response>
1025 id : <id_for_response>
1025 api_key : "<api_key>"
1026 api_key : "<api_key>"
1026 args: {
1027 args: {
1027 "repoid" : "<reponame or repo_id>",
1028 "repoid" : "<reponame or repo_id>",
1028 "fork_name": "<forkname>",
1029 "fork_name": "<forkname>",
1029 "owner": "<username or user_id = Optional(=apiuser)>",
1030 "owner": "<username or user_id = Optional(=apiuser)>",
1030 "description": "<description>",
1031 "description": "<description>",
1031 "copy_permissions": "<bool>",
1032 "copy_permissions": "<bool>",
1032 "private": "<bool>",
1033 "private": "<bool>",
1033 "landing_rev": "<landing_rev>"
1034 "landing_rev": "<landing_rev>"
1034 }
1035 }
1035
1036
1036 Example error output:
1037 Example error output:
1037
1038
1038 .. code-block:: bash
1039 .. code-block:: bash
1039
1040
1040 id : <id_given_in_input>
1041 id : <id_given_in_input>
1041 result: {
1042 result: {
1042 "msg": "Created fork of `<reponame>` as `<forkname>`",
1043 "msg": "Created fork of `<reponame>` as `<forkname>`",
1043 "success": true,
1044 "success": true,
1044 "task": "<celery task id or None if done sync>"
1045 "task": "<celery task id or None if done sync>"
1045 }
1046 }
1046 error: null
1047 error: null
1047
1048
1048 """
1049 """
1049
1050
1050 repo = get_repo_or_error(repoid)
1051 repo = get_repo_or_error(repoid)
1051 repo_name = repo.repo_name
1052 repo_name = repo.repo_name
1052
1053
1053 if not has_superadmin_permission(apiuser):
1054 if not has_superadmin_permission(apiuser):
1054 # check if we have at least read permission for
1055 # check if we have at least read permission for
1055 # this repo that we fork !
1056 # this repo that we fork !
1056 _perms = (
1057 _perms = (
1057 'repository.admin', 'repository.write', 'repository.read')
1058 'repository.admin', 'repository.write', 'repository.read')
1058 validate_repo_permissions(apiuser, repoid, repo, _perms)
1059 validate_repo_permissions(apiuser, repoid, repo, _perms)
1059
1060
1060 # check if the regular user has at least fork permissions as well
1061 # check if the regular user has at least fork permissions as well
1061 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1062 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1062 raise JSONRPCForbidden()
1063 raise JSONRPCForbidden()
1063
1064
1064 # check if user can set owner parameter
1065 # check if user can set owner parameter
1065 owner = validate_set_owner_permissions(apiuser, owner)
1066 owner = validate_set_owner_permissions(apiuser, owner)
1066
1067
1067 description = Optional.extract(description)
1068 description = Optional.extract(description)
1068 copy_permissions = Optional.extract(copy_permissions)
1069 copy_permissions = Optional.extract(copy_permissions)
1069 clone_uri = Optional.extract(clone_uri)
1070 clone_uri = Optional.extract(clone_uri)
1070 landing_commit_ref = Optional.extract(landing_rev)
1071 landing_commit_ref = Optional.extract(landing_rev)
1071 private = Optional.extract(private)
1072 private = Optional.extract(private)
1072
1073
1073 schema = repo_schema.RepoSchema().bind(
1074 schema = repo_schema.RepoSchema().bind(
1074 repo_type_options=rhodecode.BACKENDS.keys(),
1075 repo_type_options=rhodecode.BACKENDS.keys(),
1075 # user caller
1076 # user caller
1076 user=apiuser)
1077 user=apiuser)
1077
1078
1078 try:
1079 try:
1079 schema_data = schema.deserialize(dict(
1080 schema_data = schema.deserialize(dict(
1080 repo_name=fork_name,
1081 repo_name=fork_name,
1081 repo_type=repo.repo_type,
1082 repo_type=repo.repo_type,
1082 repo_owner=owner.username,
1083 repo_owner=owner.username,
1083 repo_description=description,
1084 repo_description=description,
1084 repo_landing_commit_ref=landing_commit_ref,
1085 repo_landing_commit_ref=landing_commit_ref,
1085 repo_clone_uri=clone_uri,
1086 repo_clone_uri=clone_uri,
1086 repo_private=private,
1087 repo_private=private,
1087 repo_copy_permissions=copy_permissions))
1088 repo_copy_permissions=copy_permissions))
1088 except validation_schema.Invalid as err:
1089 except validation_schema.Invalid as err:
1089 raise JSONRPCValidationError(colander_exc=err)
1090 raise JSONRPCValidationError(colander_exc=err)
1090
1091
1091 try:
1092 try:
1092 data = {
1093 data = {
1093 'fork_parent_id': repo.repo_id,
1094 'fork_parent_id': repo.repo_id,
1094
1095
1095 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1096 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1096 'repo_name_full': schema_data['repo_name'],
1097 'repo_name_full': schema_data['repo_name'],
1097 'repo_group': schema_data['repo_group']['repo_group_id'],
1098 'repo_group': schema_data['repo_group']['repo_group_id'],
1098 'repo_type': schema_data['repo_type'],
1099 'repo_type': schema_data['repo_type'],
1099 'description': schema_data['repo_description'],
1100 'description': schema_data['repo_description'],
1100 'private': schema_data['repo_private'],
1101 'private': schema_data['repo_private'],
1101 'copy_permissions': schema_data['repo_copy_permissions'],
1102 'copy_permissions': schema_data['repo_copy_permissions'],
1102 'landing_rev': schema_data['repo_landing_commit_ref'],
1103 'landing_rev': schema_data['repo_landing_commit_ref'],
1103 }
1104 }
1104
1105
1105 task = RepoModel().create_fork(data, cur_user=owner)
1106 task = RepoModel().create_fork(data, cur_user=owner)
1106 # no commit, it's done in RepoModel, or async via celery
1107 # no commit, it's done in RepoModel, or async via celery
1107 from celery.result import BaseAsyncResult
1108 from celery.result import BaseAsyncResult
1108 task_id = None
1109 task_id = None
1109 if isinstance(task, BaseAsyncResult):
1110 if isinstance(task, BaseAsyncResult):
1110 task_id = task.task_id
1111 task_id = task.task_id
1111 return {
1112 return {
1112 'msg': 'Created fork of `%s` as `%s`' % (
1113 'msg': 'Created fork of `%s` as `%s`' % (
1113 repo.repo_name, schema_data['repo_name']),
1114 repo.repo_name, schema_data['repo_name']),
1114 'success': True, # cannot return the repo data here since fork
1115 'success': True, # cannot return the repo data here since fork
1115 # can be done async
1116 # can be done async
1116 'task': task_id
1117 'task': task_id
1117 }
1118 }
1118 except Exception:
1119 except Exception:
1119 log.exception(
1120 log.exception(
1120 u"Exception while trying to create fork %s",
1121 u"Exception while trying to create fork %s",
1121 schema_data['repo_name'])
1122 schema_data['repo_name'])
1122 raise JSONRPCError(
1123 raise JSONRPCError(
1123 'failed to fork repository `%s` as `%s`' % (
1124 'failed to fork repository `%s` as `%s`' % (
1124 repo_name, schema_data['repo_name']))
1125 repo_name, schema_data['repo_name']))
1125
1126
1126
1127
1127 @jsonrpc_method()
1128 @jsonrpc_method()
1128 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1129 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1129 """
1130 """
1130 Deletes a repository.
1131 Deletes a repository.
1131
1132
1132 * When the `forks` parameter is set it's possible to detach or delete
1133 * When the `forks` parameter is set it's possible to detach or delete
1133 forks of deleted repository.
1134 forks of deleted repository.
1134
1135
1135 This command can only be run using an |authtoken| with admin
1136 This command can only be run using an |authtoken| with admin
1136 permissions on the |repo|.
1137 permissions on the |repo|.
1137
1138
1138 :param apiuser: This is filled automatically from the |authtoken|.
1139 :param apiuser: This is filled automatically from the |authtoken|.
1139 :type apiuser: AuthUser
1140 :type apiuser: AuthUser
1140 :param repoid: Set the repository name or repository ID.
1141 :param repoid: Set the repository name or repository ID.
1141 :type repoid: str or int
1142 :type repoid: str or int
1142 :param forks: Set to `detach` or `delete` forks from the |repo|.
1143 :param forks: Set to `detach` or `delete` forks from the |repo|.
1143 :type forks: Optional(str)
1144 :type forks: Optional(str)
1144
1145
1145 Example error output:
1146 Example error output:
1146
1147
1147 .. code-block:: bash
1148 .. code-block:: bash
1148
1149
1149 id : <id_given_in_input>
1150 id : <id_given_in_input>
1150 result: {
1151 result: {
1151 "msg": "Deleted repository `<reponame>`",
1152 "msg": "Deleted repository `<reponame>`",
1152 "success": true
1153 "success": true
1153 }
1154 }
1154 error: null
1155 error: null
1155 """
1156 """
1156
1157
1157 repo = get_repo_or_error(repoid)
1158 repo = get_repo_or_error(repoid)
1158 repo_name = repo.repo_name
1159 repo_name = repo.repo_name
1159 if not has_superadmin_permission(apiuser):
1160 if not has_superadmin_permission(apiuser):
1160 _perms = ('repository.admin',)
1161 _perms = ('repository.admin',)
1161 validate_repo_permissions(apiuser, repoid, repo, _perms)
1162 validate_repo_permissions(apiuser, repoid, repo, _perms)
1162
1163
1163 try:
1164 try:
1164 handle_forks = Optional.extract(forks)
1165 handle_forks = Optional.extract(forks)
1165 _forks_msg = ''
1166 _forks_msg = ''
1166 _forks = [f for f in repo.forks]
1167 _forks = [f for f in repo.forks]
1167 if handle_forks == 'detach':
1168 if handle_forks == 'detach':
1168 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1169 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1169 elif handle_forks == 'delete':
1170 elif handle_forks == 'delete':
1170 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1171 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1171 elif _forks:
1172 elif _forks:
1172 raise JSONRPCError(
1173 raise JSONRPCError(
1173 'Cannot delete `%s` it still contains attached forks' %
1174 'Cannot delete `%s` it still contains attached forks' %
1174 (repo.repo_name,)
1175 (repo.repo_name,)
1175 )
1176 )
1176 old_data = repo.get_api_data()
1177 old_data = repo.get_api_data()
1177 RepoModel().delete(repo, forks=forks)
1178 RepoModel().delete(repo, forks=forks)
1178
1179
1179 repo = audit_logger.RepoWrap(repo_id=None,
1180 repo = audit_logger.RepoWrap(repo_id=None,
1180 repo_name=repo.repo_name)
1181 repo_name=repo.repo_name)
1181
1182
1182 audit_logger.store_api(
1183 audit_logger.store_api(
1183 'repo.delete', action_data={'old_data': old_data},
1184 'repo.delete', action_data={'old_data': old_data},
1184 user=apiuser, repo=repo)
1185 user=apiuser, repo=repo)
1185
1186
1186 ScmModel().mark_for_invalidation(repo_name, delete=True)
1187 ScmModel().mark_for_invalidation(repo_name, delete=True)
1187 Session().commit()
1188 Session().commit()
1188 return {
1189 return {
1189 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1190 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1190 'success': True
1191 'success': True
1191 }
1192 }
1192 except Exception:
1193 except Exception:
1193 log.exception("Exception occurred while trying to delete repo")
1194 log.exception("Exception occurred while trying to delete repo")
1194 raise JSONRPCError(
1195 raise JSONRPCError(
1195 'failed to delete repository `%s`' % (repo_name,)
1196 'failed to delete repository `%s`' % (repo_name,)
1196 )
1197 )
1197
1198
1198
1199
1199 #TODO: marcink, change name ?
1200 #TODO: marcink, change name ?
1200 @jsonrpc_method()
1201 @jsonrpc_method()
1201 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1202 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1202 """
1203 """
1203 Invalidates the cache for the specified repository.
1204 Invalidates the cache for the specified repository.
1204
1205
1205 This command can only be run using an |authtoken| with admin rights to
1206 This command can only be run using an |authtoken| with admin rights to
1206 the specified repository.
1207 the specified repository.
1207
1208
1208 This command takes the following options:
1209 This command takes the following options:
1209
1210
1210 :param apiuser: This is filled automatically from |authtoken|.
1211 :param apiuser: This is filled automatically from |authtoken|.
1211 :type apiuser: AuthUser
1212 :type apiuser: AuthUser
1212 :param repoid: Sets the repository name or repository ID.
1213 :param repoid: Sets the repository name or repository ID.
1213 :type repoid: str or int
1214 :type repoid: str or int
1214 :param delete_keys: This deletes the invalidated keys instead of
1215 :param delete_keys: This deletes the invalidated keys instead of
1215 just flagging them.
1216 just flagging them.
1216 :type delete_keys: Optional(``True`` | ``False``)
1217 :type delete_keys: Optional(``True`` | ``False``)
1217
1218
1218 Example output:
1219 Example output:
1219
1220
1220 .. code-block:: bash
1221 .. code-block:: bash
1221
1222
1222 id : <id_given_in_input>
1223 id : <id_given_in_input>
1223 result : {
1224 result : {
1224 'msg': Cache for repository `<repository name>` was invalidated,
1225 'msg': Cache for repository `<repository name>` was invalidated,
1225 'repository': <repository name>
1226 'repository': <repository name>
1226 }
1227 }
1227 error : null
1228 error : null
1228
1229
1229 Example error output:
1230 Example error output:
1230
1231
1231 .. code-block:: bash
1232 .. code-block:: bash
1232
1233
1233 id : <id_given_in_input>
1234 id : <id_given_in_input>
1234 result : null
1235 result : null
1235 error : {
1236 error : {
1236 'Error occurred during cache invalidation action'
1237 'Error occurred during cache invalidation action'
1237 }
1238 }
1238
1239
1239 """
1240 """
1240
1241
1241 repo = get_repo_or_error(repoid)
1242 repo = get_repo_or_error(repoid)
1242 if not has_superadmin_permission(apiuser):
1243 if not has_superadmin_permission(apiuser):
1243 _perms = ('repository.admin', 'repository.write',)
1244 _perms = ('repository.admin', 'repository.write',)
1244 validate_repo_permissions(apiuser, repoid, repo, _perms)
1245 validate_repo_permissions(apiuser, repoid, repo, _perms)
1245
1246
1246 delete = Optional.extract(delete_keys)
1247 delete = Optional.extract(delete_keys)
1247 try:
1248 try:
1248 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1249 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1249 return {
1250 return {
1250 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1251 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1251 'repository': repo.repo_name
1252 'repository': repo.repo_name
1252 }
1253 }
1253 except Exception:
1254 except Exception:
1254 log.exception(
1255 log.exception(
1255 "Exception occurred while trying to invalidate repo cache")
1256 "Exception occurred while trying to invalidate repo cache")
1256 raise JSONRPCError(
1257 raise JSONRPCError(
1257 'Error occurred during cache invalidation action'
1258 'Error occurred during cache invalidation action'
1258 )
1259 )
1259
1260
1260
1261
1261 #TODO: marcink, change name ?
1262 #TODO: marcink, change name ?
1262 @jsonrpc_method()
1263 @jsonrpc_method()
1263 def lock(request, apiuser, repoid, locked=Optional(None),
1264 def lock(request, apiuser, repoid, locked=Optional(None),
1264 userid=Optional(OAttr('apiuser'))):
1265 userid=Optional(OAttr('apiuser'))):
1265 """
1266 """
1266 Sets the lock state of the specified |repo| by the given user.
1267 Sets the lock state of the specified |repo| by the given user.
1267 From more information, see :ref:`repo-locking`.
1268 From more information, see :ref:`repo-locking`.
1268
1269
1269 * If the ``userid`` option is not set, the repository is locked to the
1270 * If the ``userid`` option is not set, the repository is locked to the
1270 user who called the method.
1271 user who called the method.
1271 * If the ``locked`` parameter is not set, the current lock state of the
1272 * If the ``locked`` parameter is not set, the current lock state of the
1272 repository is displayed.
1273 repository is displayed.
1273
1274
1274 This command can only be run using an |authtoken| with admin rights to
1275 This command can only be run using an |authtoken| with admin rights to
1275 the specified repository.
1276 the specified repository.
1276
1277
1277 This command takes the following options:
1278 This command takes the following options:
1278
1279
1279 :param apiuser: This is filled automatically from the |authtoken|.
1280 :param apiuser: This is filled automatically from the |authtoken|.
1280 :type apiuser: AuthUser
1281 :type apiuser: AuthUser
1281 :param repoid: Sets the repository name or repository ID.
1282 :param repoid: Sets the repository name or repository ID.
1282 :type repoid: str or int
1283 :type repoid: str or int
1283 :param locked: Sets the lock state.
1284 :param locked: Sets the lock state.
1284 :type locked: Optional(``True`` | ``False``)
1285 :type locked: Optional(``True`` | ``False``)
1285 :param userid: Set the repository lock to this user.
1286 :param userid: Set the repository lock to this user.
1286 :type userid: Optional(str or int)
1287 :type userid: Optional(str or int)
1287
1288
1288 Example error output:
1289 Example error output:
1289
1290
1290 .. code-block:: bash
1291 .. code-block:: bash
1291
1292
1292 id : <id_given_in_input>
1293 id : <id_given_in_input>
1293 result : {
1294 result : {
1294 'repo': '<reponame>',
1295 'repo': '<reponame>',
1295 'locked': <bool: lock state>,
1296 'locked': <bool: lock state>,
1296 'locked_since': <int: lock timestamp>,
1297 'locked_since': <int: lock timestamp>,
1297 'locked_by': <username of person who made the lock>,
1298 'locked_by': <username of person who made the lock>,
1298 'lock_reason': <str: reason for locking>,
1299 'lock_reason': <str: reason for locking>,
1299 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1300 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1300 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1301 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1301 or
1302 or
1302 'msg': 'Repo `<repository name>` not locked.'
1303 'msg': 'Repo `<repository name>` not locked.'
1303 or
1304 or
1304 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1305 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1305 }
1306 }
1306 error : null
1307 error : null
1307
1308
1308 Example error output:
1309 Example error output:
1309
1310
1310 .. code-block:: bash
1311 .. code-block:: bash
1311
1312
1312 id : <id_given_in_input>
1313 id : <id_given_in_input>
1313 result : null
1314 result : null
1314 error : {
1315 error : {
1315 'Error occurred locking repository `<reponame>`'
1316 'Error occurred locking repository `<reponame>`'
1316 }
1317 }
1317 """
1318 """
1318
1319
1319 repo = get_repo_or_error(repoid)
1320 repo = get_repo_or_error(repoid)
1320 if not has_superadmin_permission(apiuser):
1321 if not has_superadmin_permission(apiuser):
1321 # check if we have at least write permission for this repo !
1322 # check if we have at least write permission for this repo !
1322 _perms = ('repository.admin', 'repository.write',)
1323 _perms = ('repository.admin', 'repository.write',)
1323 validate_repo_permissions(apiuser, repoid, repo, _perms)
1324 validate_repo_permissions(apiuser, repoid, repo, _perms)
1324
1325
1325 # make sure normal user does not pass someone else userid,
1326 # make sure normal user does not pass someone else userid,
1326 # he is not allowed to do that
1327 # he is not allowed to do that
1327 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1328 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1328 raise JSONRPCError('userid is not the same as your user')
1329 raise JSONRPCError('userid is not the same as your user')
1329
1330
1330 if isinstance(userid, Optional):
1331 if isinstance(userid, Optional):
1331 userid = apiuser.user_id
1332 userid = apiuser.user_id
1332
1333
1333 user = get_user_or_error(userid)
1334 user = get_user_or_error(userid)
1334
1335
1335 if isinstance(locked, Optional):
1336 if isinstance(locked, Optional):
1336 lockobj = repo.locked
1337 lockobj = repo.locked
1337
1338
1338 if lockobj[0] is None:
1339 if lockobj[0] is None:
1339 _d = {
1340 _d = {
1340 'repo': repo.repo_name,
1341 'repo': repo.repo_name,
1341 'locked': False,
1342 'locked': False,
1342 'locked_since': None,
1343 'locked_since': None,
1343 'locked_by': None,
1344 'locked_by': None,
1344 'lock_reason': None,
1345 'lock_reason': None,
1345 'lock_state_changed': False,
1346 'lock_state_changed': False,
1346 'msg': 'Repo `%s` not locked.' % repo.repo_name
1347 'msg': 'Repo `%s` not locked.' % repo.repo_name
1347 }
1348 }
1348 return _d
1349 return _d
1349 else:
1350 else:
1350 _user_id, _time, _reason = lockobj
1351 _user_id, _time, _reason = lockobj
1351 lock_user = get_user_or_error(userid)
1352 lock_user = get_user_or_error(userid)
1352 _d = {
1353 _d = {
1353 'repo': repo.repo_name,
1354 'repo': repo.repo_name,
1354 'locked': True,
1355 'locked': True,
1355 'locked_since': _time,
1356 'locked_since': _time,
1356 'locked_by': lock_user.username,
1357 'locked_by': lock_user.username,
1357 'lock_reason': _reason,
1358 'lock_reason': _reason,
1358 'lock_state_changed': False,
1359 'lock_state_changed': False,
1359 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1360 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1360 % (repo.repo_name, lock_user.username,
1361 % (repo.repo_name, lock_user.username,
1361 json.dumps(time_to_datetime(_time))))
1362 json.dumps(time_to_datetime(_time))))
1362 }
1363 }
1363 return _d
1364 return _d
1364
1365
1365 # force locked state through a flag
1366 # force locked state through a flag
1366 else:
1367 else:
1367 locked = str2bool(locked)
1368 locked = str2bool(locked)
1368 lock_reason = Repository.LOCK_API
1369 lock_reason = Repository.LOCK_API
1369 try:
1370 try:
1370 if locked:
1371 if locked:
1371 lock_time = time.time()
1372 lock_time = time.time()
1372 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1373 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1373 else:
1374 else:
1374 lock_time = None
1375 lock_time = None
1375 Repository.unlock(repo)
1376 Repository.unlock(repo)
1376 _d = {
1377 _d = {
1377 'repo': repo.repo_name,
1378 'repo': repo.repo_name,
1378 'locked': locked,
1379 'locked': locked,
1379 'locked_since': lock_time,
1380 'locked_since': lock_time,
1380 'locked_by': user.username,
1381 'locked_by': user.username,
1381 'lock_reason': lock_reason,
1382 'lock_reason': lock_reason,
1382 'lock_state_changed': True,
1383 'lock_state_changed': True,
1383 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1384 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1384 % (user.username, repo.repo_name, locked))
1385 % (user.username, repo.repo_name, locked))
1385 }
1386 }
1386 return _d
1387 return _d
1387 except Exception:
1388 except Exception:
1388 log.exception(
1389 log.exception(
1389 "Exception occurred while trying to lock repository")
1390 "Exception occurred while trying to lock repository")
1390 raise JSONRPCError(
1391 raise JSONRPCError(
1391 'Error occurred locking repository `%s`' % repo.repo_name
1392 'Error occurred locking repository `%s`' % repo.repo_name
1392 )
1393 )
1393
1394
1394
1395
1395 @jsonrpc_method()
1396 @jsonrpc_method()
1396 def comment_commit(
1397 def comment_commit(
1397 request, apiuser, repoid, commit_id, message, status=Optional(None),
1398 request, apiuser, repoid, commit_id, message, status=Optional(None),
1398 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1399 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1399 resolves_comment_id=Optional(None),
1400 resolves_comment_id=Optional(None),
1400 userid=Optional(OAttr('apiuser'))):
1401 userid=Optional(OAttr('apiuser'))):
1401 """
1402 """
1402 Set a commit comment, and optionally change the status of the commit.
1403 Set a commit comment, and optionally change the status of the commit.
1403
1404
1404 :param apiuser: This is filled automatically from the |authtoken|.
1405 :param apiuser: This is filled automatically from the |authtoken|.
1405 :type apiuser: AuthUser
1406 :type apiuser: AuthUser
1406 :param repoid: Set the repository name or repository ID.
1407 :param repoid: Set the repository name or repository ID.
1407 :type repoid: str or int
1408 :type repoid: str or int
1408 :param commit_id: Specify the commit_id for which to set a comment.
1409 :param commit_id: Specify the commit_id for which to set a comment.
1409 :type commit_id: str
1410 :type commit_id: str
1410 :param message: The comment text.
1411 :param message: The comment text.
1411 :type message: str
1412 :type message: str
1412 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1413 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1413 'approved', 'rejected', 'under_review'
1414 'approved', 'rejected', 'under_review'
1414 :type status: str
1415 :type status: str
1415 :param comment_type: Comment type, one of: 'note', 'todo'
1416 :param comment_type: Comment type, one of: 'note', 'todo'
1416 :type comment_type: Optional(str), default: 'note'
1417 :type comment_type: Optional(str), default: 'note'
1417 :param userid: Set the user name of the comment creator.
1418 :param userid: Set the user name of the comment creator.
1418 :type userid: Optional(str or int)
1419 :type userid: Optional(str or int)
1419
1420
1420 Example error output:
1421 Example error output:
1421
1422
1422 .. code-block:: bash
1423 .. code-block:: bash
1423
1424
1424 {
1425 {
1425 "id" : <id_given_in_input>,
1426 "id" : <id_given_in_input>,
1426 "result" : {
1427 "result" : {
1427 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1428 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1428 "status_change": null or <status>,
1429 "status_change": null or <status>,
1429 "success": true
1430 "success": true
1430 },
1431 },
1431 "error" : null
1432 "error" : null
1432 }
1433 }
1433
1434
1434 """
1435 """
1435 repo = get_repo_or_error(repoid)
1436 repo = get_repo_or_error(repoid)
1436 if not has_superadmin_permission(apiuser):
1437 if not has_superadmin_permission(apiuser):
1437 _perms = ('repository.read', 'repository.write', 'repository.admin')
1438 _perms = ('repository.read', 'repository.write', 'repository.admin')
1438 validate_repo_permissions(apiuser, repoid, repo, _perms)
1439 validate_repo_permissions(apiuser, repoid, repo, _perms)
1439
1440
1440 try:
1441 try:
1441 commit_id = repo.scm_instance().get_commit(commit_id=commit_id).raw_id
1442 commit_id = repo.scm_instance().get_commit(commit_id=commit_id).raw_id
1442 except Exception as e:
1443 except Exception as e:
1443 log.exception('Failed to fetch commit')
1444 log.exception('Failed to fetch commit')
1444 raise JSONRPCError(e.message)
1445 raise JSONRPCError(e.message)
1445
1446
1446 if isinstance(userid, Optional):
1447 if isinstance(userid, Optional):
1447 userid = apiuser.user_id
1448 userid = apiuser.user_id
1448
1449
1449 user = get_user_or_error(userid)
1450 user = get_user_or_error(userid)
1450 status = Optional.extract(status)
1451 status = Optional.extract(status)
1451 comment_type = Optional.extract(comment_type)
1452 comment_type = Optional.extract(comment_type)
1452 resolves_comment_id = Optional.extract(resolves_comment_id)
1453 resolves_comment_id = Optional.extract(resolves_comment_id)
1453
1454
1454 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1455 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1455 if status and status not in allowed_statuses:
1456 if status and status not in allowed_statuses:
1456 raise JSONRPCError('Bad status, must be on '
1457 raise JSONRPCError('Bad status, must be on '
1457 'of %s got %s' % (allowed_statuses, status,))
1458 'of %s got %s' % (allowed_statuses, status,))
1458
1459
1459 if resolves_comment_id:
1460 if resolves_comment_id:
1460 comment = ChangesetComment.get(resolves_comment_id)
1461 comment = ChangesetComment.get(resolves_comment_id)
1461 if not comment:
1462 if not comment:
1462 raise JSONRPCError(
1463 raise JSONRPCError(
1463 'Invalid resolves_comment_id `%s` for this commit.'
1464 'Invalid resolves_comment_id `%s` for this commit.'
1464 % resolves_comment_id)
1465 % resolves_comment_id)
1465 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1466 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1466 raise JSONRPCError(
1467 raise JSONRPCError(
1467 'Comment `%s` is wrong type for setting status to resolved.'
1468 'Comment `%s` is wrong type for setting status to resolved.'
1468 % resolves_comment_id)
1469 % resolves_comment_id)
1469
1470
1470 try:
1471 try:
1471 rc_config = SettingsModel().get_all_settings()
1472 rc_config = SettingsModel().get_all_settings()
1472 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1473 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1473 status_change_label = ChangesetStatus.get_status_lbl(status)
1474 status_change_label = ChangesetStatus.get_status_lbl(status)
1474 comment = CommentsModel().create(
1475 comment = CommentsModel().create(
1475 message, repo, user, commit_id=commit_id,
1476 message, repo, user, commit_id=commit_id,
1476 status_change=status_change_label,
1477 status_change=status_change_label,
1477 status_change_type=status,
1478 status_change_type=status,
1478 renderer=renderer,
1479 renderer=renderer,
1479 comment_type=comment_type,
1480 comment_type=comment_type,
1480 resolves_comment_id=resolves_comment_id
1481 resolves_comment_id=resolves_comment_id
1481 )
1482 )
1482 if status:
1483 if status:
1483 # also do a status change
1484 # also do a status change
1484 try:
1485 try:
1485 ChangesetStatusModel().set_status(
1486 ChangesetStatusModel().set_status(
1486 repo, status, user, comment, revision=commit_id,
1487 repo, status, user, comment, revision=commit_id,
1487 dont_allow_on_closed_pull_request=True
1488 dont_allow_on_closed_pull_request=True
1488 )
1489 )
1489 except StatusChangeOnClosedPullRequestError:
1490 except StatusChangeOnClosedPullRequestError:
1490 log.exception(
1491 log.exception(
1491 "Exception occurred while trying to change repo commit status")
1492 "Exception occurred while trying to change repo commit status")
1492 msg = ('Changing status on a changeset associated with '
1493 msg = ('Changing status on a changeset associated with '
1493 'a closed pull request is not allowed')
1494 'a closed pull request is not allowed')
1494 raise JSONRPCError(msg)
1495 raise JSONRPCError(msg)
1495
1496
1496 Session().commit()
1497 Session().commit()
1497 return {
1498 return {
1498 'msg': (
1499 'msg': (
1499 'Commented on commit `%s` for repository `%s`' % (
1500 'Commented on commit `%s` for repository `%s`' % (
1500 comment.revision, repo.repo_name)),
1501 comment.revision, repo.repo_name)),
1501 'status_change': status,
1502 'status_change': status,
1502 'success': True,
1503 'success': True,
1503 }
1504 }
1504 except JSONRPCError:
1505 except JSONRPCError:
1505 # catch any inside errors, and re-raise them to prevent from
1506 # catch any inside errors, and re-raise them to prevent from
1506 # below global catch to silence them
1507 # below global catch to silence them
1507 raise
1508 raise
1508 except Exception:
1509 except Exception:
1509 log.exception("Exception occurred while trying to comment on commit")
1510 log.exception("Exception occurred while trying to comment on commit")
1510 raise JSONRPCError(
1511 raise JSONRPCError(
1511 'failed to set comment on repository `%s`' % (repo.repo_name,)
1512 'failed to set comment on repository `%s`' % (repo.repo_name,)
1512 )
1513 )
1513
1514
1514
1515
1515 @jsonrpc_method()
1516 @jsonrpc_method()
1516 def grant_user_permission(request, apiuser, repoid, userid, perm):
1517 def grant_user_permission(request, apiuser, repoid, userid, perm):
1517 """
1518 """
1518 Grant permissions for the specified user on the given repository,
1519 Grant permissions for the specified user on the given repository,
1519 or update existing permissions if found.
1520 or update existing permissions if found.
1520
1521
1521 This command can only be run using an |authtoken| with admin
1522 This command can only be run using an |authtoken| with admin
1522 permissions on the |repo|.
1523 permissions on the |repo|.
1523
1524
1524 :param apiuser: This is filled automatically from the |authtoken|.
1525 :param apiuser: This is filled automatically from the |authtoken|.
1525 :type apiuser: AuthUser
1526 :type apiuser: AuthUser
1526 :param repoid: Set the repository name or repository ID.
1527 :param repoid: Set the repository name or repository ID.
1527 :type repoid: str or int
1528 :type repoid: str or int
1528 :param userid: Set the user name.
1529 :param userid: Set the user name.
1529 :type userid: str
1530 :type userid: str
1530 :param perm: Set the user permissions, using the following format
1531 :param perm: Set the user permissions, using the following format
1531 ``(repository.(none|read|write|admin))``
1532 ``(repository.(none|read|write|admin))``
1532 :type perm: str
1533 :type perm: str
1533
1534
1534 Example output:
1535 Example output:
1535
1536
1536 .. code-block:: bash
1537 .. code-block:: bash
1537
1538
1538 id : <id_given_in_input>
1539 id : <id_given_in_input>
1539 result: {
1540 result: {
1540 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1541 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1541 "success": true
1542 "success": true
1542 }
1543 }
1543 error: null
1544 error: null
1544 """
1545 """
1545
1546
1546 repo = get_repo_or_error(repoid)
1547 repo = get_repo_or_error(repoid)
1547 user = get_user_or_error(userid)
1548 user = get_user_or_error(userid)
1548 perm = get_perm_or_error(perm)
1549 perm = get_perm_or_error(perm)
1549 if not has_superadmin_permission(apiuser):
1550 if not has_superadmin_permission(apiuser):
1550 _perms = ('repository.admin',)
1551 _perms = ('repository.admin',)
1551 validate_repo_permissions(apiuser, repoid, repo, _perms)
1552 validate_repo_permissions(apiuser, repoid, repo, _perms)
1552
1553
1553 try:
1554 try:
1554
1555
1555 RepoModel().grant_user_permission(repo=repo, user=user, perm=perm)
1556 RepoModel().grant_user_permission(repo=repo, user=user, perm=perm)
1556
1557
1557 Session().commit()
1558 Session().commit()
1558 return {
1559 return {
1559 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1560 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1560 perm.permission_name, user.username, repo.repo_name
1561 perm.permission_name, user.username, repo.repo_name
1561 ),
1562 ),
1562 'success': True
1563 'success': True
1563 }
1564 }
1564 except Exception:
1565 except Exception:
1565 log.exception(
1566 log.exception(
1566 "Exception occurred while trying edit permissions for repo")
1567 "Exception occurred while trying edit permissions for repo")
1567 raise JSONRPCError(
1568 raise JSONRPCError(
1568 'failed to edit permission for user: `%s` in repo: `%s`' % (
1569 'failed to edit permission for user: `%s` in repo: `%s`' % (
1569 userid, repoid
1570 userid, repoid
1570 )
1571 )
1571 )
1572 )
1572
1573
1573
1574
1574 @jsonrpc_method()
1575 @jsonrpc_method()
1575 def revoke_user_permission(request, apiuser, repoid, userid):
1576 def revoke_user_permission(request, apiuser, repoid, userid):
1576 """
1577 """
1577 Revoke permission for a user on the specified repository.
1578 Revoke permission for a user on the specified repository.
1578
1579
1579 This command can only be run using an |authtoken| with admin
1580 This command can only be run using an |authtoken| with admin
1580 permissions on the |repo|.
1581 permissions on the |repo|.
1581
1582
1582 :param apiuser: This is filled automatically from the |authtoken|.
1583 :param apiuser: This is filled automatically from the |authtoken|.
1583 :type apiuser: AuthUser
1584 :type apiuser: AuthUser
1584 :param repoid: Set the repository name or repository ID.
1585 :param repoid: Set the repository name or repository ID.
1585 :type repoid: str or int
1586 :type repoid: str or int
1586 :param userid: Set the user name of revoked user.
1587 :param userid: Set the user name of revoked user.
1587 :type userid: str or int
1588 :type userid: str or int
1588
1589
1589 Example error output:
1590 Example error output:
1590
1591
1591 .. code-block:: bash
1592 .. code-block:: bash
1592
1593
1593 id : <id_given_in_input>
1594 id : <id_given_in_input>
1594 result: {
1595 result: {
1595 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1596 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1596 "success": true
1597 "success": true
1597 }
1598 }
1598 error: null
1599 error: null
1599 """
1600 """
1600
1601
1601 repo = get_repo_or_error(repoid)
1602 repo = get_repo_or_error(repoid)
1602 user = get_user_or_error(userid)
1603 user = get_user_or_error(userid)
1603 if not has_superadmin_permission(apiuser):
1604 if not has_superadmin_permission(apiuser):
1604 _perms = ('repository.admin',)
1605 _perms = ('repository.admin',)
1605 validate_repo_permissions(apiuser, repoid, repo, _perms)
1606 validate_repo_permissions(apiuser, repoid, repo, _perms)
1606
1607
1607 try:
1608 try:
1608 RepoModel().revoke_user_permission(repo=repo, user=user)
1609 RepoModel().revoke_user_permission(repo=repo, user=user)
1609 Session().commit()
1610 Session().commit()
1610 return {
1611 return {
1611 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
1612 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
1612 user.username, repo.repo_name
1613 user.username, repo.repo_name
1613 ),
1614 ),
1614 'success': True
1615 'success': True
1615 }
1616 }
1616 except Exception:
1617 except Exception:
1617 log.exception(
1618 log.exception(
1618 "Exception occurred while trying revoke permissions to repo")
1619 "Exception occurred while trying revoke permissions to repo")
1619 raise JSONRPCError(
1620 raise JSONRPCError(
1620 'failed to edit permission for user: `%s` in repo: `%s`' % (
1621 'failed to edit permission for user: `%s` in repo: `%s`' % (
1621 userid, repoid
1622 userid, repoid
1622 )
1623 )
1623 )
1624 )
1624
1625
1625
1626
1626 @jsonrpc_method()
1627 @jsonrpc_method()
1627 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
1628 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
1628 """
1629 """
1629 Grant permission for a user group on the specified repository,
1630 Grant permission for a user group on the specified repository,
1630 or update existing permissions.
1631 or update existing permissions.
1631
1632
1632 This command can only be run using an |authtoken| with admin
1633 This command can only be run using an |authtoken| with admin
1633 permissions on the |repo|.
1634 permissions on the |repo|.
1634
1635
1635 :param apiuser: This is filled automatically from the |authtoken|.
1636 :param apiuser: This is filled automatically from the |authtoken|.
1636 :type apiuser: AuthUser
1637 :type apiuser: AuthUser
1637 :param repoid: Set the repository name or repository ID.
1638 :param repoid: Set the repository name or repository ID.
1638 :type repoid: str or int
1639 :type repoid: str or int
1639 :param usergroupid: Specify the ID of the user group.
1640 :param usergroupid: Specify the ID of the user group.
1640 :type usergroupid: str or int
1641 :type usergroupid: str or int
1641 :param perm: Set the user group permissions using the following
1642 :param perm: Set the user group permissions using the following
1642 format: (repository.(none|read|write|admin))
1643 format: (repository.(none|read|write|admin))
1643 :type perm: str
1644 :type perm: str
1644
1645
1645 Example output:
1646 Example output:
1646
1647
1647 .. code-block:: bash
1648 .. code-block:: bash
1648
1649
1649 id : <id_given_in_input>
1650 id : <id_given_in_input>
1650 result : {
1651 result : {
1651 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
1652 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
1652 "success": true
1653 "success": true
1653
1654
1654 }
1655 }
1655 error : null
1656 error : null
1656
1657
1657 Example error output:
1658 Example error output:
1658
1659
1659 .. code-block:: bash
1660 .. code-block:: bash
1660
1661
1661 id : <id_given_in_input>
1662 id : <id_given_in_input>
1662 result : null
1663 result : null
1663 error : {
1664 error : {
1664 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
1665 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
1665 }
1666 }
1666
1667
1667 """
1668 """
1668
1669
1669 repo = get_repo_or_error(repoid)
1670 repo = get_repo_or_error(repoid)
1670 perm = get_perm_or_error(perm)
1671 perm = get_perm_or_error(perm)
1671 if not has_superadmin_permission(apiuser):
1672 if not has_superadmin_permission(apiuser):
1672 _perms = ('repository.admin',)
1673 _perms = ('repository.admin',)
1673 validate_repo_permissions(apiuser, repoid, repo, _perms)
1674 validate_repo_permissions(apiuser, repoid, repo, _perms)
1674
1675
1675 user_group = get_user_group_or_error(usergroupid)
1676 user_group = get_user_group_or_error(usergroupid)
1676 if not has_superadmin_permission(apiuser):
1677 if not has_superadmin_permission(apiuser):
1677 # check if we have at least read permission for this user group !
1678 # check if we have at least read permission for this user group !
1678 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1679 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1679 if not HasUserGroupPermissionAnyApi(*_perms)(
1680 if not HasUserGroupPermissionAnyApi(*_perms)(
1680 user=apiuser, user_group_name=user_group.users_group_name):
1681 user=apiuser, user_group_name=user_group.users_group_name):
1681 raise JSONRPCError(
1682 raise JSONRPCError(
1682 'user group `%s` does not exist' % (usergroupid,))
1683 'user group `%s` does not exist' % (usergroupid,))
1683
1684
1684 try:
1685 try:
1685 RepoModel().grant_user_group_permission(
1686 RepoModel().grant_user_group_permission(
1686 repo=repo, group_name=user_group, perm=perm)
1687 repo=repo, group_name=user_group, perm=perm)
1687
1688
1688 Session().commit()
1689 Session().commit()
1689 return {
1690 return {
1690 'msg': 'Granted perm: `%s` for user group: `%s` in '
1691 'msg': 'Granted perm: `%s` for user group: `%s` in '
1691 'repo: `%s`' % (
1692 'repo: `%s`' % (
1692 perm.permission_name, user_group.users_group_name,
1693 perm.permission_name, user_group.users_group_name,
1693 repo.repo_name
1694 repo.repo_name
1694 ),
1695 ),
1695 'success': True
1696 'success': True
1696 }
1697 }
1697 except Exception:
1698 except Exception:
1698 log.exception(
1699 log.exception(
1699 "Exception occurred while trying change permission on repo")
1700 "Exception occurred while trying change permission on repo")
1700 raise JSONRPCError(
1701 raise JSONRPCError(
1701 'failed to edit permission for user group: `%s` in '
1702 'failed to edit permission for user group: `%s` in '
1702 'repo: `%s`' % (
1703 'repo: `%s`' % (
1703 usergroupid, repo.repo_name
1704 usergroupid, repo.repo_name
1704 )
1705 )
1705 )
1706 )
1706
1707
1707
1708
1708 @jsonrpc_method()
1709 @jsonrpc_method()
1709 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
1710 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
1710 """
1711 """
1711 Revoke the permissions of a user group on a given repository.
1712 Revoke the permissions of a user group on a given repository.
1712
1713
1713 This command can only be run using an |authtoken| with admin
1714 This command can only be run using an |authtoken| with admin
1714 permissions on the |repo|.
1715 permissions on the |repo|.
1715
1716
1716 :param apiuser: This is filled automatically from the |authtoken|.
1717 :param apiuser: This is filled automatically from the |authtoken|.
1717 :type apiuser: AuthUser
1718 :type apiuser: AuthUser
1718 :param repoid: Set the repository name or repository ID.
1719 :param repoid: Set the repository name or repository ID.
1719 :type repoid: str or int
1720 :type repoid: str or int
1720 :param usergroupid: Specify the user group ID.
1721 :param usergroupid: Specify the user group ID.
1721 :type usergroupid: str or int
1722 :type usergroupid: str or int
1722
1723
1723 Example output:
1724 Example output:
1724
1725
1725 .. code-block:: bash
1726 .. code-block:: bash
1726
1727
1727 id : <id_given_in_input>
1728 id : <id_given_in_input>
1728 result: {
1729 result: {
1729 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
1730 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
1730 "success": true
1731 "success": true
1731 }
1732 }
1732 error: null
1733 error: null
1733 """
1734 """
1734
1735
1735 repo = get_repo_or_error(repoid)
1736 repo = get_repo_or_error(repoid)
1736 if not has_superadmin_permission(apiuser):
1737 if not has_superadmin_permission(apiuser):
1737 _perms = ('repository.admin',)
1738 _perms = ('repository.admin',)
1738 validate_repo_permissions(apiuser, repoid, repo, _perms)
1739 validate_repo_permissions(apiuser, repoid, repo, _perms)
1739
1740
1740 user_group = get_user_group_or_error(usergroupid)
1741 user_group = get_user_group_or_error(usergroupid)
1741 if not has_superadmin_permission(apiuser):
1742 if not has_superadmin_permission(apiuser):
1742 # check if we have at least read permission for this user group !
1743 # check if we have at least read permission for this user group !
1743 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1744 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1744 if not HasUserGroupPermissionAnyApi(*_perms)(
1745 if not HasUserGroupPermissionAnyApi(*_perms)(
1745 user=apiuser, user_group_name=user_group.users_group_name):
1746 user=apiuser, user_group_name=user_group.users_group_name):
1746 raise JSONRPCError(
1747 raise JSONRPCError(
1747 'user group `%s` does not exist' % (usergroupid,))
1748 'user group `%s` does not exist' % (usergroupid,))
1748
1749
1749 try:
1750 try:
1750 RepoModel().revoke_user_group_permission(
1751 RepoModel().revoke_user_group_permission(
1751 repo=repo, group_name=user_group)
1752 repo=repo, group_name=user_group)
1752
1753
1753 Session().commit()
1754 Session().commit()
1754 return {
1755 return {
1755 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
1756 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
1756 user_group.users_group_name, repo.repo_name
1757 user_group.users_group_name, repo.repo_name
1757 ),
1758 ),
1758 'success': True
1759 'success': True
1759 }
1760 }
1760 except Exception:
1761 except Exception:
1761 log.exception("Exception occurred while trying revoke "
1762 log.exception("Exception occurred while trying revoke "
1762 "user group permission on repo")
1763 "user group permission on repo")
1763 raise JSONRPCError(
1764 raise JSONRPCError(
1764 'failed to edit permission for user group: `%s` in '
1765 'failed to edit permission for user group: `%s` in '
1765 'repo: `%s`' % (
1766 'repo: `%s`' % (
1766 user_group.users_group_name, repo.repo_name
1767 user_group.users_group_name, repo.repo_name
1767 )
1768 )
1768 )
1769 )
1769
1770
1770
1771
1771 @jsonrpc_method()
1772 @jsonrpc_method()
1772 def pull(request, apiuser, repoid):
1773 def pull(request, apiuser, repoid):
1773 """
1774 """
1774 Triggers a pull on the given repository from a remote location. You
1775 Triggers a pull on the given repository from a remote location. You
1775 can use this to keep remote repositories up-to-date.
1776 can use this to keep remote repositories up-to-date.
1776
1777
1777 This command can only be run using an |authtoken| with admin
1778 This command can only be run using an |authtoken| with admin
1778 rights to the specified repository. For more information,
1779 rights to the specified repository. For more information,
1779 see :ref:`config-token-ref`.
1780 see :ref:`config-token-ref`.
1780
1781
1781 This command takes the following options:
1782 This command takes the following options:
1782
1783
1783 :param apiuser: This is filled automatically from the |authtoken|.
1784 :param apiuser: This is filled automatically from the |authtoken|.
1784 :type apiuser: AuthUser
1785 :type apiuser: AuthUser
1785 :param repoid: The repository name or repository ID.
1786 :param repoid: The repository name or repository ID.
1786 :type repoid: str or int
1787 :type repoid: str or int
1787
1788
1788 Example output:
1789 Example output:
1789
1790
1790 .. code-block:: bash
1791 .. code-block:: bash
1791
1792
1792 id : <id_given_in_input>
1793 id : <id_given_in_input>
1793 result : {
1794 result : {
1794 "msg": "Pulled from `<repository name>`"
1795 "msg": "Pulled from `<repository name>`"
1795 "repository": "<repository name>"
1796 "repository": "<repository name>"
1796 }
1797 }
1797 error : null
1798 error : null
1798
1799
1799 Example error output:
1800 Example error output:
1800
1801
1801 .. code-block:: bash
1802 .. code-block:: bash
1802
1803
1803 id : <id_given_in_input>
1804 id : <id_given_in_input>
1804 result : null
1805 result : null
1805 error : {
1806 error : {
1806 "Unable to pull changes from `<reponame>`"
1807 "Unable to pull changes from `<reponame>`"
1807 }
1808 }
1808
1809
1809 """
1810 """
1810
1811
1811 repo = get_repo_or_error(repoid)
1812 repo = get_repo_or_error(repoid)
1812 if not has_superadmin_permission(apiuser):
1813 if not has_superadmin_permission(apiuser):
1813 _perms = ('repository.admin',)
1814 _perms = ('repository.admin',)
1814 validate_repo_permissions(apiuser, repoid, repo, _perms)
1815 validate_repo_permissions(apiuser, repoid, repo, _perms)
1815
1816
1816 try:
1817 try:
1817 ScmModel().pull_changes(repo.repo_name, apiuser.username)
1818 ScmModel().pull_changes(repo.repo_name, apiuser.username)
1818 return {
1819 return {
1819 'msg': 'Pulled from `%s`' % repo.repo_name,
1820 'msg': 'Pulled from `%s`' % repo.repo_name,
1820 'repository': repo.repo_name
1821 'repository': repo.repo_name
1821 }
1822 }
1822 except Exception:
1823 except Exception:
1823 log.exception("Exception occurred while trying to "
1824 log.exception("Exception occurred while trying to "
1824 "pull changes from remote location")
1825 "pull changes from remote location")
1825 raise JSONRPCError(
1826 raise JSONRPCError(
1826 'Unable to pull changes from `%s`' % repo.repo_name
1827 'Unable to pull changes from `%s`' % repo.repo_name
1827 )
1828 )
1828
1829
1829
1830
1830 @jsonrpc_method()
1831 @jsonrpc_method()
1831 def strip(request, apiuser, repoid, revision, branch):
1832 def strip(request, apiuser, repoid, revision, branch):
1832 """
1833 """
1833 Strips the given revision from the specified repository.
1834 Strips the given revision from the specified repository.
1834
1835
1835 * This will remove the revision and all of its decendants.
1836 * This will remove the revision and all of its decendants.
1836
1837
1837 This command can only be run using an |authtoken| with admin rights to
1838 This command can only be run using an |authtoken| with admin rights to
1838 the specified repository.
1839 the specified repository.
1839
1840
1840 This command takes the following options:
1841 This command takes the following options:
1841
1842
1842 :param apiuser: This is filled automatically from the |authtoken|.
1843 :param apiuser: This is filled automatically from the |authtoken|.
1843 :type apiuser: AuthUser
1844 :type apiuser: AuthUser
1844 :param repoid: The repository name or repository ID.
1845 :param repoid: The repository name or repository ID.
1845 :type repoid: str or int
1846 :type repoid: str or int
1846 :param revision: The revision you wish to strip.
1847 :param revision: The revision you wish to strip.
1847 :type revision: str
1848 :type revision: str
1848 :param branch: The branch from which to strip the revision.
1849 :param branch: The branch from which to strip the revision.
1849 :type branch: str
1850 :type branch: str
1850
1851
1851 Example output:
1852 Example output:
1852
1853
1853 .. code-block:: bash
1854 .. code-block:: bash
1854
1855
1855 id : <id_given_in_input>
1856 id : <id_given_in_input>
1856 result : {
1857 result : {
1857 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
1858 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
1858 "repository": "<repository name>"
1859 "repository": "<repository name>"
1859 }
1860 }
1860 error : null
1861 error : null
1861
1862
1862 Example error output:
1863 Example error output:
1863
1864
1864 .. code-block:: bash
1865 .. code-block:: bash
1865
1866
1866 id : <id_given_in_input>
1867 id : <id_given_in_input>
1867 result : null
1868 result : null
1868 error : {
1869 error : {
1869 "Unable to strip commit <commit_hash> from repo `<repository name>`"
1870 "Unable to strip commit <commit_hash> from repo `<repository name>`"
1870 }
1871 }
1871
1872
1872 """
1873 """
1873
1874
1874 repo = get_repo_or_error(repoid)
1875 repo = get_repo_or_error(repoid)
1875 if not has_superadmin_permission(apiuser):
1876 if not has_superadmin_permission(apiuser):
1876 _perms = ('repository.admin',)
1877 _perms = ('repository.admin',)
1877 validate_repo_permissions(apiuser, repoid, repo, _perms)
1878 validate_repo_permissions(apiuser, repoid, repo, _perms)
1878
1879
1879 try:
1880 try:
1880 ScmModel().strip(repo, revision, branch)
1881 ScmModel().strip(repo, revision, branch)
1881 audit_logger.store_api(
1882 audit_logger.store_api(
1882 'repo.commit.strip', action_data={'commit_id': revision},
1883 'repo.commit.strip', action_data={'commit_id': revision},
1883 repo=repo,
1884 repo=repo,
1884 user=apiuser, commit=True)
1885 user=apiuser, commit=True)
1885
1886
1886 return {
1887 return {
1887 'msg': 'Stripped commit %s from repo `%s`' % (
1888 'msg': 'Stripped commit %s from repo `%s`' % (
1888 revision, repo.repo_name),
1889 revision, repo.repo_name),
1889 'repository': repo.repo_name
1890 'repository': repo.repo_name
1890 }
1891 }
1891 except Exception:
1892 except Exception:
1892 log.exception("Exception while trying to strip")
1893 log.exception("Exception while trying to strip")
1893 raise JSONRPCError(
1894 raise JSONRPCError(
1894 'Unable to strip commit %s from repo `%s`' % (
1895 'Unable to strip commit %s from repo `%s`' % (
1895 revision, repo.repo_name)
1896 revision, repo.repo_name)
1896 )
1897 )
1897
1898
1898
1899
1899 @jsonrpc_method()
1900 @jsonrpc_method()
1900 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
1901 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
1901 """
1902 """
1902 Returns all settings for a repository. If key is given it only returns the
1903 Returns all settings for a repository. If key is given it only returns the
1903 setting identified by the key or null.
1904 setting identified by the key or null.
1904
1905
1905 :param apiuser: This is filled automatically from the |authtoken|.
1906 :param apiuser: This is filled automatically from the |authtoken|.
1906 :type apiuser: AuthUser
1907 :type apiuser: AuthUser
1907 :param repoid: The repository name or repository id.
1908 :param repoid: The repository name or repository id.
1908 :type repoid: str or int
1909 :type repoid: str or int
1909 :param key: Key of the setting to return.
1910 :param key: Key of the setting to return.
1910 :type: key: Optional(str)
1911 :type: key: Optional(str)
1911
1912
1912 Example output:
1913 Example output:
1913
1914
1914 .. code-block:: bash
1915 .. code-block:: bash
1915
1916
1916 {
1917 {
1917 "error": null,
1918 "error": null,
1918 "id": 237,
1919 "id": 237,
1919 "result": {
1920 "result": {
1920 "extensions_largefiles": true,
1921 "extensions_largefiles": true,
1921 "extensions_evolve": true,
1922 "extensions_evolve": true,
1922 "hooks_changegroup_push_logger": true,
1923 "hooks_changegroup_push_logger": true,
1923 "hooks_changegroup_repo_size": false,
1924 "hooks_changegroup_repo_size": false,
1924 "hooks_outgoing_pull_logger": true,
1925 "hooks_outgoing_pull_logger": true,
1925 "phases_publish": "True",
1926 "phases_publish": "True",
1926 "rhodecode_hg_use_rebase_for_merging": true,
1927 "rhodecode_hg_use_rebase_for_merging": true,
1927 "rhodecode_pr_merge_enabled": true,
1928 "rhodecode_pr_merge_enabled": true,
1928 "rhodecode_use_outdated_comments": true
1929 "rhodecode_use_outdated_comments": true
1929 }
1930 }
1930 }
1931 }
1931 """
1932 """
1932
1933
1933 # Restrict access to this api method to admins only.
1934 # Restrict access to this api method to admins only.
1934 if not has_superadmin_permission(apiuser):
1935 if not has_superadmin_permission(apiuser):
1935 raise JSONRPCForbidden()
1936 raise JSONRPCForbidden()
1936
1937
1937 try:
1938 try:
1938 repo = get_repo_or_error(repoid)
1939 repo = get_repo_or_error(repoid)
1939 settings_model = VcsSettingsModel(repo=repo)
1940 settings_model = VcsSettingsModel(repo=repo)
1940 settings = settings_model.get_global_settings()
1941 settings = settings_model.get_global_settings()
1941 settings.update(settings_model.get_repo_settings())
1942 settings.update(settings_model.get_repo_settings())
1942
1943
1943 # If only a single setting is requested fetch it from all settings.
1944 # If only a single setting is requested fetch it from all settings.
1944 key = Optional.extract(key)
1945 key = Optional.extract(key)
1945 if key is not None:
1946 if key is not None:
1946 settings = settings.get(key, None)
1947 settings = settings.get(key, None)
1947 except Exception:
1948 except Exception:
1948 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
1949 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
1949 log.exception(msg)
1950 log.exception(msg)
1950 raise JSONRPCError(msg)
1951 raise JSONRPCError(msg)
1951
1952
1952 return settings
1953 return settings
1953
1954
1954
1955
1955 @jsonrpc_method()
1956 @jsonrpc_method()
1956 def set_repo_settings(request, apiuser, repoid, settings):
1957 def set_repo_settings(request, apiuser, repoid, settings):
1957 """
1958 """
1958 Update repository settings. Returns true on success.
1959 Update repository settings. Returns true on success.
1959
1960
1960 :param apiuser: This is filled automatically from the |authtoken|.
1961 :param apiuser: This is filled automatically from the |authtoken|.
1961 :type apiuser: AuthUser
1962 :type apiuser: AuthUser
1962 :param repoid: The repository name or repository id.
1963 :param repoid: The repository name or repository id.
1963 :type repoid: str or int
1964 :type repoid: str or int
1964 :param settings: The new settings for the repository.
1965 :param settings: The new settings for the repository.
1965 :type: settings: dict
1966 :type: settings: dict
1966
1967
1967 Example output:
1968 Example output:
1968
1969
1969 .. code-block:: bash
1970 .. code-block:: bash
1970
1971
1971 {
1972 {
1972 "error": null,
1973 "error": null,
1973 "id": 237,
1974 "id": 237,
1974 "result": true
1975 "result": true
1975 }
1976 }
1976 """
1977 """
1977 # Restrict access to this api method to admins only.
1978 # Restrict access to this api method to admins only.
1978 if not has_superadmin_permission(apiuser):
1979 if not has_superadmin_permission(apiuser):
1979 raise JSONRPCForbidden()
1980 raise JSONRPCForbidden()
1980
1981
1981 if type(settings) is not dict:
1982 if type(settings) is not dict:
1982 raise JSONRPCError('Settings have to be a JSON Object.')
1983 raise JSONRPCError('Settings have to be a JSON Object.')
1983
1984
1984 try:
1985 try:
1985 settings_model = VcsSettingsModel(repo=repoid)
1986 settings_model = VcsSettingsModel(repo=repoid)
1986
1987
1987 # Merge global, repo and incoming settings.
1988 # Merge global, repo and incoming settings.
1988 new_settings = settings_model.get_global_settings()
1989 new_settings = settings_model.get_global_settings()
1989 new_settings.update(settings_model.get_repo_settings())
1990 new_settings.update(settings_model.get_repo_settings())
1990 new_settings.update(settings)
1991 new_settings.update(settings)
1991
1992
1992 # Update the settings.
1993 # Update the settings.
1993 inherit_global_settings = new_settings.get(
1994 inherit_global_settings = new_settings.get(
1994 'inherit_global_settings', False)
1995 'inherit_global_settings', False)
1995 settings_model.create_or_update_repo_settings(
1996 settings_model.create_or_update_repo_settings(
1996 new_settings, inherit_global_settings=inherit_global_settings)
1997 new_settings, inherit_global_settings=inherit_global_settings)
1997 Session().commit()
1998 Session().commit()
1998 except Exception:
1999 except Exception:
1999 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2000 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2000 log.exception(msg)
2001 log.exception(msg)
2001 raise JSONRPCError(msg)
2002 raise JSONRPCError(msg)
2002
2003
2003 # Indicate success.
2004 # Indicate success.
2004 return True
2005 return True
2005
2006
2006
2007
2007 @jsonrpc_method()
2008 @jsonrpc_method()
2008 def maintenance(request, apiuser, repoid):
2009 def maintenance(request, apiuser, repoid):
2009 """
2010 """
2010 Triggers a maintenance on the given repository.
2011 Triggers a maintenance on the given repository.
2011
2012
2012 This command can only be run using an |authtoken| with admin
2013 This command can only be run using an |authtoken| with admin
2013 rights to the specified repository. For more information,
2014 rights to the specified repository. For more information,
2014 see :ref:`config-token-ref`.
2015 see :ref:`config-token-ref`.
2015
2016
2016 This command takes the following options:
2017 This command takes the following options:
2017
2018
2018 :param apiuser: This is filled automatically from the |authtoken|.
2019 :param apiuser: This is filled automatically from the |authtoken|.
2019 :type apiuser: AuthUser
2020 :type apiuser: AuthUser
2020 :param repoid: The repository name or repository ID.
2021 :param repoid: The repository name or repository ID.
2021 :type repoid: str or int
2022 :type repoid: str or int
2022
2023
2023 Example output:
2024 Example output:
2024
2025
2025 .. code-block:: bash
2026 .. code-block:: bash
2026
2027
2027 id : <id_given_in_input>
2028 id : <id_given_in_input>
2028 result : {
2029 result : {
2029 "msg": "executed maintenance command",
2030 "msg": "executed maintenance command",
2030 "executed_actions": [
2031 "executed_actions": [
2031 <action_message>, <action_message2>...
2032 <action_message>, <action_message2>...
2032 ],
2033 ],
2033 "repository": "<repository name>"
2034 "repository": "<repository name>"
2034 }
2035 }
2035 error : null
2036 error : null
2036
2037
2037 Example error output:
2038 Example error output:
2038
2039
2039 .. code-block:: bash
2040 .. code-block:: bash
2040
2041
2041 id : <id_given_in_input>
2042 id : <id_given_in_input>
2042 result : null
2043 result : null
2043 error : {
2044 error : {
2044 "Unable to execute maintenance on `<reponame>`"
2045 "Unable to execute maintenance on `<reponame>`"
2045 }
2046 }
2046
2047
2047 """
2048 """
2048
2049
2049 repo = get_repo_or_error(repoid)
2050 repo = get_repo_or_error(repoid)
2050 if not has_superadmin_permission(apiuser):
2051 if not has_superadmin_permission(apiuser):
2051 _perms = ('repository.admin',)
2052 _perms = ('repository.admin',)
2052 validate_repo_permissions(apiuser, repoid, repo, _perms)
2053 validate_repo_permissions(apiuser, repoid, repo, _perms)
2053
2054
2054 try:
2055 try:
2055 maintenance = repo_maintenance.RepoMaintenance()
2056 maintenance = repo_maintenance.RepoMaintenance()
2056 executed_actions = maintenance.execute(repo)
2057 executed_actions = maintenance.execute(repo)
2057
2058
2058 return {
2059 return {
2059 'msg': 'executed maintenance command',
2060 'msg': 'executed maintenance command',
2060 'executed_actions': executed_actions,
2061 'executed_actions': executed_actions,
2061 'repository': repo.repo_name
2062 'repository': repo.repo_name
2062 }
2063 }
2063 except Exception:
2064 except Exception:
2064 log.exception("Exception occurred while trying to run maintenance")
2065 log.exception("Exception occurred while trying to run maintenance")
2065 raise JSONRPCError(
2066 raise JSONRPCError(
2066 'Unable to execute maintenance on `%s`' % repo.repo_name)
2067 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,201 +1,201 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.tests import assert_session_flash
23 from rhodecode.tests import assert_session_flash
24 from rhodecode.tests.utils import AssertResponse
24 from rhodecode.tests.utils import AssertResponse
25 from rhodecode.model.db import Session
25 from rhodecode.model.db import Session
26 from rhodecode.model.settings import SettingsModel
26 from rhodecode.model.settings import SettingsModel
27
27
28
28
29 def assert_auth_settings_updated(response):
29 def assert_auth_settings_updated(response):
30 assert response.status_int == 302, 'Expected response HTTP Found 302'
30 assert response.status_int == 302, 'Expected response HTTP Found 302'
31 assert_session_flash(response, 'Auth settings updated successfully')
31 assert_session_flash(response, 'Auth settings updated successfully')
32
32
33
33
34 @pytest.mark.usefixtures("autologin_user", "app")
34 @pytest.mark.usefixtures("autologin_user", "app")
35 class TestAuthSettingsController(object):
35 class TestAuthSettingsView(object):
36
36
37 def _enable_plugins(self, plugins_list, csrf_token, override=None,
37 def _enable_plugins(self, plugins_list, csrf_token, override=None,
38 verify_response=False):
38 verify_response=False):
39 test_url = '/_admin/auth'
39 test_url = '/_admin/auth'
40 params = {
40 params = {
41 'auth_plugins': plugins_list,
41 'auth_plugins': plugins_list,
42 'csrf_token': csrf_token,
42 'csrf_token': csrf_token,
43 }
43 }
44 if override:
44 if override:
45 params.update(override)
45 params.update(override)
46 _enabled_plugins = []
46 _enabled_plugins = []
47 for plugin in plugins_list.split(','):
47 for plugin in plugins_list.split(','):
48 plugin_name = plugin.partition('#')[-1]
48 plugin_name = plugin.partition('#')[-1]
49 enabled_plugin = '%s_enabled' % plugin_name
49 enabled_plugin = '%s_enabled' % plugin_name
50 cache_ttl = '%s_cache_ttl' % plugin_name
50 cache_ttl = '%s_cache_ttl' % plugin_name
51
51
52 # default params that are needed for each plugin,
52 # default params that are needed for each plugin,
53 # `enabled` and `cache_ttl`
53 # `enabled` and `cache_ttl`
54 params.update({
54 params.update({
55 enabled_plugin: True,
55 enabled_plugin: True,
56 cache_ttl: 0
56 cache_ttl: 0
57 })
57 })
58 _enabled_plugins.append(enabled_plugin)
58 _enabled_plugins.append(enabled_plugin)
59
59
60 # we need to clean any enabled plugin before, since they require
60 # we need to clean any enabled plugin before, since they require
61 # form params to be present
61 # form params to be present
62 db_plugin = SettingsModel().get_setting_by_name('auth_plugins')
62 db_plugin = SettingsModel().get_setting_by_name('auth_plugins')
63 db_plugin.app_settings_value = \
63 db_plugin.app_settings_value = \
64 'egg:rhodecode-enterprise-ce#rhodecode'
64 'egg:rhodecode-enterprise-ce#rhodecode'
65 Session().add(db_plugin)
65 Session().add(db_plugin)
66 Session().commit()
66 Session().commit()
67 for _plugin in _enabled_plugins:
67 for _plugin in _enabled_plugins:
68 db_plugin = SettingsModel().get_setting_by_name(_plugin)
68 db_plugin = SettingsModel().get_setting_by_name(_plugin)
69 if db_plugin:
69 if db_plugin:
70 Session().delete(db_plugin)
70 Session().delete(db_plugin)
71 Session().commit()
71 Session().commit()
72
72
73 response = self.app.post(url=test_url, params=params)
73 response = self.app.post(url=test_url, params=params)
74
74
75 if verify_response:
75 if verify_response:
76 assert_auth_settings_updated(response)
76 assert_auth_settings_updated(response)
77 return params
77 return params
78
78
79 def _post_ldap_settings(self, params, override=None, force=False):
79 def _post_ldap_settings(self, params, override=None, force=False):
80
80
81 params.update({
81 params.update({
82 'filter': 'user',
82 'filter': 'user',
83 'user_member_of': '',
83 'user_member_of': '',
84 'user_search_base': '',
84 'user_search_base': '',
85 'user_search_filter': 'test_filter',
85 'user_search_filter': 'test_filter',
86
86
87 'host': 'dc.example.com',
87 'host': 'dc.example.com',
88 'port': '999',
88 'port': '999',
89 'tls_kind': 'PLAIN',
89 'tls_kind': 'PLAIN',
90 'tls_reqcert': 'NEVER',
90 'tls_reqcert': 'NEVER',
91
91
92 'dn_user': 'test_user',
92 'dn_user': 'test_user',
93 'dn_pass': 'test_pass',
93 'dn_pass': 'test_pass',
94 'base_dn': 'test_base_dn',
94 'base_dn': 'test_base_dn',
95 'search_scope': 'BASE',
95 'search_scope': 'BASE',
96 'attr_login': 'test_attr_login',
96 'attr_login': 'test_attr_login',
97 'attr_firstname': 'ima',
97 'attr_firstname': 'ima',
98 'attr_lastname': 'tester',
98 'attr_lastname': 'tester',
99 'attr_email': 'test@example.com',
99 'attr_email': 'test@example.com',
100 'cache_ttl': '0',
100 'cache_ttl': '0',
101 })
101 })
102 if force:
102 if force:
103 params = {}
103 params = {}
104 params.update(override or {})
104 params.update(override or {})
105
105
106 test_url = '/_admin/auth/ldap/'
106 test_url = '/_admin/auth/ldap/'
107
107
108 response = self.app.post(url=test_url, params=params)
108 response = self.app.post(url=test_url, params=params)
109 return response
109 return response
110
110
111 def test_index(self):
111 def test_index(self):
112 response = self.app.get('/_admin/auth')
112 response = self.app.get('/_admin/auth')
113 response.mustcontain('Authentication Plugins')
113 response.mustcontain('Authentication Plugins')
114
114
115 @pytest.mark.parametrize("disable_plugin, needs_import", [
115 @pytest.mark.parametrize("disable_plugin, needs_import", [
116 ('egg:rhodecode-enterprise-ce#headers', None),
116 ('egg:rhodecode-enterprise-ce#headers', None),
117 ('egg:rhodecode-enterprise-ce#crowd', None),
117 ('egg:rhodecode-enterprise-ce#crowd', None),
118 ('egg:rhodecode-enterprise-ce#jasig_cas', None),
118 ('egg:rhodecode-enterprise-ce#jasig_cas', None),
119 ('egg:rhodecode-enterprise-ce#ldap', None),
119 ('egg:rhodecode-enterprise-ce#ldap', None),
120 ('egg:rhodecode-enterprise-ce#pam', "pam"),
120 ('egg:rhodecode-enterprise-ce#pam', "pam"),
121 ])
121 ])
122 def test_disable_plugin(self, csrf_token, disable_plugin, needs_import):
122 def test_disable_plugin(self, csrf_token, disable_plugin, needs_import):
123 # TODO: johbo: "pam" is currently not available on darwin,
123 # TODO: johbo: "pam" is currently not available on darwin,
124 # although the docs state that it should work on darwin.
124 # although the docs state that it should work on darwin.
125 if needs_import:
125 if needs_import:
126 pytest.importorskip(needs_import)
126 pytest.importorskip(needs_import)
127
127
128 self._enable_plugins(
128 self._enable_plugins(
129 'egg:rhodecode-enterprise-ce#rhodecode,' + disable_plugin,
129 'egg:rhodecode-enterprise-ce#rhodecode,' + disable_plugin,
130 csrf_token, verify_response=True)
130 csrf_token, verify_response=True)
131
131
132 self._enable_plugins(
132 self._enable_plugins(
133 'egg:rhodecode-enterprise-ce#rhodecode', csrf_token,
133 'egg:rhodecode-enterprise-ce#rhodecode', csrf_token,
134 verify_response=True)
134 verify_response=True)
135
135
136 def test_ldap_save_settings(self, csrf_token):
136 def test_ldap_save_settings(self, csrf_token):
137 params = self._enable_plugins(
137 params = self._enable_plugins(
138 'egg:rhodecode-enterprise-ce#rhodecode,'
138 'egg:rhodecode-enterprise-ce#rhodecode,'
139 'egg:rhodecode-enterprise-ce#ldap',
139 'egg:rhodecode-enterprise-ce#ldap',
140 csrf_token)
140 csrf_token)
141 response = self._post_ldap_settings(params)
141 response = self._post_ldap_settings(params)
142 assert_auth_settings_updated(response)
142 assert_auth_settings_updated(response)
143
143
144 new_settings = SettingsModel().get_auth_settings()
144 new_settings = SettingsModel().get_auth_settings()
145 assert new_settings['auth_ldap_host'] == u'dc.example.com', \
145 assert new_settings['auth_ldap_host'] == u'dc.example.com', \
146 'fail db write compare'
146 'fail db write compare'
147
147
148 def test_ldap_error_form_wrong_port_number(self, csrf_token):
148 def test_ldap_error_form_wrong_port_number(self, csrf_token):
149 params = self._enable_plugins(
149 params = self._enable_plugins(
150 'egg:rhodecode-enterprise-ce#rhodecode,'
150 'egg:rhodecode-enterprise-ce#rhodecode,'
151 'egg:rhodecode-enterprise-ce#ldap',
151 'egg:rhodecode-enterprise-ce#ldap',
152 csrf_token)
152 csrf_token)
153 invalid_port_value = 'invalid-port-number'
153 invalid_port_value = 'invalid-port-number'
154 response = self._post_ldap_settings(params, override={
154 response = self._post_ldap_settings(params, override={
155 'port': invalid_port_value,
155 'port': invalid_port_value,
156 })
156 })
157 assertr = AssertResponse(response)
157 assertr = AssertResponse(response)
158 assertr.element_contains(
158 assertr.element_contains(
159 '.form .field #port ~ .error-message',
159 '.form .field #port ~ .error-message',
160 invalid_port_value)
160 invalid_port_value)
161
161
162 def test_ldap_error_form(self, csrf_token):
162 def test_ldap_error_form(self, csrf_token):
163 params = self._enable_plugins(
163 params = self._enable_plugins(
164 'egg:rhodecode-enterprise-ce#rhodecode,'
164 'egg:rhodecode-enterprise-ce#rhodecode,'
165 'egg:rhodecode-enterprise-ce#ldap',
165 'egg:rhodecode-enterprise-ce#ldap',
166 csrf_token)
166 csrf_token)
167 response = self._post_ldap_settings(params, override={
167 response = self._post_ldap_settings(params, override={
168 'attr_login': '',
168 'attr_login': '',
169 })
169 })
170 response.mustcontain("""<span class="error-message">The LDAP Login"""
170 response.mustcontain("""<span class="error-message">The LDAP Login"""
171 """ attribute of the CN must be specified""")
171 """ attribute of the CN must be specified""")
172
172
173 def test_post_ldap_group_settings(self, csrf_token):
173 def test_post_ldap_group_settings(self, csrf_token):
174 params = self._enable_plugins(
174 params = self._enable_plugins(
175 'egg:rhodecode-enterprise-ce#rhodecode,'
175 'egg:rhodecode-enterprise-ce#rhodecode,'
176 'egg:rhodecode-enterprise-ce#ldap',
176 'egg:rhodecode-enterprise-ce#ldap',
177 csrf_token)
177 csrf_token)
178
178
179 response = self._post_ldap_settings(params, override={
179 response = self._post_ldap_settings(params, override={
180 'host': 'dc-legacy.example.com',
180 'host': 'dc-legacy.example.com',
181 'port': '999',
181 'port': '999',
182 'tls_kind': 'PLAIN',
182 'tls_kind': 'PLAIN',
183 'tls_reqcert': 'NEVER',
183 'tls_reqcert': 'NEVER',
184 'dn_user': 'test_user',
184 'dn_user': 'test_user',
185 'dn_pass': 'test_pass',
185 'dn_pass': 'test_pass',
186 'base_dn': 'test_base_dn',
186 'base_dn': 'test_base_dn',
187 'filter': 'test_filter',
187 'filter': 'test_filter',
188 'search_scope': 'BASE',
188 'search_scope': 'BASE',
189 'attr_login': 'test_attr_login',
189 'attr_login': 'test_attr_login',
190 'attr_firstname': 'ima',
190 'attr_firstname': 'ima',
191 'attr_lastname': 'tester',
191 'attr_lastname': 'tester',
192 'attr_email': 'test@example.com',
192 'attr_email': 'test@example.com',
193 'cache_ttl': '60',
193 'cache_ttl': '60',
194 'csrf_token': csrf_token,
194 'csrf_token': csrf_token,
195 }
195 }
196 )
196 )
197 assert_auth_settings_updated(response)
197 assert_auth_settings_updated(response)
198
198
199 new_settings = SettingsModel().get_auth_settings()
199 new_settings = SettingsModel().get_auth_settings()
200 assert new_settings['auth_ldap_host'] == u'dc-legacy.example.com', \
200 assert new_settings['auth_ldap_host'] == u'dc-legacy.example.com', \
201 'fail db write compare'
201 'fail db write compare'
@@ -1,781 +1,781 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22 from sqlalchemy.orm.exc import NoResultFound
22 from sqlalchemy.orm.exc import NoResultFound
23
23
24 from rhodecode.lib import auth
24 from rhodecode.lib import auth
25 from rhodecode.lib import helpers as h
25 from rhodecode.lib import helpers as h
26 from rhodecode.model.db import User, UserApiKeys, UserEmailMap, Repository
26 from rhodecode.model.db import User, UserApiKeys, UserEmailMap, Repository
27 from rhodecode.model.meta import Session
27 from rhodecode.model.meta import Session
28 from rhodecode.model.user import UserModel
28 from rhodecode.model.user import UserModel
29
29
30 from rhodecode.tests import (
30 from rhodecode.tests import (
31 TestController, TEST_USER_REGULAR_LOGIN, assert_session_flash)
31 TestController, TEST_USER_REGULAR_LOGIN, assert_session_flash)
32 from rhodecode.tests.fixture import Fixture
32 from rhodecode.tests.fixture import Fixture
33
33
34 fixture = Fixture()
34 fixture = Fixture()
35
35
36
36
37 def route_path(name, params=None, **kwargs):
37 def route_path(name, params=None, **kwargs):
38 import urllib
38 import urllib
39 from rhodecode.apps._base import ADMIN_PREFIX
39 from rhodecode.apps._base import ADMIN_PREFIX
40
40
41 base_url = {
41 base_url = {
42 'users':
42 'users':
43 ADMIN_PREFIX + '/users',
43 ADMIN_PREFIX + '/users',
44 'users_data':
44 'users_data':
45 ADMIN_PREFIX + '/users_data',
45 ADMIN_PREFIX + '/users_data',
46 'users_create':
46 'users_create':
47 ADMIN_PREFIX + '/users/create',
47 ADMIN_PREFIX + '/users/create',
48 'users_new':
48 'users_new':
49 ADMIN_PREFIX + '/users/new',
49 ADMIN_PREFIX + '/users/new',
50 'user_edit':
50 'user_edit':
51 ADMIN_PREFIX + '/users/{user_id}/edit',
51 ADMIN_PREFIX + '/users/{user_id}/edit',
52 'user_edit_advanced':
52 'user_edit_advanced':
53 ADMIN_PREFIX + '/users/{user_id}/edit/advanced',
53 ADMIN_PREFIX + '/users/{user_id}/edit/advanced',
54 'user_edit_global_perms':
54 'user_edit_global_perms':
55 ADMIN_PREFIX + '/users/{user_id}/edit/global_permissions',
55 ADMIN_PREFIX + '/users/{user_id}/edit/global_permissions',
56 'user_edit_global_perms_update':
56 'user_edit_global_perms_update':
57 ADMIN_PREFIX + '/users/{user_id}/edit/global_permissions/update',
57 ADMIN_PREFIX + '/users/{user_id}/edit/global_permissions/update',
58 'user_update':
58 'user_update':
59 ADMIN_PREFIX + '/users/{user_id}/update',
59 ADMIN_PREFIX + '/users/{user_id}/update',
60 'user_delete':
60 'user_delete':
61 ADMIN_PREFIX + '/users/{user_id}/delete',
61 ADMIN_PREFIX + '/users/{user_id}/delete',
62 'user_force_password_reset':
62 'user_force_password_reset':
63 ADMIN_PREFIX + '/users/{user_id}/password_reset',
63 ADMIN_PREFIX + '/users/{user_id}/password_reset',
64 'user_create_personal_repo_group':
64 'user_create_personal_repo_group':
65 ADMIN_PREFIX + '/users/{user_id}/create_repo_group',
65 ADMIN_PREFIX + '/users/{user_id}/create_repo_group',
66
66
67 'edit_user_auth_tokens':
67 'edit_user_auth_tokens':
68 ADMIN_PREFIX + '/users/{user_id}/edit/auth_tokens',
68 ADMIN_PREFIX + '/users/{user_id}/edit/auth_tokens',
69 'edit_user_auth_tokens_add':
69 'edit_user_auth_tokens_add':
70 ADMIN_PREFIX + '/users/{user_id}/edit/auth_tokens/new',
70 ADMIN_PREFIX + '/users/{user_id}/edit/auth_tokens/new',
71 'edit_user_auth_tokens_delete':
71 'edit_user_auth_tokens_delete':
72 ADMIN_PREFIX + '/users/{user_id}/edit/auth_tokens/delete',
72 ADMIN_PREFIX + '/users/{user_id}/edit/auth_tokens/delete',
73
73
74 'edit_user_emails':
74 'edit_user_emails':
75 ADMIN_PREFIX + '/users/{user_id}/edit/emails',
75 ADMIN_PREFIX + '/users/{user_id}/edit/emails',
76 'edit_user_emails_add':
76 'edit_user_emails_add':
77 ADMIN_PREFIX + '/users/{user_id}/edit/emails/new',
77 ADMIN_PREFIX + '/users/{user_id}/edit/emails/new',
78 'edit_user_emails_delete':
78 'edit_user_emails_delete':
79 ADMIN_PREFIX + '/users/{user_id}/edit/emails/delete',
79 ADMIN_PREFIX + '/users/{user_id}/edit/emails/delete',
80
80
81 'edit_user_ips':
81 'edit_user_ips':
82 ADMIN_PREFIX + '/users/{user_id}/edit/ips',
82 ADMIN_PREFIX + '/users/{user_id}/edit/ips',
83 'edit_user_ips_add':
83 'edit_user_ips_add':
84 ADMIN_PREFIX + '/users/{user_id}/edit/ips/new',
84 ADMIN_PREFIX + '/users/{user_id}/edit/ips/new',
85 'edit_user_ips_delete':
85 'edit_user_ips_delete':
86 ADMIN_PREFIX + '/users/{user_id}/edit/ips/delete',
86 ADMIN_PREFIX + '/users/{user_id}/edit/ips/delete',
87
87
88 'edit_user_perms_summary':
88 'edit_user_perms_summary':
89 ADMIN_PREFIX + '/users/{user_id}/edit/permissions_summary',
89 ADMIN_PREFIX + '/users/{user_id}/edit/permissions_summary',
90 'edit_user_perms_summary_json':
90 'edit_user_perms_summary_json':
91 ADMIN_PREFIX + '/users/{user_id}/edit/permissions_summary/json',
91 ADMIN_PREFIX + '/users/{user_id}/edit/permissions_summary/json',
92
92
93 'edit_user_audit_logs':
93 'edit_user_audit_logs':
94 ADMIN_PREFIX + '/users/{user_id}/edit/audit',
94 ADMIN_PREFIX + '/users/{user_id}/edit/audit',
95
95
96 }[name].format(**kwargs)
96 }[name].format(**kwargs)
97
97
98 if params:
98 if params:
99 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
99 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
100 return base_url
100 return base_url
101
101
102
102
103 class TestAdminUsersView(TestController):
103 class TestAdminUsersView(TestController):
104
104
105 def test_show_users(self):
105 def test_show_users(self):
106 self.log_user()
106 self.log_user()
107 self.app.get(route_path('users'))
107 self.app.get(route_path('users'))
108
108
109 def test_show_users_data(self, xhr_header):
109 def test_show_users_data(self, xhr_header):
110 self.log_user()
110 self.log_user()
111 response = self.app.get(route_path(
111 response = self.app.get(route_path(
112 'users_data'), extra_environ=xhr_header)
112 'users_data'), extra_environ=xhr_header)
113
113
114 all_users = User.query().filter(
114 all_users = User.query().filter(
115 User.username != User.DEFAULT_USER).count()
115 User.username != User.DEFAULT_USER).count()
116 assert response.json['recordsTotal'] == all_users
116 assert response.json['recordsTotal'] == all_users
117
117
118 def test_show_users_data_filtered(self, xhr_header):
118 def test_show_users_data_filtered(self, xhr_header):
119 self.log_user()
119 self.log_user()
120 response = self.app.get(route_path(
120 response = self.app.get(route_path(
121 'users_data', params={'search[value]': 'empty_search'}),
121 'users_data', params={'search[value]': 'empty_search'}),
122 extra_environ=xhr_header)
122 extra_environ=xhr_header)
123
123
124 all_users = User.query().filter(
124 all_users = User.query().filter(
125 User.username != User.DEFAULT_USER).count()
125 User.username != User.DEFAULT_USER).count()
126 assert response.json['recordsTotal'] == all_users
126 assert response.json['recordsTotal'] == all_users
127 assert response.json['recordsFiltered'] == 0
127 assert response.json['recordsFiltered'] == 0
128
128
129 def test_auth_tokens_default_user(self):
129 def test_auth_tokens_default_user(self):
130 self.log_user()
130 self.log_user()
131 user = User.get_default_user()
131 user = User.get_default_user()
132 response = self.app.get(
132 response = self.app.get(
133 route_path('edit_user_auth_tokens', user_id=user.user_id),
133 route_path('edit_user_auth_tokens', user_id=user.user_id),
134 status=302)
134 status=302)
135
135
136 def test_auth_tokens(self):
136 def test_auth_tokens(self):
137 self.log_user()
137 self.log_user()
138
138
139 user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
139 user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
140 response = self.app.get(
140 response = self.app.get(
141 route_path('edit_user_auth_tokens', user_id=user.user_id))
141 route_path('edit_user_auth_tokens', user_id=user.user_id))
142 for token in user.auth_tokens:
142 for token in user.auth_tokens:
143 response.mustcontain(token)
143 response.mustcontain(token)
144 response.mustcontain('never')
144 response.mustcontain('never')
145
145
146 @pytest.mark.parametrize("desc, lifetime", [
146 @pytest.mark.parametrize("desc, lifetime", [
147 ('forever', -1),
147 ('forever', -1),
148 ('5mins', 60*5),
148 ('5mins', 60*5),
149 ('30days', 60*60*24*30),
149 ('30days', 60*60*24*30),
150 ])
150 ])
151 def test_add_auth_token(self, desc, lifetime, user_util):
151 def test_add_auth_token(self, desc, lifetime, user_util):
152 self.log_user()
152 self.log_user()
153 user = user_util.create_user()
153 user = user_util.create_user()
154 user_id = user.user_id
154 user_id = user.user_id
155
155
156 response = self.app.post(
156 response = self.app.post(
157 route_path('edit_user_auth_tokens_add', user_id=user_id),
157 route_path('edit_user_auth_tokens_add', user_id=user_id),
158 {'description': desc, 'lifetime': lifetime,
158 {'description': desc, 'lifetime': lifetime,
159 'csrf_token': self.csrf_token})
159 'csrf_token': self.csrf_token})
160 assert_session_flash(response, 'Auth token successfully created')
160 assert_session_flash(response, 'Auth token successfully created')
161
161
162 response = response.follow()
162 response = response.follow()
163 user = User.get(user_id)
163 user = User.get(user_id)
164 for auth_token in user.auth_tokens:
164 for auth_token in user.auth_tokens:
165 response.mustcontain(auth_token)
165 response.mustcontain(auth_token)
166
166
167 def test_delete_auth_token(self, user_util):
167 def test_delete_auth_token(self, user_util):
168 self.log_user()
168 self.log_user()
169 user = user_util.create_user()
169 user = user_util.create_user()
170 user_id = user.user_id
170 user_id = user.user_id
171 keys = user.auth_tokens
171 keys = user.auth_tokens
172 assert 2 == len(keys)
172 assert 2 == len(keys)
173
173
174 response = self.app.post(
174 response = self.app.post(
175 route_path('edit_user_auth_tokens_add', user_id=user_id),
175 route_path('edit_user_auth_tokens_add', user_id=user_id),
176 {'description': 'desc', 'lifetime': -1,
176 {'description': 'desc', 'lifetime': -1,
177 'csrf_token': self.csrf_token})
177 'csrf_token': self.csrf_token})
178 assert_session_flash(response, 'Auth token successfully created')
178 assert_session_flash(response, 'Auth token successfully created')
179 response.follow()
179 response.follow()
180
180
181 # now delete our key
181 # now delete our key
182 keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all()
182 keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all()
183 assert 3 == len(keys)
183 assert 3 == len(keys)
184
184
185 response = self.app.post(
185 response = self.app.post(
186 route_path('edit_user_auth_tokens_delete', user_id=user_id),
186 route_path('edit_user_auth_tokens_delete', user_id=user_id),
187 {'del_auth_token': keys[0].user_api_key_id,
187 {'del_auth_token': keys[0].user_api_key_id,
188 'csrf_token': self.csrf_token})
188 'csrf_token': self.csrf_token})
189
189
190 assert_session_flash(response, 'Auth token successfully deleted')
190 assert_session_flash(response, 'Auth token successfully deleted')
191 keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all()
191 keys = UserApiKeys.query().filter(UserApiKeys.user_id == user_id).all()
192 assert 2 == len(keys)
192 assert 2 == len(keys)
193
193
194 def test_ips(self):
194 def test_ips(self):
195 self.log_user()
195 self.log_user()
196 user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
196 user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
197 response = self.app.get(route_path('edit_user_ips', user_id=user.user_id))
197 response = self.app.get(route_path('edit_user_ips', user_id=user.user_id))
198 response.mustcontain('All IP addresses are allowed')
198 response.mustcontain('All IP addresses are allowed')
199
199
200 @pytest.mark.parametrize("test_name, ip, ip_range, failure", [
200 @pytest.mark.parametrize("test_name, ip, ip_range, failure", [
201 ('127/24', '127.0.0.1/24', '127.0.0.0 - 127.0.0.255', False),
201 ('127/24', '127.0.0.1/24', '127.0.0.0 - 127.0.0.255', False),
202 ('10/32', '10.0.0.10/32', '10.0.0.10 - 10.0.0.10', False),
202 ('10/32', '10.0.0.10/32', '10.0.0.10 - 10.0.0.10', False),
203 ('0/16', '0.0.0.0/16', '0.0.0.0 - 0.0.255.255', False),
203 ('0/16', '0.0.0.0/16', '0.0.0.0 - 0.0.255.255', False),
204 ('0/8', '0.0.0.0/8', '0.0.0.0 - 0.255.255.255', False),
204 ('0/8', '0.0.0.0/8', '0.0.0.0 - 0.255.255.255', False),
205 ('127_bad_mask', '127.0.0.1/99', '127.0.0.1 - 127.0.0.1', True),
205 ('127_bad_mask', '127.0.0.1/99', '127.0.0.1 - 127.0.0.1', True),
206 ('127_bad_ip', 'foobar', 'foobar', True),
206 ('127_bad_ip', 'foobar', 'foobar', True),
207 ])
207 ])
208 def test_ips_add(self, user_util, test_name, ip, ip_range, failure):
208 def test_ips_add(self, user_util, test_name, ip, ip_range, failure):
209 self.log_user()
209 self.log_user()
210 user = user_util.create_user(username=test_name)
210 user = user_util.create_user(username=test_name)
211 user_id = user.user_id
211 user_id = user.user_id
212
212
213 response = self.app.post(
213 response = self.app.post(
214 route_path('edit_user_ips_add', user_id=user_id),
214 route_path('edit_user_ips_add', user_id=user_id),
215 params={'new_ip': ip, 'csrf_token': self.csrf_token})
215 params={'new_ip': ip, 'csrf_token': self.csrf_token})
216
216
217 if failure:
217 if failure:
218 assert_session_flash(
218 assert_session_flash(
219 response, 'Please enter a valid IPv4 or IpV6 address')
219 response, 'Please enter a valid IPv4 or IpV6 address')
220 response = self.app.get(route_path('edit_user_ips', user_id=user_id))
220 response = self.app.get(route_path('edit_user_ips', user_id=user_id))
221
221
222 response.mustcontain(no=[ip])
222 response.mustcontain(no=[ip])
223 response.mustcontain(no=[ip_range])
223 response.mustcontain(no=[ip_range])
224
224
225 else:
225 else:
226 response = self.app.get(route_path('edit_user_ips', user_id=user_id))
226 response = self.app.get(route_path('edit_user_ips', user_id=user_id))
227 response.mustcontain(ip)
227 response.mustcontain(ip)
228 response.mustcontain(ip_range)
228 response.mustcontain(ip_range)
229
229
230 def test_ips_delete(self, user_util):
230 def test_ips_delete(self, user_util):
231 self.log_user()
231 self.log_user()
232 user = user_util.create_user()
232 user = user_util.create_user()
233 user_id = user.user_id
233 user_id = user.user_id
234 ip = '127.0.0.1/32'
234 ip = '127.0.0.1/32'
235 ip_range = '127.0.0.1 - 127.0.0.1'
235 ip_range = '127.0.0.1 - 127.0.0.1'
236 new_ip = UserModel().add_extra_ip(user_id, ip)
236 new_ip = UserModel().add_extra_ip(user_id, ip)
237 Session().commit()
237 Session().commit()
238 new_ip_id = new_ip.ip_id
238 new_ip_id = new_ip.ip_id
239
239
240 response = self.app.get(route_path('edit_user_ips', user_id=user_id))
240 response = self.app.get(route_path('edit_user_ips', user_id=user_id))
241 response.mustcontain(ip)
241 response.mustcontain(ip)
242 response.mustcontain(ip_range)
242 response.mustcontain(ip_range)
243
243
244 self.app.post(
244 self.app.post(
245 route_path('edit_user_ips_delete', user_id=user_id),
245 route_path('edit_user_ips_delete', user_id=user_id),
246 params={'del_ip_id': new_ip_id, 'csrf_token': self.csrf_token})
246 params={'del_ip_id': new_ip_id, 'csrf_token': self.csrf_token})
247
247
248 response = self.app.get(route_path('edit_user_ips', user_id=user_id))
248 response = self.app.get(route_path('edit_user_ips', user_id=user_id))
249 response.mustcontain('All IP addresses are allowed')
249 response.mustcontain('All IP addresses are allowed')
250 response.mustcontain(no=[ip])
250 response.mustcontain(no=[ip])
251 response.mustcontain(no=[ip_range])
251 response.mustcontain(no=[ip_range])
252
252
253 def test_emails(self):
253 def test_emails(self):
254 self.log_user()
254 self.log_user()
255 user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
255 user = User.get_by_username(TEST_USER_REGULAR_LOGIN)
256 response = self.app.get(
256 response = self.app.get(
257 route_path('edit_user_emails', user_id=user.user_id))
257 route_path('edit_user_emails', user_id=user.user_id))
258 response.mustcontain('No additional emails specified')
258 response.mustcontain('No additional emails specified')
259
259
260 def test_emails_add(self, user_util):
260 def test_emails_add(self, user_util):
261 self.log_user()
261 self.log_user()
262 user = user_util.create_user()
262 user = user_util.create_user()
263 user_id = user.user_id
263 user_id = user.user_id
264
264
265 self.app.post(
265 self.app.post(
266 route_path('edit_user_emails_add', user_id=user_id),
266 route_path('edit_user_emails_add', user_id=user_id),
267 params={'new_email': 'example@rhodecode.com',
267 params={'new_email': 'example@rhodecode.com',
268 'csrf_token': self.csrf_token})
268 'csrf_token': self.csrf_token})
269
269
270 response = self.app.get(
270 response = self.app.get(
271 route_path('edit_user_emails', user_id=user_id))
271 route_path('edit_user_emails', user_id=user_id))
272 response.mustcontain('example@rhodecode.com')
272 response.mustcontain('example@rhodecode.com')
273
273
274 def test_emails_add_existing_email(self, user_util, user_regular):
274 def test_emails_add_existing_email(self, user_util, user_regular):
275 existing_email = user_regular.email
275 existing_email = user_regular.email
276
276
277 self.log_user()
277 self.log_user()
278 user = user_util.create_user()
278 user = user_util.create_user()
279 user_id = user.user_id
279 user_id = user.user_id
280
280
281 response = self.app.post(
281 response = self.app.post(
282 route_path('edit_user_emails_add', user_id=user_id),
282 route_path('edit_user_emails_add', user_id=user_id),
283 params={'new_email': existing_email,
283 params={'new_email': existing_email,
284 'csrf_token': self.csrf_token})
284 'csrf_token': self.csrf_token})
285 assert_session_flash(
285 assert_session_flash(
286 response, 'This e-mail address is already taken')
286 response, 'This e-mail address is already taken')
287
287
288 response = self.app.get(
288 response = self.app.get(
289 route_path('edit_user_emails', user_id=user_id))
289 route_path('edit_user_emails', user_id=user_id))
290 response.mustcontain(no=[existing_email])
290 response.mustcontain(no=[existing_email])
291
291
292 def test_emails_delete(self, user_util):
292 def test_emails_delete(self, user_util):
293 self.log_user()
293 self.log_user()
294 user = user_util.create_user()
294 user = user_util.create_user()
295 user_id = user.user_id
295 user_id = user.user_id
296
296
297 self.app.post(
297 self.app.post(
298 route_path('edit_user_emails_add', user_id=user_id),
298 route_path('edit_user_emails_add', user_id=user_id),
299 params={'new_email': 'example@rhodecode.com',
299 params={'new_email': 'example@rhodecode.com',
300 'csrf_token': self.csrf_token})
300 'csrf_token': self.csrf_token})
301
301
302 response = self.app.get(
302 response = self.app.get(
303 route_path('edit_user_emails', user_id=user_id))
303 route_path('edit_user_emails', user_id=user_id))
304 response.mustcontain('example@rhodecode.com')
304 response.mustcontain('example@rhodecode.com')
305
305
306 user_email = UserEmailMap.query()\
306 user_email = UserEmailMap.query()\
307 .filter(UserEmailMap.email == 'example@rhodecode.com') \
307 .filter(UserEmailMap.email == 'example@rhodecode.com') \
308 .filter(UserEmailMap.user_id == user_id)\
308 .filter(UserEmailMap.user_id == user_id)\
309 .one()
309 .one()
310
310
311 del_email_id = user_email.email_id
311 del_email_id = user_email.email_id
312 self.app.post(
312 self.app.post(
313 route_path('edit_user_emails_delete', user_id=user_id),
313 route_path('edit_user_emails_delete', user_id=user_id),
314 params={'del_email_id': del_email_id,
314 params={'del_email_id': del_email_id,
315 'csrf_token': self.csrf_token})
315 'csrf_token': self.csrf_token})
316
316
317 response = self.app.get(
317 response = self.app.get(
318 route_path('edit_user_emails', user_id=user_id))
318 route_path('edit_user_emails', user_id=user_id))
319 response.mustcontain(no=['example@rhodecode.com'])
319 response.mustcontain(no=['example@rhodecode.com'])
320
320
321
321
322 def test_create(self, request, xhr_header):
322 def test_create(self, request, xhr_header):
323 self.log_user()
323 self.log_user()
324 username = 'newtestuser'
324 username = 'newtestuser'
325 password = 'test12'
325 password = 'test12'
326 password_confirmation = password
326 password_confirmation = password
327 name = 'name'
327 name = 'name'
328 lastname = 'lastname'
328 lastname = 'lastname'
329 email = 'mail@mail.com'
329 email = 'mail@mail.com'
330
330
331 self.app.get(route_path('users_new'))
331 self.app.get(route_path('users_new'))
332
332
333 response = self.app.post(route_path('users_create'), params={
333 response = self.app.post(route_path('users_create'), params={
334 'username': username,
334 'username': username,
335 'password': password,
335 'password': password,
336 'password_confirmation': password_confirmation,
336 'password_confirmation': password_confirmation,
337 'firstname': name,
337 'firstname': name,
338 'active': True,
338 'active': True,
339 'lastname': lastname,
339 'lastname': lastname,
340 'extern_name': 'rhodecode',
340 'extern_name': 'rhodecode',
341 'extern_type': 'rhodecode',
341 'extern_type': 'rhodecode',
342 'email': email,
342 'email': email,
343 'csrf_token': self.csrf_token,
343 'csrf_token': self.csrf_token,
344 })
344 })
345 user_link = h.link_to(
345 user_link = h.link_to(
346 username,
346 username,
347 route_path(
347 route_path(
348 'user_edit', user_id=User.get_by_username(username).user_id))
348 'user_edit', user_id=User.get_by_username(username).user_id))
349 assert_session_flash(response, 'Created user %s' % (user_link,))
349 assert_session_flash(response, 'Created user %s' % (user_link,))
350
350
351 @request.addfinalizer
351 @request.addfinalizer
352 def cleanup():
352 def cleanup():
353 fixture.destroy_user(username)
353 fixture.destroy_user(username)
354 Session().commit()
354 Session().commit()
355
355
356 new_user = User.query().filter(User.username == username).one()
356 new_user = User.query().filter(User.username == username).one()
357
357
358 assert new_user.username == username
358 assert new_user.username == username
359 assert auth.check_password(password, new_user.password)
359 assert auth.check_password(password, new_user.password)
360 assert new_user.name == name
360 assert new_user.name == name
361 assert new_user.lastname == lastname
361 assert new_user.lastname == lastname
362 assert new_user.email == email
362 assert new_user.email == email
363
363
364 response = self.app.get(route_path('users_data'),
364 response = self.app.get(route_path('users_data'),
365 extra_environ=xhr_header)
365 extra_environ=xhr_header)
366 response.mustcontain(username)
366 response.mustcontain(username)
367
367
368 def test_create_err(self):
368 def test_create_err(self):
369 self.log_user()
369 self.log_user()
370 username = 'new_user'
370 username = 'new_user'
371 password = ''
371 password = ''
372 name = 'name'
372 name = 'name'
373 lastname = 'lastname'
373 lastname = 'lastname'
374 email = 'errmail.com'
374 email = 'errmail.com'
375
375
376 self.app.get(route_path('users_new'))
376 self.app.get(route_path('users_new'))
377
377
378 response = self.app.post(route_path('users_create'), params={
378 response = self.app.post(route_path('users_create'), params={
379 'username': username,
379 'username': username,
380 'password': password,
380 'password': password,
381 'name': name,
381 'name': name,
382 'active': False,
382 'active': False,
383 'lastname': lastname,
383 'lastname': lastname,
384 'email': email,
384 'email': email,
385 'csrf_token': self.csrf_token,
385 'csrf_token': self.csrf_token,
386 })
386 })
387
387
388 msg = '???'
388 msg = u'Username "%(username)s" is forbidden'
389 msg = h.html_escape(msg % {'username': 'new_user'})
389 msg = h.html_escape(msg % {'username': 'new_user'})
390 response.mustcontain('<span class="error-message">%s</span>' % msg)
390 response.mustcontain('<span class="error-message">%s</span>' % msg)
391 response.mustcontain(
391 response.mustcontain(
392 '<span class="error-message">Please enter a value</span>')
392 '<span class="error-message">Please enter a value</span>')
393 response.mustcontain(
393 response.mustcontain(
394 '<span class="error-message">An email address must contain a'
394 '<span class="error-message">An email address must contain a'
395 ' single @</span>')
395 ' single @</span>')
396
396
397 def get_user():
397 def get_user():
398 Session().query(User).filter(User.username == username).one()
398 Session().query(User).filter(User.username == username).one()
399
399
400 with pytest.raises(NoResultFound):
400 with pytest.raises(NoResultFound):
401 get_user()
401 get_user()
402
402
403 def test_new(self):
403 def test_new(self):
404 self.log_user()
404 self.log_user()
405 self.app.get(route_path('users_new'))
405 self.app.get(route_path('users_new'))
406
406
407 @pytest.mark.parametrize("name, attrs", [
407 @pytest.mark.parametrize("name, attrs", [
408 ('firstname', {'firstname': 'new_username'}),
408 ('firstname', {'firstname': 'new_username'}),
409 ('lastname', {'lastname': 'new_username'}),
409 ('lastname', {'lastname': 'new_username'}),
410 ('admin', {'admin': True}),
410 ('admin', {'admin': True}),
411 ('admin', {'admin': False}),
411 ('admin', {'admin': False}),
412 ('extern_type', {'extern_type': 'ldap'}),
412 ('extern_type', {'extern_type': 'ldap'}),
413 ('extern_type', {'extern_type': None}),
413 ('extern_type', {'extern_type': None}),
414 ('extern_name', {'extern_name': 'test'}),
414 ('extern_name', {'extern_name': 'test'}),
415 ('extern_name', {'extern_name': None}),
415 ('extern_name', {'extern_name': None}),
416 ('active', {'active': False}),
416 ('active', {'active': False}),
417 ('active', {'active': True}),
417 ('active', {'active': True}),
418 ('email', {'email': 'some@email.com'}),
418 ('email', {'email': 'some@email.com'}),
419 ('language', {'language': 'de'}),
419 ('language', {'language': 'de'}),
420 ('language', {'language': 'en'}),
420 ('language', {'language': 'en'}),
421 # ('new_password', {'new_password': 'foobar123',
421 # ('new_password', {'new_password': 'foobar123',
422 # 'password_confirmation': 'foobar123'})
422 # 'password_confirmation': 'foobar123'})
423 ])
423 ])
424 def test_update(self, name, attrs, user_util):
424 def test_update(self, name, attrs, user_util):
425 self.log_user()
425 self.log_user()
426 usr = user_util.create_user(
426 usr = user_util.create_user(
427 password='qweqwe',
427 password='qweqwe',
428 email='testme@rhodecode.org',
428 email='testme@rhodecode.org',
429 extern_type='rhodecode',
429 extern_type='rhodecode',
430 extern_name='xxx',
430 extern_name='xxx',
431 )
431 )
432 user_id = usr.user_id
432 user_id = usr.user_id
433 Session().commit()
433 Session().commit()
434
434
435 params = usr.get_api_data()
435 params = usr.get_api_data()
436 cur_lang = params['language'] or 'en'
436 cur_lang = params['language'] or 'en'
437 params.update({
437 params.update({
438 'password_confirmation': '',
438 'password_confirmation': '',
439 'new_password': '',
439 'new_password': '',
440 'language': cur_lang,
440 'language': cur_lang,
441 'csrf_token': self.csrf_token,
441 'csrf_token': self.csrf_token,
442 })
442 })
443 params.update({'new_password': ''})
443 params.update({'new_password': ''})
444 params.update(attrs)
444 params.update(attrs)
445 if name == 'email':
445 if name == 'email':
446 params['emails'] = [attrs['email']]
446 params['emails'] = [attrs['email']]
447 elif name == 'extern_type':
447 elif name == 'extern_type':
448 # cannot update this via form, expected value is original one
448 # cannot update this via form, expected value is original one
449 params['extern_type'] = "rhodecode"
449 params['extern_type'] = "rhodecode"
450 elif name == 'extern_name':
450 elif name == 'extern_name':
451 # cannot update this via form, expected value is original one
451 # cannot update this via form, expected value is original one
452 params['extern_name'] = 'xxx'
452 params['extern_name'] = 'xxx'
453 # special case since this user is not
453 # special case since this user is not
454 # logged in yet his data is not filled
454 # logged in yet his data is not filled
455 # so we use creation data
455 # so we use creation data
456
456
457 response = self.app.post(
457 response = self.app.post(
458 route_path('user_update', user_id=usr.user_id), params)
458 route_path('user_update', user_id=usr.user_id), params)
459 assert response.status_int == 302
459 assert response.status_int == 302
460 assert_session_flash(response, 'User updated successfully')
460 assert_session_flash(response, 'User updated successfully')
461
461
462 updated_user = User.get(user_id)
462 updated_user = User.get(user_id)
463 updated_params = updated_user.get_api_data()
463 updated_params = updated_user.get_api_data()
464 updated_params.update({'password_confirmation': ''})
464 updated_params.update({'password_confirmation': ''})
465 updated_params.update({'new_password': ''})
465 updated_params.update({'new_password': ''})
466
466
467 del params['csrf_token']
467 del params['csrf_token']
468 assert params == updated_params
468 assert params == updated_params
469
469
470 def test_update_and_migrate_password(
470 def test_update_and_migrate_password(
471 self, autologin_user, real_crypto_backend, user_util):
471 self, autologin_user, real_crypto_backend, user_util):
472
472
473 user = user_util.create_user()
473 user = user_util.create_user()
474 temp_user = user.username
474 temp_user = user.username
475 user.password = auth._RhodeCodeCryptoSha256().hash_create(
475 user.password = auth._RhodeCodeCryptoSha256().hash_create(
476 b'test123')
476 b'test123')
477 Session().add(user)
477 Session().add(user)
478 Session().commit()
478 Session().commit()
479
479
480 params = user.get_api_data()
480 params = user.get_api_data()
481
481
482 params.update({
482 params.update({
483 'password_confirmation': 'qweqwe123',
483 'password_confirmation': 'qweqwe123',
484 'new_password': 'qweqwe123',
484 'new_password': 'qweqwe123',
485 'language': 'en',
485 'language': 'en',
486 'csrf_token': autologin_user.csrf_token,
486 'csrf_token': autologin_user.csrf_token,
487 })
487 })
488
488
489 response = self.app.post(
489 response = self.app.post(
490 route_path('user_update', user_id=user.user_id), params)
490 route_path('user_update', user_id=user.user_id), params)
491 assert response.status_int == 302
491 assert response.status_int == 302
492 assert_session_flash(response, 'User updated successfully')
492 assert_session_flash(response, 'User updated successfully')
493
493
494 # new password should be bcrypted, after log-in and transfer
494 # new password should be bcrypted, after log-in and transfer
495 user = User.get_by_username(temp_user)
495 user = User.get_by_username(temp_user)
496 assert user.password.startswith('$')
496 assert user.password.startswith('$')
497
497
498 updated_user = User.get_by_username(temp_user)
498 updated_user = User.get_by_username(temp_user)
499 updated_params = updated_user.get_api_data()
499 updated_params = updated_user.get_api_data()
500 updated_params.update({'password_confirmation': 'qweqwe123'})
500 updated_params.update({'password_confirmation': 'qweqwe123'})
501 updated_params.update({'new_password': 'qweqwe123'})
501 updated_params.update({'new_password': 'qweqwe123'})
502
502
503 del params['csrf_token']
503 del params['csrf_token']
504 assert params == updated_params
504 assert params == updated_params
505
505
506 def test_delete(self):
506 def test_delete(self):
507 self.log_user()
507 self.log_user()
508 username = 'newtestuserdeleteme'
508 username = 'newtestuserdeleteme'
509
509
510 fixture.create_user(name=username)
510 fixture.create_user(name=username)
511
511
512 new_user = Session().query(User)\
512 new_user = Session().query(User)\
513 .filter(User.username == username).one()
513 .filter(User.username == username).one()
514 response = self.app.post(
514 response = self.app.post(
515 route_path('user_delete', user_id=new_user.user_id),
515 route_path('user_delete', user_id=new_user.user_id),
516 params={'csrf_token': self.csrf_token})
516 params={'csrf_token': self.csrf_token})
517
517
518 assert_session_flash(response, 'Successfully deleted user')
518 assert_session_flash(response, 'Successfully deleted user')
519
519
520 def test_delete_owner_of_repository(self, request, user_util):
520 def test_delete_owner_of_repository(self, request, user_util):
521 self.log_user()
521 self.log_user()
522 obj_name = 'test_repo'
522 obj_name = 'test_repo'
523 usr = user_util.create_user()
523 usr = user_util.create_user()
524 username = usr.username
524 username = usr.username
525 fixture.create_repo(obj_name, cur_user=usr.username)
525 fixture.create_repo(obj_name, cur_user=usr.username)
526
526
527 new_user = Session().query(User)\
527 new_user = Session().query(User)\
528 .filter(User.username == username).one()
528 .filter(User.username == username).one()
529 response = self.app.post(
529 response = self.app.post(
530 route_path('user_delete', user_id=new_user.user_id),
530 route_path('user_delete', user_id=new_user.user_id),
531 params={'csrf_token': self.csrf_token})
531 params={'csrf_token': self.csrf_token})
532
532
533 msg = 'user "%s" still owns 1 repositories and cannot be removed. ' \
533 msg = 'user "%s" still owns 1 repositories and cannot be removed. ' \
534 'Switch owners or remove those repositories:%s' % (username,
534 'Switch owners or remove those repositories:%s' % (username,
535 obj_name)
535 obj_name)
536 assert_session_flash(response, msg)
536 assert_session_flash(response, msg)
537 fixture.destroy_repo(obj_name)
537 fixture.destroy_repo(obj_name)
538
538
539 def test_delete_owner_of_repository_detaching(self, request, user_util):
539 def test_delete_owner_of_repository_detaching(self, request, user_util):
540 self.log_user()
540 self.log_user()
541 obj_name = 'test_repo'
541 obj_name = 'test_repo'
542 usr = user_util.create_user(auto_cleanup=False)
542 usr = user_util.create_user(auto_cleanup=False)
543 username = usr.username
543 username = usr.username
544 fixture.create_repo(obj_name, cur_user=usr.username)
544 fixture.create_repo(obj_name, cur_user=usr.username)
545
545
546 new_user = Session().query(User)\
546 new_user = Session().query(User)\
547 .filter(User.username == username).one()
547 .filter(User.username == username).one()
548 response = self.app.post(
548 response = self.app.post(
549 route_path('user_delete', user_id=new_user.user_id),
549 route_path('user_delete', user_id=new_user.user_id),
550 params={'user_repos': 'detach', 'csrf_token': self.csrf_token})
550 params={'user_repos': 'detach', 'csrf_token': self.csrf_token})
551
551
552 msg = 'Detached 1 repositories'
552 msg = 'Detached 1 repositories'
553 assert_session_flash(response, msg)
553 assert_session_flash(response, msg)
554 fixture.destroy_repo(obj_name)
554 fixture.destroy_repo(obj_name)
555
555
556 def test_delete_owner_of_repository_deleting(self, request, user_util):
556 def test_delete_owner_of_repository_deleting(self, request, user_util):
557 self.log_user()
557 self.log_user()
558 obj_name = 'test_repo'
558 obj_name = 'test_repo'
559 usr = user_util.create_user(auto_cleanup=False)
559 usr = user_util.create_user(auto_cleanup=False)
560 username = usr.username
560 username = usr.username
561 fixture.create_repo(obj_name, cur_user=usr.username)
561 fixture.create_repo(obj_name, cur_user=usr.username)
562
562
563 new_user = Session().query(User)\
563 new_user = Session().query(User)\
564 .filter(User.username == username).one()
564 .filter(User.username == username).one()
565 response = self.app.post(
565 response = self.app.post(
566 route_path('user_delete', user_id=new_user.user_id),
566 route_path('user_delete', user_id=new_user.user_id),
567 params={'user_repos': 'delete', 'csrf_token': self.csrf_token})
567 params={'user_repos': 'delete', 'csrf_token': self.csrf_token})
568
568
569 msg = 'Deleted 1 repositories'
569 msg = 'Deleted 1 repositories'
570 assert_session_flash(response, msg)
570 assert_session_flash(response, msg)
571
571
572 def test_delete_owner_of_repository_group(self, request, user_util):
572 def test_delete_owner_of_repository_group(self, request, user_util):
573 self.log_user()
573 self.log_user()
574 obj_name = 'test_group'
574 obj_name = 'test_group'
575 usr = user_util.create_user()
575 usr = user_util.create_user()
576 username = usr.username
576 username = usr.username
577 fixture.create_repo_group(obj_name, cur_user=usr.username)
577 fixture.create_repo_group(obj_name, cur_user=usr.username)
578
578
579 new_user = Session().query(User)\
579 new_user = Session().query(User)\
580 .filter(User.username == username).one()
580 .filter(User.username == username).one()
581 response = self.app.post(
581 response = self.app.post(
582 route_path('user_delete', user_id=new_user.user_id),
582 route_path('user_delete', user_id=new_user.user_id),
583 params={'csrf_token': self.csrf_token})
583 params={'csrf_token': self.csrf_token})
584
584
585 msg = 'user "%s" still owns 1 repository groups and cannot be removed. ' \
585 msg = 'user "%s" still owns 1 repository groups and cannot be removed. ' \
586 'Switch owners or remove those repository groups:%s' % (username,
586 'Switch owners or remove those repository groups:%s' % (username,
587 obj_name)
587 obj_name)
588 assert_session_flash(response, msg)
588 assert_session_flash(response, msg)
589 fixture.destroy_repo_group(obj_name)
589 fixture.destroy_repo_group(obj_name)
590
590
591 def test_delete_owner_of_repository_group_detaching(self, request, user_util):
591 def test_delete_owner_of_repository_group_detaching(self, request, user_util):
592 self.log_user()
592 self.log_user()
593 obj_name = 'test_group'
593 obj_name = 'test_group'
594 usr = user_util.create_user(auto_cleanup=False)
594 usr = user_util.create_user(auto_cleanup=False)
595 username = usr.username
595 username = usr.username
596 fixture.create_repo_group(obj_name, cur_user=usr.username)
596 fixture.create_repo_group(obj_name, cur_user=usr.username)
597
597
598 new_user = Session().query(User)\
598 new_user = Session().query(User)\
599 .filter(User.username == username).one()
599 .filter(User.username == username).one()
600 response = self.app.post(
600 response = self.app.post(
601 route_path('user_delete', user_id=new_user.user_id),
601 route_path('user_delete', user_id=new_user.user_id),
602 params={'user_repo_groups': 'delete', 'csrf_token': self.csrf_token})
602 params={'user_repo_groups': 'delete', 'csrf_token': self.csrf_token})
603
603
604 msg = 'Deleted 1 repository groups'
604 msg = 'Deleted 1 repository groups'
605 assert_session_flash(response, msg)
605 assert_session_flash(response, msg)
606
606
607 def test_delete_owner_of_repository_group_deleting(self, request, user_util):
607 def test_delete_owner_of_repository_group_deleting(self, request, user_util):
608 self.log_user()
608 self.log_user()
609 obj_name = 'test_group'
609 obj_name = 'test_group'
610 usr = user_util.create_user(auto_cleanup=False)
610 usr = user_util.create_user(auto_cleanup=False)
611 username = usr.username
611 username = usr.username
612 fixture.create_repo_group(obj_name, cur_user=usr.username)
612 fixture.create_repo_group(obj_name, cur_user=usr.username)
613
613
614 new_user = Session().query(User)\
614 new_user = Session().query(User)\
615 .filter(User.username == username).one()
615 .filter(User.username == username).one()
616 response = self.app.post(
616 response = self.app.post(
617 route_path('user_delete', user_id=new_user.user_id),
617 route_path('user_delete', user_id=new_user.user_id),
618 params={'user_repo_groups': 'detach', 'csrf_token': self.csrf_token})
618 params={'user_repo_groups': 'detach', 'csrf_token': self.csrf_token})
619
619
620 msg = 'Detached 1 repository groups'
620 msg = 'Detached 1 repository groups'
621 assert_session_flash(response, msg)
621 assert_session_flash(response, msg)
622 fixture.destroy_repo_group(obj_name)
622 fixture.destroy_repo_group(obj_name)
623
623
624 def test_delete_owner_of_user_group(self, request, user_util):
624 def test_delete_owner_of_user_group(self, request, user_util):
625 self.log_user()
625 self.log_user()
626 obj_name = 'test_user_group'
626 obj_name = 'test_user_group'
627 usr = user_util.create_user()
627 usr = user_util.create_user()
628 username = usr.username
628 username = usr.username
629 fixture.create_user_group(obj_name, cur_user=usr.username)
629 fixture.create_user_group(obj_name, cur_user=usr.username)
630
630
631 new_user = Session().query(User)\
631 new_user = Session().query(User)\
632 .filter(User.username == username).one()
632 .filter(User.username == username).one()
633 response = self.app.post(
633 response = self.app.post(
634 route_path('user_delete', user_id=new_user.user_id),
634 route_path('user_delete', user_id=new_user.user_id),
635 params={'csrf_token': self.csrf_token})
635 params={'csrf_token': self.csrf_token})
636
636
637 msg = 'user "%s" still owns 1 user groups and cannot be removed. ' \
637 msg = 'user "%s" still owns 1 user groups and cannot be removed. ' \
638 'Switch owners or remove those user groups:%s' % (username,
638 'Switch owners or remove those user groups:%s' % (username,
639 obj_name)
639 obj_name)
640 assert_session_flash(response, msg)
640 assert_session_flash(response, msg)
641 fixture.destroy_user_group(obj_name)
641 fixture.destroy_user_group(obj_name)
642
642
643 def test_delete_owner_of_user_group_detaching(self, request, user_util):
643 def test_delete_owner_of_user_group_detaching(self, request, user_util):
644 self.log_user()
644 self.log_user()
645 obj_name = 'test_user_group'
645 obj_name = 'test_user_group'
646 usr = user_util.create_user(auto_cleanup=False)
646 usr = user_util.create_user(auto_cleanup=False)
647 username = usr.username
647 username = usr.username
648 fixture.create_user_group(obj_name, cur_user=usr.username)
648 fixture.create_user_group(obj_name, cur_user=usr.username)
649
649
650 new_user = Session().query(User)\
650 new_user = Session().query(User)\
651 .filter(User.username == username).one()
651 .filter(User.username == username).one()
652 try:
652 try:
653 response = self.app.post(
653 response = self.app.post(
654 route_path('user_delete', user_id=new_user.user_id),
654 route_path('user_delete', user_id=new_user.user_id),
655 params={'user_user_groups': 'detach',
655 params={'user_user_groups': 'detach',
656 'csrf_token': self.csrf_token})
656 'csrf_token': self.csrf_token})
657
657
658 msg = 'Detached 1 user groups'
658 msg = 'Detached 1 user groups'
659 assert_session_flash(response, msg)
659 assert_session_flash(response, msg)
660 finally:
660 finally:
661 fixture.destroy_user_group(obj_name)
661 fixture.destroy_user_group(obj_name)
662
662
663 def test_delete_owner_of_user_group_deleting(self, request, user_util):
663 def test_delete_owner_of_user_group_deleting(self, request, user_util):
664 self.log_user()
664 self.log_user()
665 obj_name = 'test_user_group'
665 obj_name = 'test_user_group'
666 usr = user_util.create_user(auto_cleanup=False)
666 usr = user_util.create_user(auto_cleanup=False)
667 username = usr.username
667 username = usr.username
668 fixture.create_user_group(obj_name, cur_user=usr.username)
668 fixture.create_user_group(obj_name, cur_user=usr.username)
669
669
670 new_user = Session().query(User)\
670 new_user = Session().query(User)\
671 .filter(User.username == username).one()
671 .filter(User.username == username).one()
672 response = self.app.post(
672 response = self.app.post(
673 route_path('user_delete', user_id=new_user.user_id),
673 route_path('user_delete', user_id=new_user.user_id),
674 params={'user_user_groups': 'delete', 'csrf_token': self.csrf_token})
674 params={'user_user_groups': 'delete', 'csrf_token': self.csrf_token})
675
675
676 msg = 'Deleted 1 user groups'
676 msg = 'Deleted 1 user groups'
677 assert_session_flash(response, msg)
677 assert_session_flash(response, msg)
678
678
679 def test_edit(self, user_util):
679 def test_edit(self, user_util):
680 self.log_user()
680 self.log_user()
681 user = user_util.create_user()
681 user = user_util.create_user()
682 self.app.get(route_path('user_edit', user_id=user.user_id))
682 self.app.get(route_path('user_edit', user_id=user.user_id))
683
683
684 def test_edit_default_user_redirect(self):
684 def test_edit_default_user_redirect(self):
685 self.log_user()
685 self.log_user()
686 user = User.get_default_user()
686 user = User.get_default_user()
687 self.app.get(route_path('user_edit', user_id=user.user_id), status=302)
687 self.app.get(route_path('user_edit', user_id=user.user_id), status=302)
688
688
689 @pytest.mark.parametrize(
689 @pytest.mark.parametrize(
690 'repo_create, repo_create_write, user_group_create, repo_group_create,'
690 'repo_create, repo_create_write, user_group_create, repo_group_create,'
691 'fork_create, inherit_default_permissions, expect_error,'
691 'fork_create, inherit_default_permissions, expect_error,'
692 'expect_form_error', [
692 'expect_form_error', [
693 ('hg.create.none', 'hg.create.write_on_repogroup.false',
693 ('hg.create.none', 'hg.create.write_on_repogroup.false',
694 'hg.usergroup.create.false', 'hg.repogroup.create.false',
694 'hg.usergroup.create.false', 'hg.repogroup.create.false',
695 'hg.fork.none', 'hg.inherit_default_perms.false', False, False),
695 'hg.fork.none', 'hg.inherit_default_perms.false', False, False),
696 ('hg.create.repository', 'hg.create.write_on_repogroup.false',
696 ('hg.create.repository', 'hg.create.write_on_repogroup.false',
697 'hg.usergroup.create.false', 'hg.repogroup.create.false',
697 'hg.usergroup.create.false', 'hg.repogroup.create.false',
698 'hg.fork.none', 'hg.inherit_default_perms.false', False, False),
698 'hg.fork.none', 'hg.inherit_default_perms.false', False, False),
699 ('hg.create.repository', 'hg.create.write_on_repogroup.true',
699 ('hg.create.repository', 'hg.create.write_on_repogroup.true',
700 'hg.usergroup.create.true', 'hg.repogroup.create.true',
700 'hg.usergroup.create.true', 'hg.repogroup.create.true',
701 'hg.fork.repository', 'hg.inherit_default_perms.false', False,
701 'hg.fork.repository', 'hg.inherit_default_perms.false', False,
702 False),
702 False),
703 ('hg.create.XXX', 'hg.create.write_on_repogroup.true',
703 ('hg.create.XXX', 'hg.create.write_on_repogroup.true',
704 'hg.usergroup.create.true', 'hg.repogroup.create.true',
704 'hg.usergroup.create.true', 'hg.repogroup.create.true',
705 'hg.fork.repository', 'hg.inherit_default_perms.false', False,
705 'hg.fork.repository', 'hg.inherit_default_perms.false', False,
706 True),
706 True),
707 ('', '', '', '', '', '', True, False),
707 ('', '', '', '', '', '', True, False),
708 ])
708 ])
709 def test_global_perms_on_user(
709 def test_global_perms_on_user(
710 self, repo_create, repo_create_write, user_group_create,
710 self, repo_create, repo_create_write, user_group_create,
711 repo_group_create, fork_create, expect_error, expect_form_error,
711 repo_group_create, fork_create, expect_error, expect_form_error,
712 inherit_default_permissions, user_util):
712 inherit_default_permissions, user_util):
713 self.log_user()
713 self.log_user()
714 user = user_util.create_user()
714 user = user_util.create_user()
715 uid = user.user_id
715 uid = user.user_id
716
716
717 # ENABLE REPO CREATE ON A GROUP
717 # ENABLE REPO CREATE ON A GROUP
718 perm_params = {
718 perm_params = {
719 'inherit_default_permissions': False,
719 'inherit_default_permissions': False,
720 'default_repo_create': repo_create,
720 'default_repo_create': repo_create,
721 'default_repo_create_on_write': repo_create_write,
721 'default_repo_create_on_write': repo_create_write,
722 'default_user_group_create': user_group_create,
722 'default_user_group_create': user_group_create,
723 'default_repo_group_create': repo_group_create,
723 'default_repo_group_create': repo_group_create,
724 'default_fork_create': fork_create,
724 'default_fork_create': fork_create,
725 'default_inherit_default_permissions': inherit_default_permissions,
725 'default_inherit_default_permissions': inherit_default_permissions,
726 'csrf_token': self.csrf_token,
726 'csrf_token': self.csrf_token,
727 }
727 }
728 response = self.app.post(
728 response = self.app.post(
729 route_path('user_edit_global_perms_update', user_id=uid),
729 route_path('user_edit_global_perms_update', user_id=uid),
730 params=perm_params)
730 params=perm_params)
731
731
732 if expect_form_error:
732 if expect_form_error:
733 assert response.status_int == 200
733 assert response.status_int == 200
734 response.mustcontain('Value must be one of')
734 response.mustcontain('Value must be one of')
735 else:
735 else:
736 if expect_error:
736 if expect_error:
737 msg = 'An error occurred during permissions saving'
737 msg = 'An error occurred during permissions saving'
738 else:
738 else:
739 msg = 'User global permissions updated successfully'
739 msg = 'User global permissions updated successfully'
740 ug = User.get(uid)
740 ug = User.get(uid)
741 del perm_params['inherit_default_permissions']
741 del perm_params['inherit_default_permissions']
742 del perm_params['csrf_token']
742 del perm_params['csrf_token']
743 assert perm_params == ug.get_default_perms()
743 assert perm_params == ug.get_default_perms()
744 assert_session_flash(response, msg)
744 assert_session_flash(response, msg)
745
745
746 def test_global_permissions_initial_values(self, user_util):
746 def test_global_permissions_initial_values(self, user_util):
747 self.log_user()
747 self.log_user()
748 user = user_util.create_user()
748 user = user_util.create_user()
749 uid = user.user_id
749 uid = user.user_id
750 response = self.app.get(
750 response = self.app.get(
751 route_path('user_edit_global_perms', user_id=uid))
751 route_path('user_edit_global_perms', user_id=uid))
752 default_user = User.get_default_user()
752 default_user = User.get_default_user()
753 default_permissions = default_user.get_default_perms()
753 default_permissions = default_user.get_default_perms()
754 assert_response = response.assert_response()
754 assert_response = response.assert_response()
755 expected_permissions = (
755 expected_permissions = (
756 'default_repo_create', 'default_repo_create_on_write',
756 'default_repo_create', 'default_repo_create_on_write',
757 'default_fork_create', 'default_repo_group_create',
757 'default_fork_create', 'default_repo_group_create',
758 'default_user_group_create', 'default_inherit_default_permissions')
758 'default_user_group_create', 'default_inherit_default_permissions')
759 for permission in expected_permissions:
759 for permission in expected_permissions:
760 css_selector = '[name={}][checked=checked]'.format(permission)
760 css_selector = '[name={}][checked=checked]'.format(permission)
761 element = assert_response.get_element(css_selector)
761 element = assert_response.get_element(css_selector)
762 assert element.value == default_permissions[permission]
762 assert element.value == default_permissions[permission]
763
763
764 def test_perms_summary_page(self):
764 def test_perms_summary_page(self):
765 user = self.log_user()
765 user = self.log_user()
766 response = self.app.get(
766 response = self.app.get(
767 route_path('edit_user_perms_summary', user_id=user['user_id']))
767 route_path('edit_user_perms_summary', user_id=user['user_id']))
768 for repo in Repository.query().all():
768 for repo in Repository.query().all():
769 response.mustcontain(repo.repo_name)
769 response.mustcontain(repo.repo_name)
770
770
771 def test_perms_summary_page_json(self):
771 def test_perms_summary_page_json(self):
772 user = self.log_user()
772 user = self.log_user()
773 response = self.app.get(
773 response = self.app.get(
774 route_path('edit_user_perms_summary_json', user_id=user['user_id']))
774 route_path('edit_user_perms_summary_json', user_id=user['user_id']))
775 for repo in Repository.query().all():
775 for repo in Repository.query().all():
776 response.mustcontain(repo.repo_name)
776 response.mustcontain(repo.repo_name)
777
777
778 def test_audit_log_page(self):
778 def test_audit_log_page(self):
779 user = self.log_user()
779 user = self.log_user()
780 self.app.get(
780 self.app.get(
781 route_path('edit_user_audit_logs', user_id=user['user_id']))
781 route_path('edit_user_audit_logs', user_id=user['user_id']))
@@ -1,486 +1,484 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22 import logging
22 import logging
23 import formencode
23 import formencode
24 import formencode.htmlfill
24 import formencode.htmlfill
25 import datetime
25 import datetime
26 from pyramid.interfaces import IRoutesMapper
26 from pyramid.interfaces import IRoutesMapper
27
27
28 from pyramid.view import view_config
28 from pyramid.view import view_config
29 from pyramid.httpexceptions import HTTPFound
29 from pyramid.httpexceptions import HTTPFound
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31 from pyramid.response import Response
31 from pyramid.response import Response
32
32
33 from rhodecode.apps._base import BaseAppView, DataGridAppView
33 from rhodecode.apps._base import BaseAppView, DataGridAppView
34 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
34 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
35 from rhodecode.events import trigger
35 from rhodecode.events import trigger
36
36
37 from rhodecode.lib import helpers as h
37 from rhodecode.lib import helpers as h
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
39 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
40 from rhodecode.lib.utils2 import aslist, safe_unicode
40 from rhodecode.lib.utils2 import aslist, safe_unicode
41 from rhodecode.model.db import (
41 from rhodecode.model.db import (
42 or_, coalesce, User, UserIpMap, UserSshKeys)
42 or_, coalesce, User, UserIpMap, UserSshKeys)
43 from rhodecode.model.forms import (
43 from rhodecode.model.forms import (
44 ApplicationPermissionsForm, ObjectPermissionsForm, UserPermissionsForm)
44 ApplicationPermissionsForm, ObjectPermissionsForm, UserPermissionsForm)
45 from rhodecode.model.meta import Session
45 from rhodecode.model.meta import Session
46 from rhodecode.model.permission import PermissionModel
46 from rhodecode.model.permission import PermissionModel
47 from rhodecode.model.settings import SettingsModel
47 from rhodecode.model.settings import SettingsModel
48
48
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class AdminPermissionsView(BaseAppView, DataGridAppView):
53 class AdminPermissionsView(BaseAppView, DataGridAppView):
54 def load_default_context(self):
54 def load_default_context(self):
55 c = self._get_local_tmpl_context()
55 c = self._get_local_tmpl_context()
56
57
58 PermissionModel().set_global_permission_choices(
56 PermissionModel().set_global_permission_choices(
59 c, gettext_translator=self.request.translate)
57 c, gettext_translator=self.request.translate)
60 return c
58 return c
61
59
62 @LoginRequired()
60 @LoginRequired()
63 @HasPermissionAllDecorator('hg.admin')
61 @HasPermissionAllDecorator('hg.admin')
64 @view_config(
62 @view_config(
65 route_name='admin_permissions_application', request_method='GET',
63 route_name='admin_permissions_application', request_method='GET',
66 renderer='rhodecode:templates/admin/permissions/permissions.mako')
64 renderer='rhodecode:templates/admin/permissions/permissions.mako')
67 def permissions_application(self):
65 def permissions_application(self):
68 c = self.load_default_context()
66 c = self.load_default_context()
69 c.active = 'application'
67 c.active = 'application'
70
68
71 c.user = User.get_default_user(refresh=True)
69 c.user = User.get_default_user(refresh=True)
72
70
73 app_settings = SettingsModel().get_all_settings()
71 app_settings = SettingsModel().get_all_settings()
74 defaults = {
72 defaults = {
75 'anonymous': c.user.active,
73 'anonymous': c.user.active,
76 'default_register_message': app_settings.get(
74 'default_register_message': app_settings.get(
77 'rhodecode_register_message')
75 'rhodecode_register_message')
78 }
76 }
79 defaults.update(c.user.get_default_perms())
77 defaults.update(c.user.get_default_perms())
80
78
81 data = render('rhodecode:templates/admin/permissions/permissions.mako',
79 data = render('rhodecode:templates/admin/permissions/permissions.mako',
82 self._get_template_context(c), self.request)
80 self._get_template_context(c), self.request)
83 html = formencode.htmlfill.render(
81 html = formencode.htmlfill.render(
84 data,
82 data,
85 defaults=defaults,
83 defaults=defaults,
86 encoding="UTF-8",
84 encoding="UTF-8",
87 force_defaults=False
85 force_defaults=False
88 )
86 )
89 return Response(html)
87 return Response(html)
90
88
91 @LoginRequired()
89 @LoginRequired()
92 @HasPermissionAllDecorator('hg.admin')
90 @HasPermissionAllDecorator('hg.admin')
93 @CSRFRequired()
91 @CSRFRequired()
94 @view_config(
92 @view_config(
95 route_name='admin_permissions_application_update', request_method='POST',
93 route_name='admin_permissions_application_update', request_method='POST',
96 renderer='rhodecode:templates/admin/permissions/permissions.mako')
94 renderer='rhodecode:templates/admin/permissions/permissions.mako')
97 def permissions_application_update(self):
95 def permissions_application_update(self):
98 _ = self.request.translate
96 _ = self.request.translate
99 c = self.load_default_context()
97 c = self.load_default_context()
100 c.active = 'application'
98 c.active = 'application'
101
99
102 _form = ApplicationPermissionsForm(
100 _form = ApplicationPermissionsForm(
103 self.request.translate,
101 self.request.translate,
104 [x[0] for x in c.register_choices],
102 [x[0] for x in c.register_choices],
105 [x[0] for x in c.password_reset_choices],
103 [x[0] for x in c.password_reset_choices],
106 [x[0] for x in c.extern_activate_choices])()
104 [x[0] for x in c.extern_activate_choices])()
107
105
108 try:
106 try:
109 form_result = _form.to_python(dict(self.request.POST))
107 form_result = _form.to_python(dict(self.request.POST))
110 form_result.update({'perm_user_name': User.DEFAULT_USER})
108 form_result.update({'perm_user_name': User.DEFAULT_USER})
111 PermissionModel().update_application_permissions(form_result)
109 PermissionModel().update_application_permissions(form_result)
112
110
113 settings = [
111 settings = [
114 ('register_message', 'default_register_message'),
112 ('register_message', 'default_register_message'),
115 ]
113 ]
116 for setting, form_key in settings:
114 for setting, form_key in settings:
117 sett = SettingsModel().create_or_update_setting(
115 sett = SettingsModel().create_or_update_setting(
118 setting, form_result[form_key])
116 setting, form_result[form_key])
119 Session().add(sett)
117 Session().add(sett)
120
118
121 Session().commit()
119 Session().commit()
122 h.flash(_('Application permissions updated successfully'),
120 h.flash(_('Application permissions updated successfully'),
123 category='success')
121 category='success')
124
122
125 except formencode.Invalid as errors:
123 except formencode.Invalid as errors:
126 defaults = errors.value
124 defaults = errors.value
127
125
128 data = render(
126 data = render(
129 'rhodecode:templates/admin/permissions/permissions.mako',
127 'rhodecode:templates/admin/permissions/permissions.mako',
130 self._get_template_context(c), self.request)
128 self._get_template_context(c), self.request)
131 html = formencode.htmlfill.render(
129 html = formencode.htmlfill.render(
132 data,
130 data,
133 defaults=defaults,
131 defaults=defaults,
134 errors=errors.error_dict or {},
132 errors=errors.error_dict or {},
135 prefix_error=False,
133 prefix_error=False,
136 encoding="UTF-8",
134 encoding="UTF-8",
137 force_defaults=False
135 force_defaults=False
138 )
136 )
139 return Response(html)
137 return Response(html)
140
138
141 except Exception:
139 except Exception:
142 log.exception("Exception during update of permissions")
140 log.exception("Exception during update of permissions")
143 h.flash(_('Error occurred during update of permissions'),
141 h.flash(_('Error occurred during update of permissions'),
144 category='error')
142 category='error')
145
143
146 raise HTTPFound(h.route_path('admin_permissions_application'))
144 raise HTTPFound(h.route_path('admin_permissions_application'))
147
145
148 @LoginRequired()
146 @LoginRequired()
149 @HasPermissionAllDecorator('hg.admin')
147 @HasPermissionAllDecorator('hg.admin')
150 @view_config(
148 @view_config(
151 route_name='admin_permissions_object', request_method='GET',
149 route_name='admin_permissions_object', request_method='GET',
152 renderer='rhodecode:templates/admin/permissions/permissions.mako')
150 renderer='rhodecode:templates/admin/permissions/permissions.mako')
153 def permissions_objects(self):
151 def permissions_objects(self):
154 c = self.load_default_context()
152 c = self.load_default_context()
155 c.active = 'objects'
153 c.active = 'objects'
156
154
157 c.user = User.get_default_user(refresh=True)
155 c.user = User.get_default_user(refresh=True)
158 defaults = {}
156 defaults = {}
159 defaults.update(c.user.get_default_perms())
157 defaults.update(c.user.get_default_perms())
160
158
161 data = render(
159 data = render(
162 'rhodecode:templates/admin/permissions/permissions.mako',
160 'rhodecode:templates/admin/permissions/permissions.mako',
163 self._get_template_context(c), self.request)
161 self._get_template_context(c), self.request)
164 html = formencode.htmlfill.render(
162 html = formencode.htmlfill.render(
165 data,
163 data,
166 defaults=defaults,
164 defaults=defaults,
167 encoding="UTF-8",
165 encoding="UTF-8",
168 force_defaults=False
166 force_defaults=False
169 )
167 )
170 return Response(html)
168 return Response(html)
171
169
172 @LoginRequired()
170 @LoginRequired()
173 @HasPermissionAllDecorator('hg.admin')
171 @HasPermissionAllDecorator('hg.admin')
174 @CSRFRequired()
172 @CSRFRequired()
175 @view_config(
173 @view_config(
176 route_name='admin_permissions_object_update', request_method='POST',
174 route_name='admin_permissions_object_update', request_method='POST',
177 renderer='rhodecode:templates/admin/permissions/permissions.mako')
175 renderer='rhodecode:templates/admin/permissions/permissions.mako')
178 def permissions_objects_update(self):
176 def permissions_objects_update(self):
179 _ = self.request.translate
177 _ = self.request.translate
180 c = self.load_default_context()
178 c = self.load_default_context()
181 c.active = 'objects'
179 c.active = 'objects'
182
180
183 _form = ObjectPermissionsForm(
181 _form = ObjectPermissionsForm(
184 self.request.translate,
182 self.request.translate,
185 [x[0] for x in c.repo_perms_choices],
183 [x[0] for x in c.repo_perms_choices],
186 [x[0] for x in c.group_perms_choices],
184 [x[0] for x in c.group_perms_choices],
187 [x[0] for x in c.user_group_perms_choices])()
185 [x[0] for x in c.user_group_perms_choices])()
188
186
189 try:
187 try:
190 form_result = _form.to_python(dict(self.request.POST))
188 form_result = _form.to_python(dict(self.request.POST))
191 form_result.update({'perm_user_name': User.DEFAULT_USER})
189 form_result.update({'perm_user_name': User.DEFAULT_USER})
192 PermissionModel().update_object_permissions(form_result)
190 PermissionModel().update_object_permissions(form_result)
193
191
194 Session().commit()
192 Session().commit()
195 h.flash(_('Object permissions updated successfully'),
193 h.flash(_('Object permissions updated successfully'),
196 category='success')
194 category='success')
197
195
198 except formencode.Invalid as errors:
196 except formencode.Invalid as errors:
199 defaults = errors.value
197 defaults = errors.value
200
198
201 data = render(
199 data = render(
202 'rhodecode:templates/admin/permissions/permissions.mako',
200 'rhodecode:templates/admin/permissions/permissions.mako',
203 self._get_template_context(c), self.request)
201 self._get_template_context(c), self.request)
204 html = formencode.htmlfill.render(
202 html = formencode.htmlfill.render(
205 data,
203 data,
206 defaults=defaults,
204 defaults=defaults,
207 errors=errors.error_dict or {},
205 errors=errors.error_dict or {},
208 prefix_error=False,
206 prefix_error=False,
209 encoding="UTF-8",
207 encoding="UTF-8",
210 force_defaults=False
208 force_defaults=False
211 )
209 )
212 return Response(html)
210 return Response(html)
213 except Exception:
211 except Exception:
214 log.exception("Exception during update of permissions")
212 log.exception("Exception during update of permissions")
215 h.flash(_('Error occurred during update of permissions'),
213 h.flash(_('Error occurred during update of permissions'),
216 category='error')
214 category='error')
217
215
218 raise HTTPFound(h.route_path('admin_permissions_object'))
216 raise HTTPFound(h.route_path('admin_permissions_object'))
219
217
220 @LoginRequired()
218 @LoginRequired()
221 @HasPermissionAllDecorator('hg.admin')
219 @HasPermissionAllDecorator('hg.admin')
222 @view_config(
220 @view_config(
223 route_name='admin_permissions_global', request_method='GET',
221 route_name='admin_permissions_global', request_method='GET',
224 renderer='rhodecode:templates/admin/permissions/permissions.mako')
222 renderer='rhodecode:templates/admin/permissions/permissions.mako')
225 def permissions_global(self):
223 def permissions_global(self):
226 c = self.load_default_context()
224 c = self.load_default_context()
227 c.active = 'global'
225 c.active = 'global'
228
226
229 c.user = User.get_default_user(refresh=True)
227 c.user = User.get_default_user(refresh=True)
230 defaults = {}
228 defaults = {}
231 defaults.update(c.user.get_default_perms())
229 defaults.update(c.user.get_default_perms())
232
230
233 data = render(
231 data = render(
234 'rhodecode:templates/admin/permissions/permissions.mako',
232 'rhodecode:templates/admin/permissions/permissions.mako',
235 self._get_template_context(c), self.request)
233 self._get_template_context(c), self.request)
236 html = formencode.htmlfill.render(
234 html = formencode.htmlfill.render(
237 data,
235 data,
238 defaults=defaults,
236 defaults=defaults,
239 encoding="UTF-8",
237 encoding="UTF-8",
240 force_defaults=False
238 force_defaults=False
241 )
239 )
242 return Response(html)
240 return Response(html)
243
241
244 @LoginRequired()
242 @LoginRequired()
245 @HasPermissionAllDecorator('hg.admin')
243 @HasPermissionAllDecorator('hg.admin')
246 @CSRFRequired()
244 @CSRFRequired()
247 @view_config(
245 @view_config(
248 route_name='admin_permissions_global_update', request_method='POST',
246 route_name='admin_permissions_global_update', request_method='POST',
249 renderer='rhodecode:templates/admin/permissions/permissions.mako')
247 renderer='rhodecode:templates/admin/permissions/permissions.mako')
250 def permissions_global_update(self):
248 def permissions_global_update(self):
251 _ = self.request.translate
249 _ = self.request.translate
252 c = self.load_default_context()
250 c = self.load_default_context()
253 c.active = 'global'
251 c.active = 'global'
254
252
255 _form = UserPermissionsForm(
253 _form = UserPermissionsForm(
256 self.request.translate,
254 self.request.translate,
257 [x[0] for x in c.repo_create_choices],
255 [x[0] for x in c.repo_create_choices],
258 [x[0] for x in c.repo_create_on_write_choices],
256 [x[0] for x in c.repo_create_on_write_choices],
259 [x[0] for x in c.repo_group_create_choices],
257 [x[0] for x in c.repo_group_create_choices],
260 [x[0] for x in c.user_group_create_choices],
258 [x[0] for x in c.user_group_create_choices],
261 [x[0] for x in c.fork_choices],
259 [x[0] for x in c.fork_choices],
262 [x[0] for x in c.inherit_default_permission_choices])()
260 [x[0] for x in c.inherit_default_permission_choices])()
263
261
264 try:
262 try:
265 form_result = _form.to_python(dict(self.request.POST))
263 form_result = _form.to_python(dict(self.request.POST))
266 form_result.update({'perm_user_name': User.DEFAULT_USER})
264 form_result.update({'perm_user_name': User.DEFAULT_USER})
267 PermissionModel().update_user_permissions(form_result)
265 PermissionModel().update_user_permissions(form_result)
268
266
269 Session().commit()
267 Session().commit()
270 h.flash(_('Global permissions updated successfully'),
268 h.flash(_('Global permissions updated successfully'),
271 category='success')
269 category='success')
272
270
273 except formencode.Invalid as errors:
271 except formencode.Invalid as errors:
274 defaults = errors.value
272 defaults = errors.value
275
273
276 data = render(
274 data = render(
277 'rhodecode:templates/admin/permissions/permissions.mako',
275 'rhodecode:templates/admin/permissions/permissions.mako',
278 self._get_template_context(c), self.request)
276 self._get_template_context(c), self.request)
279 html = formencode.htmlfill.render(
277 html = formencode.htmlfill.render(
280 data,
278 data,
281 defaults=defaults,
279 defaults=defaults,
282 errors=errors.error_dict or {},
280 errors=errors.error_dict or {},
283 prefix_error=False,
281 prefix_error=False,
284 encoding="UTF-8",
282 encoding="UTF-8",
285 force_defaults=False
283 force_defaults=False
286 )
284 )
287 return Response(html)
285 return Response(html)
288 except Exception:
286 except Exception:
289 log.exception("Exception during update of permissions")
287 log.exception("Exception during update of permissions")
290 h.flash(_('Error occurred during update of permissions'),
288 h.flash(_('Error occurred during update of permissions'),
291 category='error')
289 category='error')
292
290
293 raise HTTPFound(h.route_path('admin_permissions_global'))
291 raise HTTPFound(h.route_path('admin_permissions_global'))
294
292
295 @LoginRequired()
293 @LoginRequired()
296 @HasPermissionAllDecorator('hg.admin')
294 @HasPermissionAllDecorator('hg.admin')
297 @view_config(
295 @view_config(
298 route_name='admin_permissions_ips', request_method='GET',
296 route_name='admin_permissions_ips', request_method='GET',
299 renderer='rhodecode:templates/admin/permissions/permissions.mako')
297 renderer='rhodecode:templates/admin/permissions/permissions.mako')
300 def permissions_ips(self):
298 def permissions_ips(self):
301 c = self.load_default_context()
299 c = self.load_default_context()
302 c.active = 'ips'
300 c.active = 'ips'
303
301
304 c.user = User.get_default_user(refresh=True)
302 c.user = User.get_default_user(refresh=True)
305 c.user_ip_map = (
303 c.user_ip_map = (
306 UserIpMap.query().filter(UserIpMap.user == c.user).all())
304 UserIpMap.query().filter(UserIpMap.user == c.user).all())
307
305
308 return self._get_template_context(c)
306 return self._get_template_context(c)
309
307
310 @LoginRequired()
308 @LoginRequired()
311 @HasPermissionAllDecorator('hg.admin')
309 @HasPermissionAllDecorator('hg.admin')
312 @view_config(
310 @view_config(
313 route_name='admin_permissions_overview', request_method='GET',
311 route_name='admin_permissions_overview', request_method='GET',
314 renderer='rhodecode:templates/admin/permissions/permissions.mako')
312 renderer='rhodecode:templates/admin/permissions/permissions.mako')
315 def permissions_overview(self):
313 def permissions_overview(self):
316 c = self.load_default_context()
314 c = self.load_default_context()
317 c.active = 'perms'
315 c.active = 'perms'
318
316
319 c.user = User.get_default_user(refresh=True)
317 c.user = User.get_default_user(refresh=True)
320 c.perm_user = c.user.AuthUser()
318 c.perm_user = c.user.AuthUser()
321 return self._get_template_context(c)
319 return self._get_template_context(c)
322
320
323 @LoginRequired()
321 @LoginRequired()
324 @HasPermissionAllDecorator('hg.admin')
322 @HasPermissionAllDecorator('hg.admin')
325 @view_config(
323 @view_config(
326 route_name='admin_permissions_auth_token_access', request_method='GET',
324 route_name='admin_permissions_auth_token_access', request_method='GET',
327 renderer='rhodecode:templates/admin/permissions/permissions.mako')
325 renderer='rhodecode:templates/admin/permissions/permissions.mako')
328 def auth_token_access(self):
326 def auth_token_access(self):
329 from rhodecode import CONFIG
327 from rhodecode import CONFIG
330
328
331 c = self.load_default_context()
329 c = self.load_default_context()
332 c.active = 'auth_token_access'
330 c.active = 'auth_token_access'
333
331
334 c.user = User.get_default_user(refresh=True)
332 c.user = User.get_default_user(refresh=True)
335 c.perm_user = c.user.AuthUser()
333 c.perm_user = c.user.AuthUser()
336
334
337 mapper = self.request.registry.queryUtility(IRoutesMapper)
335 mapper = self.request.registry.queryUtility(IRoutesMapper)
338 c.view_data = []
336 c.view_data = []
339
337
340 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
338 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
341 introspector = self.request.registry.introspector
339 introspector = self.request.registry.introspector
342
340
343 view_intr = {}
341 view_intr = {}
344 for view_data in introspector.get_category('views'):
342 for view_data in introspector.get_category('views'):
345 intr = view_data['introspectable']
343 intr = view_data['introspectable']
346
344
347 if 'route_name' in intr and intr['attr']:
345 if 'route_name' in intr and intr['attr']:
348 view_intr[intr['route_name']] = '{}:{}'.format(
346 view_intr[intr['route_name']] = '{}:{}'.format(
349 str(intr['derived_callable'].func_name), intr['attr']
347 str(intr['derived_callable'].func_name), intr['attr']
350 )
348 )
351
349
352 c.whitelist_key = 'api_access_controllers_whitelist'
350 c.whitelist_key = 'api_access_controllers_whitelist'
353 c.whitelist_file = CONFIG.get('__file__')
351 c.whitelist_file = CONFIG.get('__file__')
354 whitelist_views = aslist(
352 whitelist_views = aslist(
355 CONFIG.get(c.whitelist_key), sep=',')
353 CONFIG.get(c.whitelist_key), sep=',')
356
354
357 for route_info in mapper.get_routes():
355 for route_info in mapper.get_routes():
358 if not route_info.name.startswith('__'):
356 if not route_info.name.startswith('__'):
359 routepath = route_info.pattern
357 routepath = route_info.pattern
360
358
361 def replace(matchobj):
359 def replace(matchobj):
362 if matchobj.group(1):
360 if matchobj.group(1):
363 return "{%s}" % matchobj.group(1).split(':')[0]
361 return "{%s}" % matchobj.group(1).split(':')[0]
364 else:
362 else:
365 return "{%s}" % matchobj.group(2)
363 return "{%s}" % matchobj.group(2)
366
364
367 routepath = _argument_prog.sub(replace, routepath)
365 routepath = _argument_prog.sub(replace, routepath)
368
366
369 if not routepath.startswith('/'):
367 if not routepath.startswith('/'):
370 routepath = '/' + routepath
368 routepath = '/' + routepath
371
369
372 view_fqn = view_intr.get(route_info.name, 'NOT AVAILABLE')
370 view_fqn = view_intr.get(route_info.name, 'NOT AVAILABLE')
373 active = view_fqn in whitelist_views
371 active = view_fqn in whitelist_views
374 c.view_data.append((route_info.name, view_fqn, routepath, active))
372 c.view_data.append((route_info.name, view_fqn, routepath, active))
375
373
376 c.whitelist_views = whitelist_views
374 c.whitelist_views = whitelist_views
377 return self._get_template_context(c)
375 return self._get_template_context(c)
378
376
379 def ssh_enabled(self):
377 def ssh_enabled(self):
380 return self.request.registry.settings.get(
378 return self.request.registry.settings.get(
381 'ssh.generate_authorized_keyfile')
379 'ssh.generate_authorized_keyfile')
382
380
383 @LoginRequired()
381 @LoginRequired()
384 @HasPermissionAllDecorator('hg.admin')
382 @HasPermissionAllDecorator('hg.admin')
385 @view_config(
383 @view_config(
386 route_name='admin_permissions_ssh_keys', request_method='GET',
384 route_name='admin_permissions_ssh_keys', request_method='GET',
387 renderer='rhodecode:templates/admin/permissions/permissions.mako')
385 renderer='rhodecode:templates/admin/permissions/permissions.mako')
388 def ssh_keys(self):
386 def ssh_keys(self):
389 c = self.load_default_context()
387 c = self.load_default_context()
390 c.active = 'ssh_keys'
388 c.active = 'ssh_keys'
391 c.ssh_enabled = self.ssh_enabled()
389 c.ssh_enabled = self.ssh_enabled()
392 return self._get_template_context(c)
390 return self._get_template_context(c)
393
391
394 @LoginRequired()
392 @LoginRequired()
395 @HasPermissionAllDecorator('hg.admin')
393 @HasPermissionAllDecorator('hg.admin')
396 @view_config(
394 @view_config(
397 route_name='admin_permissions_ssh_keys_data', request_method='GET',
395 route_name='admin_permissions_ssh_keys_data', request_method='GET',
398 renderer='json_ext', xhr=True)
396 renderer='json_ext', xhr=True)
399 def ssh_keys_data(self):
397 def ssh_keys_data(self):
400 _ = self.request.translate
398 _ = self.request.translate
401 self.load_default_context()
399 self.load_default_context()
402 column_map = {
400 column_map = {
403 'fingerprint': 'ssh_key_fingerprint',
401 'fingerprint': 'ssh_key_fingerprint',
404 'username': User.username
402 'username': User.username
405 }
403 }
406 draw, start, limit = self._extract_chunk(self.request)
404 draw, start, limit = self._extract_chunk(self.request)
407 search_q, order_by, order_dir = self._extract_ordering(
405 search_q, order_by, order_dir = self._extract_ordering(
408 self.request, column_map=column_map)
406 self.request, column_map=column_map)
409
407
410 ssh_keys_data_total_count = UserSshKeys.query()\
408 ssh_keys_data_total_count = UserSshKeys.query()\
411 .count()
409 .count()
412
410
413 # json generate
411 # json generate
414 base_q = UserSshKeys.query().join(UserSshKeys.user)
412 base_q = UserSshKeys.query().join(UserSshKeys.user)
415
413
416 if search_q:
414 if search_q:
417 like_expression = u'%{}%'.format(safe_unicode(search_q))
415 like_expression = u'%{}%'.format(safe_unicode(search_q))
418 base_q = base_q.filter(or_(
416 base_q = base_q.filter(or_(
419 User.username.ilike(like_expression),
417 User.username.ilike(like_expression),
420 UserSshKeys.ssh_key_fingerprint.ilike(like_expression),
418 UserSshKeys.ssh_key_fingerprint.ilike(like_expression),
421 ))
419 ))
422
420
423 users_data_total_filtered_count = base_q.count()
421 users_data_total_filtered_count = base_q.count()
424
422
425 sort_col = self._get_order_col(order_by, UserSshKeys)
423 sort_col = self._get_order_col(order_by, UserSshKeys)
426 if sort_col:
424 if sort_col:
427 if order_dir == 'asc':
425 if order_dir == 'asc':
428 # handle null values properly to order by NULL last
426 # handle null values properly to order by NULL last
429 if order_by in ['created_on']:
427 if order_by in ['created_on']:
430 sort_col = coalesce(sort_col, datetime.date.max)
428 sort_col = coalesce(sort_col, datetime.date.max)
431 sort_col = sort_col.asc()
429 sort_col = sort_col.asc()
432 else:
430 else:
433 # handle null values properly to order by NULL last
431 # handle null values properly to order by NULL last
434 if order_by in ['created_on']:
432 if order_by in ['created_on']:
435 sort_col = coalesce(sort_col, datetime.date.min)
433 sort_col = coalesce(sort_col, datetime.date.min)
436 sort_col = sort_col.desc()
434 sort_col = sort_col.desc()
437
435
438 base_q = base_q.order_by(sort_col)
436 base_q = base_q.order_by(sort_col)
439 base_q = base_q.offset(start).limit(limit)
437 base_q = base_q.offset(start).limit(limit)
440
438
441 ssh_keys = base_q.all()
439 ssh_keys = base_q.all()
442
440
443 ssh_keys_data = []
441 ssh_keys_data = []
444 for ssh_key in ssh_keys:
442 for ssh_key in ssh_keys:
445 ssh_keys_data.append({
443 ssh_keys_data.append({
446 "username": h.gravatar_with_user(self.request, ssh_key.user.username),
444 "username": h.gravatar_with_user(self.request, ssh_key.user.username),
447 "fingerprint": ssh_key.ssh_key_fingerprint,
445 "fingerprint": ssh_key.ssh_key_fingerprint,
448 "description": ssh_key.description,
446 "description": ssh_key.description,
449 "created_on": h.format_date(ssh_key.created_on),
447 "created_on": h.format_date(ssh_key.created_on),
450 "accessed_on": h.format_date(ssh_key.accessed_on),
448 "accessed_on": h.format_date(ssh_key.accessed_on),
451 "action": h.link_to(
449 "action": h.link_to(
452 _('Edit'), h.route_path('edit_user_ssh_keys',
450 _('Edit'), h.route_path('edit_user_ssh_keys',
453 user_id=ssh_key.user.user_id))
451 user_id=ssh_key.user.user_id))
454 })
452 })
455
453
456 data = ({
454 data = ({
457 'draw': draw,
455 'draw': draw,
458 'data': ssh_keys_data,
456 'data': ssh_keys_data,
459 'recordsTotal': ssh_keys_data_total_count,
457 'recordsTotal': ssh_keys_data_total_count,
460 'recordsFiltered': users_data_total_filtered_count,
458 'recordsFiltered': users_data_total_filtered_count,
461 })
459 })
462
460
463 return data
461 return data
464
462
465 @LoginRequired()
463 @LoginRequired()
466 @HasPermissionAllDecorator('hg.admin')
464 @HasPermissionAllDecorator('hg.admin')
467 @CSRFRequired()
465 @CSRFRequired()
468 @view_config(
466 @view_config(
469 route_name='admin_permissions_ssh_keys_update', request_method='POST',
467 route_name='admin_permissions_ssh_keys_update', request_method='POST',
470 renderer='rhodecode:templates/admin/permissions/permissions.mako')
468 renderer='rhodecode:templates/admin/permissions/permissions.mako')
471 def ssh_keys_update(self):
469 def ssh_keys_update(self):
472 _ = self.request.translate
470 _ = self.request.translate
473 self.load_default_context()
471 self.load_default_context()
474
472
475 ssh_enabled = self.ssh_enabled()
473 ssh_enabled = self.ssh_enabled()
476 key_file = self.request.registry.settings.get(
474 key_file = self.request.registry.settings.get(
477 'ssh.authorized_keys_file_path')
475 'ssh.authorized_keys_file_path')
478 if ssh_enabled:
476 if ssh_enabled:
479 trigger(SshKeyFileChangeEvent(), self.request.registry)
477 trigger(SshKeyFileChangeEvent(), self.request.registry)
480 h.flash(_('Updated SSH keys file: {}').format(key_file),
478 h.flash(_('Updated SSH keys file: {}').format(key_file),
481 category='success')
479 category='success')
482 else:
480 else:
483 h.flash(_('SSH key support is disabled in .ini file'),
481 h.flash(_('SSH key support is disabled in .ini file'),
484 category='warning')
482 category='warning')
485
483
486 raise HTTPFound(h.route_path('admin_permissions_ssh_keys'))
484 raise HTTPFound(h.route_path('admin_permissions_ssh_keys'))
@@ -1,183 +1,183 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import formencode
22 import formencode
23 import formencode.htmlfill
23 import formencode.htmlfill
24
24
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26 from pyramid.view import view_config
26 from pyramid.view import view_config
27 from pyramid.renderers import render
27 from pyramid.renderers import render
28 from pyramid.response import Response
28 from pyramid.response import Response
29
29
30 from rhodecode.apps._base import BaseAppView, DataGridAppView
30 from rhodecode.apps._base import BaseAppView, DataGridAppView
31
31
32 from rhodecode.lib.ext_json import json
32 from rhodecode.lib.ext_json import json
33 from rhodecode.lib.auth import (
33 from rhodecode.lib.auth import (
34 LoginRequired, CSRFRequired, NotAnonymous,
34 LoginRequired, CSRFRequired, NotAnonymous,
35 HasPermissionAny, HasRepoGroupPermissionAny)
35 HasPermissionAny, HasRepoGroupPermissionAny)
36 from rhodecode.lib import helpers as h
36 from rhodecode.lib import helpers as h
37 from rhodecode.lib.utils import repo_name_slug
37 from rhodecode.lib.utils import repo_name_slug
38 from rhodecode.lib.utils2 import safe_int, safe_unicode
38 from rhodecode.lib.utils2 import safe_int, safe_unicode
39 from rhodecode.model.forms import RepoForm
39 from rhodecode.model.forms import RepoForm
40 from rhodecode.model.repo import RepoModel
40 from rhodecode.model.repo import RepoModel
41 from rhodecode.model.scm import RepoList, RepoGroupList, ScmModel
41 from rhodecode.model.scm import RepoList, RepoGroupList, ScmModel
42 from rhodecode.model.settings import SettingsModel
42 from rhodecode.model.settings import SettingsModel
43 from rhodecode.model.db import Repository, RepoGroup
43 from rhodecode.model.db import Repository, RepoGroup
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 class AdminReposView(BaseAppView, DataGridAppView):
48 class AdminReposView(BaseAppView, DataGridAppView):
49
49
50 def load_default_context(self):
50 def load_default_context(self):
51 c = self._get_local_tmpl_context()
51 c = self._get_local_tmpl_context()
52
52
53 return c
53 return c
54
54
55 def _load_form_data(self, c):
55 def _load_form_data(self, c):
56 acl_groups = RepoGroupList(RepoGroup.query().all(),
56 acl_groups = RepoGroupList(RepoGroup.query().all(),
57 perm_set=['group.write', 'group.admin'])
57 perm_set=['group.write', 'group.admin'])
58 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
58 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
59 c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups)
59 c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups)
60 c.landing_revs_choices, c.landing_revs = \
60 c.landing_revs_choices, c.landing_revs = \
61 ScmModel().get_repo_landing_revs()
61 ScmModel().get_repo_landing_revs(self.request.translate)
62 c.personal_repo_group = self._rhodecode_user.personal_repo_group
62 c.personal_repo_group = self._rhodecode_user.personal_repo_group
63
63
64 @LoginRequired()
64 @LoginRequired()
65 @NotAnonymous()
65 @NotAnonymous()
66 # perms check inside
66 # perms check inside
67 @view_config(
67 @view_config(
68 route_name='repos', request_method='GET',
68 route_name='repos', request_method='GET',
69 renderer='rhodecode:templates/admin/repos/repos.mako')
69 renderer='rhodecode:templates/admin/repos/repos.mako')
70 def repository_list(self):
70 def repository_list(self):
71 c = self.load_default_context()
71 c = self.load_default_context()
72
72
73 repo_list = Repository.get_all_repos()
73 repo_list = Repository.get_all_repos()
74 c.repo_list = RepoList(repo_list, perm_set=['repository.admin'])
74 c.repo_list = RepoList(repo_list, perm_set=['repository.admin'])
75 repos_data = RepoModel().get_repos_as_dict(
75 repos_data = RepoModel().get_repos_as_dict(
76 repo_list=c.repo_list, admin=True, super_user_actions=True)
76 repo_list=c.repo_list, admin=True, super_user_actions=True)
77 # json used to render the grid
77 # json used to render the grid
78 c.data = json.dumps(repos_data)
78 c.data = json.dumps(repos_data)
79
79
80 return self._get_template_context(c)
80 return self._get_template_context(c)
81
81
82 @LoginRequired()
82 @LoginRequired()
83 @NotAnonymous()
83 @NotAnonymous()
84 # perms check inside
84 # perms check inside
85 @view_config(
85 @view_config(
86 route_name='repo_new', request_method='GET',
86 route_name='repo_new', request_method='GET',
87 renderer='rhodecode:templates/admin/repos/repo_add.mako')
87 renderer='rhodecode:templates/admin/repos/repo_add.mako')
88 def repository_new(self):
88 def repository_new(self):
89 c = self.load_default_context()
89 c = self.load_default_context()
90
90
91 new_repo = self.request.GET.get('repo', '')
91 new_repo = self.request.GET.get('repo', '')
92 parent_group = safe_int(self.request.GET.get('parent_group'))
92 parent_group = safe_int(self.request.GET.get('parent_group'))
93 _gr = RepoGroup.get(parent_group)
93 _gr = RepoGroup.get(parent_group)
94
94
95 if not HasPermissionAny('hg.admin', 'hg.create.repository')():
95 if not HasPermissionAny('hg.admin', 'hg.create.repository')():
96 # you're not super admin nor have global create permissions,
96 # you're not super admin nor have global create permissions,
97 # but maybe you have at least write permission to a parent group ?
97 # but maybe you have at least write permission to a parent group ?
98
98
99 gr_name = _gr.group_name if _gr else None
99 gr_name = _gr.group_name if _gr else None
100 # create repositories with write permission on group is set to true
100 # create repositories with write permission on group is set to true
101 create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')()
101 create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')()
102 group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name)
102 group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name)
103 group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name)
103 group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name)
104 if not (group_admin or (group_write and create_on_write)):
104 if not (group_admin or (group_write and create_on_write)):
105 raise HTTPForbidden()
105 raise HTTPForbidden()
106
106
107 self._load_form_data(c)
107 self._load_form_data(c)
108 c.new_repo = repo_name_slug(new_repo)
108 c.new_repo = repo_name_slug(new_repo)
109
109
110 # apply the defaults from defaults page
110 # apply the defaults from defaults page
111 defaults = SettingsModel().get_default_repo_settings(strip_prefix=True)
111 defaults = SettingsModel().get_default_repo_settings(strip_prefix=True)
112 # set checkbox to autochecked
112 # set checkbox to autochecked
113 defaults['repo_copy_permissions'] = True
113 defaults['repo_copy_permissions'] = True
114
114
115 parent_group_choice = '-1'
115 parent_group_choice = '-1'
116 if not self._rhodecode_user.is_admin and self._rhodecode_user.personal_repo_group:
116 if not self._rhodecode_user.is_admin and self._rhodecode_user.personal_repo_group:
117 parent_group_choice = self._rhodecode_user.personal_repo_group
117 parent_group_choice = self._rhodecode_user.personal_repo_group
118
118
119 if parent_group and _gr:
119 if parent_group and _gr:
120 if parent_group in [x[0] for x in c.repo_groups]:
120 if parent_group in [x[0] for x in c.repo_groups]:
121 parent_group_choice = safe_unicode(parent_group)
121 parent_group_choice = safe_unicode(parent_group)
122
122
123 defaults.update({'repo_group': parent_group_choice})
123 defaults.update({'repo_group': parent_group_choice})
124
124
125 data = render('rhodecode:templates/admin/repos/repo_add.mako',
125 data = render('rhodecode:templates/admin/repos/repo_add.mako',
126 self._get_template_context(c), self.request)
126 self._get_template_context(c), self.request)
127 html = formencode.htmlfill.render(
127 html = formencode.htmlfill.render(
128 data,
128 data,
129 defaults=defaults,
129 defaults=defaults,
130 encoding="UTF-8",
130 encoding="UTF-8",
131 force_defaults=False
131 force_defaults=False
132 )
132 )
133 return Response(html)
133 return Response(html)
134
134
135 @LoginRequired()
135 @LoginRequired()
136 @NotAnonymous()
136 @NotAnonymous()
137 @CSRFRequired()
137 @CSRFRequired()
138 # perms check inside
138 # perms check inside
139 @view_config(
139 @view_config(
140 route_name='repo_create', request_method='POST',
140 route_name='repo_create', request_method='POST',
141 renderer='rhodecode:templates/admin/repos/repos.mako')
141 renderer='rhodecode:templates/admin/repos/repos.mako')
142 def repository_create(self):
142 def repository_create(self):
143 c = self.load_default_context()
143 c = self.load_default_context()
144
144
145 form_result = {}
145 form_result = {}
146 task_id = None
146 task_id = None
147 self._load_form_data(c)
147 self._load_form_data(c)
148
148
149 try:
149 try:
150 # CanWriteToGroup validators checks permissions of this POST
150 # CanWriteToGroup validators checks permissions of this POST
151 form = RepoForm(
151 form = RepoForm(
152 self.request.translate, repo_groups=c.repo_groups_choices,
152 self.request.translate, repo_groups=c.repo_groups_choices,
153 landing_revs=c.landing_revs_choices)()
153 landing_revs=c.landing_revs_choices)()
154 form_results = form.to_python(dict(self.request.POST))
154 form_results = form.to_python(dict(self.request.POST))
155
155
156 # create is done sometimes async on celery, db transaction
156 # create is done sometimes async on celery, db transaction
157 # management is handled there.
157 # management is handled there.
158 task = RepoModel().create(form_result, self._rhodecode_user.user_id)
158 task = RepoModel().create(form_result, self._rhodecode_user.user_id)
159 from celery.result import BaseAsyncResult
159 from celery.result import BaseAsyncResult
160 if isinstance(task, BaseAsyncResult):
160 if isinstance(task, BaseAsyncResult):
161 task_id = task.task_id
161 task_id = task.task_id
162 except formencode.Invalid as errors:
162 except formencode.Invalid as errors:
163 data = render('rhodecode:templates/admin/repos/repo_add.mako',
163 data = render('rhodecode:templates/admin/repos/repo_add.mako',
164 self._get_template_context(c), self.request)
164 self._get_template_context(c), self.request)
165 html = formencode.htmlfill.render(
165 html = formencode.htmlfill.render(
166 data,
166 data,
167 defaults=errors.value,
167 defaults=errors.value,
168 errors=errors.error_dict or {},
168 errors=errors.error_dict or {},
169 prefix_error=False,
169 prefix_error=False,
170 encoding="UTF-8",
170 encoding="UTF-8",
171 force_defaults=False
171 force_defaults=False
172 )
172 )
173 return Response(html)
173 return Response(html)
174
174
175 except Exception as e:
175 except Exception as e:
176 msg = self._log_creation_exception(e, form_result.get('repo_name'))
176 msg = self._log_creation_exception(e, form_result.get('repo_name'))
177 h.flash(msg, category='error')
177 h.flash(msg, category='error')
178 raise HTTPFound(h.route_path('home'))
178 raise HTTPFound(h.route_path('home'))
179
179
180 raise HTTPFound(
180 raise HTTPFound(
181 h.route_path('repo_creating',
181 h.route_path('repo_creating',
182 repo_name=form_result['repo_name_full'],
182 repo_name=form_result['repo_name_full'],
183 _query=dict(task_id=task_id)))
183 _query=dict(task_id=task_id)))
@@ -1,762 +1,762 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23 import collections
23 import collections
24
24
25 import datetime
25 import datetime
26 import formencode
26 import formencode
27 import formencode.htmlfill
27 import formencode.htmlfill
28
28
29 import rhodecode
29 import rhodecode
30 from pyramid.view import view_config
30 from pyramid.view import view_config
31 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
31 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
32 from pyramid.renderers import render
32 from pyramid.renderers import render
33 from pyramid.response import Response
33 from pyramid.response import Response
34
34
35 from rhodecode.apps._base import BaseAppView
35 from rhodecode.apps._base import BaseAppView
36 from rhodecode.apps.admin.navigation import navigation_list
36 from rhodecode.apps.admin.navigation import navigation_list
37 from rhodecode.apps.svn_support.config_keys import generate_config
37 from rhodecode.apps.svn_support.config_keys import generate_config
38 from rhodecode.lib import helpers as h
38 from rhodecode.lib import helpers as h
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
40 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
41 from rhodecode.lib.celerylib import tasks, run_task
41 from rhodecode.lib.celerylib import tasks, run_task
42 from rhodecode.lib.utils import repo2db_mapper
42 from rhodecode.lib.utils import repo2db_mapper
43 from rhodecode.lib.utils2 import str2bool, safe_unicode, AttributeDict
43 from rhodecode.lib.utils2 import str2bool, safe_unicode, AttributeDict
44 from rhodecode.lib.index import searcher_from_config
44 from rhodecode.lib.index import searcher_from_config
45
45
46 from rhodecode.model.db import RhodeCodeUi, Repository
46 from rhodecode.model.db import RhodeCodeUi, Repository
47 from rhodecode.model.forms import (ApplicationSettingsForm,
47 from rhodecode.model.forms import (ApplicationSettingsForm,
48 ApplicationUiSettingsForm, ApplicationVisualisationForm,
48 ApplicationUiSettingsForm, ApplicationVisualisationForm,
49 LabsSettingsForm, IssueTrackerPatternsForm)
49 LabsSettingsForm, IssueTrackerPatternsForm)
50 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.repo_group import RepoGroupModel
51
51
52 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.notification import EmailNotificationModel
53 from rhodecode.model.notification import EmailNotificationModel
54 from rhodecode.model.meta import Session
54 from rhodecode.model.meta import Session
55 from rhodecode.model.settings import (
55 from rhodecode.model.settings import (
56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
57 SettingsModel)
57 SettingsModel)
58
58
59
59
60 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
61
61
62
62
63 class AdminSettingsView(BaseAppView):
63 class AdminSettingsView(BaseAppView):
64
64
65 def load_default_context(self):
65 def load_default_context(self):
66 c = self._get_local_tmpl_context()
66 c = self._get_local_tmpl_context()
67 c.labs_active = str2bool(
67 c.labs_active = str2bool(
68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
69 c.navlist = navigation_list(self.request)
69 c.navlist = navigation_list(self.request)
70
70
71 return c
71 return c
72
72
73 @classmethod
73 @classmethod
74 def _get_ui_settings(cls):
74 def _get_ui_settings(cls):
75 ret = RhodeCodeUi.query().all()
75 ret = RhodeCodeUi.query().all()
76
76
77 if not ret:
77 if not ret:
78 raise Exception('Could not get application ui settings !')
78 raise Exception('Could not get application ui settings !')
79 settings = {}
79 settings = {}
80 for each in ret:
80 for each in ret:
81 k = each.ui_key
81 k = each.ui_key
82 v = each.ui_value
82 v = each.ui_value
83 if k == '/':
83 if k == '/':
84 k = 'root_path'
84 k = 'root_path'
85
85
86 if k in ['push_ssl', 'publish', 'enabled']:
86 if k in ['push_ssl', 'publish', 'enabled']:
87 v = str2bool(v)
87 v = str2bool(v)
88
88
89 if k.find('.') != -1:
89 if k.find('.') != -1:
90 k = k.replace('.', '_')
90 k = k.replace('.', '_')
91
91
92 if each.ui_section in ['hooks', 'extensions']:
92 if each.ui_section in ['hooks', 'extensions']:
93 v = each.ui_active
93 v = each.ui_active
94
94
95 settings[each.ui_section + '_' + k] = v
95 settings[each.ui_section + '_' + k] = v
96 return settings
96 return settings
97
97
98 @classmethod
98 @classmethod
99 def _form_defaults(cls):
99 def _form_defaults(cls):
100 defaults = SettingsModel().get_all_settings()
100 defaults = SettingsModel().get_all_settings()
101 defaults.update(cls._get_ui_settings())
101 defaults.update(cls._get_ui_settings())
102
102
103 defaults.update({
103 defaults.update({
104 'new_svn_branch': '',
104 'new_svn_branch': '',
105 'new_svn_tag': '',
105 'new_svn_tag': '',
106 })
106 })
107 return defaults
107 return defaults
108
108
109 @LoginRequired()
109 @LoginRequired()
110 @HasPermissionAllDecorator('hg.admin')
110 @HasPermissionAllDecorator('hg.admin')
111 @view_config(
111 @view_config(
112 route_name='admin_settings_vcs', request_method='GET',
112 route_name='admin_settings_vcs', request_method='GET',
113 renderer='rhodecode:templates/admin/settings/settings.mako')
113 renderer='rhodecode:templates/admin/settings/settings.mako')
114 def settings_vcs(self):
114 def settings_vcs(self):
115 c = self.load_default_context()
115 c = self.load_default_context()
116 c.active = 'vcs'
116 c.active = 'vcs'
117 model = VcsSettingsModel()
117 model = VcsSettingsModel()
118 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
118 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
119 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
119 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
120
120
121 settings = self.request.registry.settings
121 settings = self.request.registry.settings
122 c.svn_proxy_generate_config = settings[generate_config]
122 c.svn_proxy_generate_config = settings[generate_config]
123
123
124 defaults = self._form_defaults()
124 defaults = self._form_defaults()
125
125
126 model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
126 model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
127
127
128 data = render('rhodecode:templates/admin/settings/settings.mako',
128 data = render('rhodecode:templates/admin/settings/settings.mako',
129 self._get_template_context(c), self.request)
129 self._get_template_context(c), self.request)
130 html = formencode.htmlfill.render(
130 html = formencode.htmlfill.render(
131 data,
131 data,
132 defaults=defaults,
132 defaults=defaults,
133 encoding="UTF-8",
133 encoding="UTF-8",
134 force_defaults=False
134 force_defaults=False
135 )
135 )
136 return Response(html)
136 return Response(html)
137
137
138 @LoginRequired()
138 @LoginRequired()
139 @HasPermissionAllDecorator('hg.admin')
139 @HasPermissionAllDecorator('hg.admin')
140 @CSRFRequired()
140 @CSRFRequired()
141 @view_config(
141 @view_config(
142 route_name='admin_settings_vcs_update', request_method='POST',
142 route_name='admin_settings_vcs_update', request_method='POST',
143 renderer='rhodecode:templates/admin/settings/settings.mako')
143 renderer='rhodecode:templates/admin/settings/settings.mako')
144 def settings_vcs_update(self):
144 def settings_vcs_update(self):
145 _ = self.request.translate
145 _ = self.request.translate
146 c = self.load_default_context()
146 c = self.load_default_context()
147 c.active = 'vcs'
147 c.active = 'vcs'
148
148
149 model = VcsSettingsModel()
149 model = VcsSettingsModel()
150 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
150 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
151 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
151 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
152
152
153 settings = self.request.registry.settings
153 settings = self.request.registry.settings
154 c.svn_proxy_generate_config = settings[generate_config]
154 c.svn_proxy_generate_config = settings[generate_config]
155
155
156 application_form = ApplicationUiSettingsForm(self.request.translate)()
156 application_form = ApplicationUiSettingsForm(self.request.translate)()
157
157
158 try:
158 try:
159 form_result = application_form.to_python(dict(self.request.POST))
159 form_result = application_form.to_python(dict(self.request.POST))
160 except formencode.Invalid as errors:
160 except formencode.Invalid as errors:
161 h.flash(
161 h.flash(
162 _("Some form inputs contain invalid data."),
162 _("Some form inputs contain invalid data."),
163 category='error')
163 category='error')
164 data = render('rhodecode:templates/admin/settings/settings.mako',
164 data = render('rhodecode:templates/admin/settings/settings.mako',
165 self._get_template_context(c), self.request)
165 self._get_template_context(c), self.request)
166 html = formencode.htmlfill.render(
166 html = formencode.htmlfill.render(
167 data,
167 data,
168 defaults=errors.value,
168 defaults=errors.value,
169 errors=errors.error_dict or {},
169 errors=errors.error_dict or {},
170 prefix_error=False,
170 prefix_error=False,
171 encoding="UTF-8",
171 encoding="UTF-8",
172 force_defaults=False
172 force_defaults=False
173 )
173 )
174 return Response(html)
174 return Response(html)
175
175
176 try:
176 try:
177 if c.visual.allow_repo_location_change:
177 if c.visual.allow_repo_location_change:
178 model.update_global_path_setting(
178 model.update_global_path_setting(
179 form_result['paths_root_path'])
179 form_result['paths_root_path'])
180
180
181 model.update_global_ssl_setting(form_result['web_push_ssl'])
181 model.update_global_ssl_setting(form_result['web_push_ssl'])
182 model.update_global_hook_settings(form_result)
182 model.update_global_hook_settings(form_result)
183
183
184 model.create_or_update_global_svn_settings(form_result)
184 model.create_or_update_global_svn_settings(form_result)
185 model.create_or_update_global_hg_settings(form_result)
185 model.create_or_update_global_hg_settings(form_result)
186 model.create_or_update_global_git_settings(form_result)
186 model.create_or_update_global_git_settings(form_result)
187 model.create_or_update_global_pr_settings(form_result)
187 model.create_or_update_global_pr_settings(form_result)
188 except Exception:
188 except Exception:
189 log.exception("Exception while updating settings")
189 log.exception("Exception while updating settings")
190 h.flash(_('Error occurred during updating '
190 h.flash(_('Error occurred during updating '
191 'application settings'), category='error')
191 'application settings'), category='error')
192 else:
192 else:
193 Session().commit()
193 Session().commit()
194 h.flash(_('Updated VCS settings'), category='success')
194 h.flash(_('Updated VCS settings'), category='success')
195 raise HTTPFound(h.route_path('admin_settings_vcs'))
195 raise HTTPFound(h.route_path('admin_settings_vcs'))
196
196
197 data = render('rhodecode:templates/admin/settings/settings.mako',
197 data = render('rhodecode:templates/admin/settings/settings.mako',
198 self._get_template_context(c), self.request)
198 self._get_template_context(c), self.request)
199 html = formencode.htmlfill.render(
199 html = formencode.htmlfill.render(
200 data,
200 data,
201 defaults=self._form_defaults(),
201 defaults=self._form_defaults(),
202 encoding="UTF-8",
202 encoding="UTF-8",
203 force_defaults=False
203 force_defaults=False
204 )
204 )
205 return Response(html)
205 return Response(html)
206
206
207 @LoginRequired()
207 @LoginRequired()
208 @HasPermissionAllDecorator('hg.admin')
208 @HasPermissionAllDecorator('hg.admin')
209 @CSRFRequired()
209 @CSRFRequired()
210 @view_config(
210 @view_config(
211 route_name='admin_settings_vcs_svn_pattern_delete', request_method='POST',
211 route_name='admin_settings_vcs_svn_pattern_delete', request_method='POST',
212 renderer='json_ext', xhr=True)
212 renderer='json_ext', xhr=True)
213 def settings_vcs_delete_svn_pattern(self):
213 def settings_vcs_delete_svn_pattern(self):
214 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
214 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
215 model = VcsSettingsModel()
215 model = VcsSettingsModel()
216 try:
216 try:
217 model.delete_global_svn_pattern(delete_pattern_id)
217 model.delete_global_svn_pattern(delete_pattern_id)
218 except SettingNotFound:
218 except SettingNotFound:
219 log.exception(
219 log.exception(
220 'Failed to delete svn_pattern with id %s', delete_pattern_id)
220 'Failed to delete svn_pattern with id %s', delete_pattern_id)
221 raise HTTPNotFound()
221 raise HTTPNotFound()
222
222
223 Session().commit()
223 Session().commit()
224 return True
224 return True
225
225
226 @LoginRequired()
226 @LoginRequired()
227 @HasPermissionAllDecorator('hg.admin')
227 @HasPermissionAllDecorator('hg.admin')
228 @view_config(
228 @view_config(
229 route_name='admin_settings_mapping', request_method='GET',
229 route_name='admin_settings_mapping', request_method='GET',
230 renderer='rhodecode:templates/admin/settings/settings.mako')
230 renderer='rhodecode:templates/admin/settings/settings.mako')
231 def settings_mapping(self):
231 def settings_mapping(self):
232 c = self.load_default_context()
232 c = self.load_default_context()
233 c.active = 'mapping'
233 c.active = 'mapping'
234
234
235 data = render('rhodecode:templates/admin/settings/settings.mako',
235 data = render('rhodecode:templates/admin/settings/settings.mako',
236 self._get_template_context(c), self.request)
236 self._get_template_context(c), self.request)
237 html = formencode.htmlfill.render(
237 html = formencode.htmlfill.render(
238 data,
238 data,
239 defaults=self._form_defaults(),
239 defaults=self._form_defaults(),
240 encoding="UTF-8",
240 encoding="UTF-8",
241 force_defaults=False
241 force_defaults=False
242 )
242 )
243 return Response(html)
243 return Response(html)
244
244
245 @LoginRequired()
245 @LoginRequired()
246 @HasPermissionAllDecorator('hg.admin')
246 @HasPermissionAllDecorator('hg.admin')
247 @CSRFRequired()
247 @CSRFRequired()
248 @view_config(
248 @view_config(
249 route_name='admin_settings_mapping_update', request_method='POST',
249 route_name='admin_settings_mapping_update', request_method='POST',
250 renderer='rhodecode:templates/admin/settings/settings.mako')
250 renderer='rhodecode:templates/admin/settings/settings.mako')
251 def settings_mapping_update(self):
251 def settings_mapping_update(self):
252 _ = self.request.translate
252 _ = self.request.translate
253 c = self.load_default_context()
253 c = self.load_default_context()
254 c.active = 'mapping'
254 c.active = 'mapping'
255 rm_obsolete = self.request.POST.get('destroy', False)
255 rm_obsolete = self.request.POST.get('destroy', False)
256 invalidate_cache = self.request.POST.get('invalidate', False)
256 invalidate_cache = self.request.POST.get('invalidate', False)
257 log.debug(
257 log.debug(
258 'rescanning repo location with destroy obsolete=%s', rm_obsolete)
258 'rescanning repo location with destroy obsolete=%s', rm_obsolete)
259
259
260 if invalidate_cache:
260 if invalidate_cache:
261 log.debug('invalidating all repositories cache')
261 log.debug('invalidating all repositories cache')
262 for repo in Repository.get_all():
262 for repo in Repository.get_all():
263 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
263 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
264
264
265 filesystem_repos = ScmModel().repo_scan()
265 filesystem_repos = ScmModel().repo_scan()
266 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete)
266 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete)
267 _repr = lambda l: ', '.join(map(safe_unicode, l)) or '-'
267 _repr = lambda l: ', '.join(map(safe_unicode, l)) or '-'
268 h.flash(_('Repositories successfully '
268 h.flash(_('Repositories successfully '
269 'rescanned added: %s ; removed: %s') %
269 'rescanned added: %s ; removed: %s') %
270 (_repr(added), _repr(removed)),
270 (_repr(added), _repr(removed)),
271 category='success')
271 category='success')
272 raise HTTPFound(h.route_path('admin_settings_mapping'))
272 raise HTTPFound(h.route_path('admin_settings_mapping'))
273
273
274 @LoginRequired()
274 @LoginRequired()
275 @HasPermissionAllDecorator('hg.admin')
275 @HasPermissionAllDecorator('hg.admin')
276 @view_config(
276 @view_config(
277 route_name='admin_settings', request_method='GET',
277 route_name='admin_settings', request_method='GET',
278 renderer='rhodecode:templates/admin/settings/settings.mako')
278 renderer='rhodecode:templates/admin/settings/settings.mako')
279 @view_config(
279 @view_config(
280 route_name='admin_settings_global', request_method='GET',
280 route_name='admin_settings_global', request_method='GET',
281 renderer='rhodecode:templates/admin/settings/settings.mako')
281 renderer='rhodecode:templates/admin/settings/settings.mako')
282 def settings_global(self):
282 def settings_global(self):
283 c = self.load_default_context()
283 c = self.load_default_context()
284 c.active = 'global'
284 c.active = 'global'
285 c.personal_repo_group_default_pattern = RepoGroupModel()\
285 c.personal_repo_group_default_pattern = RepoGroupModel()\
286 .get_personal_group_name_pattern()
286 .get_personal_group_name_pattern()
287
287
288 data = render('rhodecode:templates/admin/settings/settings.mako',
288 data = render('rhodecode:templates/admin/settings/settings.mako',
289 self._get_template_context(c), self.request)
289 self._get_template_context(c), self.request)
290 html = formencode.htmlfill.render(
290 html = formencode.htmlfill.render(
291 data,
291 data,
292 defaults=self._form_defaults(),
292 defaults=self._form_defaults(),
293 encoding="UTF-8",
293 encoding="UTF-8",
294 force_defaults=False
294 force_defaults=False
295 )
295 )
296 return Response(html)
296 return Response(html)
297
297
298 @LoginRequired()
298 @LoginRequired()
299 @HasPermissionAllDecorator('hg.admin')
299 @HasPermissionAllDecorator('hg.admin')
300 @CSRFRequired()
300 @CSRFRequired()
301 @view_config(
301 @view_config(
302 route_name='admin_settings_update', request_method='POST',
302 route_name='admin_settings_update', request_method='POST',
303 renderer='rhodecode:templates/admin/settings/settings.mako')
303 renderer='rhodecode:templates/admin/settings/settings.mako')
304 @view_config(
304 @view_config(
305 route_name='admin_settings_global_update', request_method='POST',
305 route_name='admin_settings_global_update', request_method='POST',
306 renderer='rhodecode:templates/admin/settings/settings.mako')
306 renderer='rhodecode:templates/admin/settings/settings.mako')
307 def settings_global_update(self):
307 def settings_global_update(self):
308 _ = self.request.translate
308 _ = self.request.translate
309 c = self.load_default_context()
309 c = self.load_default_context()
310 c.active = 'global'
310 c.active = 'global'
311 c.personal_repo_group_default_pattern = RepoGroupModel()\
311 c.personal_repo_group_default_pattern = RepoGroupModel()\
312 .get_personal_group_name_pattern()
312 .get_personal_group_name_pattern()
313 application_form = ApplicationSettingsForm(self.request.translate)()
313 application_form = ApplicationSettingsForm(self.request.translate)()
314 try:
314 try:
315 form_result = application_form.to_python(dict(self.request.POST))
315 form_result = application_form.to_python(dict(self.request.POST))
316 except formencode.Invalid as errors:
316 except formencode.Invalid as errors:
317 data = render('rhodecode:templates/admin/settings/settings.mako',
317 data = render('rhodecode:templates/admin/settings/settings.mako',
318 self._get_template_context(c), self.request)
318 self._get_template_context(c), self.request)
319 html = formencode.htmlfill.render(
319 html = formencode.htmlfill.render(
320 data,
320 data,
321 defaults=errors.value,
321 defaults=errors.value,
322 errors=errors.error_dict or {},
322 errors=errors.error_dict or {},
323 prefix_error=False,
323 prefix_error=False,
324 encoding="UTF-8",
324 encoding="UTF-8",
325 force_defaults=False
325 force_defaults=False
326 )
326 )
327 return Response(html)
327 return Response(html)
328
328
329 settings = [
329 settings = [
330 ('title', 'rhodecode_title', 'unicode'),
330 ('title', 'rhodecode_title', 'unicode'),
331 ('realm', 'rhodecode_realm', 'unicode'),
331 ('realm', 'rhodecode_realm', 'unicode'),
332 ('pre_code', 'rhodecode_pre_code', 'unicode'),
332 ('pre_code', 'rhodecode_pre_code', 'unicode'),
333 ('post_code', 'rhodecode_post_code', 'unicode'),
333 ('post_code', 'rhodecode_post_code', 'unicode'),
334 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
334 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
335 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
335 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
336 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
336 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
337 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
337 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
338 ]
338 ]
339 try:
339 try:
340 for setting, form_key, type_ in settings:
340 for setting, form_key, type_ in settings:
341 sett = SettingsModel().create_or_update_setting(
341 sett = SettingsModel().create_or_update_setting(
342 setting, form_result[form_key], type_)
342 setting, form_result[form_key], type_)
343 Session().add(sett)
343 Session().add(sett)
344
344
345 Session().commit()
345 Session().commit()
346 SettingsModel().invalidate_settings_cache()
346 SettingsModel().invalidate_settings_cache()
347 h.flash(_('Updated application settings'), category='success')
347 h.flash(_('Updated application settings'), category='success')
348 except Exception:
348 except Exception:
349 log.exception("Exception while updating application settings")
349 log.exception("Exception while updating application settings")
350 h.flash(
350 h.flash(
351 _('Error occurred during updating application settings'),
351 _('Error occurred during updating application settings'),
352 category='error')
352 category='error')
353
353
354 raise HTTPFound(h.route_path('admin_settings_global'))
354 raise HTTPFound(h.route_path('admin_settings_global'))
355
355
356 @LoginRequired()
356 @LoginRequired()
357 @HasPermissionAllDecorator('hg.admin')
357 @HasPermissionAllDecorator('hg.admin')
358 @view_config(
358 @view_config(
359 route_name='admin_settings_visual', request_method='GET',
359 route_name='admin_settings_visual', request_method='GET',
360 renderer='rhodecode:templates/admin/settings/settings.mako')
360 renderer='rhodecode:templates/admin/settings/settings.mako')
361 def settings_visual(self):
361 def settings_visual(self):
362 c = self.load_default_context()
362 c = self.load_default_context()
363 c.active = 'visual'
363 c.active = 'visual'
364
364
365 data = render('rhodecode:templates/admin/settings/settings.mako',
365 data = render('rhodecode:templates/admin/settings/settings.mako',
366 self._get_template_context(c), self.request)
366 self._get_template_context(c), self.request)
367 html = formencode.htmlfill.render(
367 html = formencode.htmlfill.render(
368 data,
368 data,
369 defaults=self._form_defaults(),
369 defaults=self._form_defaults(),
370 encoding="UTF-8",
370 encoding="UTF-8",
371 force_defaults=False
371 force_defaults=False
372 )
372 )
373 return Response(html)
373 return Response(html)
374
374
375 @LoginRequired()
375 @LoginRequired()
376 @HasPermissionAllDecorator('hg.admin')
376 @HasPermissionAllDecorator('hg.admin')
377 @CSRFRequired()
377 @CSRFRequired()
378 @view_config(
378 @view_config(
379 route_name='admin_settings_visual_update', request_method='POST',
379 route_name='admin_settings_visual_update', request_method='POST',
380 renderer='rhodecode:templates/admin/settings/settings.mako')
380 renderer='rhodecode:templates/admin/settings/settings.mako')
381 def settings_visual_update(self):
381 def settings_visual_update(self):
382 _ = self.request.translate
382 _ = self.request.translate
383 c = self.load_default_context()
383 c = self.load_default_context()
384 c.active = 'visual'
384 c.active = 'visual'
385 application_form = ApplicationVisualisationForm(self.request.translate)()
385 application_form = ApplicationVisualisationForm(self.request.translate)()
386 try:
386 try:
387 form_result = application_form.to_python(dict(self.request.POST))
387 form_result = application_form.to_python(dict(self.request.POST))
388 except formencode.Invalid as errors:
388 except formencode.Invalid as errors:
389 data = render('rhodecode:templates/admin/settings/settings.mako',
389 data = render('rhodecode:templates/admin/settings/settings.mako',
390 self._get_template_context(c), self.request)
390 self._get_template_context(c), self.request)
391 html = formencode.htmlfill.render(
391 html = formencode.htmlfill.render(
392 data,
392 data,
393 defaults=errors.value,
393 defaults=errors.value,
394 errors=errors.error_dict or {},
394 errors=errors.error_dict or {},
395 prefix_error=False,
395 prefix_error=False,
396 encoding="UTF-8",
396 encoding="UTF-8",
397 force_defaults=False
397 force_defaults=False
398 )
398 )
399 return Response(html)
399 return Response(html)
400
400
401 try:
401 try:
402 settings = [
402 settings = [
403 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
403 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
404 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
404 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
405 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
405 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
406 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
406 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
407 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
407 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
408 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
408 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
409 ('show_version', 'rhodecode_show_version', 'bool'),
409 ('show_version', 'rhodecode_show_version', 'bool'),
410 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
410 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
411 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
411 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
412 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
412 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
413 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
413 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
414 ('support_url', 'rhodecode_support_url', 'unicode'),
414 ('support_url', 'rhodecode_support_url', 'unicode'),
415 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
415 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
416 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
416 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
417 ]
417 ]
418 for setting, form_key, type_ in settings:
418 for setting, form_key, type_ in settings:
419 sett = SettingsModel().create_or_update_setting(
419 sett = SettingsModel().create_or_update_setting(
420 setting, form_result[form_key], type_)
420 setting, form_result[form_key], type_)
421 Session().add(sett)
421 Session().add(sett)
422
422
423 Session().commit()
423 Session().commit()
424 SettingsModel().invalidate_settings_cache()
424 SettingsModel().invalidate_settings_cache()
425 h.flash(_('Updated visualisation settings'), category='success')
425 h.flash(_('Updated visualisation settings'), category='success')
426 except Exception:
426 except Exception:
427 log.exception("Exception updating visualization settings")
427 log.exception("Exception updating visualization settings")
428 h.flash(_('Error occurred during updating '
428 h.flash(_('Error occurred during updating '
429 'visualisation settings'),
429 'visualisation settings'),
430 category='error')
430 category='error')
431
431
432 raise HTTPFound(h.route_path('admin_settings_visual'))
432 raise HTTPFound(h.route_path('admin_settings_visual'))
433
433
434 @LoginRequired()
434 @LoginRequired()
435 @HasPermissionAllDecorator('hg.admin')
435 @HasPermissionAllDecorator('hg.admin')
436 @view_config(
436 @view_config(
437 route_name='admin_settings_issuetracker', request_method='GET',
437 route_name='admin_settings_issuetracker', request_method='GET',
438 renderer='rhodecode:templates/admin/settings/settings.mako')
438 renderer='rhodecode:templates/admin/settings/settings.mako')
439 def settings_issuetracker(self):
439 def settings_issuetracker(self):
440 c = self.load_default_context()
440 c = self.load_default_context()
441 c.active = 'issuetracker'
441 c.active = 'issuetracker'
442 defaults = SettingsModel().get_all_settings()
442 defaults = SettingsModel().get_all_settings()
443
443
444 entry_key = 'rhodecode_issuetracker_pat_'
444 entry_key = 'rhodecode_issuetracker_pat_'
445
445
446 c.issuetracker_entries = {}
446 c.issuetracker_entries = {}
447 for k, v in defaults.items():
447 for k, v in defaults.items():
448 if k.startswith(entry_key):
448 if k.startswith(entry_key):
449 uid = k[len(entry_key):]
449 uid = k[len(entry_key):]
450 c.issuetracker_entries[uid] = None
450 c.issuetracker_entries[uid] = None
451
451
452 for uid in c.issuetracker_entries:
452 for uid in c.issuetracker_entries:
453 c.issuetracker_entries[uid] = AttributeDict({
453 c.issuetracker_entries[uid] = AttributeDict({
454 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
454 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
455 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
455 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
456 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
456 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
457 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
457 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
458 })
458 })
459
459
460 return self._get_template_context(c)
460 return self._get_template_context(c)
461
461
462 @LoginRequired()
462 @LoginRequired()
463 @HasPermissionAllDecorator('hg.admin')
463 @HasPermissionAllDecorator('hg.admin')
464 @CSRFRequired()
464 @CSRFRequired()
465 @view_config(
465 @view_config(
466 route_name='admin_settings_issuetracker_test', request_method='POST',
466 route_name='admin_settings_issuetracker_test', request_method='POST',
467 renderer='string', xhr=True)
467 renderer='string', xhr=True)
468 def settings_issuetracker_test(self):
468 def settings_issuetracker_test(self):
469 return h.urlify_commit_message(
469 return h.urlify_commit_message(
470 self.request.POST.get('test_text', ''),
470 self.request.POST.get('test_text', ''),
471 'repo_group/test_repo1')
471 'repo_group/test_repo1')
472
472
473 @LoginRequired()
473 @LoginRequired()
474 @HasPermissionAllDecorator('hg.admin')
474 @HasPermissionAllDecorator('hg.admin')
475 @CSRFRequired()
475 @CSRFRequired()
476 @view_config(
476 @view_config(
477 route_name='admin_settings_issuetracker_update', request_method='POST',
477 route_name='admin_settings_issuetracker_update', request_method='POST',
478 renderer='rhodecode:templates/admin/settings/settings.mako')
478 renderer='rhodecode:templates/admin/settings/settings.mako')
479 def settings_issuetracker_update(self):
479 def settings_issuetracker_update(self):
480 _ = self.request.translate
480 _ = self.request.translate
481 self.load_default_context()
481 self.load_default_context()
482 settings_model = IssueTrackerSettingsModel()
482 settings_model = IssueTrackerSettingsModel()
483
483
484 try:
484 try:
485 form = IssueTrackerPatternsForm(self.request.translate)().to_python(self.request.POST)
485 form = IssueTrackerPatternsForm(self.request.translate)().to_python(self.request.POST)
486 except formencode.Invalid as errors:
486 except formencode.Invalid as errors:
487 log.exception('Failed to add new pattern')
487 log.exception('Failed to add new pattern')
488 error = errors
488 error = errors
489 h.flash(_('Invalid issue tracker pattern: {}'.format(error)),
489 h.flash(_('Invalid issue tracker pattern: {}'.format(error)),
490 category='error')
490 category='error')
491 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
491 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
492
492
493 if form:
493 if form:
494 for uid in form.get('delete_patterns', []):
494 for uid in form.get('delete_patterns', []):
495 settings_model.delete_entries(uid)
495 settings_model.delete_entries(uid)
496
496
497 for pattern in form.get('patterns', []):
497 for pattern in form.get('patterns', []):
498 for setting, value, type_ in pattern:
498 for setting, value, type_ in pattern:
499 sett = settings_model.create_or_update_setting(
499 sett = settings_model.create_or_update_setting(
500 setting, value, type_)
500 setting, value, type_)
501 Session().add(sett)
501 Session().add(sett)
502
502
503 Session().commit()
503 Session().commit()
504
504
505 SettingsModel().invalidate_settings_cache()
505 SettingsModel().invalidate_settings_cache()
506 h.flash(_('Updated issue tracker entries'), category='success')
506 h.flash(_('Updated issue tracker entries'), category='success')
507 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
507 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
508
508
509 @LoginRequired()
509 @LoginRequired()
510 @HasPermissionAllDecorator('hg.admin')
510 @HasPermissionAllDecorator('hg.admin')
511 @CSRFRequired()
511 @CSRFRequired()
512 @view_config(
512 @view_config(
513 route_name='admin_settings_issuetracker_delete', request_method='POST',
513 route_name='admin_settings_issuetracker_delete', request_method='POST',
514 renderer='rhodecode:templates/admin/settings/settings.mako')
514 renderer='rhodecode:templates/admin/settings/settings.mako')
515 def settings_issuetracker_delete(self):
515 def settings_issuetracker_delete(self):
516 _ = self.request.translate
516 _ = self.request.translate
517 self.load_default_context()
517 self.load_default_context()
518 uid = self.request.POST.get('uid')
518 uid = self.request.POST.get('uid')
519 try:
519 try:
520 IssueTrackerSettingsModel().delete_entries(uid)
520 IssueTrackerSettingsModel().delete_entries(uid)
521 except Exception:
521 except Exception:
522 log.exception('Failed to delete issue tracker setting %s', uid)
522 log.exception('Failed to delete issue tracker setting %s', uid)
523 raise HTTPNotFound()
523 raise HTTPNotFound()
524 h.flash(_('Removed issue tracker entry'), category='success')
524 h.flash(_('Removed issue tracker entry'), category='success')
525 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
525 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
526
526
527 @LoginRequired()
527 @LoginRequired()
528 @HasPermissionAllDecorator('hg.admin')
528 @HasPermissionAllDecorator('hg.admin')
529 @view_config(
529 @view_config(
530 route_name='admin_settings_email', request_method='GET',
530 route_name='admin_settings_email', request_method='GET',
531 renderer='rhodecode:templates/admin/settings/settings.mako')
531 renderer='rhodecode:templates/admin/settings/settings.mako')
532 def settings_email(self):
532 def settings_email(self):
533 c = self.load_default_context()
533 c = self.load_default_context()
534 c.active = 'email'
534 c.active = 'email'
535 c.rhodecode_ini = rhodecode.CONFIG
535 c.rhodecode_ini = rhodecode.CONFIG
536
536
537 data = render('rhodecode:templates/admin/settings/settings.mako',
537 data = render('rhodecode:templates/admin/settings/settings.mako',
538 self._get_template_context(c), self.request)
538 self._get_template_context(c), self.request)
539 html = formencode.htmlfill.render(
539 html = formencode.htmlfill.render(
540 data,
540 data,
541 defaults=self._form_defaults(),
541 defaults=self._form_defaults(),
542 encoding="UTF-8",
542 encoding="UTF-8",
543 force_defaults=False
543 force_defaults=False
544 )
544 )
545 return Response(html)
545 return Response(html)
546
546
547 @LoginRequired()
547 @LoginRequired()
548 @HasPermissionAllDecorator('hg.admin')
548 @HasPermissionAllDecorator('hg.admin')
549 @CSRFRequired()
549 @CSRFRequired()
550 @view_config(
550 @view_config(
551 route_name='admin_settings_email_update', request_method='POST',
551 route_name='admin_settings_email_update', request_method='POST',
552 renderer='rhodecode:templates/admin/settings/settings.mako')
552 renderer='rhodecode:templates/admin/settings/settings.mako')
553 def settings_email_update(self):
553 def settings_email_update(self):
554 _ = self.request.translate
554 _ = self.request.translate
555 c = self.load_default_context()
555 c = self.load_default_context()
556 c.active = 'email'
556 c.active = 'email'
557
557
558 test_email = self.request.POST.get('test_email')
558 test_email = self.request.POST.get('test_email')
559
559
560 if not test_email:
560 if not test_email:
561 h.flash(_('Please enter email address'), category='error')
561 h.flash(_('Please enter email address'), category='error')
562 raise HTTPFound(h.route_path('admin_settings_email'))
562 raise HTTPFound(h.route_path('admin_settings_email'))
563
563
564 email_kwargs = {
564 email_kwargs = {
565 'date': datetime.datetime.now(),
565 'date': datetime.datetime.now(),
566 'user': c.rhodecode_user,
566 'user': c.rhodecode_user,
567 'rhodecode_version': c.rhodecode_version
567 'rhodecode_version': c.rhodecode_version
568 }
568 }
569
569
570 (subject, headers, email_body,
570 (subject, headers, email_body,
571 email_body_plaintext) = EmailNotificationModel().render_email(
571 email_body_plaintext) = EmailNotificationModel().render_email(
572 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
572 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
573
573
574 recipients = [test_email] if test_email else None
574 recipients = [test_email] if test_email else None
575
575
576 run_task(tasks.send_email, recipients, subject,
576 run_task(tasks.send_email, recipients, subject,
577 email_body_plaintext, email_body)
577 email_body_plaintext, email_body)
578
578
579 h.flash(_('Send email task created'), category='success')
579 h.flash(_('Send email task created'), category='success')
580 raise HTTPFound(h.route_path('admin_settings_email'))
580 raise HTTPFound(h.route_path('admin_settings_email'))
581
581
582 @LoginRequired()
582 @LoginRequired()
583 @HasPermissionAllDecorator('hg.admin')
583 @HasPermissionAllDecorator('hg.admin')
584 @view_config(
584 @view_config(
585 route_name='admin_settings_hooks', request_method='GET',
585 route_name='admin_settings_hooks', request_method='GET',
586 renderer='rhodecode:templates/admin/settings/settings.mako')
586 renderer='rhodecode:templates/admin/settings/settings.mako')
587 def settings_hooks(self):
587 def settings_hooks(self):
588 c = self.load_default_context()
588 c = self.load_default_context()
589 c.active = 'hooks'
589 c.active = 'hooks'
590
590
591 model = SettingsModel()
591 model = SettingsModel()
592 c.hooks = model.get_builtin_hooks()
592 c.hooks = model.get_builtin_hooks()
593 c.custom_hooks = model.get_custom_hooks()
593 c.custom_hooks = model.get_custom_hooks()
594
594
595 data = render('rhodecode:templates/admin/settings/settings.mako',
595 data = render('rhodecode:templates/admin/settings/settings.mako',
596 self._get_template_context(c), self.request)
596 self._get_template_context(c), self.request)
597 html = formencode.htmlfill.render(
597 html = formencode.htmlfill.render(
598 data,
598 data,
599 defaults=self._form_defaults(),
599 defaults=self._form_defaults(),
600 encoding="UTF-8",
600 encoding="UTF-8",
601 force_defaults=False
601 force_defaults=False
602 )
602 )
603 return Response(html)
603 return Response(html)
604
604
605 @LoginRequired()
605 @LoginRequired()
606 @HasPermissionAllDecorator('hg.admin')
606 @HasPermissionAllDecorator('hg.admin')
607 @CSRFRequired()
607 @CSRFRequired()
608 @view_config(
608 @view_config(
609 route_name='admin_settings_hooks_update', request_method='POST',
609 route_name='admin_settings_hooks_update', request_method='POST',
610 renderer='rhodecode:templates/admin/settings/settings.mako')
610 renderer='rhodecode:templates/admin/settings/settings.mako')
611 @view_config(
611 @view_config(
612 route_name='admin_settings_hooks_delete', request_method='POST',
612 route_name='admin_settings_hooks_delete', request_method='POST',
613 renderer='rhodecode:templates/admin/settings/settings.mako')
613 renderer='rhodecode:templates/admin/settings/settings.mako')
614 def settings_hooks_update(self):
614 def settings_hooks_update(self):
615 _ = self.request.translate
615 _ = self.request.translate
616 c = self.load_default_context()
616 c = self.load_default_context()
617 c.active = 'hooks'
617 c.active = 'hooks'
618 if c.visual.allow_custom_hooks_settings:
618 if c.visual.allow_custom_hooks_settings:
619 ui_key = self.request.POST.get('new_hook_ui_key')
619 ui_key = self.request.POST.get('new_hook_ui_key')
620 ui_value = self.request.POST.get('new_hook_ui_value')
620 ui_value = self.request.POST.get('new_hook_ui_value')
621
621
622 hook_id = self.request.POST.get('hook_id')
622 hook_id = self.request.POST.get('hook_id')
623 new_hook = False
623 new_hook = False
624
624
625 model = SettingsModel()
625 model = SettingsModel()
626 try:
626 try:
627 if ui_value and ui_key:
627 if ui_value and ui_key:
628 model.create_or_update_hook(ui_key, ui_value)
628 model.create_or_update_hook(ui_key, ui_value)
629 h.flash(_('Added new hook'), category='success')
629 h.flash(_('Added new hook'), category='success')
630 new_hook = True
630 new_hook = True
631 elif hook_id:
631 elif hook_id:
632 RhodeCodeUi.delete(hook_id)
632 RhodeCodeUi.delete(hook_id)
633 Session().commit()
633 Session().commit()
634
634
635 # check for edits
635 # check for edits
636 update = False
636 update = False
637 _d = self.request.POST.dict_of_lists()
637 _d = self.request.POST.dict_of_lists()
638 for k, v in zip(_d.get('hook_ui_key', []),
638 for k, v in zip(_d.get('hook_ui_key', []),
639 _d.get('hook_ui_value_new', [])):
639 _d.get('hook_ui_value_new', [])):
640 model.create_or_update_hook(k, v)
640 model.create_or_update_hook(k, v)
641 update = True
641 update = True
642
642
643 if update and not new_hook:
643 if update and not new_hook:
644 h.flash(_('Updated hooks'), category='success')
644 h.flash(_('Updated hooks'), category='success')
645 Session().commit()
645 Session().commit()
646 except Exception:
646 except Exception:
647 log.exception("Exception during hook creation")
647 log.exception("Exception during hook creation")
648 h.flash(_('Error occurred during hook creation'),
648 h.flash(_('Error occurred during hook creation'),
649 category='error')
649 category='error')
650
650
651 raise HTTPFound(h.route_path('admin_settings_hooks'))
651 raise HTTPFound(h.route_path('admin_settings_hooks'))
652
652
653 @LoginRequired()
653 @LoginRequired()
654 @HasPermissionAllDecorator('hg.admin')
654 @HasPermissionAllDecorator('hg.admin')
655 @view_config(
655 @view_config(
656 route_name='admin_settings_search', request_method='GET',
656 route_name='admin_settings_search', request_method='GET',
657 renderer='rhodecode:templates/admin/settings/settings.mako')
657 renderer='rhodecode:templates/admin/settings/settings.mako')
658 def settings_search(self):
658 def settings_search(self):
659 c = self.load_default_context()
659 c = self.load_default_context()
660 c.active = 'search'
660 c.active = 'search'
661
661
662 searcher = searcher_from_config(self.request.registry.settings)
662 searcher = searcher_from_config(self.request.registry.settings)
663 c.statistics = searcher.statistics()
663 c.statistics = searcher.statistics(self.request.translate)
664
664
665 return self._get_template_context(c)
665 return self._get_template_context(c)
666
666
667 @LoginRequired()
667 @LoginRequired()
668 @HasPermissionAllDecorator('hg.admin')
668 @HasPermissionAllDecorator('hg.admin')
669 @view_config(
669 @view_config(
670 route_name='admin_settings_labs', request_method='GET',
670 route_name='admin_settings_labs', request_method='GET',
671 renderer='rhodecode:templates/admin/settings/settings.mako')
671 renderer='rhodecode:templates/admin/settings/settings.mako')
672 def settings_labs(self):
672 def settings_labs(self):
673 c = self.load_default_context()
673 c = self.load_default_context()
674 if not c.labs_active:
674 if not c.labs_active:
675 raise HTTPFound(h.route_path('admin_settings'))
675 raise HTTPFound(h.route_path('admin_settings'))
676
676
677 c.active = 'labs'
677 c.active = 'labs'
678 c.lab_settings = _LAB_SETTINGS
678 c.lab_settings = _LAB_SETTINGS
679
679
680 data = render('rhodecode:templates/admin/settings/settings.mako',
680 data = render('rhodecode:templates/admin/settings/settings.mako',
681 self._get_template_context(c), self.request)
681 self._get_template_context(c), self.request)
682 html = formencode.htmlfill.render(
682 html = formencode.htmlfill.render(
683 data,
683 data,
684 defaults=self._form_defaults(),
684 defaults=self._form_defaults(),
685 encoding="UTF-8",
685 encoding="UTF-8",
686 force_defaults=False
686 force_defaults=False
687 )
687 )
688 return Response(html)
688 return Response(html)
689
689
690 @LoginRequired()
690 @LoginRequired()
691 @HasPermissionAllDecorator('hg.admin')
691 @HasPermissionAllDecorator('hg.admin')
692 @CSRFRequired()
692 @CSRFRequired()
693 @view_config(
693 @view_config(
694 route_name='admin_settings_labs_update', request_method='POST',
694 route_name='admin_settings_labs_update', request_method='POST',
695 renderer='rhodecode:templates/admin/settings/settings.mako')
695 renderer='rhodecode:templates/admin/settings/settings.mako')
696 def settings_labs_update(self):
696 def settings_labs_update(self):
697 _ = self.request.translate
697 _ = self.request.translate
698 c = self.load_default_context()
698 c = self.load_default_context()
699 c.active = 'labs'
699 c.active = 'labs'
700
700
701 application_form = LabsSettingsForm(self.request.translate)()
701 application_form = LabsSettingsForm(self.request.translate)()
702 try:
702 try:
703 form_result = application_form.to_python(dict(self.request.POST))
703 form_result = application_form.to_python(dict(self.request.POST))
704 except formencode.Invalid as errors:
704 except formencode.Invalid as errors:
705 h.flash(
705 h.flash(
706 _('Some form inputs contain invalid data.'),
706 _('Some form inputs contain invalid data.'),
707 category='error')
707 category='error')
708 data = render('rhodecode:templates/admin/settings/settings.mako',
708 data = render('rhodecode:templates/admin/settings/settings.mako',
709 self._get_template_context(c), self.request)
709 self._get_template_context(c), self.request)
710 html = formencode.htmlfill.render(
710 html = formencode.htmlfill.render(
711 data,
711 data,
712 defaults=errors.value,
712 defaults=errors.value,
713 errors=errors.error_dict or {},
713 errors=errors.error_dict or {},
714 prefix_error=False,
714 prefix_error=False,
715 encoding="UTF-8",
715 encoding="UTF-8",
716 force_defaults=False
716 force_defaults=False
717 )
717 )
718 return Response(html)
718 return Response(html)
719
719
720 try:
720 try:
721 session = Session()
721 session = Session()
722 for setting in _LAB_SETTINGS:
722 for setting in _LAB_SETTINGS:
723 setting_name = setting.key[len('rhodecode_'):]
723 setting_name = setting.key[len('rhodecode_'):]
724 sett = SettingsModel().create_or_update_setting(
724 sett = SettingsModel().create_or_update_setting(
725 setting_name, form_result[setting.key], setting.type)
725 setting_name, form_result[setting.key], setting.type)
726 session.add(sett)
726 session.add(sett)
727
727
728 except Exception:
728 except Exception:
729 log.exception('Exception while updating lab settings')
729 log.exception('Exception while updating lab settings')
730 h.flash(_('Error occurred during updating labs settings'),
730 h.flash(_('Error occurred during updating labs settings'),
731 category='error')
731 category='error')
732 else:
732 else:
733 Session().commit()
733 Session().commit()
734 SettingsModel().invalidate_settings_cache()
734 SettingsModel().invalidate_settings_cache()
735 h.flash(_('Updated Labs settings'), category='success')
735 h.flash(_('Updated Labs settings'), category='success')
736 raise HTTPFound(h.route_path('admin_settings_labs'))
736 raise HTTPFound(h.route_path('admin_settings_labs'))
737
737
738 data = render('rhodecode:templates/admin/settings/settings.mako',
738 data = render('rhodecode:templates/admin/settings/settings.mako',
739 self._get_template_context(c), self.request)
739 self._get_template_context(c), self.request)
740 html = formencode.htmlfill.render(
740 html = formencode.htmlfill.render(
741 data,
741 data,
742 defaults=self._form_defaults(),
742 defaults=self._form_defaults(),
743 encoding="UTF-8",
743 encoding="UTF-8",
744 force_defaults=False
744 force_defaults=False
745 )
745 )
746 return Response(html)
746 return Response(html)
747
747
748
748
749 # :param key: name of the setting including the 'rhodecode_' prefix
749 # :param key: name of the setting including the 'rhodecode_' prefix
750 # :param type: the RhodeCodeSetting type to use.
750 # :param type: the RhodeCodeSetting type to use.
751 # :param group: the i18ned group in which we should dispaly this setting
751 # :param group: the i18ned group in which we should dispaly this setting
752 # :param label: the i18ned label we should display for this setting
752 # :param label: the i18ned label we should display for this setting
753 # :param help: the i18ned help we should dispaly for this setting
753 # :param help: the i18ned help we should dispaly for this setting
754 LabSetting = collections.namedtuple(
754 LabSetting = collections.namedtuple(
755 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
755 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
756
756
757
757
758 # This list has to be kept in sync with the form
758 # This list has to be kept in sync with the form
759 # rhodecode.model.forms.LabsSettingsForm.
759 # rhodecode.model.forms.LabsSettingsForm.
760 _LAB_SETTINGS = [
760 _LAB_SETTINGS = [
761
761
762 ]
762 ]
@@ -1,1190 +1,1189 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import datetime
22 import datetime
23 import formencode
23 import formencode
24 import formencode.htmlfill
24 import formencode.htmlfill
25
25
26 from pyramid.httpexceptions import HTTPFound
26 from pyramid.httpexceptions import HTTPFound
27 from pyramid.view import view_config
27 from pyramid.view import view_config
28 from pyramid.renderers import render
28 from pyramid.renderers import render
29 from pyramid.response import Response
29 from pyramid.response import Response
30
30
31 from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
31 from rhodecode.apps._base import BaseAppView, DataGridAppView, UserAppView
32 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
32 from rhodecode.apps.ssh_support import SshKeyFileChangeEvent
33 from rhodecode.authentication.plugins import auth_rhodecode
33 from rhodecode.authentication.plugins import auth_rhodecode
34 from rhodecode.events import trigger
34 from rhodecode.events import trigger
35
35
36 from rhodecode.lib import audit_logger
36 from rhodecode.lib import audit_logger
37 from rhodecode.lib.exceptions import (
37 from rhodecode.lib.exceptions import (
38 UserCreationError, UserOwnsReposException, UserOwnsRepoGroupsException,
38 UserCreationError, UserOwnsReposException, UserOwnsRepoGroupsException,
39 UserOwnsUserGroupsException, DefaultUserException)
39 UserOwnsUserGroupsException, DefaultUserException)
40 from rhodecode.lib.ext_json import json
40 from rhodecode.lib.ext_json import json
41 from rhodecode.lib.auth import (
41 from rhodecode.lib.auth import (
42 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
42 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
43 from rhodecode.lib import helpers as h
43 from rhodecode.lib import helpers as h
44 from rhodecode.lib.utils2 import safe_int, safe_unicode, AttributeDict
44 from rhodecode.lib.utils2 import safe_int, safe_unicode, AttributeDict
45 from rhodecode.model.auth_token import AuthTokenModel
45 from rhodecode.model.auth_token import AuthTokenModel
46 from rhodecode.model.forms import (
46 from rhodecode.model.forms import (
47 UserForm, UserIndividualPermissionsForm, UserPermissionsForm,
47 UserForm, UserIndividualPermissionsForm, UserPermissionsForm,
48 UserExtraEmailForm, UserExtraIpForm)
48 UserExtraEmailForm, UserExtraIpForm)
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.ssh_key import SshKeyModel
51 from rhodecode.model.ssh_key import SshKeyModel
52 from rhodecode.model.user import UserModel
52 from rhodecode.model.user import UserModel
53 from rhodecode.model.user_group import UserGroupModel
53 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.db import (
54 from rhodecode.model.db import (
55 or_, coalesce,IntegrityError, User, UserGroup, UserIpMap, UserEmailMap,
55 or_, coalesce,IntegrityError, User, UserGroup, UserIpMap, UserEmailMap,
56 UserApiKeys, UserSshKeys, RepoGroup)
56 UserApiKeys, UserSshKeys, RepoGroup)
57 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class AdminUsersView(BaseAppView, DataGridAppView):
62 class AdminUsersView(BaseAppView, DataGridAppView):
63
63
64 def load_default_context(self):
64 def load_default_context(self):
65 c = self._get_local_tmpl_context()
65 c = self._get_local_tmpl_context()
66 return c
66 return c
67
67
68 @LoginRequired()
68 @LoginRequired()
69 @HasPermissionAllDecorator('hg.admin')
69 @HasPermissionAllDecorator('hg.admin')
70 @view_config(
70 @view_config(
71 route_name='users', request_method='GET',
71 route_name='users', request_method='GET',
72 renderer='rhodecode:templates/admin/users/users.mako')
72 renderer='rhodecode:templates/admin/users/users.mako')
73 def users_list(self):
73 def users_list(self):
74 c = self.load_default_context()
74 c = self.load_default_context()
75 return self._get_template_context(c)
75 return self._get_template_context(c)
76
76
77 @LoginRequired()
77 @LoginRequired()
78 @HasPermissionAllDecorator('hg.admin')
78 @HasPermissionAllDecorator('hg.admin')
79 @view_config(
79 @view_config(
80 # renderer defined below
80 # renderer defined below
81 route_name='users_data', request_method='GET',
81 route_name='users_data', request_method='GET',
82 renderer='json_ext', xhr=True)
82 renderer='json_ext', xhr=True)
83 def users_list_data(self):
83 def users_list_data(self):
84 self.load_default_context()
84 self.load_default_context()
85 column_map = {
85 column_map = {
86 'first_name': 'name',
86 'first_name': 'name',
87 'last_name': 'lastname',
87 'last_name': 'lastname',
88 }
88 }
89 draw, start, limit = self._extract_chunk(self.request)
89 draw, start, limit = self._extract_chunk(self.request)
90 search_q, order_by, order_dir = self._extract_ordering(
90 search_q, order_by, order_dir = self._extract_ordering(
91 self.request, column_map=column_map)
91 self.request, column_map=column_map)
92
92
93 _render = self.request.get_partial_renderer(
93 _render = self.request.get_partial_renderer(
94 'rhodecode:templates/data_table/_dt_elements.mako')
94 'rhodecode:templates/data_table/_dt_elements.mako')
95
95
96 def user_actions(user_id, username):
96 def user_actions(user_id, username):
97 return _render("user_actions", user_id, username)
97 return _render("user_actions", user_id, username)
98
98
99 users_data_total_count = User.query()\
99 users_data_total_count = User.query()\
100 .filter(User.username != User.DEFAULT_USER) \
100 .filter(User.username != User.DEFAULT_USER) \
101 .count()
101 .count()
102
102
103 # json generate
103 # json generate
104 base_q = User.query().filter(User.username != User.DEFAULT_USER)
104 base_q = User.query().filter(User.username != User.DEFAULT_USER)
105
105
106 if search_q:
106 if search_q:
107 like_expression = u'%{}%'.format(safe_unicode(search_q))
107 like_expression = u'%{}%'.format(safe_unicode(search_q))
108 base_q = base_q.filter(or_(
108 base_q = base_q.filter(or_(
109 User.username.ilike(like_expression),
109 User.username.ilike(like_expression),
110 User._email.ilike(like_expression),
110 User._email.ilike(like_expression),
111 User.name.ilike(like_expression),
111 User.name.ilike(like_expression),
112 User.lastname.ilike(like_expression),
112 User.lastname.ilike(like_expression),
113 ))
113 ))
114
114
115 users_data_total_filtered_count = base_q.count()
115 users_data_total_filtered_count = base_q.count()
116
116
117 sort_col = getattr(User, order_by, None)
117 sort_col = getattr(User, order_by, None)
118 if sort_col:
118 if sort_col:
119 if order_dir == 'asc':
119 if order_dir == 'asc':
120 # handle null values properly to order by NULL last
120 # handle null values properly to order by NULL last
121 if order_by in ['last_activity']:
121 if order_by in ['last_activity']:
122 sort_col = coalesce(sort_col, datetime.date.max)
122 sort_col = coalesce(sort_col, datetime.date.max)
123 sort_col = sort_col.asc()
123 sort_col = sort_col.asc()
124 else:
124 else:
125 # handle null values properly to order by NULL last
125 # handle null values properly to order by NULL last
126 if order_by in ['last_activity']:
126 if order_by in ['last_activity']:
127 sort_col = coalesce(sort_col, datetime.date.min)
127 sort_col = coalesce(sort_col, datetime.date.min)
128 sort_col = sort_col.desc()
128 sort_col = sort_col.desc()
129
129
130 base_q = base_q.order_by(sort_col)
130 base_q = base_q.order_by(sort_col)
131 base_q = base_q.offset(start).limit(limit)
131 base_q = base_q.offset(start).limit(limit)
132
132
133 users_list = base_q.all()
133 users_list = base_q.all()
134
134
135 users_data = []
135 users_data = []
136 for user in users_list:
136 for user in users_list:
137 users_data.append({
137 users_data.append({
138 "username": h.gravatar_with_user(self.request, user.username),
138 "username": h.gravatar_with_user(self.request, user.username),
139 "email": user.email,
139 "email": user.email,
140 "first_name": user.first_name,
140 "first_name": user.first_name,
141 "last_name": user.last_name,
141 "last_name": user.last_name,
142 "last_login": h.format_date(user.last_login),
142 "last_login": h.format_date(user.last_login),
143 "last_activity": h.format_date(user.last_activity),
143 "last_activity": h.format_date(user.last_activity),
144 "active": h.bool2icon(user.active),
144 "active": h.bool2icon(user.active),
145 "active_raw": user.active,
145 "active_raw": user.active,
146 "admin": h.bool2icon(user.admin),
146 "admin": h.bool2icon(user.admin),
147 "extern_type": user.extern_type,
147 "extern_type": user.extern_type,
148 "extern_name": user.extern_name,
148 "extern_name": user.extern_name,
149 "action": user_actions(user.user_id, user.username),
149 "action": user_actions(user.user_id, user.username),
150 })
150 })
151
151
152 data = ({
152 data = ({
153 'draw': draw,
153 'draw': draw,
154 'data': users_data,
154 'data': users_data,
155 'recordsTotal': users_data_total_count,
155 'recordsTotal': users_data_total_count,
156 'recordsFiltered': users_data_total_filtered_count,
156 'recordsFiltered': users_data_total_filtered_count,
157 })
157 })
158
158
159 return data
159 return data
160
160
161 def _set_personal_repo_group_template_vars(self, c_obj):
161 def _set_personal_repo_group_template_vars(self, c_obj):
162 DummyUser = AttributeDict({
162 DummyUser = AttributeDict({
163 'username': '${username}',
163 'username': '${username}',
164 'user_id': '${user_id}',
164 'user_id': '${user_id}',
165 })
165 })
166 c_obj.default_create_repo_group = RepoGroupModel() \
166 c_obj.default_create_repo_group = RepoGroupModel() \
167 .get_default_create_personal_repo_group()
167 .get_default_create_personal_repo_group()
168 c_obj.personal_repo_group_name = RepoGroupModel() \
168 c_obj.personal_repo_group_name = RepoGroupModel() \
169 .get_personal_group_name(DummyUser)
169 .get_personal_group_name(DummyUser)
170
170
171 @LoginRequired()
171 @LoginRequired()
172 @HasPermissionAllDecorator('hg.admin')
172 @HasPermissionAllDecorator('hg.admin')
173 @view_config(
173 @view_config(
174 route_name='users_new', request_method='GET',
174 route_name='users_new', request_method='GET',
175 renderer='rhodecode:templates/admin/users/user_add.mako')
175 renderer='rhodecode:templates/admin/users/user_add.mako')
176 def users_new(self):
176 def users_new(self):
177 _ = self.request.translate
177 _ = self.request.translate
178 c = self.load_default_context()
178 c = self.load_default_context()
179 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
179 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
180 self._set_personal_repo_group_template_vars(c)
180 self._set_personal_repo_group_template_vars(c)
181 return self._get_template_context(c)
181 return self._get_template_context(c)
182
182
183 @LoginRequired()
183 @LoginRequired()
184 @HasPermissionAllDecorator('hg.admin')
184 @HasPermissionAllDecorator('hg.admin')
185 @CSRFRequired()
185 @CSRFRequired()
186 @view_config(
186 @view_config(
187 route_name='users_create', request_method='POST',
187 route_name='users_create', request_method='POST',
188 renderer='rhodecode:templates/admin/users/user_add.mako')
188 renderer='rhodecode:templates/admin/users/user_add.mako')
189 def users_create(self):
189 def users_create(self):
190 _ = self.request.translate
190 _ = self.request.translate
191 c = self.load_default_context()
191 c = self.load_default_context()
192 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
192 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
193 user_model = UserModel()
193 user_model = UserModel()
194 user_form = UserForm(self.request.translate)()
194 user_form = UserForm(self.request.translate)()
195 try:
195 try:
196 form_result = user_form.to_python(dict(self.request.POST))
196 form_result = user_form.to_python(dict(self.request.POST))
197 user = user_model.create(form_result)
197 user = user_model.create(form_result)
198 Session().flush()
198 Session().flush()
199 creation_data = user.get_api_data()
199 creation_data = user.get_api_data()
200 username = form_result['username']
200 username = form_result['username']
201
201
202 audit_logger.store_web(
202 audit_logger.store_web(
203 'user.create', action_data={'data': creation_data},
203 'user.create', action_data={'data': creation_data},
204 user=c.rhodecode_user)
204 user=c.rhodecode_user)
205
205
206 user_link = h.link_to(
206 user_link = h.link_to(
207 h.escape(username),
207 h.escape(username),
208 h.route_path('user_edit', user_id=user.user_id))
208 h.route_path('user_edit', user_id=user.user_id))
209 h.flash(h.literal(_('Created user %(user_link)s')
209 h.flash(h.literal(_('Created user %(user_link)s')
210 % {'user_link': user_link}), category='success')
210 % {'user_link': user_link}), category='success')
211 Session().commit()
211 Session().commit()
212 except formencode.Invalid as errors:
212 except formencode.Invalid as errors:
213 self._set_personal_repo_group_template_vars(c)
213 self._set_personal_repo_group_template_vars(c)
214 data = render(
214 data = render(
215 'rhodecode:templates/admin/users/user_add.mako',
215 'rhodecode:templates/admin/users/user_add.mako',
216 self._get_template_context(c), self.request)
216 self._get_template_context(c), self.request)
217 html = formencode.htmlfill.render(
217 html = formencode.htmlfill.render(
218 data,
218 data,
219 defaults=errors.value,
219 defaults=errors.value,
220 errors=errors.error_dict or {},
220 errors=errors.error_dict or {},
221 prefix_error=False,
221 prefix_error=False,
222 encoding="UTF-8",
222 encoding="UTF-8",
223 force_defaults=False
223 force_defaults=False
224 )
224 )
225 return Response(html)
225 return Response(html)
226 except UserCreationError as e:
226 except UserCreationError as e:
227 h.flash(e, 'error')
227 h.flash(e, 'error')
228 except Exception:
228 except Exception:
229 log.exception("Exception creation of user")
229 log.exception("Exception creation of user")
230 h.flash(_('Error occurred during creation of user %s')
230 h.flash(_('Error occurred during creation of user %s')
231 % self.request.POST.get('username'), category='error')
231 % self.request.POST.get('username'), category='error')
232 raise HTTPFound(h.route_path('users'))
232 raise HTTPFound(h.route_path('users'))
233
233
234
234
235 class UsersView(UserAppView):
235 class UsersView(UserAppView):
236 ALLOW_SCOPED_TOKENS = False
236 ALLOW_SCOPED_TOKENS = False
237 """
237 """
238 This view has alternative version inside EE, if modified please take a look
238 This view has alternative version inside EE, if modified please take a look
239 in there as well.
239 in there as well.
240 """
240 """
241
241
242 def load_default_context(self):
242 def load_default_context(self):
243 c = self._get_local_tmpl_context()
243 c = self._get_local_tmpl_context()
244 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
244 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
245 c.allowed_languages = [
245 c.allowed_languages = [
246 ('en', 'English (en)'),
246 ('en', 'English (en)'),
247 ('de', 'German (de)'),
247 ('de', 'German (de)'),
248 ('fr', 'French (fr)'),
248 ('fr', 'French (fr)'),
249 ('it', 'Italian (it)'),
249 ('it', 'Italian (it)'),
250 ('ja', 'Japanese (ja)'),
250 ('ja', 'Japanese (ja)'),
251 ('pl', 'Polish (pl)'),
251 ('pl', 'Polish (pl)'),
252 ('pt', 'Portuguese (pt)'),
252 ('pt', 'Portuguese (pt)'),
253 ('ru', 'Russian (ru)'),
253 ('ru', 'Russian (ru)'),
254 ('zh', 'Chinese (zh)'),
254 ('zh', 'Chinese (zh)'),
255 ]
255 ]
256 req = self.request
256 req = self.request
257
257
258 c.available_permissions = req.registry.settings['available_permissions']
258 c.available_permissions = req.registry.settings['available_permissions']
259 PermissionModel().set_global_permission_choices(
259 PermissionModel().set_global_permission_choices(
260 c, gettext_translator=req.translate)
260 c, gettext_translator=req.translate)
261
261
262
263 return c
262 return c
264
263
265 @LoginRequired()
264 @LoginRequired()
266 @HasPermissionAllDecorator('hg.admin')
265 @HasPermissionAllDecorator('hg.admin')
267 @CSRFRequired()
266 @CSRFRequired()
268 @view_config(
267 @view_config(
269 route_name='user_update', request_method='POST',
268 route_name='user_update', request_method='POST',
270 renderer='rhodecode:templates/admin/users/user_edit.mako')
269 renderer='rhodecode:templates/admin/users/user_edit.mako')
271 def user_update(self):
270 def user_update(self):
272 _ = self.request.translate
271 _ = self.request.translate
273 c = self.load_default_context()
272 c = self.load_default_context()
274
273
275 user_id = self.db_user_id
274 user_id = self.db_user_id
276 c.user = self.db_user
275 c.user = self.db_user
277
276
278 c.active = 'profile'
277 c.active = 'profile'
279 c.extern_type = c.user.extern_type
278 c.extern_type = c.user.extern_type
280 c.extern_name = c.user.extern_name
279 c.extern_name = c.user.extern_name
281 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
280 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
282 available_languages = [x[0] for x in c.allowed_languages]
281 available_languages = [x[0] for x in c.allowed_languages]
283 _form = UserForm(self.request.translate, edit=True,
282 _form = UserForm(self.request.translate, edit=True,
284 available_languages=available_languages,
283 available_languages=available_languages,
285 old_data={'user_id': user_id,
284 old_data={'user_id': user_id,
286 'email': c.user.email})()
285 'email': c.user.email})()
287 form_result = {}
286 form_result = {}
288 old_values = c.user.get_api_data()
287 old_values = c.user.get_api_data()
289 try:
288 try:
290 form_result = _form.to_python(dict(self.request.POST))
289 form_result = _form.to_python(dict(self.request.POST))
291 skip_attrs = ['extern_type', 'extern_name']
290 skip_attrs = ['extern_type', 'extern_name']
292 # TODO: plugin should define if username can be updated
291 # TODO: plugin should define if username can be updated
293 if c.extern_type != "rhodecode":
292 if c.extern_type != "rhodecode":
294 # forbid updating username for external accounts
293 # forbid updating username for external accounts
295 skip_attrs.append('username')
294 skip_attrs.append('username')
296
295
297 UserModel().update_user(
296 UserModel().update_user(
298 user_id, skip_attrs=skip_attrs, **form_result)
297 user_id, skip_attrs=skip_attrs, **form_result)
299
298
300 audit_logger.store_web(
299 audit_logger.store_web(
301 'user.edit', action_data={'old_data': old_values},
300 'user.edit', action_data={'old_data': old_values},
302 user=c.rhodecode_user)
301 user=c.rhodecode_user)
303
302
304 Session().commit()
303 Session().commit()
305 h.flash(_('User updated successfully'), category='success')
304 h.flash(_('User updated successfully'), category='success')
306 except formencode.Invalid as errors:
305 except formencode.Invalid as errors:
307 data = render(
306 data = render(
308 'rhodecode:templates/admin/users/user_edit.mako',
307 'rhodecode:templates/admin/users/user_edit.mako',
309 self._get_template_context(c), self.request)
308 self._get_template_context(c), self.request)
310 html = formencode.htmlfill.render(
309 html = formencode.htmlfill.render(
311 data,
310 data,
312 defaults=errors.value,
311 defaults=errors.value,
313 errors=errors.error_dict or {},
312 errors=errors.error_dict or {},
314 prefix_error=False,
313 prefix_error=False,
315 encoding="UTF-8",
314 encoding="UTF-8",
316 force_defaults=False
315 force_defaults=False
317 )
316 )
318 return Response(html)
317 return Response(html)
319 except UserCreationError as e:
318 except UserCreationError as e:
320 h.flash(e, 'error')
319 h.flash(e, 'error')
321 except Exception:
320 except Exception:
322 log.exception("Exception updating user")
321 log.exception("Exception updating user")
323 h.flash(_('Error occurred during update of user %s')
322 h.flash(_('Error occurred during update of user %s')
324 % form_result.get('username'), category='error')
323 % form_result.get('username'), category='error')
325 raise HTTPFound(h.route_path('user_edit', user_id=user_id))
324 raise HTTPFound(h.route_path('user_edit', user_id=user_id))
326
325
327 @LoginRequired()
326 @LoginRequired()
328 @HasPermissionAllDecorator('hg.admin')
327 @HasPermissionAllDecorator('hg.admin')
329 @CSRFRequired()
328 @CSRFRequired()
330 @view_config(
329 @view_config(
331 route_name='user_delete', request_method='POST',
330 route_name='user_delete', request_method='POST',
332 renderer='rhodecode:templates/admin/users/user_edit.mako')
331 renderer='rhodecode:templates/admin/users/user_edit.mako')
333 def user_delete(self):
332 def user_delete(self):
334 _ = self.request.translate
333 _ = self.request.translate
335 c = self.load_default_context()
334 c = self.load_default_context()
336 c.user = self.db_user
335 c.user = self.db_user
337
336
338 _repos = c.user.repositories
337 _repos = c.user.repositories
339 _repo_groups = c.user.repository_groups
338 _repo_groups = c.user.repository_groups
340 _user_groups = c.user.user_groups
339 _user_groups = c.user.user_groups
341
340
342 handle_repos = None
341 handle_repos = None
343 handle_repo_groups = None
342 handle_repo_groups = None
344 handle_user_groups = None
343 handle_user_groups = None
345 # dummy call for flash of handle
344 # dummy call for flash of handle
346 set_handle_flash_repos = lambda: None
345 set_handle_flash_repos = lambda: None
347 set_handle_flash_repo_groups = lambda: None
346 set_handle_flash_repo_groups = lambda: None
348 set_handle_flash_user_groups = lambda: None
347 set_handle_flash_user_groups = lambda: None
349
348
350 if _repos and self.request.POST.get('user_repos'):
349 if _repos and self.request.POST.get('user_repos'):
351 do = self.request.POST['user_repos']
350 do = self.request.POST['user_repos']
352 if do == 'detach':
351 if do == 'detach':
353 handle_repos = 'detach'
352 handle_repos = 'detach'
354 set_handle_flash_repos = lambda: h.flash(
353 set_handle_flash_repos = lambda: h.flash(
355 _('Detached %s repositories') % len(_repos),
354 _('Detached %s repositories') % len(_repos),
356 category='success')
355 category='success')
357 elif do == 'delete':
356 elif do == 'delete':
358 handle_repos = 'delete'
357 handle_repos = 'delete'
359 set_handle_flash_repos = lambda: h.flash(
358 set_handle_flash_repos = lambda: h.flash(
360 _('Deleted %s repositories') % len(_repos),
359 _('Deleted %s repositories') % len(_repos),
361 category='success')
360 category='success')
362
361
363 if _repo_groups and self.request.POST.get('user_repo_groups'):
362 if _repo_groups and self.request.POST.get('user_repo_groups'):
364 do = self.request.POST['user_repo_groups']
363 do = self.request.POST['user_repo_groups']
365 if do == 'detach':
364 if do == 'detach':
366 handle_repo_groups = 'detach'
365 handle_repo_groups = 'detach'
367 set_handle_flash_repo_groups = lambda: h.flash(
366 set_handle_flash_repo_groups = lambda: h.flash(
368 _('Detached %s repository groups') % len(_repo_groups),
367 _('Detached %s repository groups') % len(_repo_groups),
369 category='success')
368 category='success')
370 elif do == 'delete':
369 elif do == 'delete':
371 handle_repo_groups = 'delete'
370 handle_repo_groups = 'delete'
372 set_handle_flash_repo_groups = lambda: h.flash(
371 set_handle_flash_repo_groups = lambda: h.flash(
373 _('Deleted %s repository groups') % len(_repo_groups),
372 _('Deleted %s repository groups') % len(_repo_groups),
374 category='success')
373 category='success')
375
374
376 if _user_groups and self.request.POST.get('user_user_groups'):
375 if _user_groups and self.request.POST.get('user_user_groups'):
377 do = self.request.POST['user_user_groups']
376 do = self.request.POST['user_user_groups']
378 if do == 'detach':
377 if do == 'detach':
379 handle_user_groups = 'detach'
378 handle_user_groups = 'detach'
380 set_handle_flash_user_groups = lambda: h.flash(
379 set_handle_flash_user_groups = lambda: h.flash(
381 _('Detached %s user groups') % len(_user_groups),
380 _('Detached %s user groups') % len(_user_groups),
382 category='success')
381 category='success')
383 elif do == 'delete':
382 elif do == 'delete':
384 handle_user_groups = 'delete'
383 handle_user_groups = 'delete'
385 set_handle_flash_user_groups = lambda: h.flash(
384 set_handle_flash_user_groups = lambda: h.flash(
386 _('Deleted %s user groups') % len(_user_groups),
385 _('Deleted %s user groups') % len(_user_groups),
387 category='success')
386 category='success')
388
387
389 old_values = c.user.get_api_data()
388 old_values = c.user.get_api_data()
390 try:
389 try:
391 UserModel().delete(c.user, handle_repos=handle_repos,
390 UserModel().delete(c.user, handle_repos=handle_repos,
392 handle_repo_groups=handle_repo_groups,
391 handle_repo_groups=handle_repo_groups,
393 handle_user_groups=handle_user_groups)
392 handle_user_groups=handle_user_groups)
394
393
395 audit_logger.store_web(
394 audit_logger.store_web(
396 'user.delete', action_data={'old_data': old_values},
395 'user.delete', action_data={'old_data': old_values},
397 user=c.rhodecode_user)
396 user=c.rhodecode_user)
398
397
399 Session().commit()
398 Session().commit()
400 set_handle_flash_repos()
399 set_handle_flash_repos()
401 set_handle_flash_repo_groups()
400 set_handle_flash_repo_groups()
402 set_handle_flash_user_groups()
401 set_handle_flash_user_groups()
403 h.flash(_('Successfully deleted user'), category='success')
402 h.flash(_('Successfully deleted user'), category='success')
404 except (UserOwnsReposException, UserOwnsRepoGroupsException,
403 except (UserOwnsReposException, UserOwnsRepoGroupsException,
405 UserOwnsUserGroupsException, DefaultUserException) as e:
404 UserOwnsUserGroupsException, DefaultUserException) as e:
406 h.flash(e, category='warning')
405 h.flash(e, category='warning')
407 except Exception:
406 except Exception:
408 log.exception("Exception during deletion of user")
407 log.exception("Exception during deletion of user")
409 h.flash(_('An error occurred during deletion of user'),
408 h.flash(_('An error occurred during deletion of user'),
410 category='error')
409 category='error')
411 raise HTTPFound(h.route_path('users'))
410 raise HTTPFound(h.route_path('users'))
412
411
413 @LoginRequired()
412 @LoginRequired()
414 @HasPermissionAllDecorator('hg.admin')
413 @HasPermissionAllDecorator('hg.admin')
415 @view_config(
414 @view_config(
416 route_name='user_edit', request_method='GET',
415 route_name='user_edit', request_method='GET',
417 renderer='rhodecode:templates/admin/users/user_edit.mako')
416 renderer='rhodecode:templates/admin/users/user_edit.mako')
418 def user_edit(self):
417 def user_edit(self):
419 _ = self.request.translate
418 _ = self.request.translate
420 c = self.load_default_context()
419 c = self.load_default_context()
421 c.user = self.db_user
420 c.user = self.db_user
422
421
423 c.active = 'profile'
422 c.active = 'profile'
424 c.extern_type = c.user.extern_type
423 c.extern_type = c.user.extern_type
425 c.extern_name = c.user.extern_name
424 c.extern_name = c.user.extern_name
426 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
425 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
427
426
428 defaults = c.user.get_dict()
427 defaults = c.user.get_dict()
429 defaults.update({'language': c.user.user_data.get('language')})
428 defaults.update({'language': c.user.user_data.get('language')})
430
429
431 data = render(
430 data = render(
432 'rhodecode:templates/admin/users/user_edit.mako',
431 'rhodecode:templates/admin/users/user_edit.mako',
433 self._get_template_context(c), self.request)
432 self._get_template_context(c), self.request)
434 html = formencode.htmlfill.render(
433 html = formencode.htmlfill.render(
435 data,
434 data,
436 defaults=defaults,
435 defaults=defaults,
437 encoding="UTF-8",
436 encoding="UTF-8",
438 force_defaults=False
437 force_defaults=False
439 )
438 )
440 return Response(html)
439 return Response(html)
441
440
442 @LoginRequired()
441 @LoginRequired()
443 @HasPermissionAllDecorator('hg.admin')
442 @HasPermissionAllDecorator('hg.admin')
444 @view_config(
443 @view_config(
445 route_name='user_edit_advanced', request_method='GET',
444 route_name='user_edit_advanced', request_method='GET',
446 renderer='rhodecode:templates/admin/users/user_edit.mako')
445 renderer='rhodecode:templates/admin/users/user_edit.mako')
447 def user_edit_advanced(self):
446 def user_edit_advanced(self):
448 _ = self.request.translate
447 _ = self.request.translate
449 c = self.load_default_context()
448 c = self.load_default_context()
450
449
451 user_id = self.db_user_id
450 user_id = self.db_user_id
452 c.user = self.db_user
451 c.user = self.db_user
453
452
454 c.active = 'advanced'
453 c.active = 'advanced'
455 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
454 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
456 c.personal_repo_group_name = RepoGroupModel()\
455 c.personal_repo_group_name = RepoGroupModel()\
457 .get_personal_group_name(c.user)
456 .get_personal_group_name(c.user)
458
457
459 c.user_to_review_rules = sorted(
458 c.user_to_review_rules = sorted(
460 (x.user for x in c.user.user_review_rules),
459 (x.user for x in c.user.user_review_rules),
461 key=lambda u: u.username.lower())
460 key=lambda u: u.username.lower())
462
461
463 c.first_admin = User.get_first_super_admin()
462 c.first_admin = User.get_first_super_admin()
464 defaults = c.user.get_dict()
463 defaults = c.user.get_dict()
465
464
466 # Interim workaround if the user participated on any pull requests as a
465 # Interim workaround if the user participated on any pull requests as a
467 # reviewer.
466 # reviewer.
468 has_review = len(c.user.reviewer_pull_requests)
467 has_review = len(c.user.reviewer_pull_requests)
469 c.can_delete_user = not has_review
468 c.can_delete_user = not has_review
470 c.can_delete_user_message = ''
469 c.can_delete_user_message = ''
471 inactive_link = h.link_to(
470 inactive_link = h.link_to(
472 'inactive', h.route_path('user_edit', user_id=user_id, _anchor='active'))
471 'inactive', h.route_path('user_edit', user_id=user_id, _anchor='active'))
473 if has_review == 1:
472 if has_review == 1:
474 c.can_delete_user_message = h.literal(_(
473 c.can_delete_user_message = h.literal(_(
475 'The user participates as reviewer in {} pull request and '
474 'The user participates as reviewer in {} pull request and '
476 'cannot be deleted. \nYou can set the user to '
475 'cannot be deleted. \nYou can set the user to '
477 '"{}" instead of deleting it.').format(
476 '"{}" instead of deleting it.').format(
478 has_review, inactive_link))
477 has_review, inactive_link))
479 elif has_review:
478 elif has_review:
480 c.can_delete_user_message = h.literal(_(
479 c.can_delete_user_message = h.literal(_(
481 'The user participates as reviewer in {} pull requests and '
480 'The user participates as reviewer in {} pull requests and '
482 'cannot be deleted. \nYou can set the user to '
481 'cannot be deleted. \nYou can set the user to '
483 '"{}" instead of deleting it.').format(
482 '"{}" instead of deleting it.').format(
484 has_review, inactive_link))
483 has_review, inactive_link))
485
484
486 data = render(
485 data = render(
487 'rhodecode:templates/admin/users/user_edit.mako',
486 'rhodecode:templates/admin/users/user_edit.mako',
488 self._get_template_context(c), self.request)
487 self._get_template_context(c), self.request)
489 html = formencode.htmlfill.render(
488 html = formencode.htmlfill.render(
490 data,
489 data,
491 defaults=defaults,
490 defaults=defaults,
492 encoding="UTF-8",
491 encoding="UTF-8",
493 force_defaults=False
492 force_defaults=False
494 )
493 )
495 return Response(html)
494 return Response(html)
496
495
497 @LoginRequired()
496 @LoginRequired()
498 @HasPermissionAllDecorator('hg.admin')
497 @HasPermissionAllDecorator('hg.admin')
499 @view_config(
498 @view_config(
500 route_name='user_edit_global_perms', request_method='GET',
499 route_name='user_edit_global_perms', request_method='GET',
501 renderer='rhodecode:templates/admin/users/user_edit.mako')
500 renderer='rhodecode:templates/admin/users/user_edit.mako')
502 def user_edit_global_perms(self):
501 def user_edit_global_perms(self):
503 _ = self.request.translate
502 _ = self.request.translate
504 c = self.load_default_context()
503 c = self.load_default_context()
505 c.user = self.db_user
504 c.user = self.db_user
506
505
507 c.active = 'global_perms'
506 c.active = 'global_perms'
508
507
509 c.default_user = User.get_default_user()
508 c.default_user = User.get_default_user()
510 defaults = c.user.get_dict()
509 defaults = c.user.get_dict()
511 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
510 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
512 defaults.update(c.default_user.get_default_perms())
511 defaults.update(c.default_user.get_default_perms())
513 defaults.update(c.user.get_default_perms())
512 defaults.update(c.user.get_default_perms())
514
513
515 data = render(
514 data = render(
516 'rhodecode:templates/admin/users/user_edit.mako',
515 'rhodecode:templates/admin/users/user_edit.mako',
517 self._get_template_context(c), self.request)
516 self._get_template_context(c), self.request)
518 html = formencode.htmlfill.render(
517 html = formencode.htmlfill.render(
519 data,
518 data,
520 defaults=defaults,
519 defaults=defaults,
521 encoding="UTF-8",
520 encoding="UTF-8",
522 force_defaults=False
521 force_defaults=False
523 )
522 )
524 return Response(html)
523 return Response(html)
525
524
526 @LoginRequired()
525 @LoginRequired()
527 @HasPermissionAllDecorator('hg.admin')
526 @HasPermissionAllDecorator('hg.admin')
528 @CSRFRequired()
527 @CSRFRequired()
529 @view_config(
528 @view_config(
530 route_name='user_edit_global_perms_update', request_method='POST',
529 route_name='user_edit_global_perms_update', request_method='POST',
531 renderer='rhodecode:templates/admin/users/user_edit.mako')
530 renderer='rhodecode:templates/admin/users/user_edit.mako')
532 def user_edit_global_perms_update(self):
531 def user_edit_global_perms_update(self):
533 _ = self.request.translate
532 _ = self.request.translate
534 c = self.load_default_context()
533 c = self.load_default_context()
535
534
536 user_id = self.db_user_id
535 user_id = self.db_user_id
537 c.user = self.db_user
536 c.user = self.db_user
538
537
539 c.active = 'global_perms'
538 c.active = 'global_perms'
540 try:
539 try:
541 # first stage that verifies the checkbox
540 # first stage that verifies the checkbox
542 _form = UserIndividualPermissionsForm(self.request.translate)
541 _form = UserIndividualPermissionsForm(self.request.translate)
543 form_result = _form.to_python(dict(self.request.POST))
542 form_result = _form.to_python(dict(self.request.POST))
544 inherit_perms = form_result['inherit_default_permissions']
543 inherit_perms = form_result['inherit_default_permissions']
545 c.user.inherit_default_permissions = inherit_perms
544 c.user.inherit_default_permissions = inherit_perms
546 Session().add(c.user)
545 Session().add(c.user)
547
546
548 if not inherit_perms:
547 if not inherit_perms:
549 # only update the individual ones if we un check the flag
548 # only update the individual ones if we un check the flag
550 _form = UserPermissionsForm(
549 _form = UserPermissionsForm(
551 self.request.translate,
550 self.request.translate,
552 [x[0] for x in c.repo_create_choices],
551 [x[0] for x in c.repo_create_choices],
553 [x[0] for x in c.repo_create_on_write_choices],
552 [x[0] for x in c.repo_create_on_write_choices],
554 [x[0] for x in c.repo_group_create_choices],
553 [x[0] for x in c.repo_group_create_choices],
555 [x[0] for x in c.user_group_create_choices],
554 [x[0] for x in c.user_group_create_choices],
556 [x[0] for x in c.fork_choices],
555 [x[0] for x in c.fork_choices],
557 [x[0] for x in c.inherit_default_permission_choices])()
556 [x[0] for x in c.inherit_default_permission_choices])()
558
557
559 form_result = _form.to_python(dict(self.request.POST))
558 form_result = _form.to_python(dict(self.request.POST))
560 form_result.update({'perm_user_id': c.user.user_id})
559 form_result.update({'perm_user_id': c.user.user_id})
561
560
562 PermissionModel().update_user_permissions(form_result)
561 PermissionModel().update_user_permissions(form_result)
563
562
564 # TODO(marcink): implement global permissions
563 # TODO(marcink): implement global permissions
565 # audit_log.store_web('user.edit.permissions')
564 # audit_log.store_web('user.edit.permissions')
566
565
567 Session().commit()
566 Session().commit()
568 h.flash(_('User global permissions updated successfully'),
567 h.flash(_('User global permissions updated successfully'),
569 category='success')
568 category='success')
570
569
571 except formencode.Invalid as errors:
570 except formencode.Invalid as errors:
572 data = render(
571 data = render(
573 'rhodecode:templates/admin/users/user_edit.mako',
572 'rhodecode:templates/admin/users/user_edit.mako',
574 self._get_template_context(c), self.request)
573 self._get_template_context(c), self.request)
575 html = formencode.htmlfill.render(
574 html = formencode.htmlfill.render(
576 data,
575 data,
577 defaults=errors.value,
576 defaults=errors.value,
578 errors=errors.error_dict or {},
577 errors=errors.error_dict or {},
579 prefix_error=False,
578 prefix_error=False,
580 encoding="UTF-8",
579 encoding="UTF-8",
581 force_defaults=False
580 force_defaults=False
582 )
581 )
583 return Response(html)
582 return Response(html)
584 except Exception:
583 except Exception:
585 log.exception("Exception during permissions saving")
584 log.exception("Exception during permissions saving")
586 h.flash(_('An error occurred during permissions saving'),
585 h.flash(_('An error occurred during permissions saving'),
587 category='error')
586 category='error')
588 raise HTTPFound(h.route_path('user_edit_global_perms', user_id=user_id))
587 raise HTTPFound(h.route_path('user_edit_global_perms', user_id=user_id))
589
588
590 @LoginRequired()
589 @LoginRequired()
591 @HasPermissionAllDecorator('hg.admin')
590 @HasPermissionAllDecorator('hg.admin')
592 @CSRFRequired()
591 @CSRFRequired()
593 @view_config(
592 @view_config(
594 route_name='user_force_password_reset', request_method='POST',
593 route_name='user_force_password_reset', request_method='POST',
595 renderer='rhodecode:templates/admin/users/user_edit.mako')
594 renderer='rhodecode:templates/admin/users/user_edit.mako')
596 def user_force_password_reset(self):
595 def user_force_password_reset(self):
597 """
596 """
598 toggle reset password flag for this user
597 toggle reset password flag for this user
599 """
598 """
600 _ = self.request.translate
599 _ = self.request.translate
601 c = self.load_default_context()
600 c = self.load_default_context()
602
601
603 user_id = self.db_user_id
602 user_id = self.db_user_id
604 c.user = self.db_user
603 c.user = self.db_user
605
604
606 try:
605 try:
607 old_value = c.user.user_data.get('force_password_change')
606 old_value = c.user.user_data.get('force_password_change')
608 c.user.update_userdata(force_password_change=not old_value)
607 c.user.update_userdata(force_password_change=not old_value)
609
608
610 if old_value:
609 if old_value:
611 msg = _('Force password change disabled for user')
610 msg = _('Force password change disabled for user')
612 audit_logger.store_web(
611 audit_logger.store_web(
613 'user.edit.password_reset.disabled',
612 'user.edit.password_reset.disabled',
614 user=c.rhodecode_user)
613 user=c.rhodecode_user)
615 else:
614 else:
616 msg = _('Force password change enabled for user')
615 msg = _('Force password change enabled for user')
617 audit_logger.store_web(
616 audit_logger.store_web(
618 'user.edit.password_reset.enabled',
617 'user.edit.password_reset.enabled',
619 user=c.rhodecode_user)
618 user=c.rhodecode_user)
620
619
621 Session().commit()
620 Session().commit()
622 h.flash(msg, category='success')
621 h.flash(msg, category='success')
623 except Exception:
622 except Exception:
624 log.exception("Exception during password reset for user")
623 log.exception("Exception during password reset for user")
625 h.flash(_('An error occurred during password reset for user'),
624 h.flash(_('An error occurred during password reset for user'),
626 category='error')
625 category='error')
627
626
628 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
627 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
629
628
630 @LoginRequired()
629 @LoginRequired()
631 @HasPermissionAllDecorator('hg.admin')
630 @HasPermissionAllDecorator('hg.admin')
632 @CSRFRequired()
631 @CSRFRequired()
633 @view_config(
632 @view_config(
634 route_name='user_create_personal_repo_group', request_method='POST',
633 route_name='user_create_personal_repo_group', request_method='POST',
635 renderer='rhodecode:templates/admin/users/user_edit.mako')
634 renderer='rhodecode:templates/admin/users/user_edit.mako')
636 def user_create_personal_repo_group(self):
635 def user_create_personal_repo_group(self):
637 """
636 """
638 Create personal repository group for this user
637 Create personal repository group for this user
639 """
638 """
640 from rhodecode.model.repo_group import RepoGroupModel
639 from rhodecode.model.repo_group import RepoGroupModel
641
640
642 _ = self.request.translate
641 _ = self.request.translate
643 c = self.load_default_context()
642 c = self.load_default_context()
644
643
645 user_id = self.db_user_id
644 user_id = self.db_user_id
646 c.user = self.db_user
645 c.user = self.db_user
647
646
648 personal_repo_group = RepoGroup.get_user_personal_repo_group(
647 personal_repo_group = RepoGroup.get_user_personal_repo_group(
649 c.user.user_id)
648 c.user.user_id)
650 if personal_repo_group:
649 if personal_repo_group:
651 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
650 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
652
651
653 personal_repo_group_name = RepoGroupModel().get_personal_group_name(
652 personal_repo_group_name = RepoGroupModel().get_personal_group_name(
654 c.user)
653 c.user)
655 named_personal_group = RepoGroup.get_by_group_name(
654 named_personal_group = RepoGroup.get_by_group_name(
656 personal_repo_group_name)
655 personal_repo_group_name)
657 try:
656 try:
658
657
659 if named_personal_group and named_personal_group.user_id == c.user.user_id:
658 if named_personal_group and named_personal_group.user_id == c.user.user_id:
660 # migrate the same named group, and mark it as personal
659 # migrate the same named group, and mark it as personal
661 named_personal_group.personal = True
660 named_personal_group.personal = True
662 Session().add(named_personal_group)
661 Session().add(named_personal_group)
663 Session().commit()
662 Session().commit()
664 msg = _('Linked repository group `%s` as personal' % (
663 msg = _('Linked repository group `%s` as personal' % (
665 personal_repo_group_name,))
664 personal_repo_group_name,))
666 h.flash(msg, category='success')
665 h.flash(msg, category='success')
667 elif not named_personal_group:
666 elif not named_personal_group:
668 RepoGroupModel().create_personal_repo_group(c.user)
667 RepoGroupModel().create_personal_repo_group(c.user)
669
668
670 msg = _('Created repository group `%s`' % (
669 msg = _('Created repository group `%s`' % (
671 personal_repo_group_name,))
670 personal_repo_group_name,))
672 h.flash(msg, category='success')
671 h.flash(msg, category='success')
673 else:
672 else:
674 msg = _('Repository group `%s` is already taken' % (
673 msg = _('Repository group `%s` is already taken' % (
675 personal_repo_group_name,))
674 personal_repo_group_name,))
676 h.flash(msg, category='warning')
675 h.flash(msg, category='warning')
677 except Exception:
676 except Exception:
678 log.exception("Exception during repository group creation")
677 log.exception("Exception during repository group creation")
679 msg = _(
678 msg = _(
680 'An error occurred during repository group creation for user')
679 'An error occurred during repository group creation for user')
681 h.flash(msg, category='error')
680 h.flash(msg, category='error')
682 Session().rollback()
681 Session().rollback()
683
682
684 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
683 raise HTTPFound(h.route_path('user_edit_advanced', user_id=user_id))
685
684
686 @LoginRequired()
685 @LoginRequired()
687 @HasPermissionAllDecorator('hg.admin')
686 @HasPermissionAllDecorator('hg.admin')
688 @view_config(
687 @view_config(
689 route_name='edit_user_auth_tokens', request_method='GET',
688 route_name='edit_user_auth_tokens', request_method='GET',
690 renderer='rhodecode:templates/admin/users/user_edit.mako')
689 renderer='rhodecode:templates/admin/users/user_edit.mako')
691 def auth_tokens(self):
690 def auth_tokens(self):
692 _ = self.request.translate
691 _ = self.request.translate
693 c = self.load_default_context()
692 c = self.load_default_context()
694 c.user = self.db_user
693 c.user = self.db_user
695
694
696 c.active = 'auth_tokens'
695 c.active = 'auth_tokens'
697
696
698 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
697 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
699 c.role_values = [
698 c.role_values = [
700 (x, AuthTokenModel.cls._get_role_name(x))
699 (x, AuthTokenModel.cls._get_role_name(x))
701 for x in AuthTokenModel.cls.ROLES]
700 for x in AuthTokenModel.cls.ROLES]
702 c.role_options = [(c.role_values, _("Role"))]
701 c.role_options = [(c.role_values, _("Role"))]
703 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
702 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
704 c.user.user_id, show_expired=True)
703 c.user.user_id, show_expired=True)
705 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
704 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
706 return self._get_template_context(c)
705 return self._get_template_context(c)
707
706
708 def maybe_attach_token_scope(self, token):
707 def maybe_attach_token_scope(self, token):
709 # implemented in EE edition
708 # implemented in EE edition
710 pass
709 pass
711
710
712 @LoginRequired()
711 @LoginRequired()
713 @HasPermissionAllDecorator('hg.admin')
712 @HasPermissionAllDecorator('hg.admin')
714 @CSRFRequired()
713 @CSRFRequired()
715 @view_config(
714 @view_config(
716 route_name='edit_user_auth_tokens_add', request_method='POST')
715 route_name='edit_user_auth_tokens_add', request_method='POST')
717 def auth_tokens_add(self):
716 def auth_tokens_add(self):
718 _ = self.request.translate
717 _ = self.request.translate
719 c = self.load_default_context()
718 c = self.load_default_context()
720
719
721 user_id = self.db_user_id
720 user_id = self.db_user_id
722 c.user = self.db_user
721 c.user = self.db_user
723
722
724 user_data = c.user.get_api_data()
723 user_data = c.user.get_api_data()
725 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
724 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
726 description = self.request.POST.get('description')
725 description = self.request.POST.get('description')
727 role = self.request.POST.get('role')
726 role = self.request.POST.get('role')
728
727
729 token = AuthTokenModel().create(
728 token = AuthTokenModel().create(
730 c.user.user_id, description, lifetime, role)
729 c.user.user_id, description, lifetime, role)
731 token_data = token.get_api_data()
730 token_data = token.get_api_data()
732
731
733 self.maybe_attach_token_scope(token)
732 self.maybe_attach_token_scope(token)
734 audit_logger.store_web(
733 audit_logger.store_web(
735 'user.edit.token.add', action_data={
734 'user.edit.token.add', action_data={
736 'data': {'token': token_data, 'user': user_data}},
735 'data': {'token': token_data, 'user': user_data}},
737 user=self._rhodecode_user, )
736 user=self._rhodecode_user, )
738 Session().commit()
737 Session().commit()
739
738
740 h.flash(_("Auth token successfully created"), category='success')
739 h.flash(_("Auth token successfully created"), category='success')
741 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
740 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
742
741
743 @LoginRequired()
742 @LoginRequired()
744 @HasPermissionAllDecorator('hg.admin')
743 @HasPermissionAllDecorator('hg.admin')
745 @CSRFRequired()
744 @CSRFRequired()
746 @view_config(
745 @view_config(
747 route_name='edit_user_auth_tokens_delete', request_method='POST')
746 route_name='edit_user_auth_tokens_delete', request_method='POST')
748 def auth_tokens_delete(self):
747 def auth_tokens_delete(self):
749 _ = self.request.translate
748 _ = self.request.translate
750 c = self.load_default_context()
749 c = self.load_default_context()
751
750
752 user_id = self.db_user_id
751 user_id = self.db_user_id
753 c.user = self.db_user
752 c.user = self.db_user
754
753
755 user_data = c.user.get_api_data()
754 user_data = c.user.get_api_data()
756
755
757 del_auth_token = self.request.POST.get('del_auth_token')
756 del_auth_token = self.request.POST.get('del_auth_token')
758
757
759 if del_auth_token:
758 if del_auth_token:
760 token = UserApiKeys.get_or_404(del_auth_token)
759 token = UserApiKeys.get_or_404(del_auth_token)
761 token_data = token.get_api_data()
760 token_data = token.get_api_data()
762
761
763 AuthTokenModel().delete(del_auth_token, c.user.user_id)
762 AuthTokenModel().delete(del_auth_token, c.user.user_id)
764 audit_logger.store_web(
763 audit_logger.store_web(
765 'user.edit.token.delete', action_data={
764 'user.edit.token.delete', action_data={
766 'data': {'token': token_data, 'user': user_data}},
765 'data': {'token': token_data, 'user': user_data}},
767 user=self._rhodecode_user,)
766 user=self._rhodecode_user,)
768 Session().commit()
767 Session().commit()
769 h.flash(_("Auth token successfully deleted"), category='success')
768 h.flash(_("Auth token successfully deleted"), category='success')
770
769
771 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
770 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
772
771
773 @LoginRequired()
772 @LoginRequired()
774 @HasPermissionAllDecorator('hg.admin')
773 @HasPermissionAllDecorator('hg.admin')
775 @view_config(
774 @view_config(
776 route_name='edit_user_ssh_keys', request_method='GET',
775 route_name='edit_user_ssh_keys', request_method='GET',
777 renderer='rhodecode:templates/admin/users/user_edit.mako')
776 renderer='rhodecode:templates/admin/users/user_edit.mako')
778 def ssh_keys(self):
777 def ssh_keys(self):
779 _ = self.request.translate
778 _ = self.request.translate
780 c = self.load_default_context()
779 c = self.load_default_context()
781 c.user = self.db_user
780 c.user = self.db_user
782
781
783 c.active = 'ssh_keys'
782 c.active = 'ssh_keys'
784 c.default_key = self.request.GET.get('default_key')
783 c.default_key = self.request.GET.get('default_key')
785 c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
784 c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
786 return self._get_template_context(c)
785 return self._get_template_context(c)
787
786
788 @LoginRequired()
787 @LoginRequired()
789 @HasPermissionAllDecorator('hg.admin')
788 @HasPermissionAllDecorator('hg.admin')
790 @view_config(
789 @view_config(
791 route_name='edit_user_ssh_keys_generate_keypair', request_method='GET',
790 route_name='edit_user_ssh_keys_generate_keypair', request_method='GET',
792 renderer='rhodecode:templates/admin/users/user_edit.mako')
791 renderer='rhodecode:templates/admin/users/user_edit.mako')
793 def ssh_keys_generate_keypair(self):
792 def ssh_keys_generate_keypair(self):
794 _ = self.request.translate
793 _ = self.request.translate
795 c = self.load_default_context()
794 c = self.load_default_context()
796
795
797 c.user = self.db_user
796 c.user = self.db_user
798
797
799 c.active = 'ssh_keys_generate'
798 c.active = 'ssh_keys_generate'
800 comment = 'RhodeCode-SSH {}'.format(c.user.email or '')
799 comment = 'RhodeCode-SSH {}'.format(c.user.email or '')
801 c.private, c.public = SshKeyModel().generate_keypair(comment=comment)
800 c.private, c.public = SshKeyModel().generate_keypair(comment=comment)
802
801
803 return self._get_template_context(c)
802 return self._get_template_context(c)
804
803
805 @LoginRequired()
804 @LoginRequired()
806 @HasPermissionAllDecorator('hg.admin')
805 @HasPermissionAllDecorator('hg.admin')
807 @CSRFRequired()
806 @CSRFRequired()
808 @view_config(
807 @view_config(
809 route_name='edit_user_ssh_keys_add', request_method='POST')
808 route_name='edit_user_ssh_keys_add', request_method='POST')
810 def ssh_keys_add(self):
809 def ssh_keys_add(self):
811 _ = self.request.translate
810 _ = self.request.translate
812 c = self.load_default_context()
811 c = self.load_default_context()
813
812
814 user_id = self.db_user_id
813 user_id = self.db_user_id
815 c.user = self.db_user
814 c.user = self.db_user
816
815
817 user_data = c.user.get_api_data()
816 user_data = c.user.get_api_data()
818 key_data = self.request.POST.get('key_data')
817 key_data = self.request.POST.get('key_data')
819 description = self.request.POST.get('description')
818 description = self.request.POST.get('description')
820
819
821 try:
820 try:
822 if not key_data:
821 if not key_data:
823 raise ValueError('Please add a valid public key')
822 raise ValueError('Please add a valid public key')
824
823
825 key = SshKeyModel().parse_key(key_data.strip())
824 key = SshKeyModel().parse_key(key_data.strip())
826 fingerprint = key.hash_md5()
825 fingerprint = key.hash_md5()
827
826
828 ssh_key = SshKeyModel().create(
827 ssh_key = SshKeyModel().create(
829 c.user.user_id, fingerprint, key_data, description)
828 c.user.user_id, fingerprint, key_data, description)
830 ssh_key_data = ssh_key.get_api_data()
829 ssh_key_data = ssh_key.get_api_data()
831
830
832 audit_logger.store_web(
831 audit_logger.store_web(
833 'user.edit.ssh_key.add', action_data={
832 'user.edit.ssh_key.add', action_data={
834 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
833 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
835 user=self._rhodecode_user, )
834 user=self._rhodecode_user, )
836 Session().commit()
835 Session().commit()
837
836
838 # Trigger an event on change of keys.
837 # Trigger an event on change of keys.
839 trigger(SshKeyFileChangeEvent(), self.request.registry)
838 trigger(SshKeyFileChangeEvent(), self.request.registry)
840
839
841 h.flash(_("Ssh Key successfully created"), category='success')
840 h.flash(_("Ssh Key successfully created"), category='success')
842
841
843 except IntegrityError:
842 except IntegrityError:
844 log.exception("Exception during ssh key saving")
843 log.exception("Exception during ssh key saving")
845 h.flash(_('An error occurred during ssh key saving: {}').format(
844 h.flash(_('An error occurred during ssh key saving: {}').format(
846 'Such key already exists, please use a different one'),
845 'Such key already exists, please use a different one'),
847 category='error')
846 category='error')
848 except Exception as e:
847 except Exception as e:
849 log.exception("Exception during ssh key saving")
848 log.exception("Exception during ssh key saving")
850 h.flash(_('An error occurred during ssh key saving: {}').format(e),
849 h.flash(_('An error occurred during ssh key saving: {}').format(e),
851 category='error')
850 category='error')
852
851
853 return HTTPFound(
852 return HTTPFound(
854 h.route_path('edit_user_ssh_keys', user_id=user_id))
853 h.route_path('edit_user_ssh_keys', user_id=user_id))
855
854
856 @LoginRequired()
855 @LoginRequired()
857 @HasPermissionAllDecorator('hg.admin')
856 @HasPermissionAllDecorator('hg.admin')
858 @CSRFRequired()
857 @CSRFRequired()
859 @view_config(
858 @view_config(
860 route_name='edit_user_ssh_keys_delete', request_method='POST')
859 route_name='edit_user_ssh_keys_delete', request_method='POST')
861 def ssh_keys_delete(self):
860 def ssh_keys_delete(self):
862 _ = self.request.translate
861 _ = self.request.translate
863 c = self.load_default_context()
862 c = self.load_default_context()
864
863
865 user_id = self.db_user_id
864 user_id = self.db_user_id
866 c.user = self.db_user
865 c.user = self.db_user
867
866
868 user_data = c.user.get_api_data()
867 user_data = c.user.get_api_data()
869
868
870 del_ssh_key = self.request.POST.get('del_ssh_key')
869 del_ssh_key = self.request.POST.get('del_ssh_key')
871
870
872 if del_ssh_key:
871 if del_ssh_key:
873 ssh_key = UserSshKeys.get_or_404(del_ssh_key)
872 ssh_key = UserSshKeys.get_or_404(del_ssh_key)
874 ssh_key_data = ssh_key.get_api_data()
873 ssh_key_data = ssh_key.get_api_data()
875
874
876 SshKeyModel().delete(del_ssh_key, c.user.user_id)
875 SshKeyModel().delete(del_ssh_key, c.user.user_id)
877 audit_logger.store_web(
876 audit_logger.store_web(
878 'user.edit.ssh_key.delete', action_data={
877 'user.edit.ssh_key.delete', action_data={
879 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
878 'data': {'ssh_key': ssh_key_data, 'user': user_data}},
880 user=self._rhodecode_user,)
879 user=self._rhodecode_user,)
881 Session().commit()
880 Session().commit()
882 # Trigger an event on change of keys.
881 # Trigger an event on change of keys.
883 trigger(SshKeyFileChangeEvent(), self.request.registry)
882 trigger(SshKeyFileChangeEvent(), self.request.registry)
884 h.flash(_("Ssh key successfully deleted"), category='success')
883 h.flash(_("Ssh key successfully deleted"), category='success')
885
884
886 return HTTPFound(h.route_path('edit_user_ssh_keys', user_id=user_id))
885 return HTTPFound(h.route_path('edit_user_ssh_keys', user_id=user_id))
887
886
888 @LoginRequired()
887 @LoginRequired()
889 @HasPermissionAllDecorator('hg.admin')
888 @HasPermissionAllDecorator('hg.admin')
890 @view_config(
889 @view_config(
891 route_name='edit_user_emails', request_method='GET',
890 route_name='edit_user_emails', request_method='GET',
892 renderer='rhodecode:templates/admin/users/user_edit.mako')
891 renderer='rhodecode:templates/admin/users/user_edit.mako')
893 def emails(self):
892 def emails(self):
894 _ = self.request.translate
893 _ = self.request.translate
895 c = self.load_default_context()
894 c = self.load_default_context()
896 c.user = self.db_user
895 c.user = self.db_user
897
896
898 c.active = 'emails'
897 c.active = 'emails'
899 c.user_email_map = UserEmailMap.query() \
898 c.user_email_map = UserEmailMap.query() \
900 .filter(UserEmailMap.user == c.user).all()
899 .filter(UserEmailMap.user == c.user).all()
901
900
902 return self._get_template_context(c)
901 return self._get_template_context(c)
903
902
904 @LoginRequired()
903 @LoginRequired()
905 @HasPermissionAllDecorator('hg.admin')
904 @HasPermissionAllDecorator('hg.admin')
906 @CSRFRequired()
905 @CSRFRequired()
907 @view_config(
906 @view_config(
908 route_name='edit_user_emails_add', request_method='POST')
907 route_name='edit_user_emails_add', request_method='POST')
909 def emails_add(self):
908 def emails_add(self):
910 _ = self.request.translate
909 _ = self.request.translate
911 c = self.load_default_context()
910 c = self.load_default_context()
912
911
913 user_id = self.db_user_id
912 user_id = self.db_user_id
914 c.user = self.db_user
913 c.user = self.db_user
915
914
916 email = self.request.POST.get('new_email')
915 email = self.request.POST.get('new_email')
917 user_data = c.user.get_api_data()
916 user_data = c.user.get_api_data()
918 try:
917 try:
919
918
920 form = UserExtraEmailForm(self.request.translate)()
919 form = UserExtraEmailForm(self.request.translate)()
921 data = form.to_python({'email': email})
920 data = form.to_python({'email': email})
922 email = data['email']
921 email = data['email']
923
922
924 UserModel().add_extra_email(c.user.user_id, email)
923 UserModel().add_extra_email(c.user.user_id, email)
925 audit_logger.store_web(
924 audit_logger.store_web(
926 'user.edit.email.add',
925 'user.edit.email.add',
927 action_data={'email': email, 'user': user_data},
926 action_data={'email': email, 'user': user_data},
928 user=self._rhodecode_user)
927 user=self._rhodecode_user)
929 Session().commit()
928 Session().commit()
930 h.flash(_("Added new email address `%s` for user account") % email,
929 h.flash(_("Added new email address `%s` for user account") % email,
931 category='success')
930 category='success')
932 except formencode.Invalid as error:
931 except formencode.Invalid as error:
933 h.flash(h.escape(error.error_dict['email']), category='error')
932 h.flash(h.escape(error.error_dict['email']), category='error')
934 except IntegrityError:
933 except IntegrityError:
935 log.warning("Email %s already exists", email)
934 log.warning("Email %s already exists", email)
936 h.flash(_('Email `{}` is already registered for another user.').format(email),
935 h.flash(_('Email `{}` is already registered for another user.').format(email),
937 category='error')
936 category='error')
938 except Exception:
937 except Exception:
939 log.exception("Exception during email saving")
938 log.exception("Exception during email saving")
940 h.flash(_('An error occurred during email saving'),
939 h.flash(_('An error occurred during email saving'),
941 category='error')
940 category='error')
942 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
941 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
943
942
944 @LoginRequired()
943 @LoginRequired()
945 @HasPermissionAllDecorator('hg.admin')
944 @HasPermissionAllDecorator('hg.admin')
946 @CSRFRequired()
945 @CSRFRequired()
947 @view_config(
946 @view_config(
948 route_name='edit_user_emails_delete', request_method='POST')
947 route_name='edit_user_emails_delete', request_method='POST')
949 def emails_delete(self):
948 def emails_delete(self):
950 _ = self.request.translate
949 _ = self.request.translate
951 c = self.load_default_context()
950 c = self.load_default_context()
952
951
953 user_id = self.db_user_id
952 user_id = self.db_user_id
954 c.user = self.db_user
953 c.user = self.db_user
955
954
956 email_id = self.request.POST.get('del_email_id')
955 email_id = self.request.POST.get('del_email_id')
957 user_model = UserModel()
956 user_model = UserModel()
958
957
959 email = UserEmailMap.query().get(email_id).email
958 email = UserEmailMap.query().get(email_id).email
960 user_data = c.user.get_api_data()
959 user_data = c.user.get_api_data()
961 user_model.delete_extra_email(c.user.user_id, email_id)
960 user_model.delete_extra_email(c.user.user_id, email_id)
962 audit_logger.store_web(
961 audit_logger.store_web(
963 'user.edit.email.delete',
962 'user.edit.email.delete',
964 action_data={'email': email, 'user': user_data},
963 action_data={'email': email, 'user': user_data},
965 user=self._rhodecode_user)
964 user=self._rhodecode_user)
966 Session().commit()
965 Session().commit()
967 h.flash(_("Removed email address from user account"),
966 h.flash(_("Removed email address from user account"),
968 category='success')
967 category='success')
969 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
968 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
970
969
971 @LoginRequired()
970 @LoginRequired()
972 @HasPermissionAllDecorator('hg.admin')
971 @HasPermissionAllDecorator('hg.admin')
973 @view_config(
972 @view_config(
974 route_name='edit_user_ips', request_method='GET',
973 route_name='edit_user_ips', request_method='GET',
975 renderer='rhodecode:templates/admin/users/user_edit.mako')
974 renderer='rhodecode:templates/admin/users/user_edit.mako')
976 def ips(self):
975 def ips(self):
977 _ = self.request.translate
976 _ = self.request.translate
978 c = self.load_default_context()
977 c = self.load_default_context()
979 c.user = self.db_user
978 c.user = self.db_user
980
979
981 c.active = 'ips'
980 c.active = 'ips'
982 c.user_ip_map = UserIpMap.query() \
981 c.user_ip_map = UserIpMap.query() \
983 .filter(UserIpMap.user == c.user).all()
982 .filter(UserIpMap.user == c.user).all()
984
983
985 c.inherit_default_ips = c.user.inherit_default_permissions
984 c.inherit_default_ips = c.user.inherit_default_permissions
986 c.default_user_ip_map = UserIpMap.query() \
985 c.default_user_ip_map = UserIpMap.query() \
987 .filter(UserIpMap.user == User.get_default_user()).all()
986 .filter(UserIpMap.user == User.get_default_user()).all()
988
987
989 return self._get_template_context(c)
988 return self._get_template_context(c)
990
989
991 @LoginRequired()
990 @LoginRequired()
992 @HasPermissionAllDecorator('hg.admin')
991 @HasPermissionAllDecorator('hg.admin')
993 @CSRFRequired()
992 @CSRFRequired()
994 @view_config(
993 @view_config(
995 route_name='edit_user_ips_add', request_method='POST')
994 route_name='edit_user_ips_add', request_method='POST')
996 # NOTE(marcink): this view is allowed for default users, as we can
995 # NOTE(marcink): this view is allowed for default users, as we can
997 # edit their IP white list
996 # edit their IP white list
998 def ips_add(self):
997 def ips_add(self):
999 _ = self.request.translate
998 _ = self.request.translate
1000 c = self.load_default_context()
999 c = self.load_default_context()
1001
1000
1002 user_id = self.db_user_id
1001 user_id = self.db_user_id
1003 c.user = self.db_user
1002 c.user = self.db_user
1004
1003
1005 user_model = UserModel()
1004 user_model = UserModel()
1006 desc = self.request.POST.get('description')
1005 desc = self.request.POST.get('description')
1007 try:
1006 try:
1008 ip_list = user_model.parse_ip_range(
1007 ip_list = user_model.parse_ip_range(
1009 self.request.POST.get('new_ip'))
1008 self.request.POST.get('new_ip'))
1010 except Exception as e:
1009 except Exception as e:
1011 ip_list = []
1010 ip_list = []
1012 log.exception("Exception during ip saving")
1011 log.exception("Exception during ip saving")
1013 h.flash(_('An error occurred during ip saving:%s' % (e,)),
1012 h.flash(_('An error occurred during ip saving:%s' % (e,)),
1014 category='error')
1013 category='error')
1015 added = []
1014 added = []
1016 user_data = c.user.get_api_data()
1015 user_data = c.user.get_api_data()
1017 for ip in ip_list:
1016 for ip in ip_list:
1018 try:
1017 try:
1019 form = UserExtraIpForm(self.request.translate)()
1018 form = UserExtraIpForm(self.request.translate)()
1020 data = form.to_python({'ip': ip})
1019 data = form.to_python({'ip': ip})
1021 ip = data['ip']
1020 ip = data['ip']
1022
1021
1023 user_model.add_extra_ip(c.user.user_id, ip, desc)
1022 user_model.add_extra_ip(c.user.user_id, ip, desc)
1024 audit_logger.store_web(
1023 audit_logger.store_web(
1025 'user.edit.ip.add',
1024 'user.edit.ip.add',
1026 action_data={'ip': ip, 'user': user_data},
1025 action_data={'ip': ip, 'user': user_data},
1027 user=self._rhodecode_user)
1026 user=self._rhodecode_user)
1028 Session().commit()
1027 Session().commit()
1029 added.append(ip)
1028 added.append(ip)
1030 except formencode.Invalid as error:
1029 except formencode.Invalid as error:
1031 msg = error.error_dict['ip']
1030 msg = error.error_dict['ip']
1032 h.flash(msg, category='error')
1031 h.flash(msg, category='error')
1033 except Exception:
1032 except Exception:
1034 log.exception("Exception during ip saving")
1033 log.exception("Exception during ip saving")
1035 h.flash(_('An error occurred during ip saving'),
1034 h.flash(_('An error occurred during ip saving'),
1036 category='error')
1035 category='error')
1037 if added:
1036 if added:
1038 h.flash(
1037 h.flash(
1039 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
1038 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
1040 category='success')
1039 category='success')
1041 if 'default_user' in self.request.POST:
1040 if 'default_user' in self.request.POST:
1042 # case for editing global IP list we do it for 'DEFAULT' user
1041 # case for editing global IP list we do it for 'DEFAULT' user
1043 raise HTTPFound(h.route_path('admin_permissions_ips'))
1042 raise HTTPFound(h.route_path('admin_permissions_ips'))
1044 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1043 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1045
1044
1046 @LoginRequired()
1045 @LoginRequired()
1047 @HasPermissionAllDecorator('hg.admin')
1046 @HasPermissionAllDecorator('hg.admin')
1048 @CSRFRequired()
1047 @CSRFRequired()
1049 @view_config(
1048 @view_config(
1050 route_name='edit_user_ips_delete', request_method='POST')
1049 route_name='edit_user_ips_delete', request_method='POST')
1051 # NOTE(marcink): this view is allowed for default users, as we can
1050 # NOTE(marcink): this view is allowed for default users, as we can
1052 # edit their IP white list
1051 # edit their IP white list
1053 def ips_delete(self):
1052 def ips_delete(self):
1054 _ = self.request.translate
1053 _ = self.request.translate
1055 c = self.load_default_context()
1054 c = self.load_default_context()
1056
1055
1057 user_id = self.db_user_id
1056 user_id = self.db_user_id
1058 c.user = self.db_user
1057 c.user = self.db_user
1059
1058
1060 ip_id = self.request.POST.get('del_ip_id')
1059 ip_id = self.request.POST.get('del_ip_id')
1061 user_model = UserModel()
1060 user_model = UserModel()
1062 user_data = c.user.get_api_data()
1061 user_data = c.user.get_api_data()
1063 ip = UserIpMap.query().get(ip_id).ip_addr
1062 ip = UserIpMap.query().get(ip_id).ip_addr
1064 user_model.delete_extra_ip(c.user.user_id, ip_id)
1063 user_model.delete_extra_ip(c.user.user_id, ip_id)
1065 audit_logger.store_web(
1064 audit_logger.store_web(
1066 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
1065 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
1067 user=self._rhodecode_user)
1066 user=self._rhodecode_user)
1068 Session().commit()
1067 Session().commit()
1069 h.flash(_("Removed ip address from user whitelist"), category='success')
1068 h.flash(_("Removed ip address from user whitelist"), category='success')
1070
1069
1071 if 'default_user' in self.request.POST:
1070 if 'default_user' in self.request.POST:
1072 # case for editing global IP list we do it for 'DEFAULT' user
1071 # case for editing global IP list we do it for 'DEFAULT' user
1073 raise HTTPFound(h.route_path('admin_permissions_ips'))
1072 raise HTTPFound(h.route_path('admin_permissions_ips'))
1074 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1073 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
1075
1074
1076 @LoginRequired()
1075 @LoginRequired()
1077 @HasPermissionAllDecorator('hg.admin')
1076 @HasPermissionAllDecorator('hg.admin')
1078 @view_config(
1077 @view_config(
1079 route_name='edit_user_groups_management', request_method='GET',
1078 route_name='edit_user_groups_management', request_method='GET',
1080 renderer='rhodecode:templates/admin/users/user_edit.mako')
1079 renderer='rhodecode:templates/admin/users/user_edit.mako')
1081 def groups_management(self):
1080 def groups_management(self):
1082 c = self.load_default_context()
1081 c = self.load_default_context()
1083 c.user = self.db_user
1082 c.user = self.db_user
1084 c.data = c.user.group_member
1083 c.data = c.user.group_member
1085
1084
1086 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
1085 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
1087 for group in c.user.group_member]
1086 for group in c.user.group_member]
1088 c.groups = json.dumps(groups)
1087 c.groups = json.dumps(groups)
1089 c.active = 'groups'
1088 c.active = 'groups'
1090
1089
1091 return self._get_template_context(c)
1090 return self._get_template_context(c)
1092
1091
1093 @LoginRequired()
1092 @LoginRequired()
1094 @HasPermissionAllDecorator('hg.admin')
1093 @HasPermissionAllDecorator('hg.admin')
1095 @CSRFRequired()
1094 @CSRFRequired()
1096 @view_config(
1095 @view_config(
1097 route_name='edit_user_groups_management_updates', request_method='POST')
1096 route_name='edit_user_groups_management_updates', request_method='POST')
1098 def groups_management_updates(self):
1097 def groups_management_updates(self):
1099 _ = self.request.translate
1098 _ = self.request.translate
1100 c = self.load_default_context()
1099 c = self.load_default_context()
1101
1100
1102 user_id = self.db_user_id
1101 user_id = self.db_user_id
1103 c.user = self.db_user
1102 c.user = self.db_user
1104
1103
1105 user_groups = set(self.request.POST.getall('users_group_id'))
1104 user_groups = set(self.request.POST.getall('users_group_id'))
1106 user_groups_objects = []
1105 user_groups_objects = []
1107
1106
1108 for ugid in user_groups:
1107 for ugid in user_groups:
1109 user_groups_objects.append(
1108 user_groups_objects.append(
1110 UserGroupModel().get_group(safe_int(ugid)))
1109 UserGroupModel().get_group(safe_int(ugid)))
1111 user_group_model = UserGroupModel()
1110 user_group_model = UserGroupModel()
1112 added_to_groups, removed_from_groups = \
1111 added_to_groups, removed_from_groups = \
1113 user_group_model.change_groups(c.user, user_groups_objects)
1112 user_group_model.change_groups(c.user, user_groups_objects)
1114
1113
1115 user_data = c.user.get_api_data()
1114 user_data = c.user.get_api_data()
1116 for user_group_id in added_to_groups:
1115 for user_group_id in added_to_groups:
1117 user_group = UserGroup.get(user_group_id)
1116 user_group = UserGroup.get(user_group_id)
1118 old_values = user_group.get_api_data()
1117 old_values = user_group.get_api_data()
1119 audit_logger.store_web(
1118 audit_logger.store_web(
1120 'user_group.edit.member.add',
1119 'user_group.edit.member.add',
1121 action_data={'user': user_data, 'old_data': old_values},
1120 action_data={'user': user_data, 'old_data': old_values},
1122 user=self._rhodecode_user)
1121 user=self._rhodecode_user)
1123
1122
1124 for user_group_id in removed_from_groups:
1123 for user_group_id in removed_from_groups:
1125 user_group = UserGroup.get(user_group_id)
1124 user_group = UserGroup.get(user_group_id)
1126 old_values = user_group.get_api_data()
1125 old_values = user_group.get_api_data()
1127 audit_logger.store_web(
1126 audit_logger.store_web(
1128 'user_group.edit.member.delete',
1127 'user_group.edit.member.delete',
1129 action_data={'user': user_data, 'old_data': old_values},
1128 action_data={'user': user_data, 'old_data': old_values},
1130 user=self._rhodecode_user)
1129 user=self._rhodecode_user)
1131
1130
1132 Session().commit()
1131 Session().commit()
1133 c.active = 'user_groups_management'
1132 c.active = 'user_groups_management'
1134 h.flash(_("Groups successfully changed"), category='success')
1133 h.flash(_("Groups successfully changed"), category='success')
1135
1134
1136 return HTTPFound(h.route_path(
1135 return HTTPFound(h.route_path(
1137 'edit_user_groups_management', user_id=user_id))
1136 'edit_user_groups_management', user_id=user_id))
1138
1137
1139 @LoginRequired()
1138 @LoginRequired()
1140 @HasPermissionAllDecorator('hg.admin')
1139 @HasPermissionAllDecorator('hg.admin')
1141 @view_config(
1140 @view_config(
1142 route_name='edit_user_audit_logs', request_method='GET',
1141 route_name='edit_user_audit_logs', request_method='GET',
1143 renderer='rhodecode:templates/admin/users/user_edit.mako')
1142 renderer='rhodecode:templates/admin/users/user_edit.mako')
1144 def user_audit_logs(self):
1143 def user_audit_logs(self):
1145 _ = self.request.translate
1144 _ = self.request.translate
1146 c = self.load_default_context()
1145 c = self.load_default_context()
1147 c.user = self.db_user
1146 c.user = self.db_user
1148
1147
1149 c.active = 'audit'
1148 c.active = 'audit'
1150
1149
1151 p = safe_int(self.request.GET.get('page', 1), 1)
1150 p = safe_int(self.request.GET.get('page', 1), 1)
1152
1151
1153 filter_term = self.request.GET.get('filter')
1152 filter_term = self.request.GET.get('filter')
1154 user_log = UserModel().get_user_log(c.user, filter_term)
1153 user_log = UserModel().get_user_log(c.user, filter_term)
1155
1154
1156 def url_generator(**kw):
1155 def url_generator(**kw):
1157 if filter_term:
1156 if filter_term:
1158 kw['filter'] = filter_term
1157 kw['filter'] = filter_term
1159 return self.request.current_route_path(_query=kw)
1158 return self.request.current_route_path(_query=kw)
1160
1159
1161 c.audit_logs = h.Page(
1160 c.audit_logs = h.Page(
1162 user_log, page=p, items_per_page=10, url=url_generator)
1161 user_log, page=p, items_per_page=10, url=url_generator)
1163 c.filter_term = filter_term
1162 c.filter_term = filter_term
1164 return self._get_template_context(c)
1163 return self._get_template_context(c)
1165
1164
1166 @LoginRequired()
1165 @LoginRequired()
1167 @HasPermissionAllDecorator('hg.admin')
1166 @HasPermissionAllDecorator('hg.admin')
1168 @view_config(
1167 @view_config(
1169 route_name='edit_user_perms_summary', request_method='GET',
1168 route_name='edit_user_perms_summary', request_method='GET',
1170 renderer='rhodecode:templates/admin/users/user_edit.mako')
1169 renderer='rhodecode:templates/admin/users/user_edit.mako')
1171 def user_perms_summary(self):
1170 def user_perms_summary(self):
1172 _ = self.request.translate
1171 _ = self.request.translate
1173 c = self.load_default_context()
1172 c = self.load_default_context()
1174 c.user = self.db_user
1173 c.user = self.db_user
1175
1174
1176 c.active = 'perms_summary'
1175 c.active = 'perms_summary'
1177 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1176 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1178
1177
1179 return self._get_template_context(c)
1178 return self._get_template_context(c)
1180
1179
1181 @LoginRequired()
1180 @LoginRequired()
1182 @HasPermissionAllDecorator('hg.admin')
1181 @HasPermissionAllDecorator('hg.admin')
1183 @view_config(
1182 @view_config(
1184 route_name='edit_user_perms_summary_json', request_method='GET',
1183 route_name='edit_user_perms_summary_json', request_method='GET',
1185 renderer='json_ext')
1184 renderer='json_ext')
1186 def user_perms_summary_json(self):
1185 def user_perms_summary_json(self):
1187 self.load_default_context()
1186 self.load_default_context()
1188 perm_user = self.db_user.AuthUser(ip_addr=self.request.remote_addr)
1187 perm_user = self.db_user.AuthUser(ip_addr=self.request.remote_addr)
1189
1188
1190 return perm_user.permissions
1189 return perm_user.permissions
@@ -1,553 +1,559 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import urlparse
21 import urlparse
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.tests import (
26 from rhodecode.tests import (
27 assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN,
27 assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN,
28 no_newline_id_generator)
28 no_newline_id_generator)
29 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.fixture import Fixture
30 from rhodecode.lib.auth import check_password
30 from rhodecode.lib.auth import check_password
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.model.auth_token import AuthTokenModel
32 from rhodecode.model.auth_token import AuthTokenModel
33 from rhodecode.model.db import User, Notification, UserApiKeys
33 from rhodecode.model.db import User, Notification, UserApiKeys
34 from rhodecode.model.meta import Session
34 from rhodecode.model.meta import Session
35
35
36 fixture = Fixture()
36 fixture = Fixture()
37
37
38 whitelist_view = ['RepoCommitsView:repo_commit_raw']
38 whitelist_view = ['RepoCommitsView:repo_commit_raw']
39
39
40
40
41 def route_path(name, params=None, **kwargs):
41 def route_path(name, params=None, **kwargs):
42 import urllib
42 import urllib
43 from rhodecode.apps._base import ADMIN_PREFIX
43 from rhodecode.apps._base import ADMIN_PREFIX
44
44
45 base_url = {
45 base_url = {
46 'login': ADMIN_PREFIX + '/login',
46 'login': ADMIN_PREFIX + '/login',
47 'logout': ADMIN_PREFIX + '/logout',
47 'logout': ADMIN_PREFIX + '/logout',
48 'register': ADMIN_PREFIX + '/register',
48 'register': ADMIN_PREFIX + '/register',
49 'reset_password':
49 'reset_password':
50 ADMIN_PREFIX + '/password_reset',
50 ADMIN_PREFIX + '/password_reset',
51 'reset_password_confirmation':
51 'reset_password_confirmation':
52 ADMIN_PREFIX + '/password_reset_confirmation',
52 ADMIN_PREFIX + '/password_reset_confirmation',
53
53
54 'admin_permissions_application':
54 'admin_permissions_application':
55 ADMIN_PREFIX + '/permissions/application',
55 ADMIN_PREFIX + '/permissions/application',
56 'admin_permissions_application_update':
56 'admin_permissions_application_update':
57 ADMIN_PREFIX + '/permissions/application/update',
57 ADMIN_PREFIX + '/permissions/application/update',
58
58
59 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}'
59 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}'
60
60
61 }[name].format(**kwargs)
61 }[name].format(**kwargs)
62
62
63 if params:
63 if params:
64 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
64 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
65 return base_url
65 return base_url
66
66
67
67
68 @pytest.mark.usefixtures('app')
68 @pytest.mark.usefixtures('app')
69 class TestLoginController(object):
69 class TestLoginController(object):
70 destroy_users = set()
70 destroy_users = set()
71
71
72 @classmethod
72 @classmethod
73 def teardown_class(cls):
73 def teardown_class(cls):
74 fixture.destroy_users(cls.destroy_users)
74 fixture.destroy_users(cls.destroy_users)
75
75
76 def teardown_method(self, method):
76 def teardown_method(self, method):
77 for n in Notification.query().all():
77 for n in Notification.query().all():
78 Session().delete(n)
78 Session().delete(n)
79
79
80 Session().commit()
80 Session().commit()
81 assert Notification.query().all() == []
81 assert Notification.query().all() == []
82
82
83 def test_index(self):
83 def test_index(self):
84 response = self.app.get(route_path('login'))
84 response = self.app.get(route_path('login'))
85 assert response.status == '200 OK'
85 assert response.status == '200 OK'
86 # Test response...
86 # Test response...
87
87
88 def test_login_admin_ok(self):
88 def test_login_admin_ok(self):
89 response = self.app.post(route_path('login'),
89 response = self.app.post(route_path('login'),
90 {'username': 'test_admin',
90 {'username': 'test_admin',
91 'password': 'test12'})
91 'password': 'test12'}, status=302)
92 assert response.status == '302 Found'
92 response = response.follow()
93 session = response.get_session_from_response()
93 session = response.get_session_from_response()
94 username = session['rhodecode_user'].get('username')
94 username = session['rhodecode_user'].get('username')
95 assert username == 'test_admin'
95 assert username == 'test_admin'
96 response = response.follow()
97 response.mustcontain('/%s' % HG_REPO)
96 response.mustcontain('/%s' % HG_REPO)
98
97
99 def test_login_regular_ok(self):
98 def test_login_regular_ok(self):
100 response = self.app.post(route_path('login'),
99 response = self.app.post(route_path('login'),
101 {'username': 'test_regular',
100 {'username': 'test_regular',
102 'password': 'test12'})
101 'password': 'test12'}, status=302)
103
102
104 assert response.status == '302 Found'
103 response = response.follow()
105 session = response.get_session_from_response()
104 session = response.get_session_from_response()
106 username = session['rhodecode_user'].get('username')
105 username = session['rhodecode_user'].get('username')
107 assert username == 'test_regular'
106 assert username == 'test_regular'
108 response = response.follow()
107
109 response.mustcontain('/%s' % HG_REPO)
108 response.mustcontain('/%s' % HG_REPO)
110
109
111 def test_login_ok_came_from(self):
110 def test_login_ok_came_from(self):
112 test_came_from = '/_admin/users?branch=stable'
111 test_came_from = '/_admin/users?branch=stable'
113 _url = '{}?came_from={}'.format(route_path('login'), test_came_from)
112 _url = '{}?came_from={}'.format(route_path('login'), test_came_from)
114 response = self.app.post(
113 response = self.app.post(
115 _url, {'username': 'test_admin', 'password': 'test12'})
114 _url, {'username': 'test_admin', 'password': 'test12'}, status=302)
116 assert response.status == '302 Found'
115
117 assert 'branch=stable' in response.location
116 assert 'branch=stable' in response.location
118 response = response.follow()
117 response = response.follow()
119
118
120 assert response.status == '200 OK'
119 assert response.status == '200 OK'
121 response.mustcontain('Users administration')
120 response.mustcontain('Users administration')
122
121
123 def test_redirect_to_login_with_get_args(self):
122 def test_redirect_to_login_with_get_args(self):
124 with fixture.anon_access(False):
123 with fixture.anon_access(False):
125 kwargs = {'branch': 'stable'}
124 kwargs = {'branch': 'stable'}
126 response = self.app.get(
125 response = self.app.get(
127 h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs))
126 h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs),
128 assert response.status == '302 Found'
127 status=302)
129
128
130 response_query = urlparse.parse_qsl(response.location)
129 response_query = urlparse.parse_qsl(response.location)
131 assert 'branch=stable' in response_query[0][1]
130 assert 'branch=stable' in response_query[0][1]
132
131
133 def test_login_form_with_get_args(self):
132 def test_login_form_with_get_args(self):
134 _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login'))
133 _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login'))
135 response = self.app.get(_url)
134 response = self.app.get(_url)
136 assert 'branch%3Dstable' in response.form.action
135 assert 'branch%3Dstable' in response.form.action
137
136
138 @pytest.mark.parametrize("url_came_from", [
137 @pytest.mark.parametrize("url_came_from", [
139 'data:text/html,<script>window.alert("xss")</script>',
138 'data:text/html,<script>window.alert("xss")</script>',
140 'mailto:test@rhodecode.org',
139 'mailto:test@rhodecode.org',
141 'file:///etc/passwd',
140 'file:///etc/passwd',
142 'ftp://some.ftp.server',
141 'ftp://some.ftp.server',
143 'http://other.domain',
142 'http://other.domain',
144 '/\r\nX-Forwarded-Host: http://example.org',
143 '/\r\nX-Forwarded-Host: http://example.org',
145 ], ids=no_newline_id_generator)
144 ], ids=no_newline_id_generator)
146 def test_login_bad_came_froms(self, url_came_from):
145 def test_login_bad_came_froms(self, url_came_from):
147 _url = '{}?came_from={}'.format(route_path('login'), url_came_from)
146 _url = '{}?came_from={}'.format(route_path('login'), url_came_from)
148 response = self.app.post(
147 response = self.app.post(
149 _url,
148 _url,
150 {'username': 'test_admin', 'password': 'test12'})
149 {'username': 'test_admin', 'password': 'test12'})
151 assert response.status == '302 Found'
150 assert response.status == '302 Found'
152 response = response.follow()
151 response = response.follow()
153 assert response.status == '200 OK'
152 assert response.status == '200 OK'
154 assert response.request.path == '/'
153 assert response.request.path == '/'
155
154
156 def test_login_short_password(self):
155 def test_login_short_password(self):
157 response = self.app.post(route_path('login'),
156 response = self.app.post(route_path('login'),
158 {'username': 'test_admin',
157 {'username': 'test_admin',
159 'password': 'as'})
158 'password': 'as'})
160 assert response.status == '200 OK'
159 assert response.status == '200 OK'
161
160
162 response.mustcontain('Enter 3 characters or more')
161 response.mustcontain('Enter 3 characters or more')
163
162
164 def test_login_wrong_non_ascii_password(self, user_regular):
163 def test_login_wrong_non_ascii_password(self, user_regular):
165 response = self.app.post(
164 response = self.app.post(
166 route_path('login'),
165 route_path('login'),
167 {'username': user_regular.username,
166 {'username': user_regular.username,
168 'password': u'invalid-non-asci\xe4'.encode('utf8')})
167 'password': u'invalid-non-asci\xe4'.encode('utf8')})
169
168
170 response.mustcontain('invalid user name')
169 response.mustcontain('invalid user name')
171 response.mustcontain('invalid password')
170 response.mustcontain('invalid password')
172
171
173 def test_login_with_non_ascii_password(self, user_util):
172 def test_login_with_non_ascii_password(self, user_util):
174 password = u'valid-non-ascii\xe4'
173 password = u'valid-non-ascii\xe4'
175 user = user_util.create_user(password=password)
174 user = user_util.create_user(password=password)
176 response = self.app.post(
175 response = self.app.post(
177 route_path('login'),
176 route_path('login'),
178 {'username': user.username,
177 {'username': user.username,
179 'password': password.encode('utf-8')})
178 'password': password.encode('utf-8')})
180 assert response.status_code == 302
179 assert response.status_code == 302
181
180
182 def test_login_wrong_username_password(self):
181 def test_login_wrong_username_password(self):
183 response = self.app.post(route_path('login'),
182 response = self.app.post(route_path('login'),
184 {'username': 'error',
183 {'username': 'error',
185 'password': 'test12'})
184 'password': 'test12'})
186
185
187 response.mustcontain('invalid user name')
186 response.mustcontain('invalid user name')
188 response.mustcontain('invalid password')
187 response.mustcontain('invalid password')
189
188
190 def test_login_admin_ok_password_migration(self, real_crypto_backend):
189 def test_login_admin_ok_password_migration(self, real_crypto_backend):
191 from rhodecode.lib import auth
190 from rhodecode.lib import auth
192
191
193 # create new user, with sha256 password
192 # create new user, with sha256 password
194 temp_user = 'test_admin_sha256'
193 temp_user = 'test_admin_sha256'
195 user = fixture.create_user(temp_user)
194 user = fixture.create_user(temp_user)
196 user.password = auth._RhodeCodeCryptoSha256().hash_create(
195 user.password = auth._RhodeCodeCryptoSha256().hash_create(
197 b'test123')
196 b'test123')
198 Session().add(user)
197 Session().add(user)
199 Session().commit()
198 Session().commit()
200 self.destroy_users.add(temp_user)
199 self.destroy_users.add(temp_user)
201 response = self.app.post(route_path('login'),
200 response = self.app.post(route_path('login'),
202 {'username': temp_user,
201 {'username': temp_user,
203 'password': 'test123'})
202 'password': 'test123'}, status=302)
204
203
205 assert response.status == '302 Found'
204 response = response.follow()
206 session = response.get_session_from_response()
205 session = response.get_session_from_response()
207 username = session['rhodecode_user'].get('username')
206 username = session['rhodecode_user'].get('username')
208 assert username == temp_user
207 assert username == temp_user
209 response = response.follow()
210 response.mustcontain('/%s' % HG_REPO)
208 response.mustcontain('/%s' % HG_REPO)
211
209
212 # new password should be bcrypted, after log-in and transfer
210 # new password should be bcrypted, after log-in and transfer
213 user = User.get_by_username(temp_user)
211 user = User.get_by_username(temp_user)
214 assert user.password.startswith('$')
212 assert user.password.startswith('$')
215
213
216 # REGISTRATIONS
214 # REGISTRATIONS
217 def test_register(self):
215 def test_register(self):
218 response = self.app.get(route_path('register'))
216 response = self.app.get(route_path('register'))
219 response.mustcontain('Create an Account')
217 response.mustcontain('Create an Account')
220
218
221 def test_register_err_same_username(self):
219 def test_register_err_same_username(self):
222 uname = 'test_admin'
220 uname = 'test_admin'
223 response = self.app.post(
221 response = self.app.post(
224 route_path('register'),
222 route_path('register'),
225 {
223 {
226 'username': uname,
224 'username': uname,
227 'password': 'test12',
225 'password': 'test12',
228 'password_confirmation': 'test12',
226 'password_confirmation': 'test12',
229 'email': 'goodmail@domain.com',
227 'email': 'goodmail@domain.com',
230 'firstname': 'test',
228 'firstname': 'test',
231 'lastname': 'test'
229 'lastname': 'test'
232 }
230 }
233 )
231 )
234
232
235 assertr = response.assert_response()
233 assertr = response.assert_response()
236 msg = '???'
234 msg = 'Username "%(username)s" already exists'
237 msg = msg % {'username': uname}
235 msg = msg % {'username': uname}
238 assertr.element_contains('#username+.error-message', msg)
236 assertr.element_contains('#username+.error-message', msg)
239
237
240 def test_register_err_same_email(self):
238 def test_register_err_same_email(self):
241 response = self.app.post(
239 response = self.app.post(
242 route_path('register'),
240 route_path('register'),
243 {
241 {
244 'username': 'test_admin_0',
242 'username': 'test_admin_0',
245 'password': 'test12',
243 'password': 'test12',
246 'password_confirmation': 'test12',
244 'password_confirmation': 'test12',
247 'email': 'test_admin@mail.com',
245 'email': 'test_admin@mail.com',
248 'firstname': 'test',
246 'firstname': 'test',
249 'lastname': 'test'
247 'lastname': 'test'
250 }
248 }
251 )
249 )
252
250
253 assertr = response.assert_response()
251 assertr = response.assert_response()
254 msg = '???'
252 msg = u'This e-mail address is already taken'
255 assertr.element_contains('#email+.error-message', msg)
253 assertr.element_contains('#email+.error-message', msg)
256
254
257 def test_register_err_same_email_case_sensitive(self):
255 def test_register_err_same_email_case_sensitive(self):
258 response = self.app.post(
256 response = self.app.post(
259 route_path('register'),
257 route_path('register'),
260 {
258 {
261 'username': 'test_admin_1',
259 'username': 'test_admin_1',
262 'password': 'test12',
260 'password': 'test12',
263 'password_confirmation': 'test12',
261 'password_confirmation': 'test12',
264 'email': 'TesT_Admin@mail.COM',
262 'email': 'TesT_Admin@mail.COM',
265 'firstname': 'test',
263 'firstname': 'test',
266 'lastname': 'test'
264 'lastname': 'test'
267 }
265 }
268 )
266 )
269 assertr = response.assert_response()
267 assertr = response.assert_response()
270 msg = '???'
268 msg = u'This e-mail address is already taken'
271 assertr.element_contains('#email+.error-message', msg)
269 assertr.element_contains('#email+.error-message', msg)
272
270
273 def test_register_err_wrong_data(self):
271 def test_register_err_wrong_data(self):
274 response = self.app.post(
272 response = self.app.post(
275 route_path('register'),
273 route_path('register'),
276 {
274 {
277 'username': 'xs',
275 'username': 'xs',
278 'password': 'test',
276 'password': 'test',
279 'password_confirmation': 'test',
277 'password_confirmation': 'test',
280 'email': 'goodmailm',
278 'email': 'goodmailm',
281 'firstname': 'test',
279 'firstname': 'test',
282 'lastname': 'test'
280 'lastname': 'test'
283 }
281 }
284 )
282 )
285 assert response.status == '200 OK'
283 assert response.status == '200 OK'
286 response.mustcontain('An email address must contain a single @')
284 response.mustcontain('An email address must contain a single @')
287 response.mustcontain('Enter a value 6 characters long or more')
285 response.mustcontain('Enter a value 6 characters long or more')
288
286
289 def test_register_err_username(self):
287 def test_register_err_username(self):
290 response = self.app.post(
288 response = self.app.post(
291 route_path('register'),
289 route_path('register'),
292 {
290 {
293 'username': 'error user',
291 'username': 'error user',
294 'password': 'test12',
292 'password': 'test12',
295 'password_confirmation': 'test12',
293 'password_confirmation': 'test12',
296 'email': 'goodmailm',
294 'email': 'goodmailm',
297 'firstname': 'test',
295 'firstname': 'test',
298 'lastname': 'test'
296 'lastname': 'test'
299 }
297 }
300 )
298 )
301
299
302 response.mustcontain('An email address must contain a single @')
300 response.mustcontain('An email address must contain a single @')
303 response.mustcontain(
301 response.mustcontain(
304 'Username may only contain '
302 'Username may only contain '
305 'alphanumeric characters underscores, '
303 'alphanumeric characters underscores, '
306 'periods or dashes and must begin with '
304 'periods or dashes and must begin with '
307 'alphanumeric character')
305 'alphanumeric character')
308
306
309 def test_register_err_case_sensitive(self):
307 def test_register_err_case_sensitive(self):
310 usr = 'Test_Admin'
308 usr = 'Test_Admin'
311 response = self.app.post(
309 response = self.app.post(
312 route_path('register'),
310 route_path('register'),
313 {
311 {
314 'username': usr,
312 'username': usr,
315 'password': 'test12',
313 'password': 'test12',
316 'password_confirmation': 'test12',
314 'password_confirmation': 'test12',
317 'email': 'goodmailm',
315 'email': 'goodmailm',
318 'firstname': 'test',
316 'firstname': 'test',
319 'lastname': 'test'
317 'lastname': 'test'
320 }
318 }
321 )
319 )
322
320
323 assertr = response.assert_response()
321 assertr = response.assert_response()
324 msg = '???'
322 msg = u'Username "%(username)s" already exists'
325 msg = msg % {'username': usr}
323 msg = msg % {'username': usr}
326 assertr.element_contains('#username+.error-message', msg)
324 assertr.element_contains('#username+.error-message', msg)
327
325
328 def test_register_special_chars(self):
326 def test_register_special_chars(self):
329 response = self.app.post(
327 response = self.app.post(
330 route_path('register'),
328 route_path('register'),
331 {
329 {
332 'username': 'xxxaxn',
330 'username': 'xxxaxn',
333 'password': 'ąćźżąśśśś',
331 'password': 'ąćźżąśśśś',
334 'password_confirmation': 'ąćźżąśśśś',
332 'password_confirmation': 'ąćźżąśśśś',
335 'email': 'goodmailm@test.plx',
333 'email': 'goodmailm@test.plx',
336 'firstname': 'test',
334 'firstname': 'test',
337 'lastname': 'test'
335 'lastname': 'test'
338 }
336 }
339 )
337 )
340
338
341 msg = '???'
339 msg = u'Invalid characters (non-ascii) in password'
342 response.mustcontain(msg)
340 response.mustcontain(msg)
343
341
344 def test_register_password_mismatch(self):
342 def test_register_password_mismatch(self):
345 response = self.app.post(
343 response = self.app.post(
346 route_path('register'),
344 route_path('register'),
347 {
345 {
348 'username': 'xs',
346 'username': 'xs',
349 'password': '123qwe',
347 'password': '123qwe',
350 'password_confirmation': 'qwe123',
348 'password_confirmation': 'qwe123',
351 'email': 'goodmailm@test.plxa',
349 'email': 'goodmailm@test.plxa',
352 'firstname': 'test',
350 'firstname': 'test',
353 'lastname': 'test'
351 'lastname': 'test'
354 }
352 }
355 )
353 )
356 msg = '???'
354 msg = u'Passwords do not match'
357 response.mustcontain(msg)
355 response.mustcontain(msg)
358
356
359 def test_register_ok(self):
357 def test_register_ok(self):
360 username = 'test_regular4'
358 username = 'test_regular4'
361 password = 'qweqwe'
359 password = 'qweqwe'
362 email = 'marcin@test.com'
360 email = 'marcin@test.com'
363 name = 'testname'
361 name = 'testname'
364 lastname = 'testlastname'
362 lastname = 'testlastname'
365
363
364 # this initializes a session
365 response = self.app.get(route_path('register'))
366 response.mustcontain('Create an Account')
367
368
366 response = self.app.post(
369 response = self.app.post(
367 route_path('register'),
370 route_path('register'),
368 {
371 {
369 'username': username,
372 'username': username,
370 'password': password,
373 'password': password,
371 'password_confirmation': password,
374 'password_confirmation': password,
372 'email': email,
375 'email': email,
373 'firstname': name,
376 'firstname': name,
374 'lastname': lastname,
377 'lastname': lastname,
375 'admin': True
378 'admin': True
376 }
379 },
377 ) # This should be overriden
380 status=302
378 assert response.status == '302 Found'
381 ) # This should be overridden
382
379 assert_session_flash(
383 assert_session_flash(
380 response, 'You have successfully registered with RhodeCode')
384 response, 'You have successfully registered with RhodeCode')
381
385
382 ret = Session().query(User).filter(
386 ret = Session().query(User).filter(
383 User.username == 'test_regular4').one()
387 User.username == 'test_regular4').one()
384 assert ret.username == username
388 assert ret.username == username
385 assert check_password(password, ret.password)
389 assert check_password(password, ret.password)
386 assert ret.email == email
390 assert ret.email == email
387 assert ret.name == name
391 assert ret.name == name
388 assert ret.lastname == lastname
392 assert ret.lastname == lastname
389 assert ret.auth_tokens is not None
393 assert ret.auth_tokens is not None
390 assert not ret.admin
394 assert not ret.admin
391
395
392 def test_forgot_password_wrong_mail(self):
396 def test_forgot_password_wrong_mail(self):
393 bad_email = 'marcin@wrongmail.org'
397 bad_email = 'marcin@wrongmail.org'
398 # this initializes a session
399 self.app.get(route_path('reset_password'))
400
394 response = self.app.post(
401 response = self.app.post(
395 route_path('reset_password'), {'email': bad_email, }
402 route_path('reset_password'), {'email': bad_email, }
396 )
403 )
397 assert_session_flash(response,
404 assert_session_flash(response,
398 'If such email exists, a password reset link was sent to it.')
405 'If such email exists, a password reset link was sent to it.')
399
406
400 def test_forgot_password(self, user_util):
407 def test_forgot_password(self, user_util):
401 response = self.app.get(route_path('reset_password'))
408 # this initializes a session
402 assert response.status == '200 OK'
409 self.app.get(route_path('reset_password'))
403
410
404 user = user_util.create_user()
411 user = user_util.create_user()
405 user_id = user.user_id
412 user_id = user.user_id
406 email = user.email
413 email = user.email
407
414
408 response = self.app.post(route_path('reset_password'), {'email': email, })
415 response = self.app.post(route_path('reset_password'), {'email': email, })
409
416
410 assert_session_flash(response,
417 assert_session_flash(response,
411 'If such email exists, a password reset link was sent to it.')
418 'If such email exists, a password reset link was sent to it.')
412
419
413 # BAD KEY
420 # BAD KEY
414 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey')
421 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey')
415 response = self.app.get(confirm_url)
422 response = self.app.get(confirm_url, status=302)
416 assert response.status == '302 Found'
417 assert response.location.endswith(route_path('reset_password'))
423 assert response.location.endswith(route_path('reset_password'))
418 assert_session_flash(response, 'Given reset token is invalid')
424 assert_session_flash(response, 'Given reset token is invalid')
419
425
420 response.follow() # cleanup flash
426 response.follow() # cleanup flash
421
427
422 # GOOD KEY
428 # GOOD KEY
423 key = UserApiKeys.query()\
429 key = UserApiKeys.query()\
424 .filter(UserApiKeys.user_id == user_id)\
430 .filter(UserApiKeys.user_id == user_id)\
425 .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\
431 .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\
426 .first()
432 .first()
427
433
428 assert key
434 assert key
429
435
430 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key)
436 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key)
431 response = self.app.get(confirm_url)
437 response = self.app.get(confirm_url)
432 assert response.status == '302 Found'
438 assert response.status == '302 Found'
433 assert response.location.endswith(route_path('login'))
439 assert response.location.endswith(route_path('login'))
434
440
435 assert_session_flash(
441 assert_session_flash(
436 response,
442 response,
437 'Your password reset was successful, '
443 'Your password reset was successful, '
438 'a new password has been sent to your email')
444 'a new password has been sent to your email')
439
445
440 response.follow()
446 response.follow()
441
447
442 def _get_api_whitelist(self, values=None):
448 def _get_api_whitelist(self, values=None):
443 config = {'api_access_controllers_whitelist': values or []}
449 config = {'api_access_controllers_whitelist': values or []}
444 return config
450 return config
445
451
446 @pytest.mark.parametrize("test_name, auth_token", [
452 @pytest.mark.parametrize("test_name, auth_token", [
447 ('none', None),
453 ('none', None),
448 ('empty_string', ''),
454 ('empty_string', ''),
449 ('fake_number', '123456'),
455 ('fake_number', '123456'),
450 ('proper_auth_token', None)
456 ('proper_auth_token', None)
451 ])
457 ])
452 def test_access_not_whitelisted_page_via_auth_token(
458 def test_access_not_whitelisted_page_via_auth_token(
453 self, test_name, auth_token, user_admin):
459 self, test_name, auth_token, user_admin):
454
460
455 whitelist = self._get_api_whitelist([])
461 whitelist = self._get_api_whitelist([])
456 with mock.patch.dict('rhodecode.CONFIG', whitelist):
462 with mock.patch.dict('rhodecode.CONFIG', whitelist):
457 assert [] == whitelist['api_access_controllers_whitelist']
463 assert [] == whitelist['api_access_controllers_whitelist']
458 if test_name == 'proper_auth_token':
464 if test_name == 'proper_auth_token':
459 # use builtin if api_key is None
465 # use builtin if api_key is None
460 auth_token = user_admin.api_key
466 auth_token = user_admin.api_key
461
467
462 with fixture.anon_access(False):
468 with fixture.anon_access(False):
463 self.app.get(
469 self.app.get(
464 route_path('repo_commit_raw',
470 route_path('repo_commit_raw',
465 repo_name=HG_REPO, commit_id='tip',
471 repo_name=HG_REPO, commit_id='tip',
466 params=dict(api_key=auth_token)),
472 params=dict(api_key=auth_token)),
467 status=302)
473 status=302)
468
474
469 @pytest.mark.parametrize("test_name, auth_token, code", [
475 @pytest.mark.parametrize("test_name, auth_token, code", [
470 ('none', None, 302),
476 ('none', None, 302),
471 ('empty_string', '', 302),
477 ('empty_string', '', 302),
472 ('fake_number', '123456', 302),
478 ('fake_number', '123456', 302),
473 ('proper_auth_token', None, 200)
479 ('proper_auth_token', None, 200)
474 ])
480 ])
475 def test_access_whitelisted_page_via_auth_token(
481 def test_access_whitelisted_page_via_auth_token(
476 self, test_name, auth_token, code, user_admin):
482 self, test_name, auth_token, code, user_admin):
477
483
478 whitelist = self._get_api_whitelist(whitelist_view)
484 whitelist = self._get_api_whitelist(whitelist_view)
479
485
480 with mock.patch.dict('rhodecode.CONFIG', whitelist):
486 with mock.patch.dict('rhodecode.CONFIG', whitelist):
481 assert whitelist_view == whitelist['api_access_controllers_whitelist']
487 assert whitelist_view == whitelist['api_access_controllers_whitelist']
482
488
483 if test_name == 'proper_auth_token':
489 if test_name == 'proper_auth_token':
484 auth_token = user_admin.api_key
490 auth_token = user_admin.api_key
485 assert auth_token
491 assert auth_token
486
492
487 with fixture.anon_access(False):
493 with fixture.anon_access(False):
488 self.app.get(
494 self.app.get(
489 route_path('repo_commit_raw',
495 route_path('repo_commit_raw',
490 repo_name=HG_REPO, commit_id='tip',
496 repo_name=HG_REPO, commit_id='tip',
491 params=dict(api_key=auth_token)),
497 params=dict(api_key=auth_token)),
492 status=code)
498 status=code)
493
499
494 @pytest.mark.parametrize("test_name, auth_token, code", [
500 @pytest.mark.parametrize("test_name, auth_token, code", [
495 ('proper_auth_token', None, 200),
501 ('proper_auth_token', None, 200),
496 ('wrong_auth_token', '123456', 302),
502 ('wrong_auth_token', '123456', 302),
497 ])
503 ])
498 def test_access_whitelisted_page_via_auth_token_bound_to_token(
504 def test_access_whitelisted_page_via_auth_token_bound_to_token(
499 self, test_name, auth_token, code, user_admin):
505 self, test_name, auth_token, code, user_admin):
500
506
501 expected_token = auth_token
507 expected_token = auth_token
502 if test_name == 'proper_auth_token':
508 if test_name == 'proper_auth_token':
503 auth_token = user_admin.api_key
509 auth_token = user_admin.api_key
504 expected_token = auth_token
510 expected_token = auth_token
505 assert auth_token
511 assert auth_token
506
512
507 whitelist = self._get_api_whitelist([
513 whitelist = self._get_api_whitelist([
508 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)])
514 'RepoCommitsView:repo_commit_raw@{}'.format(expected_token)])
509
515
510 with mock.patch.dict('rhodecode.CONFIG', whitelist):
516 with mock.patch.dict('rhodecode.CONFIG', whitelist):
511
517
512 with fixture.anon_access(False):
518 with fixture.anon_access(False):
513 self.app.get(
519 self.app.get(
514 route_path('repo_commit_raw',
520 route_path('repo_commit_raw',
515 repo_name=HG_REPO, commit_id='tip',
521 repo_name=HG_REPO, commit_id='tip',
516 params=dict(api_key=auth_token)),
522 params=dict(api_key=auth_token)),
517 status=code)
523 status=code)
518
524
519 def test_access_page_via_extra_auth_token(self):
525 def test_access_page_via_extra_auth_token(self):
520 whitelist = self._get_api_whitelist(whitelist_view)
526 whitelist = self._get_api_whitelist(whitelist_view)
521 with mock.patch.dict('rhodecode.CONFIG', whitelist):
527 with mock.patch.dict('rhodecode.CONFIG', whitelist):
522 assert whitelist_view == \
528 assert whitelist_view == \
523 whitelist['api_access_controllers_whitelist']
529 whitelist['api_access_controllers_whitelist']
524
530
525 new_auth_token = AuthTokenModel().create(
531 new_auth_token = AuthTokenModel().create(
526 TEST_USER_ADMIN_LOGIN, 'test')
532 TEST_USER_ADMIN_LOGIN, 'test')
527 Session().commit()
533 Session().commit()
528 with fixture.anon_access(False):
534 with fixture.anon_access(False):
529 self.app.get(
535 self.app.get(
530 route_path('repo_commit_raw',
536 route_path('repo_commit_raw',
531 repo_name=HG_REPO, commit_id='tip',
537 repo_name=HG_REPO, commit_id='tip',
532 params=dict(api_key=new_auth_token.api_key)),
538 params=dict(api_key=new_auth_token.api_key)),
533 status=200)
539 status=200)
534
540
535 def test_access_page_via_expired_auth_token(self):
541 def test_access_page_via_expired_auth_token(self):
536 whitelist = self._get_api_whitelist(whitelist_view)
542 whitelist = self._get_api_whitelist(whitelist_view)
537 with mock.patch.dict('rhodecode.CONFIG', whitelist):
543 with mock.patch.dict('rhodecode.CONFIG', whitelist):
538 assert whitelist_view == \
544 assert whitelist_view == \
539 whitelist['api_access_controllers_whitelist']
545 whitelist['api_access_controllers_whitelist']
540
546
541 new_auth_token = AuthTokenModel().create(
547 new_auth_token = AuthTokenModel().create(
542 TEST_USER_ADMIN_LOGIN, 'test')
548 TEST_USER_ADMIN_LOGIN, 'test')
543 Session().commit()
549 Session().commit()
544 # patch the api key and make it expired
550 # patch the api key and make it expired
545 new_auth_token.expires = 0
551 new_auth_token.expires = 0
546 Session().add(new_auth_token)
552 Session().add(new_auth_token)
547 Session().commit()
553 Session().commit()
548 with fixture.anon_access(False):
554 with fixture.anon_access(False):
549 self.app.get(
555 self.app.get(
550 route_path('repo_commit_raw',
556 route_path('repo_commit_raw',
551 repo_name=HG_REPO, commit_id='tip',
557 repo_name=HG_REPO, commit_id='tip',
552 params=dict(api_key=new_auth_token.api_key)),
558 params=dict(api_key=new_auth_token.api_key)),
553 status=302)
559 status=302)
@@ -1,133 +1,133 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import mock
22 import mock
23 import pytest
23 import pytest
24
24
25 from rhodecode.apps._base import ADMIN_PREFIX
25 from rhodecode.apps._base import ADMIN_PREFIX
26 from rhodecode.apps.login.views import LoginView, CaptchaData
26 from rhodecode.apps.login.views import LoginView, CaptchaData
27 from rhodecode.model.settings import SettingsModel
27 from rhodecode.model.settings import SettingsModel
28 from rhodecode.lib.utils2 import AttributeDict
28 from rhodecode.lib.utils2 import AttributeDict
29 from rhodecode.tests.utils import AssertResponse
29 from rhodecode.tests.utils import AssertResponse
30
30
31
31
32 class RhodeCodeSetting(object):
32 class RhodeCodeSetting(object):
33 def __init__(self, name, value):
33 def __init__(self, name, value):
34 self.name = name
34 self.name = name
35 self.value = value
35 self.value = value
36
36
37 def __enter__(self):
37 def __enter__(self):
38 from rhodecode.model.settings import SettingsModel
38 from rhodecode.model.settings import SettingsModel
39 model = SettingsModel()
39 model = SettingsModel()
40 self.old_setting = model.get_setting_by_name(self.name)
40 self.old_setting = model.get_setting_by_name(self.name)
41 model.create_or_update_setting(name=self.name, val=self.value)
41 model.create_or_update_setting(name=self.name, val=self.value)
42 return self
42 return self
43
43
44 def __exit__(self, exc_type, exc_val, exc_tb):
44 def __exit__(self, exc_type, exc_val, exc_tb):
45 model = SettingsModel()
45 model = SettingsModel()
46 if self.old_setting:
46 if self.old_setting:
47 model.create_or_update_setting(
47 model.create_or_update_setting(
48 name=self.name, val=self.old_setting.app_settings_value)
48 name=self.name, val=self.old_setting.app_settings_value)
49 else:
49 else:
50 model.create_or_update_setting(name=self.name)
50 model.create_or_update_setting(name=self.name)
51
51
52
52
53 class TestRegisterCaptcha(object):
53 class TestRegisterCaptcha(object):
54
54
55 @pytest.mark.parametrize('private_key, public_key, expected', [
55 @pytest.mark.parametrize('private_key, public_key, expected', [
56 ('', '', CaptchaData(False, '', '')),
56 ('', '', CaptchaData(False, '', '')),
57 ('', 'pubkey', CaptchaData(False, '', 'pubkey')),
57 ('', 'pubkey', CaptchaData(False, '', 'pubkey')),
58 ('privkey', '', CaptchaData(True, 'privkey', '')),
58 ('privkey', '', CaptchaData(True, 'privkey', '')),
59 ('privkey', 'pubkey', CaptchaData(True, 'privkey', 'pubkey')),
59 ('privkey', 'pubkey', CaptchaData(True, 'privkey', 'pubkey')),
60 ])
60 ])
61 def test_get_captcha_data(self, private_key, public_key, expected, db,
61 def test_get_captcha_data(self, private_key, public_key, expected,
62 request_stub, user_util):
62 request_stub, user_util):
63 request_stub.user = user_util.create_user().AuthUser()
63 request_stub.user = user_util.create_user().AuthUser()
64 request_stub.matched_route = AttributeDict({'name': 'login'})
64 request_stub.matched_route = AttributeDict({'name': 'login'})
65 login_view = LoginView(mock.Mock(), request_stub)
65 login_view = LoginView(mock.Mock(), request_stub)
66
66
67 with RhodeCodeSetting('captcha_private_key', private_key):
67 with RhodeCodeSetting('captcha_private_key', private_key):
68 with RhodeCodeSetting('captcha_public_key', public_key):
68 with RhodeCodeSetting('captcha_public_key', public_key):
69 captcha = login_view._get_captcha_data()
69 captcha = login_view._get_captcha_data()
70 assert captcha == expected
70 assert captcha == expected
71
71
72 @pytest.mark.parametrize('active', [False, True])
72 @pytest.mark.parametrize('active', [False, True])
73 @mock.patch.object(LoginView, '_get_captcha_data')
73 @mock.patch.object(LoginView, '_get_captcha_data')
74 def test_private_key_does_not_leak_to_html(
74 def test_private_key_does_not_leak_to_html(
75 self, m_get_captcha_data, active, app):
75 self, m_get_captcha_data, active, app):
76 captcha = CaptchaData(
76 captcha = CaptchaData(
77 active=active, private_key='PRIVATE_KEY', public_key='PUBLIC_KEY')
77 active=active, private_key='PRIVATE_KEY', public_key='PUBLIC_KEY')
78 m_get_captcha_data.return_value = captcha
78 m_get_captcha_data.return_value = captcha
79
79
80 response = app.get(ADMIN_PREFIX + '/register')
80 response = app.get(ADMIN_PREFIX + '/register')
81 assert 'PRIVATE_KEY' not in response
81 assert 'PRIVATE_KEY' not in response
82
82
83 @pytest.mark.parametrize('active', [False, True])
83 @pytest.mark.parametrize('active', [False, True])
84 @mock.patch.object(LoginView, '_get_captcha_data')
84 @mock.patch.object(LoginView, '_get_captcha_data')
85 def test_register_view_renders_captcha(
85 def test_register_view_renders_captcha(
86 self, m_get_captcha_data, active, app):
86 self, m_get_captcha_data, active, app):
87 captcha = CaptchaData(
87 captcha = CaptchaData(
88 active=active, private_key='PRIVATE_KEY', public_key='PUBLIC_KEY')
88 active=active, private_key='PRIVATE_KEY', public_key='PUBLIC_KEY')
89 m_get_captcha_data.return_value = captcha
89 m_get_captcha_data.return_value = captcha
90
90
91 response = app.get(ADMIN_PREFIX + '/register')
91 response = app.get(ADMIN_PREFIX + '/register')
92
92
93 assertr = AssertResponse(response)
93 assertr = AssertResponse(response)
94 if active:
94 if active:
95 assertr.one_element_exists('#recaptcha_field')
95 assertr.one_element_exists('#recaptcha_field')
96 else:
96 else:
97 assertr.no_element_exists('#recaptcha_field')
97 assertr.no_element_exists('#recaptcha_field')
98
98
99 @pytest.mark.parametrize('valid', [False, True])
99 @pytest.mark.parametrize('valid', [False, True])
100 @mock.patch('rhodecode.apps.login.views.submit')
100 @mock.patch('rhodecode.apps.login.views.submit')
101 @mock.patch.object(LoginView, '_get_captcha_data')
101 @mock.patch.object(LoginView, '_get_captcha_data')
102 def test_register_with_active_captcha(
102 def test_register_with_active_captcha(
103 self, m_get_captcha_data, m_submit, valid, app, csrf_token):
103 self, m_get_captcha_data, m_submit, valid, app, csrf_token):
104 captcha = CaptchaData(
104 captcha = CaptchaData(
105 active=True, private_key='PRIVATE_KEY', public_key='PUBLIC_KEY')
105 active=True, private_key='PRIVATE_KEY', public_key='PUBLIC_KEY')
106 m_get_captcha_data.return_value = captcha
106 m_get_captcha_data.return_value = captcha
107 m_response = mock.Mock()
107 m_response = mock.Mock()
108 m_response.is_valid = valid
108 m_response.is_valid = valid
109 m_submit.return_value = m_response
109 m_submit.return_value = m_response
110
110
111 params = {
111 params = {
112 'csrf_token': csrf_token,
112 'csrf_token': csrf_token,
113 'email': 'pytest@example.com',
113 'email': 'pytest@example.com',
114 'firstname': 'pytest-firstname',
114 'firstname': 'pytest-firstname',
115 'lastname': 'pytest-lastname',
115 'lastname': 'pytest-lastname',
116 'password': 'secret',
116 'password': 'secret',
117 'password_confirmation': 'secret',
117 'password_confirmation': 'secret',
118 'username': 'pytest',
118 'username': 'pytest',
119 }
119 }
120 response = app.post(ADMIN_PREFIX + '/register', params=params)
120 response = app.post(ADMIN_PREFIX + '/register', params=params)
121
121
122 if valid:
122 if valid:
123 # If we provided a valid captcha input we expect a successful
123 # If we provided a valid captcha input we expect a successful
124 # registration and redirect to the login page.
124 # registration and redirect to the login page.
125 assert response.status_int == 302
125 assert response.status_int == 302
126 assert 'location' in response.headers
126 assert 'location' in response.headers
127 assert ADMIN_PREFIX + '/login' in response.headers['location']
127 assert ADMIN_PREFIX + '/login' in response.headers['location']
128 else:
128 else:
129 # If captche input is invalid we expect to stay on the registration
129 # If captche input is invalid we expect to stay on the registration
130 # page with an error message displayed.
130 # page with an error message displayed.
131 assertr = AssertResponse(response)
131 assertr = AssertResponse(response)
132 assert response.status_int == 200
132 assert response.status_int == 200
133 assertr.one_element_exists('#recaptcha_field ~ span.error-message')
133 assertr.one_element_exists('#recaptcha_field ~ span.error-message')
@@ -1,426 +1,428 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import collections
22 import collections
23 import datetime
23 import datetime
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import logging
26 import logging
27 import urlparse
27 import urlparse
28
28
29 from pyramid.httpexceptions import HTTPFound
29 from pyramid.httpexceptions import HTTPFound
30 from pyramid.view import view_config
30 from pyramid.view import view_config
31 from recaptcha.client.captcha import submit
31 from recaptcha.client.captcha import submit
32
32
33 from rhodecode.apps._base import BaseAppView
33 from rhodecode.apps._base import BaseAppView
34 from rhodecode.authentication.base import authenticate, HTTP_TYPE
34 from rhodecode.authentication.base import authenticate, HTTP_TYPE
35 from rhodecode.events import UserRegistered
35 from rhodecode.events import UserRegistered, trigger
36 from rhodecode.lib import helpers as h
36 from rhodecode.lib import helpers as h
37 from rhodecode.lib import audit_logger
37 from rhodecode.lib import audit_logger
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 AuthUser, HasPermissionAnyDecorator, CSRFRequired)
39 AuthUser, HasPermissionAnyDecorator, CSRFRequired)
40 from rhodecode.lib.base import get_ip_addr
40 from rhodecode.lib.base import get_ip_addr
41 from rhodecode.lib.exceptions import UserCreationError
41 from rhodecode.lib.exceptions import UserCreationError
42 from rhodecode.lib.utils2 import safe_str
42 from rhodecode.lib.utils2 import safe_str
43 from rhodecode.model.db import User, UserApiKeys
43 from rhodecode.model.db import User, UserApiKeys
44 from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm
44 from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm
45 from rhodecode.model.meta import Session
45 from rhodecode.model.meta import Session
46 from rhodecode.model.auth_token import AuthTokenModel
46 from rhodecode.model.auth_token import AuthTokenModel
47 from rhodecode.model.settings import SettingsModel
47 from rhodecode.model.settings import SettingsModel
48 from rhodecode.model.user import UserModel
48 from rhodecode.model.user import UserModel
49 from rhodecode.translation import _
49 from rhodecode.translation import _
50
50
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54 CaptchaData = collections.namedtuple(
54 CaptchaData = collections.namedtuple(
55 'CaptchaData', 'active, private_key, public_key')
55 'CaptchaData', 'active, private_key, public_key')
56
56
57
57
58 def _store_user_in_session(session, username, remember=False):
58 def _store_user_in_session(session, username, remember=False):
59 user = User.get_by_username(username, case_insensitive=True)
59 user = User.get_by_username(username, case_insensitive=True)
60 auth_user = AuthUser(user.user_id)
60 auth_user = AuthUser(user.user_id)
61 auth_user.set_authenticated()
61 auth_user.set_authenticated()
62 cs = auth_user.get_cookie_store()
62 cs = auth_user.get_cookie_store()
63 session['rhodecode_user'] = cs
63 session['rhodecode_user'] = cs
64 user.update_lastlogin()
64 user.update_lastlogin()
65 Session().commit()
65 Session().commit()
66
66
67 # If they want to be remembered, update the cookie
67 # If they want to be remembered, update the cookie
68 if remember:
68 if remember:
69 _year = (datetime.datetime.now() +
69 _year = (datetime.datetime.now() +
70 datetime.timedelta(seconds=60 * 60 * 24 * 365))
70 datetime.timedelta(seconds=60 * 60 * 24 * 365))
71 session._set_cookie_expires(_year)
71 session._set_cookie_expires(_year)
72
72
73 session.save()
73 session.save()
74
74
75 safe_cs = cs.copy()
75 safe_cs = cs.copy()
76 safe_cs['password'] = '****'
76 safe_cs['password'] = '****'
77 log.info('user %s is now authenticated and stored in '
77 log.info('user %s is now authenticated and stored in '
78 'session, session attrs %s', username, safe_cs)
78 'session, session attrs %s', username, safe_cs)
79
79
80 # dumps session attrs back to cookie
80 # dumps session attrs back to cookie
81 session._update_cookie_out()
81 session._update_cookie_out()
82 # we set new cookie
82 # we set new cookie
83 headers = None
83 headers = None
84 if session.request['set_cookie']:
84 if session.request['set_cookie']:
85 # send set-cookie headers back to response to update cookie
85 # send set-cookie headers back to response to update cookie
86 headers = [('Set-Cookie', session.request['cookie_out'])]
86 headers = [('Set-Cookie', session.request['cookie_out'])]
87 return headers
87 return headers
88
88
89
89
90 def get_came_from(request):
90 def get_came_from(request):
91 came_from = safe_str(request.GET.get('came_from', ''))
91 came_from = safe_str(request.GET.get('came_from', ''))
92 parsed = urlparse.urlparse(came_from)
92 parsed = urlparse.urlparse(came_from)
93 allowed_schemes = ['http', 'https']
93 allowed_schemes = ['http', 'https']
94 default_came_from = h.route_path('home')
94 default_came_from = h.route_path('home')
95 if parsed.scheme and parsed.scheme not in allowed_schemes:
95 if parsed.scheme and parsed.scheme not in allowed_schemes:
96 log.error('Suspicious URL scheme detected %s for url %s' %
96 log.error('Suspicious URL scheme detected %s for url %s' %
97 (parsed.scheme, parsed))
97 (parsed.scheme, parsed))
98 came_from = default_came_from
98 came_from = default_came_from
99 elif parsed.netloc and request.host != parsed.netloc:
99 elif parsed.netloc and request.host != parsed.netloc:
100 log.error('Suspicious NETLOC detected %s for url %s server url '
100 log.error('Suspicious NETLOC detected %s for url %s server url '
101 'is: %s' % (parsed.netloc, parsed, request.host))
101 'is: %s' % (parsed.netloc, parsed, request.host))
102 came_from = default_came_from
102 came_from = default_came_from
103 elif any(bad_str in parsed.path for bad_str in ('\r', '\n')):
103 elif any(bad_str in parsed.path for bad_str in ('\r', '\n')):
104 log.error('Header injection detected `%s` for url %s server url ' %
104 log.error('Header injection detected `%s` for url %s server url ' %
105 (parsed.path, parsed))
105 (parsed.path, parsed))
106 came_from = default_came_from
106 came_from = default_came_from
107
107
108 return came_from or default_came_from
108 return came_from or default_came_from
109
109
110
110
111 class LoginView(BaseAppView):
111 class LoginView(BaseAppView):
112
112
113 def load_default_context(self):
113 def load_default_context(self):
114 c = self._get_local_tmpl_context()
114 c = self._get_local_tmpl_context()
115 c.came_from = get_came_from(self.request)
115 c.came_from = get_came_from(self.request)
116
116
117 return c
117 return c
118
118
119 def _get_captcha_data(self):
119 def _get_captcha_data(self):
120 settings = SettingsModel().get_all_settings()
120 settings = SettingsModel().get_all_settings()
121 private_key = settings.get('rhodecode_captcha_private_key')
121 private_key = settings.get('rhodecode_captcha_private_key')
122 public_key = settings.get('rhodecode_captcha_public_key')
122 public_key = settings.get('rhodecode_captcha_public_key')
123 active = bool(private_key)
123 active = bool(private_key)
124 return CaptchaData(
124 return CaptchaData(
125 active=active, private_key=private_key, public_key=public_key)
125 active=active, private_key=private_key, public_key=public_key)
126
126
127 @view_config(
127 @view_config(
128 route_name='login', request_method='GET',
128 route_name='login', request_method='GET',
129 renderer='rhodecode:templates/login.mako')
129 renderer='rhodecode:templates/login.mako')
130 def login(self):
130 def login(self):
131 c = self.load_default_context()
131 c = self.load_default_context()
132 auth_user = self._rhodecode_user
132 auth_user = self._rhodecode_user
133
133
134 # redirect if already logged in
134 # redirect if already logged in
135 if (auth_user.is_authenticated and
135 if (auth_user.is_authenticated and
136 not auth_user.is_default and auth_user.ip_allowed):
136 not auth_user.is_default and auth_user.ip_allowed):
137 raise HTTPFound(c.came_from)
137 raise HTTPFound(c.came_from)
138
138
139 # check if we use headers plugin, and try to login using it.
139 # check if we use headers plugin, and try to login using it.
140 try:
140 try:
141 log.debug('Running PRE-AUTH for headers based authentication')
141 log.debug('Running PRE-AUTH for headers based authentication')
142 auth_info = authenticate(
142 auth_info = authenticate(
143 '', '', self.request.environ, HTTP_TYPE, skip_missing=True)
143 '', '', self.request.environ, HTTP_TYPE, skip_missing=True)
144 if auth_info:
144 if auth_info:
145 headers = _store_user_in_session(
145 headers = _store_user_in_session(
146 self.session, auth_info.get('username'))
146 self.session, auth_info.get('username'))
147 raise HTTPFound(c.came_from, headers=headers)
147 raise HTTPFound(c.came_from, headers=headers)
148 except UserCreationError as e:
148 except UserCreationError as e:
149 log.error(e)
149 log.error(e)
150 self.session.flash(e, queue='error')
150 h.flash(e, category='error')
151
151
152 return self._get_template_context(c)
152 return self._get_template_context(c)
153
153
154 @view_config(
154 @view_config(
155 route_name='login', request_method='POST',
155 route_name='login', request_method='POST',
156 renderer='rhodecode:templates/login.mako')
156 renderer='rhodecode:templates/login.mako')
157 def login_post(self):
157 def login_post(self):
158 c = self.load_default_context()
158 c = self.load_default_context()
159
159
160 login_form = LoginForm(self.request.translate)()
160 login_form = LoginForm(self.request.translate)()
161
161
162 try:
162 try:
163 self.session.invalidate()
163 self.session.invalidate()
164 form_result = login_form.to_python(self.request.POST)
164 form_result = login_form.to_python(self.request.POST)
165 # form checks for username/password, now we're authenticated
165 # form checks for username/password, now we're authenticated
166 headers = _store_user_in_session(
166 headers = _store_user_in_session(
167 self.session,
167 self.session,
168 username=form_result['username'],
168 username=form_result['username'],
169 remember=form_result['remember'])
169 remember=form_result['remember'])
170 log.debug('Redirecting to "%s" after login.', c.came_from)
170 log.debug('Redirecting to "%s" after login.', c.came_from)
171
171
172 audit_user = audit_logger.UserWrap(
172 audit_user = audit_logger.UserWrap(
173 username=self.request.POST.get('username'),
173 username=self.request.POST.get('username'),
174 ip_addr=self.request.remote_addr)
174 ip_addr=self.request.remote_addr)
175 action_data = {'user_agent': self.request.user_agent}
175 action_data = {'user_agent': self.request.user_agent}
176 audit_logger.store_web(
176 audit_logger.store_web(
177 'user.login.success', action_data=action_data,
177 'user.login.success', action_data=action_data,
178 user=audit_user, commit=True)
178 user=audit_user, commit=True)
179
179
180 raise HTTPFound(c.came_from, headers=headers)
180 raise HTTPFound(c.came_from, headers=headers)
181 except formencode.Invalid as errors:
181 except formencode.Invalid as errors:
182 defaults = errors.value
182 defaults = errors.value
183 # remove password from filling in form again
183 # remove password from filling in form again
184 defaults.pop('password', None)
184 defaults.pop('password', None)
185 render_ctx = {
185 render_ctx = {
186 'errors': errors.error_dict,
186 'errors': errors.error_dict,
187 'defaults': defaults,
187 'defaults': defaults,
188 }
188 }
189
189
190 audit_user = audit_logger.UserWrap(
190 audit_user = audit_logger.UserWrap(
191 username=self.request.POST.get('username'),
191 username=self.request.POST.get('username'),
192 ip_addr=self.request.remote_addr)
192 ip_addr=self.request.remote_addr)
193 action_data = {'user_agent': self.request.user_agent}
193 action_data = {'user_agent': self.request.user_agent}
194 audit_logger.store_web(
194 audit_logger.store_web(
195 'user.login.failure', action_data=action_data,
195 'user.login.failure', action_data=action_data,
196 user=audit_user, commit=True)
196 user=audit_user, commit=True)
197 return self._get_template_context(c, **render_ctx)
197 return self._get_template_context(c, **render_ctx)
198
198
199 except UserCreationError as e:
199 except UserCreationError as e:
200 # headers auth or other auth functions that create users on
200 # headers auth or other auth functions that create users on
201 # the fly can throw this exception signaling that there's issue
201 # the fly can throw this exception signaling that there's issue
202 # with user creation, explanation should be provided in
202 # with user creation, explanation should be provided in
203 # Exception itself
203 # Exception itself
204 self.session.flash(e, queue='error')
204 h.flash(e, category='error')
205 return self._get_template_context(c)
205 return self._get_template_context(c)
206
206
207 @CSRFRequired()
207 @CSRFRequired()
208 @view_config(route_name='logout', request_method='POST')
208 @view_config(route_name='logout', request_method='POST')
209 def logout(self):
209 def logout(self):
210 auth_user = self._rhodecode_user
210 auth_user = self._rhodecode_user
211 log.info('Deleting session for user: `%s`', auth_user)
211 log.info('Deleting session for user: `%s`', auth_user)
212
212
213 action_data = {'user_agent': self.request.user_agent}
213 action_data = {'user_agent': self.request.user_agent}
214 audit_logger.store_web(
214 audit_logger.store_web(
215 'user.logout', action_data=action_data,
215 'user.logout', action_data=action_data,
216 user=auth_user, commit=True)
216 user=auth_user, commit=True)
217 self.session.delete()
217 self.session.delete()
218 return HTTPFound(h.route_path('home'))
218 return HTTPFound(h.route_path('home'))
219
219
220 @HasPermissionAnyDecorator(
220 @HasPermissionAnyDecorator(
221 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate')
221 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate')
222 @view_config(
222 @view_config(
223 route_name='register', request_method='GET',
223 route_name='register', request_method='GET',
224 renderer='rhodecode:templates/register.mako',)
224 renderer='rhodecode:templates/register.mako',)
225 def register(self, defaults=None, errors=None):
225 def register(self, defaults=None, errors=None):
226 c = self.load_default_context()
226 c = self.load_default_context()
227 defaults = defaults or {}
227 defaults = defaults or {}
228 errors = errors or {}
228 errors = errors or {}
229
229
230 settings = SettingsModel().get_all_settings()
230 settings = SettingsModel().get_all_settings()
231 register_message = settings.get('rhodecode_register_message') or ''
231 register_message = settings.get('rhodecode_register_message') or ''
232 captcha = self._get_captcha_data()
232 captcha = self._get_captcha_data()
233 auto_active = 'hg.register.auto_activate' in User.get_default_user()\
233 auto_active = 'hg.register.auto_activate' in User.get_default_user()\
234 .AuthUser().permissions['global']
234 .AuthUser().permissions['global']
235
235
236 render_ctx = self._get_template_context(c)
236 render_ctx = self._get_template_context(c)
237 render_ctx.update({
237 render_ctx.update({
238 'defaults': defaults,
238 'defaults': defaults,
239 'errors': errors,
239 'errors': errors,
240 'auto_active': auto_active,
240 'auto_active': auto_active,
241 'captcha_active': captcha.active,
241 'captcha_active': captcha.active,
242 'captcha_public_key': captcha.public_key,
242 'captcha_public_key': captcha.public_key,
243 'register_message': register_message,
243 'register_message': register_message,
244 })
244 })
245 return render_ctx
245 return render_ctx
246
246
247 @HasPermissionAnyDecorator(
247 @HasPermissionAnyDecorator(
248 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate')
248 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate')
249 @view_config(
249 @view_config(
250 route_name='register', request_method='POST',
250 route_name='register', request_method='POST',
251 renderer='rhodecode:templates/register.mako')
251 renderer='rhodecode:templates/register.mako')
252 def register_post(self):
252 def register_post(self):
253 self.load_default_context()
253 captcha = self._get_captcha_data()
254 captcha = self._get_captcha_data()
254 auto_active = 'hg.register.auto_activate' in User.get_default_user()\
255 auto_active = 'hg.register.auto_activate' in User.get_default_user()\
255 .AuthUser().permissions['global']
256 .AuthUser().permissions['global']
256
257
257 register_form = RegisterForm(self.request.translate)()
258 register_form = RegisterForm(self.request.translate)()
258 try:
259 try:
259
260
260 form_result = register_form.to_python(self.request.POST)
261 form_result = register_form.to_python(self.request.POST)
261 form_result['active'] = auto_active
262 form_result['active'] = auto_active
262
263
263 if captcha.active:
264 if captcha.active:
264 response = submit(
265 response = submit(
265 self.request.POST.get('recaptcha_challenge_field'),
266 self.request.POST.get('recaptcha_challenge_field'),
266 self.request.POST.get('recaptcha_response_field'),
267 self.request.POST.get('recaptcha_response_field'),
267 private_key=captcha.private_key,
268 private_key=captcha.private_key,
268 remoteip=get_ip_addr(self.request.environ))
269 remoteip=get_ip_addr(self.request.environ))
269 if not response.is_valid:
270 if not response.is_valid:
270 _value = form_result
271 _value = form_result
271 _msg = _('Bad captcha')
272 _msg = _('Bad captcha')
272 error_dict = {'recaptcha_field': _msg}
273 error_dict = {'recaptcha_field': _msg}
273 raise formencode.Invalid(_msg, _value, None,
274 raise formencode.Invalid(_msg, _value, None,
274 error_dict=error_dict)
275 error_dict=error_dict)
275
276
276 new_user = UserModel().create_registration(form_result)
277 new_user = UserModel().create_registration(form_result)
277 event = UserRegistered(user=new_user, session=self.session)
278 event = UserRegistered(user=new_user, session=self.session)
278 self.request.registry.notify(event)
279 trigger(event)
279 self.session.flash(
280 h.flash(
280 _('You have successfully registered with RhodeCode'),
281 _('You have successfully registered with RhodeCode'),
281 queue='success')
282 category='success')
282 Session().commit()
283 Session().commit()
283
284
284 redirect_ro = self.request.route_path('login')
285 redirect_ro = self.request.route_path('login')
285 raise HTTPFound(redirect_ro)
286 raise HTTPFound(redirect_ro)
286
287
287 except formencode.Invalid as errors:
288 except formencode.Invalid as errors:
288 errors.value.pop('password', None)
289 errors.value.pop('password', None)
289 errors.value.pop('password_confirmation', None)
290 errors.value.pop('password_confirmation', None)
290 return self.register(
291 return self.register(
291 defaults=errors.value, errors=errors.error_dict)
292 defaults=errors.value, errors=errors.error_dict)
292
293
293 except UserCreationError as e:
294 except UserCreationError as e:
294 # container auth or other auth functions that create users on
295 # container auth or other auth functions that create users on
295 # the fly can throw this exception signaling that there's issue
296 # the fly can throw this exception signaling that there's issue
296 # with user creation, explanation should be provided in
297 # with user creation, explanation should be provided in
297 # Exception itself
298 # Exception itself
298 self.session.flash(e, queue='error')
299 h.flash(e, category='error')
299 return self.register()
300 return self.register()
300
301
301 @view_config(
302 @view_config(
302 route_name='reset_password', request_method=('GET', 'POST'),
303 route_name='reset_password', request_method=('GET', 'POST'),
303 renderer='rhodecode:templates/password_reset.mako')
304 renderer='rhodecode:templates/password_reset.mako')
304 def password_reset(self):
305 def password_reset(self):
306 c = self.load_default_context()
305 captcha = self._get_captcha_data()
307 captcha = self._get_captcha_data()
306
308
307 render_ctx = {
309 template_context = {
308 'captcha_active': captcha.active,
310 'captcha_active': captcha.active,
309 'captcha_public_key': captcha.public_key,
311 'captcha_public_key': captcha.public_key,
310 'defaults': {},
312 'defaults': {},
311 'errors': {},
313 'errors': {},
312 }
314 }
313
315
314 # always send implicit message to prevent from discovery of
316 # always send implicit message to prevent from discovery of
315 # matching emails
317 # matching emails
316 msg = _('If such email exists, a password reset link was sent to it.')
318 msg = _('If such email exists, a password reset link was sent to it.')
317
319
318 if self.request.POST:
320 if self.request.POST:
319 if h.HasPermissionAny('hg.password_reset.disabled')():
321 if h.HasPermissionAny('hg.password_reset.disabled')():
320 _email = self.request.POST.get('email', '')
322 _email = self.request.POST.get('email', '')
321 log.error('Failed attempt to reset password for `%s`.', _email)
323 log.error('Failed attempt to reset password for `%s`.', _email)
322 self.session.flash(_('Password reset has been disabled.'),
324 h.flash(_('Password reset has been disabled.'),
323 queue='error')
325 category='error')
324 return HTTPFound(self.request.route_path('reset_password'))
326 return HTTPFound(self.request.route_path('reset_password'))
325
327
326 password_reset_form = PasswordResetForm(self.request.translate)()
328 password_reset_form = PasswordResetForm(self.request.translate)()
327 try:
329 try:
328 form_result = password_reset_form.to_python(
330 form_result = password_reset_form.to_python(
329 self.request.POST)
331 self.request.POST)
330 user_email = form_result['email']
332 user_email = form_result['email']
331
333
332 if captcha.active:
334 if captcha.active:
333 response = submit(
335 response = submit(
334 self.request.POST.get('recaptcha_challenge_field'),
336 self.request.POST.get('recaptcha_challenge_field'),
335 self.request.POST.get('recaptcha_response_field'),
337 self.request.POST.get('recaptcha_response_field'),
336 private_key=captcha.private_key,
338 private_key=captcha.private_key,
337 remoteip=get_ip_addr(self.request.environ))
339 remoteip=get_ip_addr(self.request.environ))
338 if not response.is_valid:
340 if not response.is_valid:
339 _value = form_result
341 _value = form_result
340 _msg = _('Bad captcha')
342 _msg = _('Bad captcha')
341 error_dict = {'recaptcha_field': _msg}
343 error_dict = {'recaptcha_field': _msg}
342 raise formencode.Invalid(
344 raise formencode.Invalid(
343 _msg, _value, None, error_dict=error_dict)
345 _msg, _value, None, error_dict=error_dict)
344
346
345 # Generate reset URL and send mail.
347 # Generate reset URL and send mail.
346 user = User.get_by_email(user_email)
348 user = User.get_by_email(user_email)
347
349
348 # generate password reset token that expires in 10minutes
350 # generate password reset token that expires in 10minutes
349 desc = 'Generated token for password reset from {}'.format(
351 desc = 'Generated token for password reset from {}'.format(
350 datetime.datetime.now().isoformat())
352 datetime.datetime.now().isoformat())
351 reset_token = AuthTokenModel().create(
353 reset_token = AuthTokenModel().create(
352 user, lifetime=10,
354 user, lifetime=10,
353 description=desc,
355 description=desc,
354 role=UserApiKeys.ROLE_PASSWORD_RESET)
356 role=UserApiKeys.ROLE_PASSWORD_RESET)
355 Session().commit()
357 Session().commit()
356
358
357 log.debug('Successfully created password recovery token')
359 log.debug('Successfully created password recovery token')
358 password_reset_url = self.request.route_url(
360 password_reset_url = self.request.route_url(
359 'reset_password_confirmation',
361 'reset_password_confirmation',
360 _query={'key': reset_token.api_key})
362 _query={'key': reset_token.api_key})
361 UserModel().reset_password_link(
363 UserModel().reset_password_link(
362 form_result, password_reset_url)
364 form_result, password_reset_url)
363 # Display success message and redirect.
365 # Display success message and redirect.
364 self.session.flash(msg, queue='success')
366 h.flash(msg, category='success')
365
367
366 action_data = {'email': user_email,
368 action_data = {'email': user_email,
367 'user_agent': self.request.user_agent}
369 'user_agent': self.request.user_agent}
368 audit_logger.store_web(
370 audit_logger.store_web(
369 'user.password.reset_request', action_data=action_data,
371 'user.password.reset_request', action_data=action_data,
370 user=self._rhodecode_user, commit=True)
372 user=self._rhodecode_user, commit=True)
371 return HTTPFound(self.request.route_path('reset_password'))
373 return HTTPFound(self.request.route_path('reset_password'))
372
374
373 except formencode.Invalid as errors:
375 except formencode.Invalid as errors:
374 render_ctx.update({
376 template_context.update({
375 'defaults': errors.value,
377 'defaults': errors.value,
376 'errors': errors.error_dict,
378 'errors': errors.error_dict,
377 })
379 })
378 if not self.request.POST.get('email'):
380 if not self.request.POST.get('email'):
379 # case of empty email, we want to report that
381 # case of empty email, we want to report that
380 return render_ctx
382 return self._get_template_context(c, **template_context)
381
383
382 if 'recaptcha_field' in errors.error_dict:
384 if 'recaptcha_field' in errors.error_dict:
383 # case of failed captcha
385 # case of failed captcha
384 return render_ctx
386 return self._get_template_context(c, **template_context)
385
387
386 log.debug('faking response on invalid password reset')
388 log.debug('faking response on invalid password reset')
387 # make this take 2s, to prevent brute forcing.
389 # make this take 2s, to prevent brute forcing.
388 time.sleep(2)
390 time.sleep(2)
389 self.session.flash(msg, queue='success')
391 h.flash(msg, category='success')
390 return HTTPFound(self.request.route_path('reset_password'))
392 return HTTPFound(self.request.route_path('reset_password'))
391
393
392 return render_ctx
394 return self._get_template_context(c, **template_context)
393
395
394 @view_config(route_name='reset_password_confirmation',
396 @view_config(route_name='reset_password_confirmation',
395 request_method='GET')
397 request_method='GET')
396 def password_reset_confirmation(self):
398 def password_reset_confirmation(self):
397
399 self.load_default_context()
398 if self.request.GET and self.request.GET.get('key'):
400 if self.request.GET and self.request.GET.get('key'):
399 # make this take 2s, to prevent brute forcing.
401 # make this take 2s, to prevent brute forcing.
400 time.sleep(2)
402 time.sleep(2)
401
403
402 token = AuthTokenModel().get_auth_token(
404 token = AuthTokenModel().get_auth_token(
403 self.request.GET.get('key'))
405 self.request.GET.get('key'))
404
406
405 # verify token is the correct role
407 # verify token is the correct role
406 if token is None or token.role != UserApiKeys.ROLE_PASSWORD_RESET:
408 if token is None or token.role != UserApiKeys.ROLE_PASSWORD_RESET:
407 log.debug('Got token with role:%s expected is %s',
409 log.debug('Got token with role:%s expected is %s',
408 getattr(token, 'role', 'EMPTY_TOKEN'),
410 getattr(token, 'role', 'EMPTY_TOKEN'),
409 UserApiKeys.ROLE_PASSWORD_RESET)
411 UserApiKeys.ROLE_PASSWORD_RESET)
410 self.session.flash(
412 h.flash(
411 _('Given reset token is invalid'), queue='error')
413 _('Given reset token is invalid'), category='error')
412 return HTTPFound(self.request.route_path('reset_password'))
414 return HTTPFound(self.request.route_path('reset_password'))
413
415
414 try:
416 try:
415 owner = token.user
417 owner = token.user
416 data = {'email': owner.email, 'token': token.api_key}
418 data = {'email': owner.email, 'token': token.api_key}
417 UserModel().reset_password(data)
419 UserModel().reset_password(data)
418 self.session.flash(
420 h.flash(
419 _('Your password reset was successful, '
421 _('Your password reset was successful, '
420 'a new password has been sent to your email'),
422 'a new password has been sent to your email'),
421 queue='success')
423 category='success')
422 except Exception as e:
424 except Exception as e:
423 log.error(e)
425 log.error(e)
424 return HTTPFound(self.request.route_path('reset_password'))
426 return HTTPFound(self.request.route_path('reset_password'))
425
427
426 return HTTPFound(self.request.route_path('login'))
428 return HTTPFound(self.request.route_path('login'))
@@ -1,203 +1,203 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 # -*- coding: utf-8 -*-
20 # -*- coding: utf-8 -*-
21
21
22 # Copyright (C) 2016-2017 RhodeCode GmbH
22 # Copyright (C) 2016-2017 RhodeCode GmbH
23 #
23 #
24 # This program is free software: you can redistribute it and/or modify
24 # This program is free software: you can redistribute it and/or modify
25 # it under the terms of the GNU Affero General Public License, version 3
25 # it under the terms of the GNU Affero General Public License, version 3
26 # (only), as published by the Free Software Foundation.
26 # (only), as published by the Free Software Foundation.
27 #
27 #
28 # This program is distributed in the hope that it will be useful,
28 # This program is distributed in the hope that it will be useful,
29 # but WITHOUT ANY WARRANTY; without even the implied warranty of
29 # but WITHOUT ANY WARRANTY; without even the implied warranty of
30 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
30 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
31 # GNU General Public License for more details.
31 # GNU General Public License for more details.
32 #
32 #
33 # You should have received a copy of the GNU Affero General Public License
33 # You should have received a copy of the GNU Affero General Public License
34 # along with this program. If not, see <http://www.gnu.org/licenses/>.
34 # along with this program. If not, see <http://www.gnu.org/licenses/>.
35 #
35 #
36 # This program is dual-licensed. If you wish to learn more about the
36 # This program is dual-licensed. If you wish to learn more about the
37 # RhodeCode Enterprise Edition, including its added features, Support services,
37 # RhodeCode Enterprise Edition, including its added features, Support services,
38 # and proprietary license terms, please see https://rhodecode.com/licenses/
38 # and proprietary license terms, please see https://rhodecode.com/licenses/
39
39
40 import pytest
40 import pytest
41
41
42 from rhodecode.model.db import User
42 from rhodecode.model.db import User
43 from rhodecode.tests import TestController, assert_session_flash
43 from rhodecode.tests import TestController, assert_session_flash
44 from rhodecode.lib import helpers as h
44 from rhodecode.lib import helpers as h
45
45
46
46
47 def route_path(name, params=None, **kwargs):
47 def route_path(name, params=None, **kwargs):
48 import urllib
48 import urllib
49 from rhodecode.apps._base import ADMIN_PREFIX
49 from rhodecode.apps._base import ADMIN_PREFIX
50
50
51 base_url = {
51 base_url = {
52 'my_account_edit': ADMIN_PREFIX + '/my_account/edit',
52 'my_account_edit': ADMIN_PREFIX + '/my_account/edit',
53 'my_account_update': ADMIN_PREFIX + '/my_account/update',
53 'my_account_update': ADMIN_PREFIX + '/my_account/update',
54 'my_account_pullrequests': ADMIN_PREFIX + '/my_account/pull_requests',
54 'my_account_pullrequests': ADMIN_PREFIX + '/my_account/pull_requests',
55 'my_account_pullrequests_data': ADMIN_PREFIX + '/my_account/pull_requests/data',
55 'my_account_pullrequests_data': ADMIN_PREFIX + '/my_account/pull_requests/data',
56 }[name].format(**kwargs)
56 }[name].format(**kwargs)
57
57
58 if params:
58 if params:
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 return base_url
60 return base_url
61
61
62
62
63 class TestMyAccountEdit(TestController):
63 class TestMyAccountEdit(TestController):
64
64
65 def test_my_account_edit(self):
65 def test_my_account_edit(self):
66 self.log_user()
66 self.log_user()
67 response = self.app.get(route_path('my_account_edit'))
67 response = self.app.get(route_path('my_account_edit'))
68
68
69 response.mustcontain('value="test_admin')
69 response.mustcontain('value="test_admin')
70
70
71 @pytest.mark.backends("git", "hg")
71 @pytest.mark.backends("git", "hg")
72 def test_my_account_my_pullrequests(self, pr_util):
72 def test_my_account_my_pullrequests(self, pr_util):
73 self.log_user()
73 self.log_user()
74 response = self.app.get(route_path('my_account_pullrequests'))
74 response = self.app.get(route_path('my_account_pullrequests'))
75 response.mustcontain('There are currently no open pull '
75 response.mustcontain('There are currently no open pull '
76 'requests requiring your participation.')
76 'requests requiring your participation.')
77
77
78 @pytest.mark.backends("git", "hg")
78 @pytest.mark.backends("git", "hg")
79 def test_my_account_my_pullrequests_data(self, pr_util, xhr_header):
79 def test_my_account_my_pullrequests_data(self, pr_util, xhr_header):
80 self.log_user()
80 self.log_user()
81 response = self.app.get(route_path('my_account_pullrequests_data'),
81 response = self.app.get(route_path('my_account_pullrequests_data'),
82 extra_environ=xhr_header)
82 extra_environ=xhr_header)
83 assert response.json == {
83 assert response.json == {
84 u'data': [], u'draw': None,
84 u'data': [], u'draw': None,
85 u'recordsFiltered': 0, u'recordsTotal': 0}
85 u'recordsFiltered': 0, u'recordsTotal': 0}
86
86
87 pr = pr_util.create_pull_request(title='TestMyAccountPR')
87 pr = pr_util.create_pull_request(title='TestMyAccountPR')
88 expected = {
88 expected = {
89 'author_raw': 'RhodeCode Admin',
89 'author_raw': 'RhodeCode Admin',
90 'name_raw': pr.pull_request_id
90 'name_raw': pr.pull_request_id
91 }
91 }
92 response = self.app.get(route_path('my_account_pullrequests_data'),
92 response = self.app.get(route_path('my_account_pullrequests_data'),
93 extra_environ=xhr_header)
93 extra_environ=xhr_header)
94 assert response.json['recordsTotal'] == 1
94 assert response.json['recordsTotal'] == 1
95 assert response.json['data'][0]['author_raw'] == expected['author_raw']
95 assert response.json['data'][0]['author_raw'] == expected['author_raw']
96
96
97 assert response.json['data'][0]['author_raw'] == expected['author_raw']
97 assert response.json['data'][0]['author_raw'] == expected['author_raw']
98 assert response.json['data'][0]['name_raw'] == expected['name_raw']
98 assert response.json['data'][0]['name_raw'] == expected['name_raw']
99
99
100 @pytest.mark.parametrize(
100 @pytest.mark.parametrize(
101 "name, attrs", [
101 "name, attrs", [
102 ('firstname', {'firstname': 'new_username'}),
102 ('firstname', {'firstname': 'new_username'}),
103 ('lastname', {'lastname': 'new_username'}),
103 ('lastname', {'lastname': 'new_username'}),
104 ('admin', {'admin': True}),
104 ('admin', {'admin': True}),
105 ('admin', {'admin': False}),
105 ('admin', {'admin': False}),
106 ('extern_type', {'extern_type': 'ldap'}),
106 ('extern_type', {'extern_type': 'ldap'}),
107 ('extern_type', {'extern_type': None}),
107 ('extern_type', {'extern_type': None}),
108 # ('extern_name', {'extern_name': 'test'}),
108 # ('extern_name', {'extern_name': 'test'}),
109 # ('extern_name', {'extern_name': None}),
109 # ('extern_name', {'extern_name': None}),
110 ('active', {'active': False}),
110 ('active', {'active': False}),
111 ('active', {'active': True}),
111 ('active', {'active': True}),
112 ('email', {'email': 'some@email.com'}),
112 ('email', {'email': 'some@email.com'}),
113 ])
113 ])
114 def test_my_account_update(self, name, attrs, user_util):
114 def test_my_account_update(self, name, attrs, user_util):
115 usr = user_util.create_user(password='qweqwe')
115 usr = user_util.create_user(password='qweqwe')
116 params = usr.get_api_data() # current user data
116 params = usr.get_api_data() # current user data
117 user_id = usr.user_id
117 user_id = usr.user_id
118 self.log_user(
118 self.log_user(
119 username=usr.username, password='qweqwe')
119 username=usr.username, password='qweqwe')
120
120
121 params.update({'password_confirmation': ''})
121 params.update({'password_confirmation': ''})
122 params.update({'new_password': ''})
122 params.update({'new_password': ''})
123 params.update({'extern_type': 'rhodecode'})
123 params.update({'extern_type': 'rhodecode'})
124 params.update({'extern_name': 'rhodecode'})
124 params.update({'extern_name': 'rhodecode'})
125 params.update({'csrf_token': self.csrf_token})
125 params.update({'csrf_token': self.csrf_token})
126
126
127 params.update(attrs)
127 params.update(attrs)
128 # my account page cannot set language param yet, only for admins
128 # my account page cannot set language param yet, only for admins
129 del params['language']
129 del params['language']
130 response = self.app.post(route_path('my_account_update'), params)
130 response = self.app.post(route_path('my_account_update'), params)
131
131
132 assert_session_flash(
132 assert_session_flash(
133 response, 'Your account was updated successfully')
133 response, 'Your account was updated successfully')
134
134
135 del params['csrf_token']
135 del params['csrf_token']
136
136
137 updated_user = User.get(user_id)
137 updated_user = User.get(user_id)
138 updated_params = updated_user.get_api_data()
138 updated_params = updated_user.get_api_data()
139 updated_params.update({'password_confirmation': ''})
139 updated_params.update({'password_confirmation': ''})
140 updated_params.update({'new_password': ''})
140 updated_params.update({'new_password': ''})
141
141
142 params['last_login'] = updated_params['last_login']
142 params['last_login'] = updated_params['last_login']
143 params['last_activity'] = updated_params['last_activity']
143 params['last_activity'] = updated_params['last_activity']
144 # my account page cannot set language param yet, only for admins
144 # my account page cannot set language param yet, only for admins
145 # but we get this info from API anyway
145 # but we get this info from API anyway
146 params['language'] = updated_params['language']
146 params['language'] = updated_params['language']
147
147
148 if name == 'email':
148 if name == 'email':
149 params['emails'] = [attrs['email']]
149 params['emails'] = [attrs['email']]
150 if name == 'extern_type':
150 if name == 'extern_type':
151 # cannot update this via form, expected value is original one
151 # cannot update this via form, expected value is original one
152 params['extern_type'] = "rhodecode"
152 params['extern_type'] = "rhodecode"
153 if name == 'extern_name':
153 if name == 'extern_name':
154 # cannot update this via form, expected value is original one
154 # cannot update this via form, expected value is original one
155 params['extern_name'] = str(user_id)
155 params['extern_name'] = str(user_id)
156 if name == 'active':
156 if name == 'active':
157 # my account cannot deactivate account
157 # my account cannot deactivate account
158 params['active'] = True
158 params['active'] = True
159 if name == 'admin':
159 if name == 'admin':
160 # my account cannot make you an admin !
160 # my account cannot make you an admin !
161 params['admin'] = False
161 params['admin'] = False
162
162
163 assert params == updated_params
163 assert params == updated_params
164
164
165 def test_my_account_update_err_email_exists(self):
165 def test_my_account_update_err_email_exists(self):
166 self.log_user()
166 self.log_user()
167
167
168 new_email = 'test_regular@mail.com' # already existing email
168 new_email = 'test_regular@mail.com' # already existing email
169 params = {
169 params = {
170 'username': 'test_admin',
170 'username': 'test_admin',
171 'new_password': 'test12',
171 'new_password': 'test12',
172 'password_confirmation': 'test122',
172 'password_confirmation': 'test122',
173 'firstname': 'NewName',
173 'firstname': 'NewName',
174 'lastname': 'NewLastname',
174 'lastname': 'NewLastname',
175 'email': new_email,
175 'email': new_email,
176 'csrf_token': self.csrf_token,
176 'csrf_token': self.csrf_token,
177 }
177 }
178
178
179 response = self.app.post(route_path('my_account_update'),
179 response = self.app.post(route_path('my_account_update'),
180 params=params)
180 params=params)
181
181
182 response.mustcontain('This e-mail address is already taken')
182 response.mustcontain('This e-mail address is already taken')
183
183
184 def test_my_account_update_bad_email_address(self):
184 def test_my_account_update_bad_email_address(self):
185 self.log_user('test_regular2', 'test12')
185 self.log_user('test_regular2', 'test12')
186
186
187 new_email = 'newmail.pl'
187 new_email = 'newmail.pl'
188 params = {
188 params = {
189 'username': 'test_admin',
189 'username': 'test_admin',
190 'new_password': 'test12',
190 'new_password': 'test12',
191 'password_confirmation': 'test122',
191 'password_confirmation': 'test122',
192 'firstname': 'NewName',
192 'firstname': 'NewName',
193 'lastname': 'NewLastname',
193 'lastname': 'NewLastname',
194 'email': new_email,
194 'email': new_email,
195 'csrf_token': self.csrf_token,
195 'csrf_token': self.csrf_token,
196 }
196 }
197 response = self.app.post(route_path('my_account_update'),
197 response = self.app.post(route_path('my_account_update'),
198 params=params)
198 params=params)
199
199
200 response.mustcontain('An email address must contain a single @')
200 response.mustcontain('An email address must contain a single @')
201 msg = '???'
201 msg = u'Username "%(username)s" already exists'
202 msg = h.html_escape(msg % {'username': 'test_admin'})
202 msg = h.html_escape(msg % {'username': 'test_admin'})
203 response.mustcontain(u"%s" % msg)
203 response.mustcontain(u"%s" % msg)
@@ -1,584 +1,585 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import datetime
22 import datetime
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 from pyramid.httpexceptions import HTTPFound
26 from pyramid.httpexceptions import HTTPFound
27 from pyramid.view import view_config
27 from pyramid.view import view_config
28 from pyramid.renderers import render
28 from pyramid.renderers import render
29 from pyramid.response import Response
29 from pyramid.response import Response
30
30
31 from rhodecode.apps._base import BaseAppView, DataGridAppView
31 from rhodecode.apps._base import BaseAppView, DataGridAppView
32 from rhodecode import forms
32 from rhodecode import forms
33 from rhodecode.lib import helpers as h
33 from rhodecode.lib import helpers as h
34 from rhodecode.lib import audit_logger
34 from rhodecode.lib import audit_logger
35 from rhodecode.lib.ext_json import json
35 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired
36 from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired
37 from rhodecode.lib.channelstream import (
37 from rhodecode.lib.channelstream import (
38 channelstream_request, ChannelstreamException)
38 channelstream_request, ChannelstreamException)
39 from rhodecode.lib.utils2 import safe_int, md5, str2bool
39 from rhodecode.lib.utils2 import safe_int, md5, str2bool
40 from rhodecode.model.auth_token import AuthTokenModel
40 from rhodecode.model.auth_token import AuthTokenModel
41 from rhodecode.model.comment import CommentsModel
41 from rhodecode.model.comment import CommentsModel
42 from rhodecode.model.db import (
42 from rhodecode.model.db import (
43 Repository, UserEmailMap, UserApiKeys, UserFollowing, joinedload,
43 Repository, UserEmailMap, UserApiKeys, UserFollowing, joinedload,
44 PullRequest)
44 PullRequest)
45 from rhodecode.model.forms import UserForm, UserExtraEmailForm
45 from rhodecode.model.forms import UserForm, UserExtraEmailForm
46 from rhodecode.model.meta import Session
46 from rhodecode.model.meta import Session
47 from rhodecode.model.pull_request import PullRequestModel
47 from rhodecode.model.pull_request import PullRequestModel
48 from rhodecode.model.scm import RepoList
48 from rhodecode.model.scm import RepoList
49 from rhodecode.model.user import UserModel
49 from rhodecode.model.user import UserModel
50 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.validation_schema.schemas import user_schema
51 from rhodecode.model.validation_schema.schemas import user_schema
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 class MyAccountView(BaseAppView, DataGridAppView):
56 class MyAccountView(BaseAppView, DataGridAppView):
57 ALLOW_SCOPED_TOKENS = False
57 ALLOW_SCOPED_TOKENS = False
58 """
58 """
59 This view has alternative version inside EE, if modified please take a look
59 This view has alternative version inside EE, if modified please take a look
60 in there as well.
60 in there as well.
61 """
61 """
62
62
63 def load_default_context(self):
63 def load_default_context(self):
64 c = self._get_local_tmpl_context()
64 c = self._get_local_tmpl_context()
65 c.user = c.auth_user.get_instance()
65 c.user = c.auth_user.get_instance()
66 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
66 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
67
67
68 return c
68 return c
69
69
70 @LoginRequired()
70 @LoginRequired()
71 @NotAnonymous()
71 @NotAnonymous()
72 @view_config(
72 @view_config(
73 route_name='my_account_profile', request_method='GET',
73 route_name='my_account_profile', request_method='GET',
74 renderer='rhodecode:templates/admin/my_account/my_account.mako')
74 renderer='rhodecode:templates/admin/my_account/my_account.mako')
75 def my_account_profile(self):
75 def my_account_profile(self):
76 c = self.load_default_context()
76 c = self.load_default_context()
77 c.active = 'profile'
77 c.active = 'profile'
78 return self._get_template_context(c)
78 return self._get_template_context(c)
79
79
80 @LoginRequired()
80 @LoginRequired()
81 @NotAnonymous()
81 @NotAnonymous()
82 @view_config(
82 @view_config(
83 route_name='my_account_password', request_method='GET',
83 route_name='my_account_password', request_method='GET',
84 renderer='rhodecode:templates/admin/my_account/my_account.mako')
84 renderer='rhodecode:templates/admin/my_account/my_account.mako')
85 def my_account_password(self):
85 def my_account_password(self):
86 c = self.load_default_context()
86 c = self.load_default_context()
87 c.active = 'password'
87 c.active = 'password'
88 c.extern_type = c.user.extern_type
88 c.extern_type = c.user.extern_type
89
89
90 schema = user_schema.ChangePasswordSchema().bind(
90 schema = user_schema.ChangePasswordSchema().bind(
91 username=c.user.username)
91 username=c.user.username)
92
92
93 form = forms.Form(
93 form = forms.Form(
94 schema,
94 schema,
95 action=h.route_path('my_account_password_update'),
95 action=h.route_path('my_account_password_update'),
96 buttons=(forms.buttons.save, forms.buttons.reset))
96 buttons=(forms.buttons.save, forms.buttons.reset))
97
97
98 c.form = form
98 c.form = form
99 return self._get_template_context(c)
99 return self._get_template_context(c)
100
100
101 @LoginRequired()
101 @LoginRequired()
102 @NotAnonymous()
102 @NotAnonymous()
103 @CSRFRequired()
103 @CSRFRequired()
104 @view_config(
104 @view_config(
105 route_name='my_account_password_update', request_method='POST',
105 route_name='my_account_password_update', request_method='POST',
106 renderer='rhodecode:templates/admin/my_account/my_account.mako')
106 renderer='rhodecode:templates/admin/my_account/my_account.mako')
107 def my_account_password_update(self):
107 def my_account_password_update(self):
108 _ = self.request.translate
108 _ = self.request.translate
109 c = self.load_default_context()
109 c = self.load_default_context()
110 c.active = 'password'
110 c.active = 'password'
111 c.extern_type = c.user.extern_type
111 c.extern_type = c.user.extern_type
112
112
113 schema = user_schema.ChangePasswordSchema().bind(
113 schema = user_schema.ChangePasswordSchema().bind(
114 username=c.user.username)
114 username=c.user.username)
115
115
116 form = forms.Form(
116 form = forms.Form(
117 schema, buttons=(forms.buttons.save, forms.buttons.reset))
117 schema, buttons=(forms.buttons.save, forms.buttons.reset))
118
118
119 if c.extern_type != 'rhodecode':
119 if c.extern_type != 'rhodecode':
120 raise HTTPFound(self.request.route_path('my_account_password'))
120 raise HTTPFound(self.request.route_path('my_account_password'))
121
121
122 controls = self.request.POST.items()
122 controls = self.request.POST.items()
123 try:
123 try:
124 valid_data = form.validate(controls)
124 valid_data = form.validate(controls)
125 UserModel().update_user(c.user.user_id, **valid_data)
125 UserModel().update_user(c.user.user_id, **valid_data)
126 c.user.update_userdata(force_password_change=False)
126 c.user.update_userdata(force_password_change=False)
127 Session().commit()
127 Session().commit()
128 except forms.ValidationFailure as e:
128 except forms.ValidationFailure as e:
129 c.form = e
129 c.form = e
130 return self._get_template_context(c)
130 return self._get_template_context(c)
131
131
132 except Exception:
132 except Exception:
133 log.exception("Exception updating password")
133 log.exception("Exception updating password")
134 h.flash(_('Error occurred during update of user password'),
134 h.flash(_('Error occurred during update of user password'),
135 category='error')
135 category='error')
136 else:
136 else:
137 instance = c.auth_user.get_instance()
137 instance = c.auth_user.get_instance()
138 self.session.setdefault('rhodecode_user', {}).update(
138 self.session.setdefault('rhodecode_user', {}).update(
139 {'password': md5(instance.password)})
139 {'password': md5(instance.password)})
140 self.session.save()
140 self.session.save()
141 h.flash(_("Successfully updated password"), category='success')
141 h.flash(_("Successfully updated password"), category='success')
142
142
143 raise HTTPFound(self.request.route_path('my_account_password'))
143 raise HTTPFound(self.request.route_path('my_account_password'))
144
144
145 @LoginRequired()
145 @LoginRequired()
146 @NotAnonymous()
146 @NotAnonymous()
147 @view_config(
147 @view_config(
148 route_name='my_account_auth_tokens', request_method='GET',
148 route_name='my_account_auth_tokens', request_method='GET',
149 renderer='rhodecode:templates/admin/my_account/my_account.mako')
149 renderer='rhodecode:templates/admin/my_account/my_account.mako')
150 def my_account_auth_tokens(self):
150 def my_account_auth_tokens(self):
151 _ = self.request.translate
151 _ = self.request.translate
152
152
153 c = self.load_default_context()
153 c = self.load_default_context()
154 c.active = 'auth_tokens'
154 c.active = 'auth_tokens'
155 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
155 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
156 c.role_values = [
156 c.role_values = [
157 (x, AuthTokenModel.cls._get_role_name(x))
157 (x, AuthTokenModel.cls._get_role_name(x))
158 for x in AuthTokenModel.cls.ROLES]
158 for x in AuthTokenModel.cls.ROLES]
159 c.role_options = [(c.role_values, _("Role"))]
159 c.role_options = [(c.role_values, _("Role"))]
160 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
160 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
161 c.user.user_id, show_expired=True)
161 c.user.user_id, show_expired=True)
162 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
162 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
163 return self._get_template_context(c)
163 return self._get_template_context(c)
164
164
165 def maybe_attach_token_scope(self, token):
165 def maybe_attach_token_scope(self, token):
166 # implemented in EE edition
166 # implemented in EE edition
167 pass
167 pass
168
168
169 @LoginRequired()
169 @LoginRequired()
170 @NotAnonymous()
170 @NotAnonymous()
171 @CSRFRequired()
171 @CSRFRequired()
172 @view_config(
172 @view_config(
173 route_name='my_account_auth_tokens_add', request_method='POST',)
173 route_name='my_account_auth_tokens_add', request_method='POST',)
174 def my_account_auth_tokens_add(self):
174 def my_account_auth_tokens_add(self):
175 _ = self.request.translate
175 _ = self.request.translate
176 c = self.load_default_context()
176 c = self.load_default_context()
177
177
178 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
178 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
179 description = self.request.POST.get('description')
179 description = self.request.POST.get('description')
180 role = self.request.POST.get('role')
180 role = self.request.POST.get('role')
181
181
182 token = AuthTokenModel().create(
182 token = AuthTokenModel().create(
183 c.user.user_id, description, lifetime, role)
183 c.user.user_id, description, lifetime, role)
184 token_data = token.get_api_data()
184 token_data = token.get_api_data()
185
185
186 self.maybe_attach_token_scope(token)
186 self.maybe_attach_token_scope(token)
187 audit_logger.store_web(
187 audit_logger.store_web(
188 'user.edit.token.add', action_data={
188 'user.edit.token.add', action_data={
189 'data': {'token': token_data, 'user': 'self'}},
189 'data': {'token': token_data, 'user': 'self'}},
190 user=self._rhodecode_user, )
190 user=self._rhodecode_user, )
191 Session().commit()
191 Session().commit()
192
192
193 h.flash(_("Auth token successfully created"), category='success')
193 h.flash(_("Auth token successfully created"), category='success')
194 return HTTPFound(h.route_path('my_account_auth_tokens'))
194 return HTTPFound(h.route_path('my_account_auth_tokens'))
195
195
196 @LoginRequired()
196 @LoginRequired()
197 @NotAnonymous()
197 @NotAnonymous()
198 @CSRFRequired()
198 @CSRFRequired()
199 @view_config(
199 @view_config(
200 route_name='my_account_auth_tokens_delete', request_method='POST')
200 route_name='my_account_auth_tokens_delete', request_method='POST')
201 def my_account_auth_tokens_delete(self):
201 def my_account_auth_tokens_delete(self):
202 _ = self.request.translate
202 _ = self.request.translate
203 c = self.load_default_context()
203 c = self.load_default_context()
204
204
205 del_auth_token = self.request.POST.get('del_auth_token')
205 del_auth_token = self.request.POST.get('del_auth_token')
206
206
207 if del_auth_token:
207 if del_auth_token:
208 token = UserApiKeys.get_or_404(del_auth_token)
208 token = UserApiKeys.get_or_404(del_auth_token)
209 token_data = token.get_api_data()
209 token_data = token.get_api_data()
210
210
211 AuthTokenModel().delete(del_auth_token, c.user.user_id)
211 AuthTokenModel().delete(del_auth_token, c.user.user_id)
212 audit_logger.store_web(
212 audit_logger.store_web(
213 'user.edit.token.delete', action_data={
213 'user.edit.token.delete', action_data={
214 'data': {'token': token_data, 'user': 'self'}},
214 'data': {'token': token_data, 'user': 'self'}},
215 user=self._rhodecode_user,)
215 user=self._rhodecode_user,)
216 Session().commit()
216 Session().commit()
217 h.flash(_("Auth token successfully deleted"), category='success')
217 h.flash(_("Auth token successfully deleted"), category='success')
218
218
219 return HTTPFound(h.route_path('my_account_auth_tokens'))
219 return HTTPFound(h.route_path('my_account_auth_tokens'))
220
220
221 @LoginRequired()
221 @LoginRequired()
222 @NotAnonymous()
222 @NotAnonymous()
223 @view_config(
223 @view_config(
224 route_name='my_account_emails', request_method='GET',
224 route_name='my_account_emails', request_method='GET',
225 renderer='rhodecode:templates/admin/my_account/my_account.mako')
225 renderer='rhodecode:templates/admin/my_account/my_account.mako')
226 def my_account_emails(self):
226 def my_account_emails(self):
227 _ = self.request.translate
227 _ = self.request.translate
228
228
229 c = self.load_default_context()
229 c = self.load_default_context()
230 c.active = 'emails'
230 c.active = 'emails'
231
231
232 c.user_email_map = UserEmailMap.query()\
232 c.user_email_map = UserEmailMap.query()\
233 .filter(UserEmailMap.user == c.user).all()
233 .filter(UserEmailMap.user == c.user).all()
234 return self._get_template_context(c)
234 return self._get_template_context(c)
235
235
236 @LoginRequired()
236 @LoginRequired()
237 @NotAnonymous()
237 @NotAnonymous()
238 @CSRFRequired()
238 @CSRFRequired()
239 @view_config(
239 @view_config(
240 route_name='my_account_emails_add', request_method='POST')
240 route_name='my_account_emails_add', request_method='POST')
241 def my_account_emails_add(self):
241 def my_account_emails_add(self):
242 _ = self.request.translate
242 _ = self.request.translate
243 c = self.load_default_context()
243 c = self.load_default_context()
244
244
245 email = self.request.POST.get('new_email')
245 email = self.request.POST.get('new_email')
246
246
247 try:
247 try:
248 form = UserExtraEmailForm(self.request.translate)()
248 form = UserExtraEmailForm(self.request.translate)()
249 data = form.to_python({'email': email})
249 data = form.to_python({'email': email})
250 email = data['email']
250 email = data['email']
251
251
252 UserModel().add_extra_email(c.user.user_id, email)
252 UserModel().add_extra_email(c.user.user_id, email)
253 audit_logger.store_web(
253 audit_logger.store_web(
254 'user.edit.email.add', action_data={
254 'user.edit.email.add', action_data={
255 'data': {'email': email, 'user': 'self'}},
255 'data': {'email': email, 'user': 'self'}},
256 user=self._rhodecode_user,)
256 user=self._rhodecode_user,)
257
257
258 Session().commit()
258 Session().commit()
259 h.flash(_("Added new email address `%s` for user account") % email,
259 h.flash(_("Added new email address `%s` for user account") % email,
260 category='success')
260 category='success')
261 except formencode.Invalid as error:
261 except formencode.Invalid as error:
262 h.flash(h.escape(error.error_dict['email']), category='error')
262 h.flash(h.escape(error.error_dict['email']), category='error')
263 except Exception:
263 except Exception:
264 log.exception("Exception in my_account_emails")
264 log.exception("Exception in my_account_emails")
265 h.flash(_('An error occurred during email saving'),
265 h.flash(_('An error occurred during email saving'),
266 category='error')
266 category='error')
267 return HTTPFound(h.route_path('my_account_emails'))
267 return HTTPFound(h.route_path('my_account_emails'))
268
268
269 @LoginRequired()
269 @LoginRequired()
270 @NotAnonymous()
270 @NotAnonymous()
271 @CSRFRequired()
271 @CSRFRequired()
272 @view_config(
272 @view_config(
273 route_name='my_account_emails_delete', request_method='POST')
273 route_name='my_account_emails_delete', request_method='POST')
274 def my_account_emails_delete(self):
274 def my_account_emails_delete(self):
275 _ = self.request.translate
275 _ = self.request.translate
276 c = self.load_default_context()
276 c = self.load_default_context()
277
277
278 del_email_id = self.request.POST.get('del_email_id')
278 del_email_id = self.request.POST.get('del_email_id')
279 if del_email_id:
279 if del_email_id:
280 email = UserEmailMap.get_or_404(del_email_id).email
280 email = UserEmailMap.get_or_404(del_email_id).email
281 UserModel().delete_extra_email(c.user.user_id, del_email_id)
281 UserModel().delete_extra_email(c.user.user_id, del_email_id)
282 audit_logger.store_web(
282 audit_logger.store_web(
283 'user.edit.email.delete', action_data={
283 'user.edit.email.delete', action_data={
284 'data': {'email': email, 'user': 'self'}},
284 'data': {'email': email, 'user': 'self'}},
285 user=self._rhodecode_user,)
285 user=self._rhodecode_user,)
286 Session().commit()
286 Session().commit()
287 h.flash(_("Email successfully deleted"),
287 h.flash(_("Email successfully deleted"),
288 category='success')
288 category='success')
289 return HTTPFound(h.route_path('my_account_emails'))
289 return HTTPFound(h.route_path('my_account_emails'))
290
290
291 @LoginRequired()
291 @LoginRequired()
292 @NotAnonymous()
292 @NotAnonymous()
293 @CSRFRequired()
293 @CSRFRequired()
294 @view_config(
294 @view_config(
295 route_name='my_account_notifications_test_channelstream',
295 route_name='my_account_notifications_test_channelstream',
296 request_method='POST', renderer='json_ext')
296 request_method='POST', renderer='json_ext')
297 def my_account_notifications_test_channelstream(self):
297 def my_account_notifications_test_channelstream(self):
298 message = 'Test message sent via Channelstream by user: {}, on {}'.format(
298 message = 'Test message sent via Channelstream by user: {}, on {}'.format(
299 self._rhodecode_user.username, datetime.datetime.now())
299 self._rhodecode_user.username, datetime.datetime.now())
300 payload = {
300 payload = {
301 # 'channel': 'broadcast',
301 # 'channel': 'broadcast',
302 'type': 'message',
302 'type': 'message',
303 'timestamp': datetime.datetime.utcnow(),
303 'timestamp': datetime.datetime.utcnow(),
304 'user': 'system',
304 'user': 'system',
305 'pm_users': [self._rhodecode_user.username],
305 'pm_users': [self._rhodecode_user.username],
306 'message': {
306 'message': {
307 'message': message,
307 'message': message,
308 'level': 'info',
308 'level': 'info',
309 'topic': '/notifications'
309 'topic': '/notifications'
310 }
310 }
311 }
311 }
312
312
313 registry = self.request.registry
313 registry = self.request.registry
314 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
314 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
315 channelstream_config = rhodecode_plugins.get('channelstream', {})
315 channelstream_config = rhodecode_plugins.get('channelstream', {})
316
316
317 try:
317 try:
318 channelstream_request(channelstream_config, [payload], '/message')
318 channelstream_request(channelstream_config, [payload], '/message')
319 except ChannelstreamException as e:
319 except ChannelstreamException as e:
320 log.exception('Failed to send channelstream data')
320 log.exception('Failed to send channelstream data')
321 return {"response": 'ERROR: {}'.format(e.__class__.__name__)}
321 return {"response": 'ERROR: {}'.format(e.__class__.__name__)}
322 return {"response": 'Channelstream data sent. '
322 return {"response": 'Channelstream data sent. '
323 'You should see a new live message now.'}
323 'You should see a new live message now.'}
324
324
325 def _load_my_repos_data(self, watched=False):
325 def _load_my_repos_data(self, watched=False):
326 if watched:
326 if watched:
327 admin = False
327 admin = False
328 follows_repos = Session().query(UserFollowing)\
328 follows_repos = Session().query(UserFollowing)\
329 .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\
329 .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\
330 .options(joinedload(UserFollowing.follows_repository))\
330 .options(joinedload(UserFollowing.follows_repository))\
331 .all()
331 .all()
332 repo_list = [x.follows_repository for x in follows_repos]
332 repo_list = [x.follows_repository for x in follows_repos]
333 else:
333 else:
334 admin = True
334 admin = True
335 repo_list = Repository.get_all_repos(
335 repo_list = Repository.get_all_repos(
336 user_id=self._rhodecode_user.user_id)
336 user_id=self._rhodecode_user.user_id)
337 repo_list = RepoList(repo_list, perm_set=[
337 repo_list = RepoList(repo_list, perm_set=[
338 'repository.read', 'repository.write', 'repository.admin'])
338 'repository.read', 'repository.write', 'repository.admin'])
339
339
340 repos_data = RepoModel().get_repos_as_dict(
340 repos_data = RepoModel().get_repos_as_dict(
341 repo_list=repo_list, admin=admin)
341 repo_list=repo_list, admin=admin)
342 # json used to render the grid
342 # json used to render the grid
343 return json.dumps(repos_data)
343 return json.dumps(repos_data)
344
344
345 @LoginRequired()
345 @LoginRequired()
346 @NotAnonymous()
346 @NotAnonymous()
347 @view_config(
347 @view_config(
348 route_name='my_account_repos', request_method='GET',
348 route_name='my_account_repos', request_method='GET',
349 renderer='rhodecode:templates/admin/my_account/my_account.mako')
349 renderer='rhodecode:templates/admin/my_account/my_account.mako')
350 def my_account_repos(self):
350 def my_account_repos(self):
351 c = self.load_default_context()
351 c = self.load_default_context()
352 c.active = 'repos'
352 c.active = 'repos'
353
353
354 # json used to render the grid
354 # json used to render the grid
355 c.data = self._load_my_repos_data()
355 c.data = self._load_my_repos_data()
356 return self._get_template_context(c)
356 return self._get_template_context(c)
357
357
358 @LoginRequired()
358 @LoginRequired()
359 @NotAnonymous()
359 @NotAnonymous()
360 @view_config(
360 @view_config(
361 route_name='my_account_watched', request_method='GET',
361 route_name='my_account_watched', request_method='GET',
362 renderer='rhodecode:templates/admin/my_account/my_account.mako')
362 renderer='rhodecode:templates/admin/my_account/my_account.mako')
363 def my_account_watched(self):
363 def my_account_watched(self):
364 c = self.load_default_context()
364 c = self.load_default_context()
365 c.active = 'watched'
365 c.active = 'watched'
366
366
367 # json used to render the grid
367 # json used to render the grid
368 c.data = self._load_my_repos_data(watched=True)
368 c.data = self._load_my_repos_data(watched=True)
369 return self._get_template_context(c)
369 return self._get_template_context(c)
370
370
371 @LoginRequired()
371 @LoginRequired()
372 @NotAnonymous()
372 @NotAnonymous()
373 @view_config(
373 @view_config(
374 route_name='my_account_perms', request_method='GET',
374 route_name='my_account_perms', request_method='GET',
375 renderer='rhodecode:templates/admin/my_account/my_account.mako')
375 renderer='rhodecode:templates/admin/my_account/my_account.mako')
376 def my_account_perms(self):
376 def my_account_perms(self):
377 c = self.load_default_context()
377 c = self.load_default_context()
378 c.active = 'perms'
378 c.active = 'perms'
379
379
380 c.perm_user = c.auth_user
380 c.perm_user = c.auth_user
381 return self._get_template_context(c)
381 return self._get_template_context(c)
382
382
383 @LoginRequired()
383 @LoginRequired()
384 @NotAnonymous()
384 @NotAnonymous()
385 @view_config(
385 @view_config(
386 route_name='my_account_notifications', request_method='GET',
386 route_name='my_account_notifications', request_method='GET',
387 renderer='rhodecode:templates/admin/my_account/my_account.mako')
387 renderer='rhodecode:templates/admin/my_account/my_account.mako')
388 def my_notifications(self):
388 def my_notifications(self):
389 c = self.load_default_context()
389 c = self.load_default_context()
390 c.active = 'notifications'
390 c.active = 'notifications'
391
391
392 return self._get_template_context(c)
392 return self._get_template_context(c)
393
393
394 @LoginRequired()
394 @LoginRequired()
395 @NotAnonymous()
395 @NotAnonymous()
396 @CSRFRequired()
396 @CSRFRequired()
397 @view_config(
397 @view_config(
398 route_name='my_account_notifications_toggle_visibility',
398 route_name='my_account_notifications_toggle_visibility',
399 request_method='POST', renderer='json_ext')
399 request_method='POST', renderer='json_ext')
400 def my_notifications_toggle_visibility(self):
400 def my_notifications_toggle_visibility(self):
401 user = self._rhodecode_db_user
401 user = self._rhodecode_db_user
402 new_status = not user.user_data.get('notification_status', True)
402 new_status = not user.user_data.get('notification_status', True)
403 user.update_userdata(notification_status=new_status)
403 user.update_userdata(notification_status=new_status)
404 Session().commit()
404 Session().commit()
405 return user.user_data['notification_status']
405 return user.user_data['notification_status']
406
406
407 @LoginRequired()
407 @LoginRequired()
408 @NotAnonymous()
408 @NotAnonymous()
409 @view_config(
409 @view_config(
410 route_name='my_account_edit',
410 route_name='my_account_edit',
411 request_method='GET',
411 request_method='GET',
412 renderer='rhodecode:templates/admin/my_account/my_account.mako')
412 renderer='rhodecode:templates/admin/my_account/my_account.mako')
413 def my_account_edit(self):
413 def my_account_edit(self):
414 c = self.load_default_context()
414 c = self.load_default_context()
415 c.active = 'profile_edit'
415 c.active = 'profile_edit'
416
416
417 c.perm_user = c.auth_user
417 c.perm_user = c.auth_user
418 c.extern_type = c.user.extern_type
418 c.extern_type = c.user.extern_type
419 c.extern_name = c.user.extern_name
419 c.extern_name = c.user.extern_name
420
420
421 defaults = c.user.get_dict()
421 defaults = c.user.get_dict()
422
422
423 data = render('rhodecode:templates/admin/my_account/my_account.mako',
423 data = render('rhodecode:templates/admin/my_account/my_account.mako',
424 self._get_template_context(c), self.request)
424 self._get_template_context(c), self.request)
425 html = formencode.htmlfill.render(
425 html = formencode.htmlfill.render(
426 data,
426 data,
427 defaults=defaults,
427 defaults=defaults,
428 encoding="UTF-8",
428 encoding="UTF-8",
429 force_defaults=False
429 force_defaults=False
430 )
430 )
431 return Response(html)
431 return Response(html)
432
432
433 @LoginRequired()
433 @LoginRequired()
434 @NotAnonymous()
434 @NotAnonymous()
435 @CSRFRequired()
435 @CSRFRequired()
436 @view_config(
436 @view_config(
437 route_name='my_account_update',
437 route_name='my_account_update',
438 request_method='POST',
438 request_method='POST',
439 renderer='rhodecode:templates/admin/my_account/my_account.mako')
439 renderer='rhodecode:templates/admin/my_account/my_account.mako')
440 def my_account_update(self):
440 def my_account_update(self):
441 _ = self.request.translate
441 _ = self.request.translate
442 c = self.load_default_context()
442 c = self.load_default_context()
443 c.active = 'profile_edit'
443 c.active = 'profile_edit'
444
444
445 c.perm_user = c.auth_user
445 c.perm_user = c.auth_user
446 c.extern_type = c.user.extern_type
446 c.extern_type = c.user.extern_type
447 c.extern_name = c.user.extern_name
447 c.extern_name = c.user.extern_name
448
448
449 _form = UserForm(self.request.translate, edit=True,
449 _form = UserForm(self.request.translate, edit=True,
450 old_data={'user_id': self._rhodecode_user.user_id,
450 old_data={'user_id': self._rhodecode_user.user_id,
451 'email': self._rhodecode_user.email})()
451 'email': self._rhodecode_user.email})()
452 form_result = {}
452 form_result = {}
453 try:
453 try:
454 post_data = dict(self.request.POST)
454 post_data = dict(self.request.POST)
455 post_data['new_password'] = ''
455 post_data['new_password'] = ''
456 post_data['password_confirmation'] = ''
456 post_data['password_confirmation'] = ''
457 form_result = _form.to_python(post_data)
457 form_result = _form.to_python(post_data)
458 # skip updating those attrs for my account
458 # skip updating those attrs for my account
459 skip_attrs = ['admin', 'active', 'extern_type', 'extern_name',
459 skip_attrs = ['admin', 'active', 'extern_type', 'extern_name',
460 'new_password', 'password_confirmation']
460 'new_password', 'password_confirmation']
461 # TODO: plugin should define if username can be updated
461 # TODO: plugin should define if username can be updated
462 if c.extern_type != "rhodecode":
462 if c.extern_type != "rhodecode":
463 # forbid updating username for external accounts
463 # forbid updating username for external accounts
464 skip_attrs.append('username')
464 skip_attrs.append('username')
465
465
466 UserModel().update_user(
466 UserModel().update_user(
467 self._rhodecode_user.user_id, skip_attrs=skip_attrs,
467 self._rhodecode_user.user_id, skip_attrs=skip_attrs,
468 **form_result)
468 **form_result)
469 h.flash(_('Your account was updated successfully'),
469 h.flash(_('Your account was updated successfully'),
470 category='success')
470 category='success')
471 Session().commit()
471 Session().commit()
472
472
473 except formencode.Invalid as errors:
473 except formencode.Invalid as errors:
474 data = render(
474 data = render(
475 'rhodecode:templates/admin/my_account/my_account.mako',
475 'rhodecode:templates/admin/my_account/my_account.mako',
476 self._get_template_context(c), self.request)
476 self._get_template_context(c), self.request)
477
477
478 html = formencode.htmlfill.render(
478 html = formencode.htmlfill.render(
479 data,
479 data,
480 defaults=errors.value,
480 defaults=errors.value,
481 errors=errors.error_dict or {},
481 errors=errors.error_dict or {},
482 prefix_error=False,
482 prefix_error=False,
483 encoding="UTF-8",
483 encoding="UTF-8",
484 force_defaults=False)
484 force_defaults=False)
485 return Response(html)
485 return Response(html)
486
486
487 except Exception:
487 except Exception:
488 log.exception("Exception updating user")
488 log.exception("Exception updating user")
489 h.flash(_('Error occurred during update of user %s')
489 h.flash(_('Error occurred during update of user %s')
490 % form_result.get('username'), category='error')
490 % form_result.get('username'), category='error')
491 raise HTTPFound(h.route_path('my_account_profile'))
491 raise HTTPFound(h.route_path('my_account_profile'))
492
492
493 raise HTTPFound(h.route_path('my_account_profile'))
493 raise HTTPFound(h.route_path('my_account_profile'))
494
494
495 def _get_pull_requests_list(self, statuses):
495 def _get_pull_requests_list(self, statuses):
496 draw, start, limit = self._extract_chunk(self.request)
496 draw, start, limit = self._extract_chunk(self.request)
497 search_q, order_by, order_dir = self._extract_ordering(self.request)
497 search_q, order_by, order_dir = self._extract_ordering(self.request)
498 _render = self.request.get_partial_renderer(
498 _render = self.request.get_partial_renderer(
499 'rhodecode:templates/data_table/_dt_elements.mako')
499 'rhodecode:templates/data_table/_dt_elements.mako')
500
500
501 pull_requests = PullRequestModel().get_im_participating_in(
501 pull_requests = PullRequestModel().get_im_participating_in(
502 user_id=self._rhodecode_user.user_id,
502 user_id=self._rhodecode_user.user_id,
503 statuses=statuses,
503 statuses=statuses,
504 offset=start, length=limit, order_by=order_by,
504 offset=start, length=limit, order_by=order_by,
505 order_dir=order_dir)
505 order_dir=order_dir)
506
506
507 pull_requests_total_count = PullRequestModel().count_im_participating_in(
507 pull_requests_total_count = PullRequestModel().count_im_participating_in(
508 user_id=self._rhodecode_user.user_id, statuses=statuses)
508 user_id=self._rhodecode_user.user_id, statuses=statuses)
509
509
510 data = []
510 data = []
511 comments_model = CommentsModel()
511 comments_model = CommentsModel()
512 for pr in pull_requests:
512 for pr in pull_requests:
513 repo_id = pr.target_repo_id
513 repo_id = pr.target_repo_id
514 comments = comments_model.get_all_comments(
514 comments = comments_model.get_all_comments(
515 repo_id, pull_request=pr)
515 repo_id, pull_request=pr)
516 owned = pr.user_id == self._rhodecode_user.user_id
516 owned = pr.user_id == self._rhodecode_user.user_id
517
517
518 data.append({
518 data.append({
519 'target_repo': _render('pullrequest_target_repo',
519 'target_repo': _render('pullrequest_target_repo',
520 pr.target_repo.repo_name),
520 pr.target_repo.repo_name),
521 'name': _render('pullrequest_name',
521 'name': _render('pullrequest_name',
522 pr.pull_request_id, pr.target_repo.repo_name,
522 pr.pull_request_id, pr.target_repo.repo_name,
523 short=True),
523 short=True),
524 'name_raw': pr.pull_request_id,
524 'name_raw': pr.pull_request_id,
525 'status': _render('pullrequest_status',
525 'status': _render('pullrequest_status',
526 pr.calculated_review_status()),
526 pr.calculated_review_status()),
527 'title': _render(
527 'title': _render(
528 'pullrequest_title', pr.title, pr.description),
528 'pullrequest_title', pr.title, pr.description),
529 'description': h.escape(pr.description),
529 'description': h.escape(pr.description),
530 'updated_on': _render('pullrequest_updated_on',
530 'updated_on': _render('pullrequest_updated_on',
531 h.datetime_to_time(pr.updated_on)),
531 h.datetime_to_time(pr.updated_on)),
532 'updated_on_raw': h.datetime_to_time(pr.updated_on),
532 'updated_on_raw': h.datetime_to_time(pr.updated_on),
533 'created_on': _render('pullrequest_updated_on',
533 'created_on': _render('pullrequest_updated_on',
534 h.datetime_to_time(pr.created_on)),
534 h.datetime_to_time(pr.created_on)),
535 'created_on_raw': h.datetime_to_time(pr.created_on),
535 'created_on_raw': h.datetime_to_time(pr.created_on),
536 'author': _render('pullrequest_author',
536 'author': _render('pullrequest_author',
537 pr.author.full_contact, ),
537 pr.author.full_contact, ),
538 'author_raw': pr.author.full_name,
538 'author_raw': pr.author.full_name,
539 'comments': _render('pullrequest_comments', len(comments)),
539 'comments': _render('pullrequest_comments', len(comments)),
540 'comments_raw': len(comments),
540 'comments_raw': len(comments),
541 'closed': pr.is_closed(),
541 'closed': pr.is_closed(),
542 'owned': owned
542 'owned': owned
543 })
543 })
544
544
545 # json used to render the grid
545 # json used to render the grid
546 data = ({
546 data = ({
547 'draw': draw,
547 'draw': draw,
548 'data': data,
548 'data': data,
549 'recordsTotal': pull_requests_total_count,
549 'recordsTotal': pull_requests_total_count,
550 'recordsFiltered': pull_requests_total_count,
550 'recordsFiltered': pull_requests_total_count,
551 })
551 })
552 return data
552 return data
553
553
554 @LoginRequired()
554 @LoginRequired()
555 @NotAnonymous()
555 @NotAnonymous()
556 @view_config(
556 @view_config(
557 route_name='my_account_pullrequests',
557 route_name='my_account_pullrequests',
558 request_method='GET',
558 request_method='GET',
559 renderer='rhodecode:templates/admin/my_account/my_account.mako')
559 renderer='rhodecode:templates/admin/my_account/my_account.mako')
560 def my_account_pullrequests(self):
560 def my_account_pullrequests(self):
561 c = self.load_default_context()
561 c = self.load_default_context()
562 c.active = 'pullrequests'
562 c.active = 'pullrequests'
563 req_get = self.request.GET
563 req_get = self.request.GET
564
564
565 c.closed = str2bool(req_get.get('pr_show_closed'))
565 c.closed = str2bool(req_get.get('pr_show_closed'))
566
566
567 return self._get_template_context(c)
567 return self._get_template_context(c)
568
568
569 @LoginRequired()
569 @LoginRequired()
570 @NotAnonymous()
570 @NotAnonymous()
571 @view_config(
571 @view_config(
572 route_name='my_account_pullrequests_data',
572 route_name='my_account_pullrequests_data',
573 request_method='GET', renderer='json_ext')
573 request_method='GET', renderer='json_ext')
574 def my_account_pullrequests_data(self):
574 def my_account_pullrequests_data(self):
575 self.load_default_context()
575 req_get = self.request.GET
576 req_get = self.request.GET
576 closed = str2bool(req_get.get('closed'))
577 closed = str2bool(req_get.get('closed'))
577
578
578 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
579 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
579 if closed:
580 if closed:
580 statuses += [PullRequest.STATUS_CLOSED]
581 statuses += [PullRequest.STATUS_CLOSED]
581
582
582 data = self._get_pull_requests_list(statuses=statuses)
583 data = self._get_pull_requests_list(statuses=statuses)
583 return data
584 return data
584
585
@@ -1,1064 +1,1068 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.apps.repository.views.repo_files import RepoFilesView
26 from rhodecode.apps.repository.views.repo_files import RepoFilesView
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.compat import OrderedDict
28 from rhodecode.lib.compat import OrderedDict
29 from rhodecode.lib.ext_json import json
29 from rhodecode.lib.ext_json import json
30 from rhodecode.lib.vcs import nodes
30 from rhodecode.lib.vcs import nodes
31
31
32 from rhodecode.lib.vcs.conf import settings
32 from rhodecode.lib.vcs.conf import settings
33 from rhodecode.tests import assert_session_flash
33 from rhodecode.tests import assert_session_flash
34 from rhodecode.tests.fixture import Fixture
34 from rhodecode.tests.fixture import Fixture
35
35
36 fixture = Fixture()
36 fixture = Fixture()
37
37
38
38
39 def get_node_history(backend_type):
39 def get_node_history(backend_type):
40 return {
40 return {
41 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
41 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
42 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
42 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
43 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
43 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
44 }[backend_type]
44 }[backend_type]
45
45
46
46
47 def route_path(name, params=None, **kwargs):
47 def route_path(name, params=None, **kwargs):
48 import urllib
48 import urllib
49
49
50 base_url = {
50 base_url = {
51 'repo_summary': '/{repo_name}',
51 'repo_archivefile': '/{repo_name}/archive/{fname}',
52 'repo_archivefile': '/{repo_name}/archive/{fname}',
52 'repo_files_diff': '/{repo_name}/diff/{f_path}',
53 'repo_files_diff': '/{repo_name}/diff/{f_path}',
53 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
54 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
54 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
55 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
55 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
56 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
56 'repo_files:default_commit': '/{repo_name}/files',
57 'repo_files:default_commit': '/{repo_name}/files',
57 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
58 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
58 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
59 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
59 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
60 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
60 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
61 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
61 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
62 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
62 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
63 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
63 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
64 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
64 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
65 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
65 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
66 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
66 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
67 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
67 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
68 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
68 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
69 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
69 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
70 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
70 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
71 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
71 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
72 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
72 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
73 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
73 }[name].format(**kwargs)
74 }[name].format(**kwargs)
74
75
75 if params:
76 if params:
76 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
77 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
77 return base_url
78 return base_url
78
79
79
80
80 def assert_files_in_response(response, files, params):
81 def assert_files_in_response(response, files, params):
81 template = (
82 template = (
82 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
83 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
83 _assert_items_in_response(response, files, template, params)
84 _assert_items_in_response(response, files, template, params)
84
85
85
86
86 def assert_dirs_in_response(response, dirs, params):
87 def assert_dirs_in_response(response, dirs, params):
87 template = (
88 template = (
88 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
89 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
89 _assert_items_in_response(response, dirs, template, params)
90 _assert_items_in_response(response, dirs, template, params)
90
91
91
92
92 def _assert_items_in_response(response, items, template, params):
93 def _assert_items_in_response(response, items, template, params):
93 for item in items:
94 for item in items:
94 item_params = {'name': item}
95 item_params = {'name': item}
95 item_params.update(params)
96 item_params.update(params)
96 response.mustcontain(template % item_params)
97 response.mustcontain(template % item_params)
97
98
98
99
99 def assert_timeago_in_response(response, items, params):
100 def assert_timeago_in_response(response, items, params):
100 for item in items:
101 for item in items:
101 response.mustcontain(h.age_component(params['date']))
102 response.mustcontain(h.age_component(params['date']))
102
103
103
104
104 @pytest.mark.usefixtures("app")
105 @pytest.mark.usefixtures("app")
105 class TestFilesViews(object):
106 class TestFilesViews(object):
106
107
107 def test_show_files(self, backend):
108 def test_show_files(self, backend):
108 response = self.app.get(
109 response = self.app.get(
109 route_path('repo_files',
110 route_path('repo_files',
110 repo_name=backend.repo_name,
111 repo_name=backend.repo_name,
111 commit_id='tip', f_path='/'))
112 commit_id='tip', f_path='/'))
112 commit = backend.repo.get_commit()
113 commit = backend.repo.get_commit()
113
114
114 params = {
115 params = {
115 'repo_name': backend.repo_name,
116 'repo_name': backend.repo_name,
116 'commit_id': commit.raw_id,
117 'commit_id': commit.raw_id,
117 'date': commit.date
118 'date': commit.date
118 }
119 }
119 assert_dirs_in_response(response, ['docs', 'vcs'], params)
120 assert_dirs_in_response(response, ['docs', 'vcs'], params)
120 files = [
121 files = [
121 '.gitignore',
122 '.gitignore',
122 '.hgignore',
123 '.hgignore',
123 '.hgtags',
124 '.hgtags',
124 # TODO: missing in Git
125 # TODO: missing in Git
125 # '.travis.yml',
126 # '.travis.yml',
126 'MANIFEST.in',
127 'MANIFEST.in',
127 'README.rst',
128 'README.rst',
128 # TODO: File is missing in svn repository
129 # TODO: File is missing in svn repository
129 # 'run_test_and_report.sh',
130 # 'run_test_and_report.sh',
130 'setup.cfg',
131 'setup.cfg',
131 'setup.py',
132 'setup.py',
132 'test_and_report.sh',
133 'test_and_report.sh',
133 'tox.ini',
134 'tox.ini',
134 ]
135 ]
135 assert_files_in_response(response, files, params)
136 assert_files_in_response(response, files, params)
136 assert_timeago_in_response(response, files, params)
137 assert_timeago_in_response(response, files, params)
137
138
138 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
139 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
139 repo = backend_hg['subrepos']
140 repo = backend_hg['subrepos']
140 response = self.app.get(
141 response = self.app.get(
141 route_path('repo_files',
142 route_path('repo_files',
142 repo_name=repo.repo_name,
143 repo_name=repo.repo_name,
143 commit_id='tip', f_path='/'))
144 commit_id='tip', f_path='/'))
144 assert_response = response.assert_response()
145 assert_response = response.assert_response()
145 assert_response.contains_one_link(
146 assert_response.contains_one_link(
146 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
147 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
147
148
148 def test_show_files_links_submodules_with_absolute_url_subpaths(
149 def test_show_files_links_submodules_with_absolute_url_subpaths(
149 self, backend_hg):
150 self, backend_hg):
150 repo = backend_hg['subrepos']
151 repo = backend_hg['subrepos']
151 response = self.app.get(
152 response = self.app.get(
152 route_path('repo_files',
153 route_path('repo_files',
153 repo_name=repo.repo_name,
154 repo_name=repo.repo_name,
154 commit_id='tip', f_path='/'))
155 commit_id='tip', f_path='/'))
155 assert_response = response.assert_response()
156 assert_response = response.assert_response()
156 assert_response.contains_one_link(
157 assert_response.contains_one_link(
157 'subpaths-path @ 000000000000',
158 'subpaths-path @ 000000000000',
158 'http://sub-base.example.com/subpaths-path')
159 'http://sub-base.example.com/subpaths-path')
159
160
160 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
161 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
161 def test_files_menu(self, backend):
162 def test_files_menu(self, backend):
162 new_branch = "temp_branch_name"
163 new_branch = "temp_branch_name"
163 commits = [
164 commits = [
164 {'message': 'a'},
165 {'message': 'a'},
165 {'message': 'b', 'branch': new_branch}
166 {'message': 'b', 'branch': new_branch}
166 ]
167 ]
167 backend.create_repo(commits)
168 backend.create_repo(commits)
168
169
169 backend.repo.landing_rev = "branch:%s" % new_branch
170 backend.repo.landing_rev = "branch:%s" % new_branch
170
171
171 # get response based on tip and not new commit
172 # get response based on tip and not new commit
172 response = self.app.get(
173 response = self.app.get(
173 route_path('repo_files',
174 route_path('repo_files',
174 repo_name=backend.repo_name,
175 repo_name=backend.repo_name,
175 commit_id='tip', f_path='/'))
176 commit_id='tip', f_path='/'))
176
177
177 # make sure Files menu url is not tip but new commit
178 # make sure Files menu url is not tip but new commit
178 landing_rev = backend.repo.landing_rev[1]
179 landing_rev = backend.repo.landing_rev[1]
179 files_url = route_path('repo_files:default_path',
180 files_url = route_path('repo_files:default_path',
180 repo_name=backend.repo_name,
181 repo_name=backend.repo_name,
181 commit_id=landing_rev)
182 commit_id=landing_rev)
182
183
183 assert landing_rev != 'tip'
184 assert landing_rev != 'tip'
184 response.mustcontain(
185 response.mustcontain(
185 '<li class="active"><a class="menulink" href="%s">' % files_url)
186 '<li class="active"><a class="menulink" href="%s">' % files_url)
186
187
187 def test_show_files_commit(self, backend):
188 def test_show_files_commit(self, backend):
188 commit = backend.repo.get_commit(commit_idx=32)
189 commit = backend.repo.get_commit(commit_idx=32)
189
190
190 response = self.app.get(
191 response = self.app.get(
191 route_path('repo_files',
192 route_path('repo_files',
192 repo_name=backend.repo_name,
193 repo_name=backend.repo_name,
193 commit_id=commit.raw_id, f_path='/'))
194 commit_id=commit.raw_id, f_path='/'))
194
195
195 dirs = ['docs', 'tests']
196 dirs = ['docs', 'tests']
196 files = ['README.rst']
197 files = ['README.rst']
197 params = {
198 params = {
198 'repo_name': backend.repo_name,
199 'repo_name': backend.repo_name,
199 'commit_id': commit.raw_id,
200 'commit_id': commit.raw_id,
200 }
201 }
201 assert_dirs_in_response(response, dirs, params)
202 assert_dirs_in_response(response, dirs, params)
202 assert_files_in_response(response, files, params)
203 assert_files_in_response(response, files, params)
203
204
204 def test_show_files_different_branch(self, backend):
205 def test_show_files_different_branch(self, backend):
205 branches = dict(
206 branches = dict(
206 hg=(150, ['git']),
207 hg=(150, ['git']),
207 # TODO: Git test repository does not contain other branches
208 # TODO: Git test repository does not contain other branches
208 git=(633, ['master']),
209 git=(633, ['master']),
209 # TODO: Branch support in Subversion
210 # TODO: Branch support in Subversion
210 svn=(150, [])
211 svn=(150, [])
211 )
212 )
212 idx, branches = branches[backend.alias]
213 idx, branches = branches[backend.alias]
213 commit = backend.repo.get_commit(commit_idx=idx)
214 commit = backend.repo.get_commit(commit_idx=idx)
214 response = self.app.get(
215 response = self.app.get(
215 route_path('repo_files',
216 route_path('repo_files',
216 repo_name=backend.repo_name,
217 repo_name=backend.repo_name,
217 commit_id=commit.raw_id, f_path='/'))
218 commit_id=commit.raw_id, f_path='/'))
218
219
219 assert_response = response.assert_response()
220 assert_response = response.assert_response()
220 for branch in branches:
221 for branch in branches:
221 assert_response.element_contains('.tags .branchtag', branch)
222 assert_response.element_contains('.tags .branchtag', branch)
222
223
223 def test_show_files_paging(self, backend):
224 def test_show_files_paging(self, backend):
224 repo = backend.repo
225 repo = backend.repo
225 indexes = [73, 92, 109, 1, 0]
226 indexes = [73, 92, 109, 1, 0]
226 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
227 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
227 for rev in indexes]
228 for rev in indexes]
228
229
229 for idx in idx_map:
230 for idx in idx_map:
230 response = self.app.get(
231 response = self.app.get(
231 route_path('repo_files',
232 route_path('repo_files',
232 repo_name=backend.repo_name,
233 repo_name=backend.repo_name,
233 commit_id=idx[1], f_path='/'))
234 commit_id=idx[1], f_path='/'))
234
235
235 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
236 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
236
237
237 def test_file_source(self, backend):
238 def test_file_source(self, backend):
238 commit = backend.repo.get_commit(commit_idx=167)
239 commit = backend.repo.get_commit(commit_idx=167)
239 response = self.app.get(
240 response = self.app.get(
240 route_path('repo_files',
241 route_path('repo_files',
241 repo_name=backend.repo_name,
242 repo_name=backend.repo_name,
242 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
243 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
243
244
244 msgbox = """<div class="commit right-content">%s</div>"""
245 msgbox = """<div class="commit right-content">%s</div>"""
245 response.mustcontain(msgbox % (commit.message, ))
246 response.mustcontain(msgbox % (commit.message, ))
246
247
247 assert_response = response.assert_response()
248 assert_response = response.assert_response()
248 if commit.branch:
249 if commit.branch:
249 assert_response.element_contains(
250 assert_response.element_contains(
250 '.tags.tags-main .branchtag', commit.branch)
251 '.tags.tags-main .branchtag', commit.branch)
251 if commit.tags:
252 if commit.tags:
252 for tag in commit.tags:
253 for tag in commit.tags:
253 assert_response.element_contains('.tags.tags-main .tagtag', tag)
254 assert_response.element_contains('.tags.tags-main .tagtag', tag)
254
255
255 def test_file_source_annotated(self, backend):
256 def test_file_source_annotated(self, backend):
256 response = self.app.get(
257 response = self.app.get(
257 route_path('repo_files:annotated',
258 route_path('repo_files:annotated',
258 repo_name=backend.repo_name,
259 repo_name=backend.repo_name,
259 commit_id='tip', f_path='vcs/nodes.py'))
260 commit_id='tip', f_path='vcs/nodes.py'))
260 expected_commits = {
261 expected_commits = {
261 'hg': 'r356',
262 'hg': 'r356',
262 'git': 'r345',
263 'git': 'r345',
263 'svn': 'r208',
264 'svn': 'r208',
264 }
265 }
265 response.mustcontain(expected_commits[backend.alias])
266 response.mustcontain(expected_commits[backend.alias])
266
267
267 def test_file_source_authors(self, backend):
268 def test_file_source_authors(self, backend):
268 response = self.app.get(
269 response = self.app.get(
269 route_path('repo_file_authors',
270 route_path('repo_file_authors',
270 repo_name=backend.repo_name,
271 repo_name=backend.repo_name,
271 commit_id='tip', f_path='vcs/nodes.py'))
272 commit_id='tip', f_path='vcs/nodes.py'))
272 expected_authors = {
273 expected_authors = {
273 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
274 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
274 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
275 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
275 'svn': ('marcin', 'lukasz'),
276 'svn': ('marcin', 'lukasz'),
276 }
277 }
277
278
278 for author in expected_authors[backend.alias]:
279 for author in expected_authors[backend.alias]:
279 response.mustcontain(author)
280 response.mustcontain(author)
280
281
281 def test_file_source_authors_with_annotation(self, backend):
282 def test_file_source_authors_with_annotation(self, backend):
282 response = self.app.get(
283 response = self.app.get(
283 route_path('repo_file_authors',
284 route_path('repo_file_authors',
284 repo_name=backend.repo_name,
285 repo_name=backend.repo_name,
285 commit_id='tip', f_path='vcs/nodes.py',
286 commit_id='tip', f_path='vcs/nodes.py',
286 params=dict(annotate=1)))
287 params=dict(annotate=1)))
287 expected_authors = {
288 expected_authors = {
288 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
289 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
289 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
290 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
290 'svn': ('marcin', 'lukasz'),
291 'svn': ('marcin', 'lukasz'),
291 }
292 }
292
293
293 for author in expected_authors[backend.alias]:
294 for author in expected_authors[backend.alias]:
294 response.mustcontain(author)
295 response.mustcontain(author)
295
296
296 def test_file_source_history(self, backend, xhr_header):
297 def test_file_source_history(self, backend, xhr_header):
297 response = self.app.get(
298 response = self.app.get(
298 route_path('repo_file_history',
299 route_path('repo_file_history',
299 repo_name=backend.repo_name,
300 repo_name=backend.repo_name,
300 commit_id='tip', f_path='vcs/nodes.py'),
301 commit_id='tip', f_path='vcs/nodes.py'),
301 extra_environ=xhr_header)
302 extra_environ=xhr_header)
302 assert get_node_history(backend.alias) == json.loads(response.body)
303 assert get_node_history(backend.alias) == json.loads(response.body)
303
304
304 def test_file_source_history_svn(self, backend_svn, xhr_header):
305 def test_file_source_history_svn(self, backend_svn, xhr_header):
305 simple_repo = backend_svn['svn-simple-layout']
306 simple_repo = backend_svn['svn-simple-layout']
306 response = self.app.get(
307 response = self.app.get(
307 route_path('repo_file_history',
308 route_path('repo_file_history',
308 repo_name=simple_repo.repo_name,
309 repo_name=simple_repo.repo_name,
309 commit_id='tip', f_path='trunk/example.py'),
310 commit_id='tip', f_path='trunk/example.py'),
310 extra_environ=xhr_header)
311 extra_environ=xhr_header)
311
312
312 expected_data = json.loads(
313 expected_data = json.loads(
313 fixture.load_resource('svn_node_history_branches.json'))
314 fixture.load_resource('svn_node_history_branches.json'))
314 assert expected_data == response.json
315 assert expected_data == response.json
315
316
316 def test_file_source_history_with_annotation(self, backend, xhr_header):
317 def test_file_source_history_with_annotation(self, backend, xhr_header):
317 response = self.app.get(
318 response = self.app.get(
318 route_path('repo_file_history',
319 route_path('repo_file_history',
319 repo_name=backend.repo_name,
320 repo_name=backend.repo_name,
320 commit_id='tip', f_path='vcs/nodes.py',
321 commit_id='tip', f_path='vcs/nodes.py',
321 params=dict(annotate=1)),
322 params=dict(annotate=1)),
322
323
323 extra_environ=xhr_header)
324 extra_environ=xhr_header)
324 assert get_node_history(backend.alias) == json.loads(response.body)
325 assert get_node_history(backend.alias) == json.loads(response.body)
325
326
326 def test_tree_search_top_level(self, backend, xhr_header):
327 def test_tree_search_top_level(self, backend, xhr_header):
327 commit = backend.repo.get_commit(commit_idx=173)
328 commit = backend.repo.get_commit(commit_idx=173)
328 response = self.app.get(
329 response = self.app.get(
329 route_path('repo_files_nodelist',
330 route_path('repo_files_nodelist',
330 repo_name=backend.repo_name,
331 repo_name=backend.repo_name,
331 commit_id=commit.raw_id, f_path='/'),
332 commit_id=commit.raw_id, f_path='/'),
332 extra_environ=xhr_header)
333 extra_environ=xhr_header)
333 assert 'nodes' in response.json
334 assert 'nodes' in response.json
334 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
335 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
335
336
336 def test_tree_search_missing_xhr(self, backend):
337 def test_tree_search_missing_xhr(self, backend):
337 self.app.get(
338 self.app.get(
338 route_path('repo_files_nodelist',
339 route_path('repo_files_nodelist',
339 repo_name=backend.repo_name,
340 repo_name=backend.repo_name,
340 commit_id='tip', f_path='/'),
341 commit_id='tip', f_path='/'),
341 status=404)
342 status=404)
342
343
343 def test_tree_search_at_path(self, backend, xhr_header):
344 def test_tree_search_at_path(self, backend, xhr_header):
344 commit = backend.repo.get_commit(commit_idx=173)
345 commit = backend.repo.get_commit(commit_idx=173)
345 response = self.app.get(
346 response = self.app.get(
346 route_path('repo_files_nodelist',
347 route_path('repo_files_nodelist',
347 repo_name=backend.repo_name,
348 repo_name=backend.repo_name,
348 commit_id=commit.raw_id, f_path='/docs'),
349 commit_id=commit.raw_id, f_path='/docs'),
349 extra_environ=xhr_header)
350 extra_environ=xhr_header)
350 assert 'nodes' in response.json
351 assert 'nodes' in response.json
351 nodes = response.json['nodes']
352 nodes = response.json['nodes']
352 assert {'name': 'docs/api', 'type': 'dir'} in nodes
353 assert {'name': 'docs/api', 'type': 'dir'} in nodes
353 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
354 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
354
355
355 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
356 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
356 commit = backend.repo.get_commit(commit_idx=173)
357 commit = backend.repo.get_commit(commit_idx=173)
357 response = self.app.get(
358 response = self.app.get(
358 route_path('repo_files_nodelist',
359 route_path('repo_files_nodelist',
359 repo_name=backend.repo_name,
360 repo_name=backend.repo_name,
360 commit_id=commit.raw_id, f_path='/docs/api'),
361 commit_id=commit.raw_id, f_path='/docs/api'),
361 extra_environ=xhr_header)
362 extra_environ=xhr_header)
362 assert 'nodes' in response.json
363 assert 'nodes' in response.json
363 nodes = response.json['nodes']
364 nodes = response.json['nodes']
364 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
365 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
365
366
366 def test_tree_search_at_path_missing_xhr(self, backend):
367 def test_tree_search_at_path_missing_xhr(self, backend):
367 self.app.get(
368 self.app.get(
368 route_path('repo_files_nodelist',
369 route_path('repo_files_nodelist',
369 repo_name=backend.repo_name,
370 repo_name=backend.repo_name,
370 commit_id='tip', f_path='/docs'),
371 commit_id='tip', f_path='/docs'),
371 status=404)
372 status=404)
372
373
373 def test_nodetree(self, backend, xhr_header):
374 def test_nodetree(self, backend, xhr_header):
374 commit = backend.repo.get_commit(commit_idx=173)
375 commit = backend.repo.get_commit(commit_idx=173)
375 response = self.app.get(
376 response = self.app.get(
376 route_path('repo_nodetree_full',
377 route_path('repo_nodetree_full',
377 repo_name=backend.repo_name,
378 repo_name=backend.repo_name,
378 commit_id=commit.raw_id, f_path='/'),
379 commit_id=commit.raw_id, f_path='/'),
379 extra_environ=xhr_header)
380 extra_environ=xhr_header)
380
381
381 assert_response = response.assert_response()
382 assert_response = response.assert_response()
382
383
383 for attr in ['data-commit-id', 'data-date', 'data-author']:
384 for attr in ['data-commit-id', 'data-date', 'data-author']:
384 elements = assert_response.get_elements('[{}]'.format(attr))
385 elements = assert_response.get_elements('[{}]'.format(attr))
385 assert len(elements) > 1
386 assert len(elements) > 1
386
387
387 for element in elements:
388 for element in elements:
388 assert element.get(attr)
389 assert element.get(attr)
389
390
390 def test_nodetree_if_file(self, backend, xhr_header):
391 def test_nodetree_if_file(self, backend, xhr_header):
391 commit = backend.repo.get_commit(commit_idx=173)
392 commit = backend.repo.get_commit(commit_idx=173)
392 response = self.app.get(
393 response = self.app.get(
393 route_path('repo_nodetree_full',
394 route_path('repo_nodetree_full',
394 repo_name=backend.repo_name,
395 repo_name=backend.repo_name,
395 commit_id=commit.raw_id, f_path='README.rst'),
396 commit_id=commit.raw_id, f_path='README.rst'),
396 extra_environ=xhr_header)
397 extra_environ=xhr_header)
397 assert response.body == ''
398 assert response.body == ''
398
399
399 def test_nodetree_wrong_path(self, backend, xhr_header):
400 def test_nodetree_wrong_path(self, backend, xhr_header):
400 commit = backend.repo.get_commit(commit_idx=173)
401 commit = backend.repo.get_commit(commit_idx=173)
401 response = self.app.get(
402 response = self.app.get(
402 route_path('repo_nodetree_full',
403 route_path('repo_nodetree_full',
403 repo_name=backend.repo_name,
404 repo_name=backend.repo_name,
404 commit_id=commit.raw_id, f_path='/dont-exist'),
405 commit_id=commit.raw_id, f_path='/dont-exist'),
405 extra_environ=xhr_header)
406 extra_environ=xhr_header)
406
407
407 err = 'error: There is no file nor ' \
408 err = 'error: There is no file nor ' \
408 'directory at the given path'
409 'directory at the given path'
409 assert err in response.body
410 assert err in response.body
410
411
411 def test_nodetree_missing_xhr(self, backend):
412 def test_nodetree_missing_xhr(self, backend):
412 self.app.get(
413 self.app.get(
413 route_path('repo_nodetree_full',
414 route_path('repo_nodetree_full',
414 repo_name=backend.repo_name,
415 repo_name=backend.repo_name,
415 commit_id='tip', f_path='/'),
416 commit_id='tip', f_path='/'),
416 status=404)
417 status=404)
417
418
418
419
419 @pytest.mark.usefixtures("app", "autologin_user")
420 @pytest.mark.usefixtures("app", "autologin_user")
420 class TestRawFileHandling(object):
421 class TestRawFileHandling(object):
421
422
422 def test_download_file(self, backend):
423 def test_download_file(self, backend):
423 commit = backend.repo.get_commit(commit_idx=173)
424 commit = backend.repo.get_commit(commit_idx=173)
424 response = self.app.get(
425 response = self.app.get(
425 route_path('repo_file_download',
426 route_path('repo_file_download',
426 repo_name=backend.repo_name,
427 repo_name=backend.repo_name,
427 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
428 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
428
429
429 assert response.content_disposition == "attachment; filename=nodes.py"
430 assert response.content_disposition == "attachment; filename=nodes.py"
430 assert response.content_type == "text/x-python"
431 assert response.content_type == "text/x-python"
431
432
432 def test_download_file_wrong_cs(self, backend):
433 def test_download_file_wrong_cs(self, backend):
433 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
434 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
434
435
435 response = self.app.get(
436 response = self.app.get(
436 route_path('repo_file_download',
437 route_path('repo_file_download',
437 repo_name=backend.repo_name,
438 repo_name=backend.repo_name,
438 commit_id=raw_id, f_path='vcs/nodes.svg'),
439 commit_id=raw_id, f_path='vcs/nodes.svg'),
439 status=404)
440 status=404)
440
441
441 msg = """No such commit exists for this repository"""
442 msg = """No such commit exists for this repository"""
442 response.mustcontain(msg)
443 response.mustcontain(msg)
443
444
444 def test_download_file_wrong_f_path(self, backend):
445 def test_download_file_wrong_f_path(self, backend):
445 commit = backend.repo.get_commit(commit_idx=173)
446 commit = backend.repo.get_commit(commit_idx=173)
446 f_path = 'vcs/ERRORnodes.py'
447 f_path = 'vcs/ERRORnodes.py'
447
448
448 response = self.app.get(
449 response = self.app.get(
449 route_path('repo_file_download',
450 route_path('repo_file_download',
450 repo_name=backend.repo_name,
451 repo_name=backend.repo_name,
451 commit_id=commit.raw_id, f_path=f_path),
452 commit_id=commit.raw_id, f_path=f_path),
452 status=404)
453 status=404)
453
454
454 msg = (
455 msg = (
455 "There is no file nor directory at the given path: "
456 "There is no file nor directory at the given path: "
456 "`%s` at commit %s" % (f_path, commit.short_id))
457 "`%s` at commit %s" % (f_path, commit.short_id))
457 response.mustcontain(msg)
458 response.mustcontain(msg)
458
459
459 def test_file_raw(self, backend):
460 def test_file_raw(self, backend):
460 commit = backend.repo.get_commit(commit_idx=173)
461 commit = backend.repo.get_commit(commit_idx=173)
461 response = self.app.get(
462 response = self.app.get(
462 route_path('repo_file_raw',
463 route_path('repo_file_raw',
463 repo_name=backend.repo_name,
464 repo_name=backend.repo_name,
464 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
465 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
465
466
466 assert response.content_type == "text/plain"
467 assert response.content_type == "text/plain"
467
468
468 def test_file_raw_binary(self, backend):
469 def test_file_raw_binary(self, backend):
469 commit = backend.repo.get_commit()
470 commit = backend.repo.get_commit()
470 response = self.app.get(
471 response = self.app.get(
471 route_path('repo_file_raw',
472 route_path('repo_file_raw',
472 repo_name=backend.repo_name,
473 repo_name=backend.repo_name,
473 commit_id=commit.raw_id,
474 commit_id=commit.raw_id,
474 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
475 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
475
476
476 assert response.content_disposition == 'inline'
477 assert response.content_disposition == 'inline'
477
478
478 def test_raw_file_wrong_cs(self, backend):
479 def test_raw_file_wrong_cs(self, backend):
479 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
480 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
480
481
481 response = self.app.get(
482 response = self.app.get(
482 route_path('repo_file_raw',
483 route_path('repo_file_raw',
483 repo_name=backend.repo_name,
484 repo_name=backend.repo_name,
484 commit_id=raw_id, f_path='vcs/nodes.svg'),
485 commit_id=raw_id, f_path='vcs/nodes.svg'),
485 status=404)
486 status=404)
486
487
487 msg = """No such commit exists for this repository"""
488 msg = """No such commit exists for this repository"""
488 response.mustcontain(msg)
489 response.mustcontain(msg)
489
490
490 def test_raw_wrong_f_path(self, backend):
491 def test_raw_wrong_f_path(self, backend):
491 commit = backend.repo.get_commit(commit_idx=173)
492 commit = backend.repo.get_commit(commit_idx=173)
492 f_path = 'vcs/ERRORnodes.py'
493 f_path = 'vcs/ERRORnodes.py'
493 response = self.app.get(
494 response = self.app.get(
494 route_path('repo_file_raw',
495 route_path('repo_file_raw',
495 repo_name=backend.repo_name,
496 repo_name=backend.repo_name,
496 commit_id=commit.raw_id, f_path=f_path),
497 commit_id=commit.raw_id, f_path=f_path),
497 status=404)
498 status=404)
498
499
499 msg = (
500 msg = (
500 "There is no file nor directory at the given path: "
501 "There is no file nor directory at the given path: "
501 "`%s` at commit %s" % (f_path, commit.short_id))
502 "`%s` at commit %s" % (f_path, commit.short_id))
502 response.mustcontain(msg)
503 response.mustcontain(msg)
503
504
504 def test_raw_svg_should_not_be_rendered(self, backend):
505 def test_raw_svg_should_not_be_rendered(self, backend):
505 backend.create_repo()
506 backend.create_repo()
506 backend.ensure_file("xss.svg")
507 backend.ensure_file("xss.svg")
507 response = self.app.get(
508 response = self.app.get(
508 route_path('repo_file_raw',
509 route_path('repo_file_raw',
509 repo_name=backend.repo_name,
510 repo_name=backend.repo_name,
510 commit_id='tip', f_path='xss.svg'),)
511 commit_id='tip', f_path='xss.svg'),)
511 # If the content type is image/svg+xml then it allows to render HTML
512 # If the content type is image/svg+xml then it allows to render HTML
512 # and malicious SVG.
513 # and malicious SVG.
513 assert response.content_type == "text/plain"
514 assert response.content_type == "text/plain"
514
515
515
516
516 @pytest.mark.usefixtures("app")
517 @pytest.mark.usefixtures("app")
517 class TestRepositoryArchival(object):
518 class TestRepositoryArchival(object):
518
519
519 def test_archival(self, backend):
520 def test_archival(self, backend):
520 backend.enable_downloads()
521 backend.enable_downloads()
521 commit = backend.repo.get_commit(commit_idx=173)
522 commit = backend.repo.get_commit(commit_idx=173)
522 for archive, info in settings.ARCHIVE_SPECS.items():
523 for archive, info in settings.ARCHIVE_SPECS.items():
523 mime_type, arch_ext = info
524 mime_type, arch_ext = info
524 short = commit.short_id + arch_ext
525 short = commit.short_id + arch_ext
525 fname = commit.raw_id + arch_ext
526 fname = commit.raw_id + arch_ext
526 filename = '%s-%s' % (backend.repo_name, short)
527 filename = '%s-%s' % (backend.repo_name, short)
527 response = self.app.get(
528 response = self.app.get(
528 route_path('repo_archivefile',
529 route_path('repo_archivefile',
529 repo_name=backend.repo_name,
530 repo_name=backend.repo_name,
530 fname=fname))
531 fname=fname))
531
532
532 assert response.status == '200 OK'
533 assert response.status == '200 OK'
533 headers = [
534 headers = [
534 ('Content-Disposition', 'attachment; filename=%s' % filename),
535 ('Content-Disposition', 'attachment; filename=%s' % filename),
535 ('Content-Type', '%s' % mime_type),
536 ('Content-Type', '%s' % mime_type),
536 ]
537 ]
537
538
538 for header in headers:
539 for header in headers:
539 assert header in response.headers.items()
540 assert header in response.headers.items()
540
541
541 @pytest.mark.parametrize('arch_ext',[
542 @pytest.mark.parametrize('arch_ext',[
542 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
543 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
543 def test_archival_wrong_ext(self, backend, arch_ext):
544 def test_archival_wrong_ext(self, backend, arch_ext):
544 backend.enable_downloads()
545 backend.enable_downloads()
545 commit = backend.repo.get_commit(commit_idx=173)
546 commit = backend.repo.get_commit(commit_idx=173)
546
547
547 fname = commit.raw_id + '.' + arch_ext
548 fname = commit.raw_id + '.' + arch_ext
548
549
549 response = self.app.get(
550 response = self.app.get(
550 route_path('repo_archivefile',
551 route_path('repo_archivefile',
551 repo_name=backend.repo_name,
552 repo_name=backend.repo_name,
552 fname=fname))
553 fname=fname))
553 response.mustcontain(
554 response.mustcontain(
554 'Unknown archive type for: `{}`'.format(fname))
555 'Unknown archive type for: `{}`'.format(fname))
555
556
556 @pytest.mark.parametrize('commit_id', [
557 @pytest.mark.parametrize('commit_id', [
557 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
558 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
558 def test_archival_wrong_commit_id(self, backend, commit_id):
559 def test_archival_wrong_commit_id(self, backend, commit_id):
559 backend.enable_downloads()
560 backend.enable_downloads()
560 fname = '%s.zip' % commit_id
561 fname = '%s.zip' % commit_id
561
562
562 response = self.app.get(
563 response = self.app.get(
563 route_path('repo_archivefile',
564 route_path('repo_archivefile',
564 repo_name=backend.repo_name,
565 repo_name=backend.repo_name,
565 fname=fname))
566 fname=fname))
566 response.mustcontain('Unknown commit_id')
567 response.mustcontain('Unknown commit_id')
567
568
568
569
569 @pytest.mark.usefixtures("app")
570 @pytest.mark.usefixtures("app")
570 class TestFilesDiff(object):
571 class TestFilesDiff(object):
571
572
572 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
573 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
573 def test_file_full_diff(self, backend, diff):
574 def test_file_full_diff(self, backend, diff):
574 commit1 = backend.repo.get_commit(commit_idx=-1)
575 commit1 = backend.repo.get_commit(commit_idx=-1)
575 commit2 = backend.repo.get_commit(commit_idx=-2)
576 commit2 = backend.repo.get_commit(commit_idx=-2)
576
577
577 response = self.app.get(
578 response = self.app.get(
578 route_path('repo_files_diff',
579 route_path('repo_files_diff',
579 repo_name=backend.repo_name,
580 repo_name=backend.repo_name,
580 f_path='README'),
581 f_path='README'),
581 params={
582 params={
582 'diff1': commit2.raw_id,
583 'diff1': commit2.raw_id,
583 'diff2': commit1.raw_id,
584 'diff2': commit1.raw_id,
584 'fulldiff': '1',
585 'fulldiff': '1',
585 'diff': diff,
586 'diff': diff,
586 })
587 })
587
588
588 if diff == 'diff':
589 if diff == 'diff':
589 # use redirect since this is OLD view redirecting to compare page
590 # use redirect since this is OLD view redirecting to compare page
590 response = response.follow()
591 response = response.follow()
591
592
592 # It's a symlink to README.rst
593 # It's a symlink to README.rst
593 response.mustcontain('README.rst')
594 response.mustcontain('README.rst')
594 response.mustcontain('No newline at end of file')
595 response.mustcontain('No newline at end of file')
595
596
596 def test_file_binary_diff(self, backend):
597 def test_file_binary_diff(self, backend):
597 commits = [
598 commits = [
598 {'message': 'First commit'},
599 {'message': 'First commit'},
599 {'message': 'Commit with binary',
600 {'message': 'Commit with binary',
600 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
601 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
601 ]
602 ]
602 repo = backend.create_repo(commits=commits)
603 repo = backend.create_repo(commits=commits)
603
604
604 response = self.app.get(
605 response = self.app.get(
605 route_path('repo_files_diff',
606 route_path('repo_files_diff',
606 repo_name=backend.repo_name,
607 repo_name=backend.repo_name,
607 f_path='file.bin'),
608 f_path='file.bin'),
608 params={
609 params={
609 'diff1': repo.get_commit(commit_idx=0).raw_id,
610 'diff1': repo.get_commit(commit_idx=0).raw_id,
610 'diff2': repo.get_commit(commit_idx=1).raw_id,
611 'diff2': repo.get_commit(commit_idx=1).raw_id,
611 'fulldiff': '1',
612 'fulldiff': '1',
612 'diff': 'diff',
613 'diff': 'diff',
613 })
614 })
614 # use redirect since this is OLD view redirecting to compare page
615 # use redirect since this is OLD view redirecting to compare page
615 response = response.follow()
616 response = response.follow()
616 response.mustcontain('Expand 1 commit')
617 response.mustcontain('Expand 1 commit')
617 response.mustcontain('1 file changed: 0 inserted, 0 deleted')
618 response.mustcontain('1 file changed: 0 inserted, 0 deleted')
618
619
619 if backend.alias == 'svn':
620 if backend.alias == 'svn':
620 response.mustcontain('new file 10644')
621 response.mustcontain('new file 10644')
621 # TODO(marcink): SVN doesn't yet detect binary changes
622 # TODO(marcink): SVN doesn't yet detect binary changes
622 else:
623 else:
623 response.mustcontain('new file 100644')
624 response.mustcontain('new file 100644')
624 response.mustcontain('binary diff hidden')
625 response.mustcontain('binary diff hidden')
625
626
626 def test_diff_2way(self, backend):
627 def test_diff_2way(self, backend):
627 commit1 = backend.repo.get_commit(commit_idx=-1)
628 commit1 = backend.repo.get_commit(commit_idx=-1)
628 commit2 = backend.repo.get_commit(commit_idx=-2)
629 commit2 = backend.repo.get_commit(commit_idx=-2)
629 response = self.app.get(
630 response = self.app.get(
630 route_path('repo_files_diff_2way_redirect',
631 route_path('repo_files_diff_2way_redirect',
631 repo_name=backend.repo_name,
632 repo_name=backend.repo_name,
632 f_path='README'),
633 f_path='README'),
633 params={
634 params={
634 'diff1': commit2.raw_id,
635 'diff1': commit2.raw_id,
635 'diff2': commit1.raw_id,
636 'diff2': commit1.raw_id,
636 })
637 })
637 # use redirect since this is OLD view redirecting to compare page
638 # use redirect since this is OLD view redirecting to compare page
638 response = response.follow()
639 response = response.follow()
639
640
640 # It's a symlink to README.rst
641 # It's a symlink to README.rst
641 response.mustcontain('README.rst')
642 response.mustcontain('README.rst')
642 response.mustcontain('No newline at end of file')
643 response.mustcontain('No newline at end of file')
643
644
644 def test_requires_one_commit_id(self, backend, autologin_user):
645 def test_requires_one_commit_id(self, backend, autologin_user):
645 response = self.app.get(
646 response = self.app.get(
646 route_path('repo_files_diff',
647 route_path('repo_files_diff',
647 repo_name=backend.repo_name,
648 repo_name=backend.repo_name,
648 f_path='README.rst'),
649 f_path='README.rst'),
649 status=400)
650 status=400)
650 response.mustcontain(
651 response.mustcontain(
651 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
652 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
652
653
653 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
654 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
654 repo = vcsbackend.repo
655 repo = vcsbackend.repo
655 response = self.app.get(
656 response = self.app.get(
656 route_path('repo_files_diff',
657 route_path('repo_files_diff',
657 repo_name=repo.name,
658 repo_name=repo.name,
658 f_path='does-not-exist-in-any-commit'),
659 f_path='does-not-exist-in-any-commit'),
659 params={
660 params={
660 'diff1': repo[0].raw_id,
661 'diff1': repo[0].raw_id,
661 'diff2': repo[1].raw_id
662 'diff2': repo[1].raw_id
662 })
663 })
663
664
664 response = response.follow()
665 response = response.follow()
665 response.mustcontain('No files')
666 response.mustcontain('No files')
666
667
667 def test_returns_redirect_if_file_not_changed(self, backend):
668 def test_returns_redirect_if_file_not_changed(self, backend):
668 commit = backend.repo.get_commit(commit_idx=-1)
669 commit = backend.repo.get_commit(commit_idx=-1)
669 response = self.app.get(
670 response = self.app.get(
670 route_path('repo_files_diff_2way_redirect',
671 route_path('repo_files_diff_2way_redirect',
671 repo_name=backend.repo_name,
672 repo_name=backend.repo_name,
672 f_path='README'),
673 f_path='README'),
673 params={
674 params={
674 'diff1': commit.raw_id,
675 'diff1': commit.raw_id,
675 'diff2': commit.raw_id,
676 'diff2': commit.raw_id,
676 })
677 })
677
678
678 response = response.follow()
679 response = response.follow()
679 response.mustcontain('No files')
680 response.mustcontain('No files')
680 response.mustcontain('No commits in this compare')
681 response.mustcontain('No commits in this compare')
681
682
682 def test_supports_diff_to_different_path_svn(self, backend_svn):
683 def test_supports_diff_to_different_path_svn(self, backend_svn):
683 #TODO: check this case
684 #TODO: check this case
684 return
685 return
685
686
686 repo = backend_svn['svn-simple-layout'].scm_instance()
687 repo = backend_svn['svn-simple-layout'].scm_instance()
687 commit_id_1 = '24'
688 commit_id_1 = '24'
688 commit_id_2 = '26'
689 commit_id_2 = '26'
689
690
690 response = self.app.get(
691 response = self.app.get(
691 route_path('repo_files_diff',
692 route_path('repo_files_diff',
692 repo_name=backend_svn.repo_name,
693 repo_name=backend_svn.repo_name,
693 f_path='trunk/example.py'),
694 f_path='trunk/example.py'),
694 params={
695 params={
695 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
696 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
696 'diff2': commit_id_2,
697 'diff2': commit_id_2,
697 })
698 })
698
699
699 response = response.follow()
700 response = response.follow()
700 response.mustcontain(
701 response.mustcontain(
701 # diff contains this
702 # diff contains this
702 "Will print out a useful message on invocation.")
703 "Will print out a useful message on invocation.")
703
704
704 # Note: Expecting that we indicate the user what's being compared
705 # Note: Expecting that we indicate the user what's being compared
705 response.mustcontain("trunk/example.py")
706 response.mustcontain("trunk/example.py")
706 response.mustcontain("tags/v0.2/example.py")
707 response.mustcontain("tags/v0.2/example.py")
707
708
708 def test_show_rev_redirects_to_svn_path(self, backend_svn):
709 def test_show_rev_redirects_to_svn_path(self, backend_svn):
709 #TODO: check this case
710 #TODO: check this case
710 return
711 return
711
712
712 repo = backend_svn['svn-simple-layout'].scm_instance()
713 repo = backend_svn['svn-simple-layout'].scm_instance()
713 commit_id = repo[-1].raw_id
714 commit_id = repo[-1].raw_id
714
715
715 response = self.app.get(
716 response = self.app.get(
716 route_path('repo_files_diff',
717 route_path('repo_files_diff',
717 repo_name=backend_svn.repo_name,
718 repo_name=backend_svn.repo_name,
718 f_path='trunk/example.py'),
719 f_path='trunk/example.py'),
719 params={
720 params={
720 'diff1': 'branches/argparse/example.py@' + commit_id,
721 'diff1': 'branches/argparse/example.py@' + commit_id,
721 'diff2': commit_id,
722 'diff2': commit_id,
722 },
723 },
723 status=302)
724 status=302)
724 response = response.follow()
725 response = response.follow()
725 assert response.headers['Location'].endswith(
726 assert response.headers['Location'].endswith(
726 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
727 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
727
728
728 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
729 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
729 #TODO: check this case
730 #TODO: check this case
730 return
731 return
731
732
732 repo = backend_svn['svn-simple-layout'].scm_instance()
733 repo = backend_svn['svn-simple-layout'].scm_instance()
733 commit_id = repo[-1].raw_id
734 commit_id = repo[-1].raw_id
734 response = self.app.get(
735 response = self.app.get(
735 route_path('repo_files_diff',
736 route_path('repo_files_diff',
736 repo_name=backend_svn.repo_name,
737 repo_name=backend_svn.repo_name,
737 f_path='trunk/example.py'),
738 f_path='trunk/example.py'),
738 params={
739 params={
739 'diff1': 'branches/argparse/example.py@' + commit_id,
740 'diff1': 'branches/argparse/example.py@' + commit_id,
740 'diff2': commit_id,
741 'diff2': commit_id,
741 'show_rev': 'Show at Revision',
742 'show_rev': 'Show at Revision',
742 'annotate': 'true',
743 'annotate': 'true',
743 },
744 },
744 status=302)
745 status=302)
745 response = response.follow()
746 response = response.follow()
746 assert response.headers['Location'].endswith(
747 assert response.headers['Location'].endswith(
747 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
748 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
748
749
749
750
750 @pytest.mark.usefixtures("app", "autologin_user")
751 @pytest.mark.usefixtures("app", "autologin_user")
751 class TestModifyFilesWithWebInterface(object):
752 class TestModifyFilesWithWebInterface(object):
752
753
753 def test_add_file_view(self, backend):
754 def test_add_file_view(self, backend):
754 self.app.get(
755 self.app.get(
755 route_path('repo_files_add_file',
756 route_path('repo_files_add_file',
756 repo_name=backend.repo_name,
757 repo_name=backend.repo_name,
757 commit_id='tip', f_path='/')
758 commit_id='tip', f_path='/')
758 )
759 )
759
760
760 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
761 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
761 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
762 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
762 repo = backend.create_repo()
763 repo = backend.create_repo()
763 filename = 'init.py'
764 filename = 'init.py'
764 response = self.app.post(
765 response = self.app.post(
765 route_path('repo_files_create_file',
766 route_path('repo_files_create_file',
766 repo_name=backend.repo_name,
767 repo_name=backend.repo_name,
767 commit_id='tip', f_path='/'),
768 commit_id='tip', f_path='/'),
768 params={
769 params={
769 'content': "",
770 'content': "",
770 'filename': filename,
771 'filename': filename,
771 'location': "",
772 'location': "",
772 'csrf_token': csrf_token,
773 'csrf_token': csrf_token,
773 },
774 },
774 status=302)
775 status=302)
775 assert_session_flash(response,
776 assert_session_flash(response,
776 'Successfully committed new file `{}`'.format(
777 'Successfully committed new file `{}`'.format(
777 os.path.join(filename)))
778 os.path.join(filename)))
778
779
779 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
780 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
780 response = self.app.post(
781 response = self.app.post(
781 route_path('repo_files_create_file',
782 route_path('repo_files_create_file',
782 repo_name=backend.repo_name,
783 repo_name=backend.repo_name,
783 commit_id='tip', f_path='/'),
784 commit_id='tip', f_path='/'),
784 params={
785 params={
785 'content': "foo",
786 'content': "foo",
786 'csrf_token': csrf_token,
787 'csrf_token': csrf_token,
787 },
788 },
788 status=302)
789 status=302)
789
790
790 assert_session_flash(response, 'No filename')
791 assert_session_flash(response, 'No filename')
791
792
792 def test_add_file_into_repo_errors_and_no_commits(
793 def test_add_file_into_repo_errors_and_no_commits(
793 self, backend, csrf_token):
794 self, backend, csrf_token):
794 repo = backend.create_repo()
795 repo = backend.create_repo()
795 # Create a file with no filename, it will display an error but
796 # Create a file with no filename, it will display an error but
796 # the repo has no commits yet
797 # the repo has no commits yet
797 response = self.app.post(
798 response = self.app.post(
798 route_path('repo_files_create_file',
799 route_path('repo_files_create_file',
799 repo_name=repo.repo_name,
800 repo_name=repo.repo_name,
800 commit_id='tip', f_path='/'),
801 commit_id='tip', f_path='/'),
801 params={
802 params={
802 'content': "foo",
803 'content': "foo",
803 'csrf_token': csrf_token,
804 'csrf_token': csrf_token,
804 },
805 },
805 status=302)
806 status=302)
806
807
807 assert_session_flash(response, 'No filename')
808 assert_session_flash(response, 'No filename')
808
809
809 # Not allowed, redirect to the summary
810 # Not allowed, redirect to the summary
810 redirected = response.follow()
811 redirected = response.follow()
811 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
812 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
812
813
813 # As there are no commits, displays the summary page with the error of
814 # As there are no commits, displays the summary page with the error of
814 # creating a file with no filename
815 # creating a file with no filename
815
816
816 assert redirected.request.path == summary_url
817 assert redirected.request.path == summary_url
817
818
818 @pytest.mark.parametrize("location, filename", [
819 @pytest.mark.parametrize("location, filename", [
819 ('/abs', 'foo'),
820 ('/abs', 'foo'),
820 ('../rel', 'foo'),
821 ('../rel', 'foo'),
821 ('file/../foo', 'foo'),
822 ('file/../foo', 'foo'),
822 ])
823 ])
823 def test_add_file_into_repo_bad_filenames(
824 def test_add_file_into_repo_bad_filenames(
824 self, location, filename, backend, csrf_token):
825 self, location, filename, backend, csrf_token):
825 response = self.app.post(
826 response = self.app.post(
826 route_path('repo_files_create_file',
827 route_path('repo_files_create_file',
827 repo_name=backend.repo_name,
828 repo_name=backend.repo_name,
828 commit_id='tip', f_path='/'),
829 commit_id='tip', f_path='/'),
829 params={
830 params={
830 'content': "foo",
831 'content': "foo",
831 'filename': filename,
832 'filename': filename,
832 'location': location,
833 'location': location,
833 'csrf_token': csrf_token,
834 'csrf_token': csrf_token,
834 },
835 },
835 status=302)
836 status=302)
836
837
837 assert_session_flash(
838 assert_session_flash(
838 response,
839 response,
839 'The location specified must be a relative path and must not '
840 'The location specified must be a relative path and must not '
840 'contain .. in the path')
841 'contain .. in the path')
841
842
842 @pytest.mark.parametrize("cnt, location, filename", [
843 @pytest.mark.parametrize("cnt, location, filename", [
843 (1, '', 'foo.txt'),
844 (1, '', 'foo.txt'),
844 (2, 'dir', 'foo.rst'),
845 (2, 'dir', 'foo.rst'),
845 (3, 'rel/dir', 'foo.bar'),
846 (3, 'rel/dir', 'foo.bar'),
846 ])
847 ])
847 def test_add_file_into_repo(self, cnt, location, filename, backend,
848 def test_add_file_into_repo(self, cnt, location, filename, backend,
848 csrf_token):
849 csrf_token):
849 repo = backend.create_repo()
850 repo = backend.create_repo()
850 response = self.app.post(
851 response = self.app.post(
851 route_path('repo_files_create_file',
852 route_path('repo_files_create_file',
852 repo_name=repo.repo_name,
853 repo_name=repo.repo_name,
853 commit_id='tip', f_path='/'),
854 commit_id='tip', f_path='/'),
854 params={
855 params={
855 'content': "foo",
856 'content': "foo",
856 'filename': filename,
857 'filename': filename,
857 'location': location,
858 'location': location,
858 'csrf_token': csrf_token,
859 'csrf_token': csrf_token,
859 },
860 },
860 status=302)
861 status=302)
861 assert_session_flash(response,
862 assert_session_flash(response,
862 'Successfully committed new file `{}`'.format(
863 'Successfully committed new file `{}`'.format(
863 os.path.join(location, filename)))
864 os.path.join(location, filename)))
864
865
865 def test_edit_file_view(self, backend):
866 def test_edit_file_view(self, backend):
866 response = self.app.get(
867 response = self.app.get(
867 route_path('repo_files_edit_file',
868 route_path('repo_files_edit_file',
868 repo_name=backend.repo_name,
869 repo_name=backend.repo_name,
869 commit_id=backend.default_head_id,
870 commit_id=backend.default_head_id,
870 f_path='vcs/nodes.py'),
871 f_path='vcs/nodes.py'),
871 status=200)
872 status=200)
872 response.mustcontain("Module holding everything related to vcs nodes.")
873 response.mustcontain("Module holding everything related to vcs nodes.")
873
874
874 def test_edit_file_view_not_on_branch(self, backend):
875 def test_edit_file_view_not_on_branch(self, backend):
875 repo = backend.create_repo()
876 repo = backend.create_repo()
876 backend.ensure_file("vcs/nodes.py")
877 backend.ensure_file("vcs/nodes.py")
877
878
878 response = self.app.get(
879 response = self.app.get(
879 route_path('repo_files_edit_file',
880 route_path('repo_files_edit_file',
880 repo_name=repo.repo_name,
881 repo_name=repo.repo_name,
881 commit_id='tip',
882 commit_id='tip',
882 f_path='vcs/nodes.py'),
883 f_path='vcs/nodes.py'),
883 status=302)
884 status=302)
884 assert_session_flash(
885 assert_session_flash(
885 response,
886 response,
886 'You can only edit files with commit being a valid branch')
887 'You can only edit files with commit being a valid branch')
887
888
888 def test_edit_file_view_commit_changes(self, backend, csrf_token):
889 def test_edit_file_view_commit_changes(self, backend, csrf_token):
889 repo = backend.create_repo()
890 repo = backend.create_repo()
890 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
891 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
891
892
892 response = self.app.post(
893 response = self.app.post(
893 route_path('repo_files_update_file',
894 route_path('repo_files_update_file',
894 repo_name=repo.repo_name,
895 repo_name=repo.repo_name,
895 commit_id=backend.default_head_id,
896 commit_id=backend.default_head_id,
896 f_path='vcs/nodes.py'),
897 f_path='vcs/nodes.py'),
897 params={
898 params={
898 'content': "print 'hello world'",
899 'content': "print 'hello world'",
899 'message': 'I committed',
900 'message': 'I committed',
900 'filename': "vcs/nodes.py",
901 'filename': "vcs/nodes.py",
901 'csrf_token': csrf_token,
902 'csrf_token': csrf_token,
902 },
903 },
903 status=302)
904 status=302)
904 assert_session_flash(
905 assert_session_flash(
905 response, 'Successfully committed changes to file `vcs/nodes.py`')
906 response, 'Successfully committed changes to file `vcs/nodes.py`')
906 tip = repo.get_commit(commit_idx=-1)
907 tip = repo.get_commit(commit_idx=-1)
907 assert tip.message == 'I committed'
908 assert tip.message == 'I committed'
908
909
909 def test_edit_file_view_commit_changes_default_message(self, backend,
910 def test_edit_file_view_commit_changes_default_message(self, backend,
910 csrf_token):
911 csrf_token):
911 repo = backend.create_repo()
912 repo = backend.create_repo()
912 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
913 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
913
914
914 commit_id = (
915 commit_id = (
915 backend.default_branch_name or
916 backend.default_branch_name or
916 backend.repo.scm_instance().commit_ids[-1])
917 backend.repo.scm_instance().commit_ids[-1])
917
918
918 response = self.app.post(
919 response = self.app.post(
919 route_path('repo_files_update_file',
920 route_path('repo_files_update_file',
920 repo_name=repo.repo_name,
921 repo_name=repo.repo_name,
921 commit_id=commit_id,
922 commit_id=commit_id,
922 f_path='vcs/nodes.py'),
923 f_path='vcs/nodes.py'),
923 params={
924 params={
924 'content': "print 'hello world'",
925 'content': "print 'hello world'",
925 'message': '',
926 'message': '',
926 'filename': "vcs/nodes.py",
927 'filename': "vcs/nodes.py",
927 'csrf_token': csrf_token,
928 'csrf_token': csrf_token,
928 },
929 },
929 status=302)
930 status=302)
930 assert_session_flash(
931 assert_session_flash(
931 response, 'Successfully committed changes to file `vcs/nodes.py`')
932 response, 'Successfully committed changes to file `vcs/nodes.py`')
932 tip = repo.get_commit(commit_idx=-1)
933 tip = repo.get_commit(commit_idx=-1)
933 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
934 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
934
935
935 def test_delete_file_view(self, backend):
936 def test_delete_file_view(self, backend):
936 self.app.get(
937 self.app.get(
937 route_path('repo_files_remove_file',
938 route_path('repo_files_remove_file',
938 repo_name=backend.repo_name,
939 repo_name=backend.repo_name,
939 commit_id=backend.default_head_id,
940 commit_id=backend.default_head_id,
940 f_path='vcs/nodes.py'),
941 f_path='vcs/nodes.py'),
941 status=200)
942 status=200)
942
943
943 def test_delete_file_view_not_on_branch(self, backend):
944 def test_delete_file_view_not_on_branch(self, backend):
944 repo = backend.create_repo()
945 repo = backend.create_repo()
945 backend.ensure_file('vcs/nodes.py')
946 backend.ensure_file('vcs/nodes.py')
946
947
947 response = self.app.get(
948 response = self.app.get(
948 route_path('repo_files_remove_file',
949 route_path('repo_files_remove_file',
949 repo_name=repo.repo_name,
950 repo_name=repo.repo_name,
950 commit_id='tip',
951 commit_id='tip',
951 f_path='vcs/nodes.py'),
952 f_path='vcs/nodes.py'),
952 status=302)
953 status=302)
953 assert_session_flash(
954 assert_session_flash(
954 response,
955 response,
955 'You can only delete files with commit being a valid branch')
956 'You can only delete files with commit being a valid branch')
956
957
957 def test_delete_file_view_commit_changes(self, backend, csrf_token):
958 def test_delete_file_view_commit_changes(self, backend, csrf_token):
958 repo = backend.create_repo()
959 repo = backend.create_repo()
959 backend.ensure_file("vcs/nodes.py")
960 backend.ensure_file("vcs/nodes.py")
960
961
961 response = self.app.post(
962 response = self.app.post(
962 route_path('repo_files_delete_file',
963 route_path('repo_files_delete_file',
963 repo_name=repo.repo_name,
964 repo_name=repo.repo_name,
964 commit_id=backend.default_head_id,
965 commit_id=backend.default_head_id,
965 f_path='vcs/nodes.py'),
966 f_path='vcs/nodes.py'),
966 params={
967 params={
967 'message': 'i commited',
968 'message': 'i commited',
968 'csrf_token': csrf_token,
969 'csrf_token': csrf_token,
969 },
970 },
970 status=302)
971 status=302)
971 assert_session_flash(
972 assert_session_flash(
972 response, 'Successfully deleted file `vcs/nodes.py`')
973 response, 'Successfully deleted file `vcs/nodes.py`')
973
974
974
975
975 @pytest.mark.usefixtures("app")
976 @pytest.mark.usefixtures("app")
976 class TestFilesViewOtherCases(object):
977 class TestFilesViewOtherCases(object):
977
978
978 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
979 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
979 self, backend_stub, autologin_regular_user, user_regular,
980 self, backend_stub, autologin_regular_user, user_regular,
980 user_util):
981 user_util):
981
982
982 repo = backend_stub.create_repo()
983 repo = backend_stub.create_repo()
983 user_util.grant_user_permission_to_repo(
984 user_util.grant_user_permission_to_repo(
984 repo, user_regular, 'repository.write')
985 repo, user_regular, 'repository.write')
985 response = self.app.get(
986 response = self.app.get(
986 route_path('repo_files',
987 route_path('repo_files',
987 repo_name=repo.repo_name,
988 repo_name=repo.repo_name,
988 commit_id='tip', f_path='/'))
989 commit_id='tip', f_path='/'))
989
990
990 repo_file_add_url = route_path(
991 repo_file_add_url = route_path(
991 'repo_files_add_file',
992 'repo_files_add_file',
992 repo_name=repo.repo_name,
993 repo_name=repo.repo_name,
993 commit_id=0, f_path='') + '#edit'
994 commit_id=0, f_path='') + '#edit'
994
995
995 assert_session_flash(
996 assert_session_flash(
996 response,
997 response,
997 'There are no files yet. <a class="alert-link" '
998 'There are no files yet. <a class="alert-link" '
998 'href="{}">Click here to add a new file.</a>'
999 'href="{}">Click here to add a new file.</a>'
999 .format(repo_file_add_url))
1000 .format(repo_file_add_url))
1000
1001
1001 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1002 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1002 self, backend_stub, user_util):
1003 self, backend_stub, autologin_regular_user):
1003 repo = backend_stub.create_repo()
1004 repo = backend_stub.create_repo()
1005 # init session for anon user
1006 route_path('repo_summary', repo_name=repo.repo_name)
1007
1004 repo_file_add_url = route_path(
1008 repo_file_add_url = route_path(
1005 'repo_files_add_file',
1009 'repo_files_add_file',
1006 repo_name=repo.repo_name,
1010 repo_name=repo.repo_name,
1007 commit_id=0, f_path='') + '#edit'
1011 commit_id=0, f_path='') + '#edit'
1008
1012
1009 response = self.app.get(
1013 response = self.app.get(
1010 route_path('repo_files',
1014 route_path('repo_files',
1011 repo_name=repo.repo_name,
1015 repo_name=repo.repo_name,
1012 commit_id='tip', f_path='/'))
1016 commit_id='tip', f_path='/'))
1013
1017
1014 assert_session_flash(response, no_=repo_file_add_url)
1018 assert_session_flash(response, no_=repo_file_add_url)
1015
1019
1016 @pytest.mark.parametrize('file_node', [
1020 @pytest.mark.parametrize('file_node', [
1017 'archive/file.zip',
1021 'archive/file.zip',
1018 'diff/my-file.txt',
1022 'diff/my-file.txt',
1019 'render.py',
1023 'render.py',
1020 'render',
1024 'render',
1021 'remove_file',
1025 'remove_file',
1022 'remove_file/to-delete.txt',
1026 'remove_file/to-delete.txt',
1023 ])
1027 ])
1024 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1028 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1025 backend.create_repo()
1029 backend.create_repo()
1026 backend.ensure_file(file_node)
1030 backend.ensure_file(file_node)
1027
1031
1028 self.app.get(
1032 self.app.get(
1029 route_path('repo_files',
1033 route_path('repo_files',
1030 repo_name=backend.repo_name,
1034 repo_name=backend.repo_name,
1031 commit_id='tip', f_path=file_node),
1035 commit_id='tip', f_path=file_node),
1032 status=200)
1036 status=200)
1033
1037
1034
1038
1035 class TestAdjustFilePathForSvn(object):
1039 class TestAdjustFilePathForSvn(object):
1036 """
1040 """
1037 SVN specific adjustments of node history in RepoFilesView.
1041 SVN specific adjustments of node history in RepoFilesView.
1038 """
1042 """
1039
1043
1040 def test_returns_path_relative_to_matched_reference(self):
1044 def test_returns_path_relative_to_matched_reference(self):
1041 repo = self._repo(branches=['trunk'])
1045 repo = self._repo(branches=['trunk'])
1042 self.assert_file_adjustment('trunk/file', 'file', repo)
1046 self.assert_file_adjustment('trunk/file', 'file', repo)
1043
1047
1044 def test_does_not_modify_file_if_no_reference_matches(self):
1048 def test_does_not_modify_file_if_no_reference_matches(self):
1045 repo = self._repo(branches=['trunk'])
1049 repo = self._repo(branches=['trunk'])
1046 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1050 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1047
1051
1048 def test_does_not_adjust_partial_directory_names(self):
1052 def test_does_not_adjust_partial_directory_names(self):
1049 repo = self._repo(branches=['trun'])
1053 repo = self._repo(branches=['trun'])
1050 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1054 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1051
1055
1052 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1056 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1053 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1057 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1054 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1058 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1055
1059
1056 def assert_file_adjustment(self, f_path, expected, repo):
1060 def assert_file_adjustment(self, f_path, expected, repo):
1057 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1061 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1058 assert result == expected
1062 assert result == expected
1059
1063
1060 def _repo(self, branches=None):
1064 def _repo(self, branches=None):
1061 repo = mock.Mock()
1065 repo = mock.Mock()
1062 repo.branches = OrderedDict((name, '0') for name in branches or [])
1066 repo.branches = OrderedDict((name, '0') for name in branches or [])
1063 repo.tags = {}
1067 repo.tags = {}
1064 return repo
1068 return repo
@@ -1,258 +1,259 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import datetime
22 import datetime
23 import formencode
23 import formencode
24 import formencode.htmlfill
24 import formencode.htmlfill
25
25
26 from pyramid.httpexceptions import HTTPFound
26 from pyramid.httpexceptions import HTTPFound
27 from pyramid.view import view_config
27 from pyramid.view import view_config
28 from pyramid.renderers import render
28 from pyramid.renderers import render
29 from pyramid.response import Response
29 from pyramid.response import Response
30
30
31 from rhodecode.apps._base import RepoAppView, DataGridAppView
31 from rhodecode.apps._base import RepoAppView, DataGridAppView
32 from rhodecode.lib.auth import (
32 from rhodecode.lib.auth import (
33 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
33 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
34 HasRepoPermissionAny, HasPermissionAnyDecorator, CSRFRequired)
34 HasRepoPermissionAny, HasPermissionAnyDecorator, CSRFRequired)
35 import rhodecode.lib.helpers as h
35 import rhodecode.lib.helpers as h
36 from rhodecode.model.db import coalesce, or_, Repository, RepoGroup
36 from rhodecode.model.db import coalesce, or_, Repository, RepoGroup
37 from rhodecode.model.repo import RepoModel
37 from rhodecode.model.repo import RepoModel
38 from rhodecode.model.forms import RepoForkForm
38 from rhodecode.model.forms import RepoForkForm
39 from rhodecode.model.scm import ScmModel, RepoGroupList
39 from rhodecode.model.scm import ScmModel, RepoGroupList
40 from rhodecode.lib.utils2 import safe_int, safe_unicode
40 from rhodecode.lib.utils2 import safe_int, safe_unicode
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 class RepoForksView(RepoAppView, DataGridAppView):
45 class RepoForksView(RepoAppView, DataGridAppView):
46
46
47 def load_default_context(self):
47 def load_default_context(self):
48 c = self._get_local_tmpl_context(include_app_defaults=True)
48 c = self._get_local_tmpl_context(include_app_defaults=True)
49 c.rhodecode_repo = self.rhodecode_vcs_repo
49 c.rhodecode_repo = self.rhodecode_vcs_repo
50
50
51 acl_groups = RepoGroupList(
51 acl_groups = RepoGroupList(
52 RepoGroup.query().all(),
52 RepoGroup.query().all(),
53 perm_set=['group.write', 'group.admin'])
53 perm_set=['group.write', 'group.admin'])
54 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
54 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
55 c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups)
55 c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups)
56 choices, c.landing_revs = ScmModel().get_repo_landing_revs()
56 choices, c.landing_revs = ScmModel().get_repo_landing_revs(
57 self.request.translate)
57 c.landing_revs_choices = choices
58 c.landing_revs_choices = choices
58 c.personal_repo_group = c.rhodecode_user.personal_repo_group
59 c.personal_repo_group = c.rhodecode_user.personal_repo_group
59
60
60
61 return c
61 return c
62
62
63 @LoginRequired()
63 @LoginRequired()
64 @HasRepoPermissionAnyDecorator(
64 @HasRepoPermissionAnyDecorator(
65 'repository.read', 'repository.write', 'repository.admin')
65 'repository.read', 'repository.write', 'repository.admin')
66 @view_config(
66 @view_config(
67 route_name='repo_forks_show_all', request_method='GET',
67 route_name='repo_forks_show_all', request_method='GET',
68 renderer='rhodecode:templates/forks/forks.mako')
68 renderer='rhodecode:templates/forks/forks.mako')
69 def repo_forks_show_all(self):
69 def repo_forks_show_all(self):
70 c = self.load_default_context()
70 c = self.load_default_context()
71 return self._get_template_context(c)
71 return self._get_template_context(c)
72
72
73 @LoginRequired()
73 @LoginRequired()
74 @HasRepoPermissionAnyDecorator(
74 @HasRepoPermissionAnyDecorator(
75 'repository.read', 'repository.write', 'repository.admin')
75 'repository.read', 'repository.write', 'repository.admin')
76 @view_config(
76 @view_config(
77 route_name='repo_forks_data', request_method='GET',
77 route_name='repo_forks_data', request_method='GET',
78 renderer='json_ext', xhr=True)
78 renderer='json_ext', xhr=True)
79 def repo_forks_data(self):
79 def repo_forks_data(self):
80 _ = self.request.translate
80 _ = self.request.translate
81 self.load_default_context()
81 column_map = {
82 column_map = {
82 'fork_name': 'repo_name',
83 'fork_name': 'repo_name',
83 'fork_date': 'created_on',
84 'fork_date': 'created_on',
84 'last_activity': 'updated_on'
85 'last_activity': 'updated_on'
85 }
86 }
86 draw, start, limit = self._extract_chunk(self.request)
87 draw, start, limit = self._extract_chunk(self.request)
87 search_q, order_by, order_dir = self._extract_ordering(
88 search_q, order_by, order_dir = self._extract_ordering(
88 self.request, column_map=column_map)
89 self.request, column_map=column_map)
89
90
90 acl_check = HasRepoPermissionAny(
91 acl_check = HasRepoPermissionAny(
91 'repository.read', 'repository.write', 'repository.admin')
92 'repository.read', 'repository.write', 'repository.admin')
92 repo_id = self.db_repo.repo_id
93 repo_id = self.db_repo.repo_id
93 allowed_ids = [-1]
94 allowed_ids = [-1]
94 for f in Repository.query().filter(Repository.fork_id == repo_id):
95 for f in Repository.query().filter(Repository.fork_id == repo_id):
95 if acl_check(f.repo_name, 'get forks check'):
96 if acl_check(f.repo_name, 'get forks check'):
96 allowed_ids.append(f.repo_id)
97 allowed_ids.append(f.repo_id)
97
98
98 forks_data_total_count = Repository.query()\
99 forks_data_total_count = Repository.query()\
99 .filter(Repository.fork_id == repo_id)\
100 .filter(Repository.fork_id == repo_id)\
100 .filter(Repository.repo_id.in_(allowed_ids))\
101 .filter(Repository.repo_id.in_(allowed_ids))\
101 .count()
102 .count()
102
103
103 # json generate
104 # json generate
104 base_q = Repository.query()\
105 base_q = Repository.query()\
105 .filter(Repository.fork_id == repo_id)\
106 .filter(Repository.fork_id == repo_id)\
106 .filter(Repository.repo_id.in_(allowed_ids))\
107 .filter(Repository.repo_id.in_(allowed_ids))\
107
108
108 if search_q:
109 if search_q:
109 like_expression = u'%{}%'.format(safe_unicode(search_q))
110 like_expression = u'%{}%'.format(safe_unicode(search_q))
110 base_q = base_q.filter(or_(
111 base_q = base_q.filter(or_(
111 Repository.repo_name.ilike(like_expression),
112 Repository.repo_name.ilike(like_expression),
112 Repository.description.ilike(like_expression),
113 Repository.description.ilike(like_expression),
113 ))
114 ))
114
115
115 forks_data_total_filtered_count = base_q.count()
116 forks_data_total_filtered_count = base_q.count()
116
117
117 sort_col = getattr(Repository, order_by, None)
118 sort_col = getattr(Repository, order_by, None)
118 if sort_col:
119 if sort_col:
119 if order_dir == 'asc':
120 if order_dir == 'asc':
120 # handle null values properly to order by NULL last
121 # handle null values properly to order by NULL last
121 if order_by in ['last_activity']:
122 if order_by in ['last_activity']:
122 sort_col = coalesce(sort_col, datetime.date.max)
123 sort_col = coalesce(sort_col, datetime.date.max)
123 sort_col = sort_col.asc()
124 sort_col = sort_col.asc()
124 else:
125 else:
125 # handle null values properly to order by NULL last
126 # handle null values properly to order by NULL last
126 if order_by in ['last_activity']:
127 if order_by in ['last_activity']:
127 sort_col = coalesce(sort_col, datetime.date.min)
128 sort_col = coalesce(sort_col, datetime.date.min)
128 sort_col = sort_col.desc()
129 sort_col = sort_col.desc()
129
130
130 base_q = base_q.order_by(sort_col)
131 base_q = base_q.order_by(sort_col)
131 base_q = base_q.offset(start).limit(limit)
132 base_q = base_q.offset(start).limit(limit)
132
133
133 fork_list = base_q.all()
134 fork_list = base_q.all()
134
135
135 def fork_actions(fork):
136 def fork_actions(fork):
136 url_link = h.route_path(
137 url_link = h.route_path(
137 'repo_compare',
138 'repo_compare',
138 repo_name=fork.repo_name,
139 repo_name=fork.repo_name,
139 source_ref_type=self.db_repo.landing_rev[0],
140 source_ref_type=self.db_repo.landing_rev[0],
140 source_ref=self.db_repo.landing_rev[1],
141 source_ref=self.db_repo.landing_rev[1],
141 target_ref_type=self.db_repo.landing_rev[0],
142 target_ref_type=self.db_repo.landing_rev[0],
142 target_ref=self.db_repo.landing_rev[1],
143 target_ref=self.db_repo.landing_rev[1],
143 _query=dict(merge=1, target_repo=f.repo_name))
144 _query=dict(merge=1, target_repo=f.repo_name))
144 return h.link_to(_('Compare fork'), url_link, class_='btn-link')
145 return h.link_to(_('Compare fork'), url_link, class_='btn-link')
145
146
146 def fork_name(fork):
147 def fork_name(fork):
147 return h.link_to(fork.repo_name,
148 return h.link_to(fork.repo_name,
148 h.route_path('repo_summary', repo_name=fork.repo_name))
149 h.route_path('repo_summary', repo_name=fork.repo_name))
149
150
150 forks_data = []
151 forks_data = []
151 for fork in fork_list:
152 for fork in fork_list:
152 forks_data.append({
153 forks_data.append({
153 "username": h.gravatar_with_user(self.request, fork.user.username),
154 "username": h.gravatar_with_user(self.request, fork.user.username),
154 "fork_name": fork_name(fork),
155 "fork_name": fork_name(fork),
155 "description": fork.description,
156 "description": fork.description,
156 "fork_date": h.age_component(fork.created_on, time_is_local=True),
157 "fork_date": h.age_component(fork.created_on, time_is_local=True),
157 "last_activity": h.format_date(fork.updated_on),
158 "last_activity": h.format_date(fork.updated_on),
158 "action": fork_actions(fork),
159 "action": fork_actions(fork),
159 })
160 })
160
161
161 data = ({
162 data = ({
162 'draw': draw,
163 'draw': draw,
163 'data': forks_data,
164 'data': forks_data,
164 'recordsTotal': forks_data_total_count,
165 'recordsTotal': forks_data_total_count,
165 'recordsFiltered': forks_data_total_filtered_count,
166 'recordsFiltered': forks_data_total_filtered_count,
166 })
167 })
167
168
168 return data
169 return data
169
170
170 @LoginRequired()
171 @LoginRequired()
171 @NotAnonymous()
172 @NotAnonymous()
172 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
173 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
173 @HasRepoPermissionAnyDecorator(
174 @HasRepoPermissionAnyDecorator(
174 'repository.read', 'repository.write', 'repository.admin')
175 'repository.read', 'repository.write', 'repository.admin')
175 @view_config(
176 @view_config(
176 route_name='repo_fork_new', request_method='GET',
177 route_name='repo_fork_new', request_method='GET',
177 renderer='rhodecode:templates/forks/forks.mako')
178 renderer='rhodecode:templates/forks/forks.mako')
178 def repo_fork_new(self):
179 def repo_fork_new(self):
179 c = self.load_default_context()
180 c = self.load_default_context()
180
181
181 defaults = RepoModel()._get_defaults(self.db_repo_name)
182 defaults = RepoModel()._get_defaults(self.db_repo_name)
182 # alter the description to indicate a fork
183 # alter the description to indicate a fork
183 defaults['description'] = (
184 defaults['description'] = (
184 'fork of repository: %s \n%s' % (
185 'fork of repository: %s \n%s' % (
185 defaults['repo_name'], defaults['description']))
186 defaults['repo_name'], defaults['description']))
186 # add suffix to fork
187 # add suffix to fork
187 defaults['repo_name'] = '%s-fork' % defaults['repo_name']
188 defaults['repo_name'] = '%s-fork' % defaults['repo_name']
188
189
189 data = render('rhodecode:templates/forks/fork.mako',
190 data = render('rhodecode:templates/forks/fork.mako',
190 self._get_template_context(c), self.request)
191 self._get_template_context(c), self.request)
191 html = formencode.htmlfill.render(
192 html = formencode.htmlfill.render(
192 data,
193 data,
193 defaults=defaults,
194 defaults=defaults,
194 encoding="UTF-8",
195 encoding="UTF-8",
195 force_defaults=False
196 force_defaults=False
196 )
197 )
197 return Response(html)
198 return Response(html)
198
199
199 @LoginRequired()
200 @LoginRequired()
200 @NotAnonymous()
201 @NotAnonymous()
201 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
202 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
202 @HasRepoPermissionAnyDecorator(
203 @HasRepoPermissionAnyDecorator(
203 'repository.read', 'repository.write', 'repository.admin')
204 'repository.read', 'repository.write', 'repository.admin')
204 @CSRFRequired()
205 @CSRFRequired()
205 @view_config(
206 @view_config(
206 route_name='repo_fork_create', request_method='POST',
207 route_name='repo_fork_create', request_method='POST',
207 renderer='rhodecode:templates/forks/fork.mako')
208 renderer='rhodecode:templates/forks/fork.mako')
208 def repo_fork_create(self):
209 def repo_fork_create(self):
209 _ = self.request.translate
210 _ = self.request.translate
210 c = self.load_default_context()
211 c = self.load_default_context()
211
212
212 _form = RepoForkForm(self.request.translate, old_data={'repo_type': self.db_repo.repo_type},
213 _form = RepoForkForm(self.request.translate, old_data={'repo_type': self.db_repo.repo_type},
213 repo_groups=c.repo_groups_choices,
214 repo_groups=c.repo_groups_choices,
214 landing_revs=c.landing_revs_choices)()
215 landing_revs=c.landing_revs_choices)()
215 post_data = dict(self.request.POST)
216 post_data = dict(self.request.POST)
216
217
217 # forbid injecting other repo by forging a request
218 # forbid injecting other repo by forging a request
218 post_data['fork_parent_id'] = self.db_repo.repo_id
219 post_data['fork_parent_id'] = self.db_repo.repo_id
219
220
220 form_result = {}
221 form_result = {}
221 task_id = None
222 task_id = None
222 try:
223 try:
223 form_result = _form.to_python(post_data)
224 form_result = _form.to_python(post_data)
224 # create fork is done sometimes async on celery, db transaction
225 # create fork is done sometimes async on celery, db transaction
225 # management is handled there.
226 # management is handled there.
226 task = RepoModel().create_fork(
227 task = RepoModel().create_fork(
227 form_result, c.rhodecode_user.user_id)
228 form_result, c.rhodecode_user.user_id)
228 from celery.result import BaseAsyncResult
229 from celery.result import BaseAsyncResult
229 if isinstance(task, BaseAsyncResult):
230 if isinstance(task, BaseAsyncResult):
230 task_id = task.task_id
231 task_id = task.task_id
231 except formencode.Invalid as errors:
232 except formencode.Invalid as errors:
232 c.rhodecode_db_repo = self.db_repo
233 c.rhodecode_db_repo = self.db_repo
233
234
234 data = render('rhodecode:templates/forks/fork.mako',
235 data = render('rhodecode:templates/forks/fork.mako',
235 self._get_template_context(c), self.request)
236 self._get_template_context(c), self.request)
236 html = formencode.htmlfill.render(
237 html = formencode.htmlfill.render(
237 data,
238 data,
238 defaults=errors.value,
239 defaults=errors.value,
239 errors=errors.error_dict or {},
240 errors=errors.error_dict or {},
240 prefix_error=False,
241 prefix_error=False,
241 encoding="UTF-8",
242 encoding="UTF-8",
242 force_defaults=False
243 force_defaults=False
243 )
244 )
244 return Response(html)
245 return Response(html)
245 except Exception:
246 except Exception:
246 log.exception(
247 log.exception(
247 u'Exception while trying to fork the repository %s',
248 u'Exception while trying to fork the repository %s',
248 self.db_repo_name)
249 self.db_repo_name)
249 msg = (
250 msg = (
250 _('An error occurred during repository forking %s') % (
251 _('An error occurred during repository forking %s') % (
251 self.db_repo_name, ))
252 self.db_repo_name, ))
252 h.flash(msg, category='error')
253 h.flash(msg, category='error')
253
254
254 repo_name = form_result.get('repo_name_full', self.db_repo_name)
255 repo_name = form_result.get('repo_name_full', self.db_repo_name)
255 raise HTTPFound(
256 raise HTTPFound(
256 h.route_path('repo_creating',
257 h.route_path('repo_creating',
257 repo_name=repo_name,
258 repo_name=repo_name,
258 _query=dict(task_id=task_id)))
259 _query=dict(task_id=task_id)))
@@ -1,1238 +1,1244 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34
34
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.base import vcs_operation_context
37 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 NotAnonymous, CSRFRequired)
40 NotAnonymous, CSRFRequired)
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 RepositoryRequirementError, NodeDoesNotExistError, EmptyRepositoryError)
44 RepositoryRequirementError, NodeDoesNotExistError, EmptyRepositoryError)
45 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 ChangesetComment, ChangesetStatus, Repository)
48 ChangesetComment, ChangesetStatus, Repository)
49 from rhodecode.model.forms import PullRequestForm
49 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.meta import Session
50 from rhodecode.model.meta import Session
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.scm import ScmModel
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58
58
59 def load_default_context(self):
59 def load_default_context(self):
60 c = self._get_local_tmpl_context(include_app_defaults=True)
60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63
63
64 return c
64 return c
65
65
66 def _get_pull_requests_list(
66 def _get_pull_requests_list(
67 self, repo_name, source, filter_type, opened_by, statuses):
67 self, repo_name, source, filter_type, opened_by, statuses):
68
68
69 draw, start, limit = self._extract_chunk(self.request)
69 draw, start, limit = self._extract_chunk(self.request)
70 search_q, order_by, order_dir = self._extract_ordering(self.request)
70 search_q, order_by, order_dir = self._extract_ordering(self.request)
71 _render = self.request.get_partial_renderer(
71 _render = self.request.get_partial_renderer(
72 'rhodecode:templates/data_table/_dt_elements.mako')
72 'rhodecode:templates/data_table/_dt_elements.mako')
73
73
74 # pagination
74 # pagination
75
75
76 if filter_type == 'awaiting_review':
76 if filter_type == 'awaiting_review':
77 pull_requests = PullRequestModel().get_awaiting_review(
77 pull_requests = PullRequestModel().get_awaiting_review(
78 repo_name, source=source, opened_by=opened_by,
78 repo_name, source=source, opened_by=opened_by,
79 statuses=statuses, offset=start, length=limit,
79 statuses=statuses, offset=start, length=limit,
80 order_by=order_by, order_dir=order_dir)
80 order_by=order_by, order_dir=order_dir)
81 pull_requests_total_count = PullRequestModel().count_awaiting_review(
81 pull_requests_total_count = PullRequestModel().count_awaiting_review(
82 repo_name, source=source, statuses=statuses,
82 repo_name, source=source, statuses=statuses,
83 opened_by=opened_by)
83 opened_by=opened_by)
84 elif filter_type == 'awaiting_my_review':
84 elif filter_type == 'awaiting_my_review':
85 pull_requests = PullRequestModel().get_awaiting_my_review(
85 pull_requests = PullRequestModel().get_awaiting_my_review(
86 repo_name, source=source, opened_by=opened_by,
86 repo_name, source=source, opened_by=opened_by,
87 user_id=self._rhodecode_user.user_id, statuses=statuses,
87 user_id=self._rhodecode_user.user_id, statuses=statuses,
88 offset=start, length=limit, order_by=order_by,
88 offset=start, length=limit, order_by=order_by,
89 order_dir=order_dir)
89 order_dir=order_dir)
90 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
90 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
91 repo_name, source=source, user_id=self._rhodecode_user.user_id,
91 repo_name, source=source, user_id=self._rhodecode_user.user_id,
92 statuses=statuses, opened_by=opened_by)
92 statuses=statuses, opened_by=opened_by)
93 else:
93 else:
94 pull_requests = PullRequestModel().get_all(
94 pull_requests = PullRequestModel().get_all(
95 repo_name, source=source, opened_by=opened_by,
95 repo_name, source=source, opened_by=opened_by,
96 statuses=statuses, offset=start, length=limit,
96 statuses=statuses, offset=start, length=limit,
97 order_by=order_by, order_dir=order_dir)
97 order_by=order_by, order_dir=order_dir)
98 pull_requests_total_count = PullRequestModel().count_all(
98 pull_requests_total_count = PullRequestModel().count_all(
99 repo_name, source=source, statuses=statuses,
99 repo_name, source=source, statuses=statuses,
100 opened_by=opened_by)
100 opened_by=opened_by)
101
101
102 data = []
102 data = []
103 comments_model = CommentsModel()
103 comments_model = CommentsModel()
104 for pr in pull_requests:
104 for pr in pull_requests:
105 comments = comments_model.get_all_comments(
105 comments = comments_model.get_all_comments(
106 self.db_repo.repo_id, pull_request=pr)
106 self.db_repo.repo_id, pull_request=pr)
107
107
108 data.append({
108 data.append({
109 'name': _render('pullrequest_name',
109 'name': _render('pullrequest_name',
110 pr.pull_request_id, pr.target_repo.repo_name),
110 pr.pull_request_id, pr.target_repo.repo_name),
111 'name_raw': pr.pull_request_id,
111 'name_raw': pr.pull_request_id,
112 'status': _render('pullrequest_status',
112 'status': _render('pullrequest_status',
113 pr.calculated_review_status()),
113 pr.calculated_review_status()),
114 'title': _render(
114 'title': _render(
115 'pullrequest_title', pr.title, pr.description),
115 'pullrequest_title', pr.title, pr.description),
116 'description': h.escape(pr.description),
116 'description': h.escape(pr.description),
117 'updated_on': _render('pullrequest_updated_on',
117 'updated_on': _render('pullrequest_updated_on',
118 h.datetime_to_time(pr.updated_on)),
118 h.datetime_to_time(pr.updated_on)),
119 'updated_on_raw': h.datetime_to_time(pr.updated_on),
119 'updated_on_raw': h.datetime_to_time(pr.updated_on),
120 'created_on': _render('pullrequest_updated_on',
120 'created_on': _render('pullrequest_updated_on',
121 h.datetime_to_time(pr.created_on)),
121 h.datetime_to_time(pr.created_on)),
122 'created_on_raw': h.datetime_to_time(pr.created_on),
122 'created_on_raw': h.datetime_to_time(pr.created_on),
123 'author': _render('pullrequest_author',
123 'author': _render('pullrequest_author',
124 pr.author.full_contact, ),
124 pr.author.full_contact, ),
125 'author_raw': pr.author.full_name,
125 'author_raw': pr.author.full_name,
126 'comments': _render('pullrequest_comments', len(comments)),
126 'comments': _render('pullrequest_comments', len(comments)),
127 'comments_raw': len(comments),
127 'comments_raw': len(comments),
128 'closed': pr.is_closed(),
128 'closed': pr.is_closed(),
129 })
129 })
130
130
131 data = ({
131 data = ({
132 'draw': draw,
132 'draw': draw,
133 'data': data,
133 'data': data,
134 'recordsTotal': pull_requests_total_count,
134 'recordsTotal': pull_requests_total_count,
135 'recordsFiltered': pull_requests_total_count,
135 'recordsFiltered': pull_requests_total_count,
136 })
136 })
137 return data
137 return data
138
138
139 @LoginRequired()
139 @LoginRequired()
140 @HasRepoPermissionAnyDecorator(
140 @HasRepoPermissionAnyDecorator(
141 'repository.read', 'repository.write', 'repository.admin')
141 'repository.read', 'repository.write', 'repository.admin')
142 @view_config(
142 @view_config(
143 route_name='pullrequest_show_all', request_method='GET',
143 route_name='pullrequest_show_all', request_method='GET',
144 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
144 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
145 def pull_request_list(self):
145 def pull_request_list(self):
146 c = self.load_default_context()
146 c = self.load_default_context()
147
147
148 req_get = self.request.GET
148 req_get = self.request.GET
149 c.source = str2bool(req_get.get('source'))
149 c.source = str2bool(req_get.get('source'))
150 c.closed = str2bool(req_get.get('closed'))
150 c.closed = str2bool(req_get.get('closed'))
151 c.my = str2bool(req_get.get('my'))
151 c.my = str2bool(req_get.get('my'))
152 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
152 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
153 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
153 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
154
154
155 c.active = 'open'
155 c.active = 'open'
156 if c.my:
156 if c.my:
157 c.active = 'my'
157 c.active = 'my'
158 if c.closed:
158 if c.closed:
159 c.active = 'closed'
159 c.active = 'closed'
160 if c.awaiting_review and not c.source:
160 if c.awaiting_review and not c.source:
161 c.active = 'awaiting'
161 c.active = 'awaiting'
162 if c.source and not c.awaiting_review:
162 if c.source and not c.awaiting_review:
163 c.active = 'source'
163 c.active = 'source'
164 if c.awaiting_my_review:
164 if c.awaiting_my_review:
165 c.active = 'awaiting_my'
165 c.active = 'awaiting_my'
166
166
167 return self._get_template_context(c)
167 return self._get_template_context(c)
168
168
169 @LoginRequired()
169 @LoginRequired()
170 @HasRepoPermissionAnyDecorator(
170 @HasRepoPermissionAnyDecorator(
171 'repository.read', 'repository.write', 'repository.admin')
171 'repository.read', 'repository.write', 'repository.admin')
172 @view_config(
172 @view_config(
173 route_name='pullrequest_show_all_data', request_method='GET',
173 route_name='pullrequest_show_all_data', request_method='GET',
174 renderer='json_ext', xhr=True)
174 renderer='json_ext', xhr=True)
175 def pull_request_list_data(self):
175 def pull_request_list_data(self):
176 self.load_default_context()
176
177
177 # additional filters
178 # additional filters
178 req_get = self.request.GET
179 req_get = self.request.GET
179 source = str2bool(req_get.get('source'))
180 source = str2bool(req_get.get('source'))
180 closed = str2bool(req_get.get('closed'))
181 closed = str2bool(req_get.get('closed'))
181 my = str2bool(req_get.get('my'))
182 my = str2bool(req_get.get('my'))
182 awaiting_review = str2bool(req_get.get('awaiting_review'))
183 awaiting_review = str2bool(req_get.get('awaiting_review'))
183 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
184 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
184
185
185 filter_type = 'awaiting_review' if awaiting_review \
186 filter_type = 'awaiting_review' if awaiting_review \
186 else 'awaiting_my_review' if awaiting_my_review \
187 else 'awaiting_my_review' if awaiting_my_review \
187 else None
188 else None
188
189
189 opened_by = None
190 opened_by = None
190 if my:
191 if my:
191 opened_by = [self._rhodecode_user.user_id]
192 opened_by = [self._rhodecode_user.user_id]
192
193
193 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
194 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
194 if closed:
195 if closed:
195 statuses = [PullRequest.STATUS_CLOSED]
196 statuses = [PullRequest.STATUS_CLOSED]
196
197
197 data = self._get_pull_requests_list(
198 data = self._get_pull_requests_list(
198 repo_name=self.db_repo_name, source=source,
199 repo_name=self.db_repo_name, source=source,
199 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
200 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
200
201
201 return data
202 return data
202
203
203 def _get_pr_version(self, pull_request_id, version=None):
204 def _get_pr_version(self, pull_request_id, version=None):
204 at_version = None
205 at_version = None
205
206
206 if version and version == 'latest':
207 if version and version == 'latest':
207 pull_request_ver = PullRequest.get(pull_request_id)
208 pull_request_ver = PullRequest.get(pull_request_id)
208 pull_request_obj = pull_request_ver
209 pull_request_obj = pull_request_ver
209 _org_pull_request_obj = pull_request_obj
210 _org_pull_request_obj = pull_request_obj
210 at_version = 'latest'
211 at_version = 'latest'
211 elif version:
212 elif version:
212 pull_request_ver = PullRequestVersion.get_or_404(version)
213 pull_request_ver = PullRequestVersion.get_or_404(version)
213 pull_request_obj = pull_request_ver
214 pull_request_obj = pull_request_ver
214 _org_pull_request_obj = pull_request_ver.pull_request
215 _org_pull_request_obj = pull_request_ver.pull_request
215 at_version = pull_request_ver.pull_request_version_id
216 at_version = pull_request_ver.pull_request_version_id
216 else:
217 else:
217 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
218 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
218 pull_request_id)
219 pull_request_id)
219
220
220 pull_request_display_obj = PullRequest.get_pr_display_object(
221 pull_request_display_obj = PullRequest.get_pr_display_object(
221 pull_request_obj, _org_pull_request_obj)
222 pull_request_obj, _org_pull_request_obj)
222
223
223 return _org_pull_request_obj, pull_request_obj, \
224 return _org_pull_request_obj, pull_request_obj, \
224 pull_request_display_obj, at_version
225 pull_request_display_obj, at_version
225
226
226 def _get_diffset(self, source_repo_name, source_repo,
227 def _get_diffset(self, source_repo_name, source_repo,
227 source_ref_id, target_ref_id,
228 source_ref_id, target_ref_id,
228 target_commit, source_commit, diff_limit, fulldiff,
229 target_commit, source_commit, diff_limit, fulldiff,
229 file_limit, display_inline_comments):
230 file_limit, display_inline_comments):
230
231
231 vcs_diff = PullRequestModel().get_diff(
232 vcs_diff = PullRequestModel().get_diff(
232 source_repo, source_ref_id, target_ref_id)
233 source_repo, source_ref_id, target_ref_id)
233
234
234 diff_processor = diffs.DiffProcessor(
235 diff_processor = diffs.DiffProcessor(
235 vcs_diff, format='newdiff', diff_limit=diff_limit,
236 vcs_diff, format='newdiff', diff_limit=diff_limit,
236 file_limit=file_limit, show_full_diff=fulldiff)
237 file_limit=file_limit, show_full_diff=fulldiff)
237
238
238 _parsed = diff_processor.prepare()
239 _parsed = diff_processor.prepare()
239
240
240 def _node_getter(commit):
241 def _node_getter(commit):
241 def get_node(fname):
242 def get_node(fname):
242 try:
243 try:
243 return commit.get_node(fname)
244 return commit.get_node(fname)
244 except NodeDoesNotExistError:
245 except NodeDoesNotExistError:
245 return None
246 return None
246
247
247 return get_node
248 return get_node
248
249
249 diffset = codeblocks.DiffSet(
250 diffset = codeblocks.DiffSet(
250 repo_name=self.db_repo_name,
251 repo_name=self.db_repo_name,
251 source_repo_name=source_repo_name,
252 source_repo_name=source_repo_name,
252 source_node_getter=_node_getter(target_commit),
253 source_node_getter=_node_getter(target_commit),
253 target_node_getter=_node_getter(source_commit),
254 target_node_getter=_node_getter(source_commit),
254 comments=display_inline_comments
255 comments=display_inline_comments
255 )
256 )
256 diffset = diffset.render_patchset(
257 diffset = diffset.render_patchset(
257 _parsed, target_commit.raw_id, source_commit.raw_id)
258 _parsed, target_commit.raw_id, source_commit.raw_id)
258
259
259 return diffset
260 return diffset
260
261
261 @LoginRequired()
262 @LoginRequired()
262 @HasRepoPermissionAnyDecorator(
263 @HasRepoPermissionAnyDecorator(
263 'repository.read', 'repository.write', 'repository.admin')
264 'repository.read', 'repository.write', 'repository.admin')
264 @view_config(
265 @view_config(
265 route_name='pullrequest_show', request_method='GET',
266 route_name='pullrequest_show', request_method='GET',
266 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
267 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
267 def pull_request_show(self):
268 def pull_request_show(self):
268 pull_request_id = self.request.matchdict['pull_request_id']
269 pull_request_id = self.request.matchdict['pull_request_id']
269
270
270 c = self.load_default_context()
271 c = self.load_default_context()
271
272
272 version = self.request.GET.get('version')
273 version = self.request.GET.get('version')
273 from_version = self.request.GET.get('from_version') or version
274 from_version = self.request.GET.get('from_version') or version
274 merge_checks = self.request.GET.get('merge_checks')
275 merge_checks = self.request.GET.get('merge_checks')
275 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
276 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
276
277
277 (pull_request_latest,
278 (pull_request_latest,
278 pull_request_at_ver,
279 pull_request_at_ver,
279 pull_request_display_obj,
280 pull_request_display_obj,
280 at_version) = self._get_pr_version(
281 at_version) = self._get_pr_version(
281 pull_request_id, version=version)
282 pull_request_id, version=version)
282 pr_closed = pull_request_latest.is_closed()
283 pr_closed = pull_request_latest.is_closed()
283
284
284 if pr_closed and (version or from_version):
285 if pr_closed and (version or from_version):
285 # not allow to browse versions
286 # not allow to browse versions
286 raise HTTPFound(h.route_path(
287 raise HTTPFound(h.route_path(
287 'pullrequest_show', repo_name=self.db_repo_name,
288 'pullrequest_show', repo_name=self.db_repo_name,
288 pull_request_id=pull_request_id))
289 pull_request_id=pull_request_id))
289
290
290 versions = pull_request_display_obj.versions()
291 versions = pull_request_display_obj.versions()
291
292
292 c.at_version = at_version
293 c.at_version = at_version
293 c.at_version_num = (at_version
294 c.at_version_num = (at_version
294 if at_version and at_version != 'latest'
295 if at_version and at_version != 'latest'
295 else None)
296 else None)
296 c.at_version_pos = ChangesetComment.get_index_from_version(
297 c.at_version_pos = ChangesetComment.get_index_from_version(
297 c.at_version_num, versions)
298 c.at_version_num, versions)
298
299
299 (prev_pull_request_latest,
300 (prev_pull_request_latest,
300 prev_pull_request_at_ver,
301 prev_pull_request_at_ver,
301 prev_pull_request_display_obj,
302 prev_pull_request_display_obj,
302 prev_at_version) = self._get_pr_version(
303 prev_at_version) = self._get_pr_version(
303 pull_request_id, version=from_version)
304 pull_request_id, version=from_version)
304
305
305 c.from_version = prev_at_version
306 c.from_version = prev_at_version
306 c.from_version_num = (prev_at_version
307 c.from_version_num = (prev_at_version
307 if prev_at_version and prev_at_version != 'latest'
308 if prev_at_version and prev_at_version != 'latest'
308 else None)
309 else None)
309 c.from_version_pos = ChangesetComment.get_index_from_version(
310 c.from_version_pos = ChangesetComment.get_index_from_version(
310 c.from_version_num, versions)
311 c.from_version_num, versions)
311
312
312 # define if we're in COMPARE mode or VIEW at version mode
313 # define if we're in COMPARE mode or VIEW at version mode
313 compare = at_version != prev_at_version
314 compare = at_version != prev_at_version
314
315
315 # pull_requests repo_name we opened it against
316 # pull_requests repo_name we opened it against
316 # ie. target_repo must match
317 # ie. target_repo must match
317 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
318 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
318 raise HTTPNotFound()
319 raise HTTPNotFound()
319
320
320 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
321 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
321 pull_request_at_ver)
322 pull_request_at_ver)
322
323
323 c.pull_request = pull_request_display_obj
324 c.pull_request = pull_request_display_obj
324 c.pull_request_latest = pull_request_latest
325 c.pull_request_latest = pull_request_latest
325
326
326 if compare or (at_version and not at_version == 'latest'):
327 if compare or (at_version and not at_version == 'latest'):
327 c.allowed_to_change_status = False
328 c.allowed_to_change_status = False
328 c.allowed_to_update = False
329 c.allowed_to_update = False
329 c.allowed_to_merge = False
330 c.allowed_to_merge = False
330 c.allowed_to_delete = False
331 c.allowed_to_delete = False
331 c.allowed_to_comment = False
332 c.allowed_to_comment = False
332 c.allowed_to_close = False
333 c.allowed_to_close = False
333 else:
334 else:
334 can_change_status = PullRequestModel().check_user_change_status(
335 can_change_status = PullRequestModel().check_user_change_status(
335 pull_request_at_ver, self._rhodecode_user)
336 pull_request_at_ver, self._rhodecode_user)
336 c.allowed_to_change_status = can_change_status and not pr_closed
337 c.allowed_to_change_status = can_change_status and not pr_closed
337
338
338 c.allowed_to_update = PullRequestModel().check_user_update(
339 c.allowed_to_update = PullRequestModel().check_user_update(
339 pull_request_latest, self._rhodecode_user) and not pr_closed
340 pull_request_latest, self._rhodecode_user) and not pr_closed
340 c.allowed_to_merge = PullRequestModel().check_user_merge(
341 c.allowed_to_merge = PullRequestModel().check_user_merge(
341 pull_request_latest, self._rhodecode_user) and not pr_closed
342 pull_request_latest, self._rhodecode_user) and not pr_closed
342 c.allowed_to_delete = PullRequestModel().check_user_delete(
343 c.allowed_to_delete = PullRequestModel().check_user_delete(
343 pull_request_latest, self._rhodecode_user) and not pr_closed
344 pull_request_latest, self._rhodecode_user) and not pr_closed
344 c.allowed_to_comment = not pr_closed
345 c.allowed_to_comment = not pr_closed
345 c.allowed_to_close = c.allowed_to_merge and not pr_closed
346 c.allowed_to_close = c.allowed_to_merge and not pr_closed
346
347
347 c.forbid_adding_reviewers = False
348 c.forbid_adding_reviewers = False
348 c.forbid_author_to_review = False
349 c.forbid_author_to_review = False
349 c.forbid_commit_author_to_review = False
350 c.forbid_commit_author_to_review = False
350
351
351 if pull_request_latest.reviewer_data and \
352 if pull_request_latest.reviewer_data and \
352 'rules' in pull_request_latest.reviewer_data:
353 'rules' in pull_request_latest.reviewer_data:
353 rules = pull_request_latest.reviewer_data['rules'] or {}
354 rules = pull_request_latest.reviewer_data['rules'] or {}
354 try:
355 try:
355 c.forbid_adding_reviewers = rules.get(
356 c.forbid_adding_reviewers = rules.get(
356 'forbid_adding_reviewers')
357 'forbid_adding_reviewers')
357 c.forbid_author_to_review = rules.get(
358 c.forbid_author_to_review = rules.get(
358 'forbid_author_to_review')
359 'forbid_author_to_review')
359 c.forbid_commit_author_to_review = rules.get(
360 c.forbid_commit_author_to_review = rules.get(
360 'forbid_commit_author_to_review')
361 'forbid_commit_author_to_review')
361 except Exception:
362 except Exception:
362 pass
363 pass
363
364
364 # check merge capabilities
365 # check merge capabilities
365 _merge_check = MergeCheck.validate(
366 _merge_check = MergeCheck.validate(
366 pull_request_latest, user=self._rhodecode_user,
367 pull_request_latest, user=self._rhodecode_user,
367 translator=self.request.translate)
368 translator=self.request.translate)
368 c.pr_merge_errors = _merge_check.error_details
369 c.pr_merge_errors = _merge_check.error_details
369 c.pr_merge_possible = not _merge_check.failed
370 c.pr_merge_possible = not _merge_check.failed
370 c.pr_merge_message = _merge_check.merge_msg
371 c.pr_merge_message = _merge_check.merge_msg
371
372
372 c.pr_merge_info = MergeCheck.get_merge_conditions(
373 c.pr_merge_info = MergeCheck.get_merge_conditions(
373 pull_request_latest, translator=self.request.translate)
374 pull_request_latest, translator=self.request.translate)
374
375
375 c.pull_request_review_status = _merge_check.review_status
376 c.pull_request_review_status = _merge_check.review_status
376 if merge_checks:
377 if merge_checks:
377 self.request.override_renderer = \
378 self.request.override_renderer = \
378 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
379 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
379 return self._get_template_context(c)
380 return self._get_template_context(c)
380
381
381 comments_model = CommentsModel()
382 comments_model = CommentsModel()
382
383
383 # reviewers and statuses
384 # reviewers and statuses
384 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
385 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
385 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
386 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
386
387
387 # GENERAL COMMENTS with versions #
388 # GENERAL COMMENTS with versions #
388 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
389 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
389 q = q.order_by(ChangesetComment.comment_id.asc())
390 q = q.order_by(ChangesetComment.comment_id.asc())
390 general_comments = q
391 general_comments = q
391
392
392 # pick comments we want to render at current version
393 # pick comments we want to render at current version
393 c.comment_versions = comments_model.aggregate_comments(
394 c.comment_versions = comments_model.aggregate_comments(
394 general_comments, versions, c.at_version_num)
395 general_comments, versions, c.at_version_num)
395 c.comments = c.comment_versions[c.at_version_num]['until']
396 c.comments = c.comment_versions[c.at_version_num]['until']
396
397
397 # INLINE COMMENTS with versions #
398 # INLINE COMMENTS with versions #
398 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
399 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
399 q = q.order_by(ChangesetComment.comment_id.asc())
400 q = q.order_by(ChangesetComment.comment_id.asc())
400 inline_comments = q
401 inline_comments = q
401
402
402 c.inline_versions = comments_model.aggregate_comments(
403 c.inline_versions = comments_model.aggregate_comments(
403 inline_comments, versions, c.at_version_num, inline=True)
404 inline_comments, versions, c.at_version_num, inline=True)
404
405
405 # inject latest version
406 # inject latest version
406 latest_ver = PullRequest.get_pr_display_object(
407 latest_ver = PullRequest.get_pr_display_object(
407 pull_request_latest, pull_request_latest)
408 pull_request_latest, pull_request_latest)
408
409
409 c.versions = versions + [latest_ver]
410 c.versions = versions + [latest_ver]
410
411
411 # if we use version, then do not show later comments
412 # if we use version, then do not show later comments
412 # than current version
413 # than current version
413 display_inline_comments = collections.defaultdict(
414 display_inline_comments = collections.defaultdict(
414 lambda: collections.defaultdict(list))
415 lambda: collections.defaultdict(list))
415 for co in inline_comments:
416 for co in inline_comments:
416 if c.at_version_num:
417 if c.at_version_num:
417 # pick comments that are at least UPTO given version, so we
418 # pick comments that are at least UPTO given version, so we
418 # don't render comments for higher version
419 # don't render comments for higher version
419 should_render = co.pull_request_version_id and \
420 should_render = co.pull_request_version_id and \
420 co.pull_request_version_id <= c.at_version_num
421 co.pull_request_version_id <= c.at_version_num
421 else:
422 else:
422 # showing all, for 'latest'
423 # showing all, for 'latest'
423 should_render = True
424 should_render = True
424
425
425 if should_render:
426 if should_render:
426 display_inline_comments[co.f_path][co.line_no].append(co)
427 display_inline_comments[co.f_path][co.line_no].append(co)
427
428
428 # load diff data into template context, if we use compare mode then
429 # load diff data into template context, if we use compare mode then
429 # diff is calculated based on changes between versions of PR
430 # diff is calculated based on changes between versions of PR
430
431
431 source_repo = pull_request_at_ver.source_repo
432 source_repo = pull_request_at_ver.source_repo
432 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
433 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
433
434
434 target_repo = pull_request_at_ver.target_repo
435 target_repo = pull_request_at_ver.target_repo
435 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
436 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
436
437
437 if compare:
438 if compare:
438 # in compare switch the diff base to latest commit from prev version
439 # in compare switch the diff base to latest commit from prev version
439 target_ref_id = prev_pull_request_display_obj.revisions[0]
440 target_ref_id = prev_pull_request_display_obj.revisions[0]
440
441
441 # despite opening commits for bookmarks/branches/tags, we always
442 # despite opening commits for bookmarks/branches/tags, we always
442 # convert this to rev to prevent changes after bookmark or branch change
443 # convert this to rev to prevent changes after bookmark or branch change
443 c.source_ref_type = 'rev'
444 c.source_ref_type = 'rev'
444 c.source_ref = source_ref_id
445 c.source_ref = source_ref_id
445
446
446 c.target_ref_type = 'rev'
447 c.target_ref_type = 'rev'
447 c.target_ref = target_ref_id
448 c.target_ref = target_ref_id
448
449
449 c.source_repo = source_repo
450 c.source_repo = source_repo
450 c.target_repo = target_repo
451 c.target_repo = target_repo
451
452
452 c.commit_ranges = []
453 c.commit_ranges = []
453 source_commit = EmptyCommit()
454 source_commit = EmptyCommit()
454 target_commit = EmptyCommit()
455 target_commit = EmptyCommit()
455 c.missing_requirements = False
456 c.missing_requirements = False
456
457
457 source_scm = source_repo.scm_instance()
458 source_scm = source_repo.scm_instance()
458 target_scm = target_repo.scm_instance()
459 target_scm = target_repo.scm_instance()
459
460
460 # try first shadow repo, fallback to regular repo
461 # try first shadow repo, fallback to regular repo
461 try:
462 try:
462 commits_source_repo = pull_request_latest.get_shadow_repo()
463 commits_source_repo = pull_request_latest.get_shadow_repo()
463 except Exception:
464 except Exception:
464 log.debug('Failed to get shadow repo', exc_info=True)
465 log.debug('Failed to get shadow repo', exc_info=True)
465 commits_source_repo = source_scm
466 commits_source_repo = source_scm
466
467
467 c.commits_source_repo = commits_source_repo
468 c.commits_source_repo = commits_source_repo
468 commit_cache = {}
469 commit_cache = {}
469 try:
470 try:
470 pre_load = ["author", "branch", "date", "message"]
471 pre_load = ["author", "branch", "date", "message"]
471 show_revs = pull_request_at_ver.revisions
472 show_revs = pull_request_at_ver.revisions
472 for rev in show_revs:
473 for rev in show_revs:
473 comm = commits_source_repo.get_commit(
474 comm = commits_source_repo.get_commit(
474 commit_id=rev, pre_load=pre_load)
475 commit_id=rev, pre_load=pre_load)
475 c.commit_ranges.append(comm)
476 c.commit_ranges.append(comm)
476 commit_cache[comm.raw_id] = comm
477 commit_cache[comm.raw_id] = comm
477
478
478 # Order here matters, we first need to get target, and then
479 # Order here matters, we first need to get target, and then
479 # the source
480 # the source
480 target_commit = commits_source_repo.get_commit(
481 target_commit = commits_source_repo.get_commit(
481 commit_id=safe_str(target_ref_id))
482 commit_id=safe_str(target_ref_id))
482
483
483 source_commit = commits_source_repo.get_commit(
484 source_commit = commits_source_repo.get_commit(
484 commit_id=safe_str(source_ref_id))
485 commit_id=safe_str(source_ref_id))
485
486
486 except CommitDoesNotExistError:
487 except CommitDoesNotExistError:
487 log.warning(
488 log.warning(
488 'Failed to get commit from `{}` repo'.format(
489 'Failed to get commit from `{}` repo'.format(
489 commits_source_repo), exc_info=True)
490 commits_source_repo), exc_info=True)
490 except RepositoryRequirementError:
491 except RepositoryRequirementError:
491 log.warning(
492 log.warning(
492 'Failed to get all required data from repo', exc_info=True)
493 'Failed to get all required data from repo', exc_info=True)
493 c.missing_requirements = True
494 c.missing_requirements = True
494
495
495 c.ancestor = None # set it to None, to hide it from PR view
496 c.ancestor = None # set it to None, to hide it from PR view
496
497
497 try:
498 try:
498 ancestor_id = source_scm.get_common_ancestor(
499 ancestor_id = source_scm.get_common_ancestor(
499 source_commit.raw_id, target_commit.raw_id, target_scm)
500 source_commit.raw_id, target_commit.raw_id, target_scm)
500 c.ancestor_commit = source_scm.get_commit(ancestor_id)
501 c.ancestor_commit = source_scm.get_commit(ancestor_id)
501 except Exception:
502 except Exception:
502 c.ancestor_commit = None
503 c.ancestor_commit = None
503
504
504 c.statuses = source_repo.statuses(
505 c.statuses = source_repo.statuses(
505 [x.raw_id for x in c.commit_ranges])
506 [x.raw_id for x in c.commit_ranges])
506
507
507 # auto collapse if we have more than limit
508 # auto collapse if we have more than limit
508 collapse_limit = diffs.DiffProcessor._collapse_commits_over
509 collapse_limit = diffs.DiffProcessor._collapse_commits_over
509 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
510 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
510 c.compare_mode = compare
511 c.compare_mode = compare
511
512
512 # diff_limit is the old behavior, will cut off the whole diff
513 # diff_limit is the old behavior, will cut off the whole diff
513 # if the limit is applied otherwise will just hide the
514 # if the limit is applied otherwise will just hide the
514 # big files from the front-end
515 # big files from the front-end
515 diff_limit = c.visual.cut_off_limit_diff
516 diff_limit = c.visual.cut_off_limit_diff
516 file_limit = c.visual.cut_off_limit_file
517 file_limit = c.visual.cut_off_limit_file
517
518
518 c.missing_commits = False
519 c.missing_commits = False
519 if (c.missing_requirements
520 if (c.missing_requirements
520 or isinstance(source_commit, EmptyCommit)
521 or isinstance(source_commit, EmptyCommit)
521 or source_commit == target_commit):
522 or source_commit == target_commit):
522
523
523 c.missing_commits = True
524 c.missing_commits = True
524 else:
525 else:
525
526
526 c.diffset = self._get_diffset(
527 c.diffset = self._get_diffset(
527 c.source_repo.repo_name, commits_source_repo,
528 c.source_repo.repo_name, commits_source_repo,
528 source_ref_id, target_ref_id,
529 source_ref_id, target_ref_id,
529 target_commit, source_commit,
530 target_commit, source_commit,
530 diff_limit, c.fulldiff, file_limit, display_inline_comments)
531 diff_limit, c.fulldiff, file_limit, display_inline_comments)
531
532
532 c.limited_diff = c.diffset.limited_diff
533 c.limited_diff = c.diffset.limited_diff
533
534
534 # calculate removed files that are bound to comments
535 # calculate removed files that are bound to comments
535 comment_deleted_files = [
536 comment_deleted_files = [
536 fname for fname in display_inline_comments
537 fname for fname in display_inline_comments
537 if fname not in c.diffset.file_stats]
538 if fname not in c.diffset.file_stats]
538
539
539 c.deleted_files_comments = collections.defaultdict(dict)
540 c.deleted_files_comments = collections.defaultdict(dict)
540 for fname, per_line_comments in display_inline_comments.items():
541 for fname, per_line_comments in display_inline_comments.items():
541 if fname in comment_deleted_files:
542 if fname in comment_deleted_files:
542 c.deleted_files_comments[fname]['stats'] = 0
543 c.deleted_files_comments[fname]['stats'] = 0
543 c.deleted_files_comments[fname]['comments'] = list()
544 c.deleted_files_comments[fname]['comments'] = list()
544 for lno, comments in per_line_comments.items():
545 for lno, comments in per_line_comments.items():
545 c.deleted_files_comments[fname]['comments'].extend(
546 c.deleted_files_comments[fname]['comments'].extend(
546 comments)
547 comments)
547
548
548 # this is a hack to properly display links, when creating PR, the
549 # this is a hack to properly display links, when creating PR, the
549 # compare view and others uses different notation, and
550 # compare view and others uses different notation, and
550 # compare_commits.mako renders links based on the target_repo.
551 # compare_commits.mako renders links based on the target_repo.
551 # We need to swap that here to generate it properly on the html side
552 # We need to swap that here to generate it properly on the html side
552 c.target_repo = c.source_repo
553 c.target_repo = c.source_repo
553
554
554 c.commit_statuses = ChangesetStatus.STATUSES
555 c.commit_statuses = ChangesetStatus.STATUSES
555
556
556 c.show_version_changes = not pr_closed
557 c.show_version_changes = not pr_closed
557 if c.show_version_changes:
558 if c.show_version_changes:
558 cur_obj = pull_request_at_ver
559 cur_obj = pull_request_at_ver
559 prev_obj = prev_pull_request_at_ver
560 prev_obj = prev_pull_request_at_ver
560
561
561 old_commit_ids = prev_obj.revisions
562 old_commit_ids = prev_obj.revisions
562 new_commit_ids = cur_obj.revisions
563 new_commit_ids = cur_obj.revisions
563 commit_changes = PullRequestModel()._calculate_commit_id_changes(
564 commit_changes = PullRequestModel()._calculate_commit_id_changes(
564 old_commit_ids, new_commit_ids)
565 old_commit_ids, new_commit_ids)
565 c.commit_changes_summary = commit_changes
566 c.commit_changes_summary = commit_changes
566
567
567 # calculate the diff for commits between versions
568 # calculate the diff for commits between versions
568 c.commit_changes = []
569 c.commit_changes = []
569 mark = lambda cs, fw: list(
570 mark = lambda cs, fw: list(
570 h.itertools.izip_longest([], cs, fillvalue=fw))
571 h.itertools.izip_longest([], cs, fillvalue=fw))
571 for c_type, raw_id in mark(commit_changes.added, 'a') \
572 for c_type, raw_id in mark(commit_changes.added, 'a') \
572 + mark(commit_changes.removed, 'r') \
573 + mark(commit_changes.removed, 'r') \
573 + mark(commit_changes.common, 'c'):
574 + mark(commit_changes.common, 'c'):
574
575
575 if raw_id in commit_cache:
576 if raw_id in commit_cache:
576 commit = commit_cache[raw_id]
577 commit = commit_cache[raw_id]
577 else:
578 else:
578 try:
579 try:
579 commit = commits_source_repo.get_commit(raw_id)
580 commit = commits_source_repo.get_commit(raw_id)
580 except CommitDoesNotExistError:
581 except CommitDoesNotExistError:
581 # in case we fail extracting still use "dummy" commit
582 # in case we fail extracting still use "dummy" commit
582 # for display in commit diff
583 # for display in commit diff
583 commit = h.AttributeDict(
584 commit = h.AttributeDict(
584 {'raw_id': raw_id,
585 {'raw_id': raw_id,
585 'message': 'EMPTY or MISSING COMMIT'})
586 'message': 'EMPTY or MISSING COMMIT'})
586 c.commit_changes.append([c_type, commit])
587 c.commit_changes.append([c_type, commit])
587
588
588 # current user review statuses for each version
589 # current user review statuses for each version
589 c.review_versions = {}
590 c.review_versions = {}
590 if self._rhodecode_user.user_id in allowed_reviewers:
591 if self._rhodecode_user.user_id in allowed_reviewers:
591 for co in general_comments:
592 for co in general_comments:
592 if co.author.user_id == self._rhodecode_user.user_id:
593 if co.author.user_id == self._rhodecode_user.user_id:
593 # each comment has a status change
594 # each comment has a status change
594 status = co.status_change
595 status = co.status_change
595 if status:
596 if status:
596 _ver_pr = status[0].comment.pull_request_version_id
597 _ver_pr = status[0].comment.pull_request_version_id
597 c.review_versions[_ver_pr] = status[0]
598 c.review_versions[_ver_pr] = status[0]
598
599
599 return self._get_template_context(c)
600 return self._get_template_context(c)
600
601
601 def assure_not_empty_repo(self):
602 def assure_not_empty_repo(self):
602 _ = self.request.translate
603 _ = self.request.translate
603
604
604 try:
605 try:
605 self.db_repo.scm_instance().get_commit()
606 self.db_repo.scm_instance().get_commit()
606 except EmptyRepositoryError:
607 except EmptyRepositoryError:
607 h.flash(h.literal(_('There are no commits yet')),
608 h.flash(h.literal(_('There are no commits yet')),
608 category='warning')
609 category='warning')
609 raise HTTPFound(
610 raise HTTPFound(
610 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
611 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
611
612
612 @LoginRequired()
613 @LoginRequired()
613 @NotAnonymous()
614 @NotAnonymous()
614 @HasRepoPermissionAnyDecorator(
615 @HasRepoPermissionAnyDecorator(
615 'repository.read', 'repository.write', 'repository.admin')
616 'repository.read', 'repository.write', 'repository.admin')
616 @view_config(
617 @view_config(
617 route_name='pullrequest_new', request_method='GET',
618 route_name='pullrequest_new', request_method='GET',
618 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
619 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
619 def pull_request_new(self):
620 def pull_request_new(self):
620 _ = self.request.translate
621 _ = self.request.translate
621 c = self.load_default_context()
622 c = self.load_default_context()
622
623
623 self.assure_not_empty_repo()
624 self.assure_not_empty_repo()
624 source_repo = self.db_repo
625 source_repo = self.db_repo
625
626
626 commit_id = self.request.GET.get('commit')
627 commit_id = self.request.GET.get('commit')
627 branch_ref = self.request.GET.get('branch')
628 branch_ref = self.request.GET.get('branch')
628 bookmark_ref = self.request.GET.get('bookmark')
629 bookmark_ref = self.request.GET.get('bookmark')
629
630
630 try:
631 try:
631 source_repo_data = PullRequestModel().generate_repo_data(
632 source_repo_data = PullRequestModel().generate_repo_data(
632 source_repo, commit_id=commit_id,
633 source_repo, commit_id=commit_id,
633 branch=branch_ref, bookmark=bookmark_ref, translator=self.request.translate)
634 branch=branch_ref, bookmark=bookmark_ref, translator=self.request.translate)
634 except CommitDoesNotExistError as e:
635 except CommitDoesNotExistError as e:
635 log.exception(e)
636 log.exception(e)
636 h.flash(_('Commit does not exist'), 'error')
637 h.flash(_('Commit does not exist'), 'error')
637 raise HTTPFound(
638 raise HTTPFound(
638 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
639 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
639
640
640 default_target_repo = source_repo
641 default_target_repo = source_repo
641
642
642 if source_repo.parent:
643 if source_repo.parent:
643 parent_vcs_obj = source_repo.parent.scm_instance()
644 parent_vcs_obj = source_repo.parent.scm_instance()
644 if parent_vcs_obj and not parent_vcs_obj.is_empty():
645 if parent_vcs_obj and not parent_vcs_obj.is_empty():
645 # change default if we have a parent repo
646 # change default if we have a parent repo
646 default_target_repo = source_repo.parent
647 default_target_repo = source_repo.parent
647
648
648 target_repo_data = PullRequestModel().generate_repo_data(
649 target_repo_data = PullRequestModel().generate_repo_data(
649 default_target_repo, translator=self.request.translate)
650 default_target_repo, translator=self.request.translate)
650
651
651 selected_source_ref = source_repo_data['refs']['selected_ref']
652 selected_source_ref = source_repo_data['refs']['selected_ref']
652
653
653 title_source_ref = selected_source_ref.split(':', 2)[1]
654 title_source_ref = selected_source_ref.split(':', 2)[1]
654 c.default_title = PullRequestModel().generate_pullrequest_title(
655 c.default_title = PullRequestModel().generate_pullrequest_title(
655 source=source_repo.repo_name,
656 source=source_repo.repo_name,
656 source_ref=title_source_ref,
657 source_ref=title_source_ref,
657 target=default_target_repo.repo_name
658 target=default_target_repo.repo_name
658 )
659 )
659
660
660 c.default_repo_data = {
661 c.default_repo_data = {
661 'source_repo_name': source_repo.repo_name,
662 'source_repo_name': source_repo.repo_name,
662 'source_refs_json': json.dumps(source_repo_data),
663 'source_refs_json': json.dumps(source_repo_data),
663 'target_repo_name': default_target_repo.repo_name,
664 'target_repo_name': default_target_repo.repo_name,
664 'target_refs_json': json.dumps(target_repo_data),
665 'target_refs_json': json.dumps(target_repo_data),
665 }
666 }
666 c.default_source_ref = selected_source_ref
667 c.default_source_ref = selected_source_ref
667
668
668 return self._get_template_context(c)
669 return self._get_template_context(c)
669
670
670 @LoginRequired()
671 @LoginRequired()
671 @NotAnonymous()
672 @NotAnonymous()
672 @HasRepoPermissionAnyDecorator(
673 @HasRepoPermissionAnyDecorator(
673 'repository.read', 'repository.write', 'repository.admin')
674 'repository.read', 'repository.write', 'repository.admin')
674 @view_config(
675 @view_config(
675 route_name='pullrequest_repo_refs', request_method='GET',
676 route_name='pullrequest_repo_refs', request_method='GET',
676 renderer='json_ext', xhr=True)
677 renderer='json_ext', xhr=True)
677 def pull_request_repo_refs(self):
678 def pull_request_repo_refs(self):
679 self.load_default_context()
678 target_repo_name = self.request.matchdict['target_repo_name']
680 target_repo_name = self.request.matchdict['target_repo_name']
679 repo = Repository.get_by_repo_name(target_repo_name)
681 repo = Repository.get_by_repo_name(target_repo_name)
680 if not repo:
682 if not repo:
681 raise HTTPNotFound()
683 raise HTTPNotFound()
682 return PullRequestModel().generate_repo_data(
684 return PullRequestModel().generate_repo_data(
683 repo, translator=self.request.translate)
685 repo, translator=self.request.translate)
684
686
685 @LoginRequired()
687 @LoginRequired()
686 @NotAnonymous()
688 @NotAnonymous()
687 @HasRepoPermissionAnyDecorator(
689 @HasRepoPermissionAnyDecorator(
688 'repository.read', 'repository.write', 'repository.admin')
690 'repository.read', 'repository.write', 'repository.admin')
689 @view_config(
691 @view_config(
690 route_name='pullrequest_repo_destinations', request_method='GET',
692 route_name='pullrequest_repo_destinations', request_method='GET',
691 renderer='json_ext', xhr=True)
693 renderer='json_ext', xhr=True)
692 def pull_request_repo_destinations(self):
694 def pull_request_repo_destinations(self):
693 _ = self.request.translate
695 _ = self.request.translate
694 filter_query = self.request.GET.get('query')
696 filter_query = self.request.GET.get('query')
695
697
696 query = Repository.query() \
698 query = Repository.query() \
697 .order_by(func.length(Repository.repo_name)) \
699 .order_by(func.length(Repository.repo_name)) \
698 .filter(
700 .filter(
699 or_(Repository.repo_name == self.db_repo.repo_name,
701 or_(Repository.repo_name == self.db_repo.repo_name,
700 Repository.fork_id == self.db_repo.repo_id))
702 Repository.fork_id == self.db_repo.repo_id))
701
703
702 if filter_query:
704 if filter_query:
703 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
705 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
704 query = query.filter(
706 query = query.filter(
705 Repository.repo_name.ilike(ilike_expression))
707 Repository.repo_name.ilike(ilike_expression))
706
708
707 add_parent = False
709 add_parent = False
708 if self.db_repo.parent:
710 if self.db_repo.parent:
709 if filter_query in self.db_repo.parent.repo_name:
711 if filter_query in self.db_repo.parent.repo_name:
710 parent_vcs_obj = self.db_repo.parent.scm_instance()
712 parent_vcs_obj = self.db_repo.parent.scm_instance()
711 if parent_vcs_obj and not parent_vcs_obj.is_empty():
713 if parent_vcs_obj and not parent_vcs_obj.is_empty():
712 add_parent = True
714 add_parent = True
713
715
714 limit = 20 - 1 if add_parent else 20
716 limit = 20 - 1 if add_parent else 20
715 all_repos = query.limit(limit).all()
717 all_repos = query.limit(limit).all()
716 if add_parent:
718 if add_parent:
717 all_repos += [self.db_repo.parent]
719 all_repos += [self.db_repo.parent]
718
720
719 repos = []
721 repos = []
720 for obj in ScmModel().get_repos(all_repos):
722 for obj in ScmModel().get_repos(all_repos):
721 repos.append({
723 repos.append({
722 'id': obj['name'],
724 'id': obj['name'],
723 'text': obj['name'],
725 'text': obj['name'],
724 'type': 'repo',
726 'type': 'repo',
725 'obj': obj['dbrepo']
727 'obj': obj['dbrepo']
726 })
728 })
727
729
728 data = {
730 data = {
729 'more': False,
731 'more': False,
730 'results': [{
732 'results': [{
731 'text': _('Repositories'),
733 'text': _('Repositories'),
732 'children': repos
734 'children': repos
733 }] if repos else []
735 }] if repos else []
734 }
736 }
735 return data
737 return data
736
738
737 @LoginRequired()
739 @LoginRequired()
738 @NotAnonymous()
740 @NotAnonymous()
739 @HasRepoPermissionAnyDecorator(
741 @HasRepoPermissionAnyDecorator(
740 'repository.read', 'repository.write', 'repository.admin')
742 'repository.read', 'repository.write', 'repository.admin')
741 @CSRFRequired()
743 @CSRFRequired()
742 @view_config(
744 @view_config(
743 route_name='pullrequest_create', request_method='POST',
745 route_name='pullrequest_create', request_method='POST',
744 renderer=None)
746 renderer=None)
745 def pull_request_create(self):
747 def pull_request_create(self):
746 _ = self.request.translate
748 _ = self.request.translate
747 self.assure_not_empty_repo()
749 self.assure_not_empty_repo()
750 self.load_default_context()
748
751
749 controls = peppercorn.parse(self.request.POST.items())
752 controls = peppercorn.parse(self.request.POST.items())
750
753
751 try:
754 try:
752 form = PullRequestForm(
755 form = PullRequestForm(
753 self.request.translate, self.db_repo.repo_id)()
756 self.request.translate, self.db_repo.repo_id)()
754 _form = form.to_python(controls)
757 _form = form.to_python(controls)
755 except formencode.Invalid as errors:
758 except formencode.Invalid as errors:
756 if errors.error_dict.get('revisions'):
759 if errors.error_dict.get('revisions'):
757 msg = 'Revisions: %s' % errors.error_dict['revisions']
760 msg = 'Revisions: %s' % errors.error_dict['revisions']
758 elif errors.error_dict.get('pullrequest_title'):
761 elif errors.error_dict.get('pullrequest_title'):
759 msg = _('Pull request requires a title with min. 3 chars')
762 msg = _('Pull request requires a title with min. 3 chars')
760 else:
763 else:
761 msg = _('Error creating pull request: {}').format(errors)
764 msg = _('Error creating pull request: {}').format(errors)
762 log.exception(msg)
765 log.exception(msg)
763 h.flash(msg, 'error')
766 h.flash(msg, 'error')
764
767
765 # would rather just go back to form ...
768 # would rather just go back to form ...
766 raise HTTPFound(
769 raise HTTPFound(
767 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
770 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
768
771
769 source_repo = _form['source_repo']
772 source_repo = _form['source_repo']
770 source_ref = _form['source_ref']
773 source_ref = _form['source_ref']
771 target_repo = _form['target_repo']
774 target_repo = _form['target_repo']
772 target_ref = _form['target_ref']
775 target_ref = _form['target_ref']
773 commit_ids = _form['revisions'][::-1]
776 commit_ids = _form['revisions'][::-1]
774
777
775 # find the ancestor for this pr
778 # find the ancestor for this pr
776 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
779 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
777 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
780 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
778
781
779 # re-check permissions again here
782 # re-check permissions again here
780 # source_repo we must have read permissions
783 # source_repo we must have read permissions
781
784
782 source_perm = HasRepoPermissionAny(
785 source_perm = HasRepoPermissionAny(
783 'repository.read',
786 'repository.read',
784 'repository.write', 'repository.admin')(source_db_repo.repo_name)
787 'repository.write', 'repository.admin')(source_db_repo.repo_name)
785 if not source_perm:
788 if not source_perm:
786 msg = _('Not Enough permissions to source repo `{}`.'.format(
789 msg = _('Not Enough permissions to source repo `{}`.'.format(
787 source_db_repo.repo_name))
790 source_db_repo.repo_name))
788 h.flash(msg, category='error')
791 h.flash(msg, category='error')
789 # copy the args back to redirect
792 # copy the args back to redirect
790 org_query = self.request.GET.mixed()
793 org_query = self.request.GET.mixed()
791 raise HTTPFound(
794 raise HTTPFound(
792 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
795 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
793 _query=org_query))
796 _query=org_query))
794
797
795 # target repo we must have read permissions, and also later on
798 # target repo we must have read permissions, and also later on
796 # we want to check branch permissions here
799 # we want to check branch permissions here
797 target_perm = HasRepoPermissionAny(
800 target_perm = HasRepoPermissionAny(
798 'repository.read',
801 'repository.read',
799 'repository.write', 'repository.admin')(target_db_repo.repo_name)
802 'repository.write', 'repository.admin')(target_db_repo.repo_name)
800 if not target_perm:
803 if not target_perm:
801 msg = _('Not Enough permissions to target repo `{}`.'.format(
804 msg = _('Not Enough permissions to target repo `{}`.'.format(
802 target_db_repo.repo_name))
805 target_db_repo.repo_name))
803 h.flash(msg, category='error')
806 h.flash(msg, category='error')
804 # copy the args back to redirect
807 # copy the args back to redirect
805 org_query = self.request.GET.mixed()
808 org_query = self.request.GET.mixed()
806 raise HTTPFound(
809 raise HTTPFound(
807 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
810 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
808 _query=org_query))
811 _query=org_query))
809
812
810 source_scm = source_db_repo.scm_instance()
813 source_scm = source_db_repo.scm_instance()
811 target_scm = target_db_repo.scm_instance()
814 target_scm = target_db_repo.scm_instance()
812
815
813 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
816 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
814 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
817 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
815
818
816 ancestor = source_scm.get_common_ancestor(
819 ancestor = source_scm.get_common_ancestor(
817 source_commit.raw_id, target_commit.raw_id, target_scm)
820 source_commit.raw_id, target_commit.raw_id, target_scm)
818
821
819 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
822 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
820 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
823 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
821
824
822 pullrequest_title = _form['pullrequest_title']
825 pullrequest_title = _form['pullrequest_title']
823 title_source_ref = source_ref.split(':', 2)[1]
826 title_source_ref = source_ref.split(':', 2)[1]
824 if not pullrequest_title:
827 if not pullrequest_title:
825 pullrequest_title = PullRequestModel().generate_pullrequest_title(
828 pullrequest_title = PullRequestModel().generate_pullrequest_title(
826 source=source_repo,
829 source=source_repo,
827 source_ref=title_source_ref,
830 source_ref=title_source_ref,
828 target=target_repo
831 target=target_repo
829 )
832 )
830
833
831 description = _form['pullrequest_desc']
834 description = _form['pullrequest_desc']
832
835
833 get_default_reviewers_data, validate_default_reviewers = \
836 get_default_reviewers_data, validate_default_reviewers = \
834 PullRequestModel().get_reviewer_functions()
837 PullRequestModel().get_reviewer_functions()
835
838
836 # recalculate reviewers logic, to make sure we can validate this
839 # recalculate reviewers logic, to make sure we can validate this
837 reviewer_rules = get_default_reviewers_data(
840 reviewer_rules = get_default_reviewers_data(
838 self._rhodecode_db_user, source_db_repo,
841 self._rhodecode_db_user, source_db_repo,
839 source_commit, target_db_repo, target_commit)
842 source_commit, target_db_repo, target_commit)
840
843
841 given_reviewers = _form['review_members']
844 given_reviewers = _form['review_members']
842 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
845 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
843
846
844 try:
847 try:
845 pull_request = PullRequestModel().create(
848 pull_request = PullRequestModel().create(
846 self._rhodecode_user.user_id, source_repo, source_ref,
849 self._rhodecode_user.user_id, source_repo, source_ref,
847 target_repo, target_ref, commit_ids, reviewers,
850 target_repo, target_ref, commit_ids, reviewers,
848 pullrequest_title, description, reviewer_rules
851 pullrequest_title, description, reviewer_rules
849 )
852 )
850 Session().commit()
853 Session().commit()
851
854
852 h.flash(_('Successfully opened new pull request'),
855 h.flash(_('Successfully opened new pull request'),
853 category='success')
856 category='success')
854 except Exception:
857 except Exception:
855 msg = _('Error occurred during creation of this pull request.')
858 msg = _('Error occurred during creation of this pull request.')
856 log.exception(msg)
859 log.exception(msg)
857 h.flash(msg, category='error')
860 h.flash(msg, category='error')
858
861
859 # copy the args back to redirect
862 # copy the args back to redirect
860 org_query = self.request.GET.mixed()
863 org_query = self.request.GET.mixed()
861 raise HTTPFound(
864 raise HTTPFound(
862 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
865 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
863 _query=org_query))
866 _query=org_query))
864
867
865 raise HTTPFound(
868 raise HTTPFound(
866 h.route_path('pullrequest_show', repo_name=target_repo,
869 h.route_path('pullrequest_show', repo_name=target_repo,
867 pull_request_id=pull_request.pull_request_id))
870 pull_request_id=pull_request.pull_request_id))
868
871
869 @LoginRequired()
872 @LoginRequired()
870 @NotAnonymous()
873 @NotAnonymous()
871 @HasRepoPermissionAnyDecorator(
874 @HasRepoPermissionAnyDecorator(
872 'repository.read', 'repository.write', 'repository.admin')
875 'repository.read', 'repository.write', 'repository.admin')
873 @CSRFRequired()
876 @CSRFRequired()
874 @view_config(
877 @view_config(
875 route_name='pullrequest_update', request_method='POST',
878 route_name='pullrequest_update', request_method='POST',
876 renderer='json_ext')
879 renderer='json_ext')
877 def pull_request_update(self):
880 def pull_request_update(self):
878 pull_request = PullRequest.get_or_404(
881 pull_request = PullRequest.get_or_404(
879 self.request.matchdict['pull_request_id'])
882 self.request.matchdict['pull_request_id'])
880
883
884 self.load_default_context()
881 # only owner or admin can update it
885 # only owner or admin can update it
882 allowed_to_update = PullRequestModel().check_user_update(
886 allowed_to_update = PullRequestModel().check_user_update(
883 pull_request, self._rhodecode_user)
887 pull_request, self._rhodecode_user)
884 if allowed_to_update:
888 if allowed_to_update:
885 controls = peppercorn.parse(self.request.POST.items())
889 controls = peppercorn.parse(self.request.POST.items())
886
890
887 if 'review_members' in controls:
891 if 'review_members' in controls:
888 self._update_reviewers(
892 self._update_reviewers(
889 pull_request, controls['review_members'],
893 pull_request, controls['review_members'],
890 pull_request.reviewer_data)
894 pull_request.reviewer_data)
891 elif str2bool(self.request.POST.get('update_commits', 'false')):
895 elif str2bool(self.request.POST.get('update_commits', 'false')):
892 self._update_commits(pull_request)
896 self._update_commits(pull_request)
893 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
897 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
894 self._edit_pull_request(pull_request)
898 self._edit_pull_request(pull_request)
895 else:
899 else:
896 raise HTTPBadRequest()
900 raise HTTPBadRequest()
897 return True
901 return True
898 raise HTTPForbidden()
902 raise HTTPForbidden()
899
903
900 def _edit_pull_request(self, pull_request):
904 def _edit_pull_request(self, pull_request):
901 _ = self.request.translate
905 _ = self.request.translate
902 try:
906 try:
903 PullRequestModel().edit(
907 PullRequestModel().edit(
904 pull_request, self.request.POST.get('title'),
908 pull_request, self.request.POST.get('title'),
905 self.request.POST.get('description'), self._rhodecode_user)
909 self.request.POST.get('description'), self._rhodecode_user)
906 except ValueError:
910 except ValueError:
907 msg = _(u'Cannot update closed pull requests.')
911 msg = _(u'Cannot update closed pull requests.')
908 h.flash(msg, category='error')
912 h.flash(msg, category='error')
909 return
913 return
910 else:
914 else:
911 Session().commit()
915 Session().commit()
912
916
913 msg = _(u'Pull request title & description updated.')
917 msg = _(u'Pull request title & description updated.')
914 h.flash(msg, category='success')
918 h.flash(msg, category='success')
915 return
919 return
916
920
917 def _update_commits(self, pull_request):
921 def _update_commits(self, pull_request):
918 _ = self.request.translate
922 _ = self.request.translate
919 resp = PullRequestModel().update_commits(pull_request)
923 resp = PullRequestModel().update_commits(pull_request)
920
924
921 if resp.executed:
925 if resp.executed:
922
926
923 if resp.target_changed and resp.source_changed:
927 if resp.target_changed and resp.source_changed:
924 changed = 'target and source repositories'
928 changed = 'target and source repositories'
925 elif resp.target_changed and not resp.source_changed:
929 elif resp.target_changed and not resp.source_changed:
926 changed = 'target repository'
930 changed = 'target repository'
927 elif not resp.target_changed and resp.source_changed:
931 elif not resp.target_changed and resp.source_changed:
928 changed = 'source repository'
932 changed = 'source repository'
929 else:
933 else:
930 changed = 'nothing'
934 changed = 'nothing'
931
935
932 msg = _(
936 msg = _(
933 u'Pull request updated to "{source_commit_id}" with '
937 u'Pull request updated to "{source_commit_id}" with '
934 u'{count_added} added, {count_removed} removed commits. '
938 u'{count_added} added, {count_removed} removed commits. '
935 u'Source of changes: {change_source}')
939 u'Source of changes: {change_source}')
936 msg = msg.format(
940 msg = msg.format(
937 source_commit_id=pull_request.source_ref_parts.commit_id,
941 source_commit_id=pull_request.source_ref_parts.commit_id,
938 count_added=len(resp.changes.added),
942 count_added=len(resp.changes.added),
939 count_removed=len(resp.changes.removed),
943 count_removed=len(resp.changes.removed),
940 change_source=changed)
944 change_source=changed)
941 h.flash(msg, category='success')
945 h.flash(msg, category='success')
942
946
943 channel = '/repo${}$/pr/{}'.format(
947 channel = '/repo${}$/pr/{}'.format(
944 pull_request.target_repo.repo_name,
948 pull_request.target_repo.repo_name,
945 pull_request.pull_request_id)
949 pull_request.pull_request_id)
946 message = msg + (
950 message = msg + (
947 ' - <a onclick="window.location.reload()">'
951 ' - <a onclick="window.location.reload()">'
948 '<strong>{}</strong></a>'.format(_('Reload page')))
952 '<strong>{}</strong></a>'.format(_('Reload page')))
949 channelstream.post_message(
953 channelstream.post_message(
950 channel, message, self._rhodecode_user.username,
954 channel, message, self._rhodecode_user.username,
951 registry=self.request.registry)
955 registry=self.request.registry)
952 else:
956 else:
953 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
957 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
954 warning_reasons = [
958 warning_reasons = [
955 UpdateFailureReason.NO_CHANGE,
959 UpdateFailureReason.NO_CHANGE,
956 UpdateFailureReason.WRONG_REF_TYPE,
960 UpdateFailureReason.WRONG_REF_TYPE,
957 ]
961 ]
958 category = 'warning' if resp.reason in warning_reasons else 'error'
962 category = 'warning' if resp.reason in warning_reasons else 'error'
959 h.flash(msg, category=category)
963 h.flash(msg, category=category)
960
964
961 @LoginRequired()
965 @LoginRequired()
962 @NotAnonymous()
966 @NotAnonymous()
963 @HasRepoPermissionAnyDecorator(
967 @HasRepoPermissionAnyDecorator(
964 'repository.read', 'repository.write', 'repository.admin')
968 'repository.read', 'repository.write', 'repository.admin')
965 @CSRFRequired()
969 @CSRFRequired()
966 @view_config(
970 @view_config(
967 route_name='pullrequest_merge', request_method='POST',
971 route_name='pullrequest_merge', request_method='POST',
968 renderer='json_ext')
972 renderer='json_ext')
969 def pull_request_merge(self):
973 def pull_request_merge(self):
970 """
974 """
971 Merge will perform a server-side merge of the specified
975 Merge will perform a server-side merge of the specified
972 pull request, if the pull request is approved and mergeable.
976 pull request, if the pull request is approved and mergeable.
973 After successful merging, the pull request is automatically
977 After successful merging, the pull request is automatically
974 closed, with a relevant comment.
978 closed, with a relevant comment.
975 """
979 """
976 pull_request = PullRequest.get_or_404(
980 pull_request = PullRequest.get_or_404(
977 self.request.matchdict['pull_request_id'])
981 self.request.matchdict['pull_request_id'])
978
982
983 self.load_default_context()
979 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
984 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
980 translator=self.request.translate)
985 translator=self.request.translate)
981 merge_possible = not check.failed
986 merge_possible = not check.failed
982
987
983 for err_type, error_msg in check.errors:
988 for err_type, error_msg in check.errors:
984 h.flash(error_msg, category=err_type)
989 h.flash(error_msg, category=err_type)
985
990
986 if merge_possible:
991 if merge_possible:
987 log.debug("Pre-conditions checked, trying to merge.")
992 log.debug("Pre-conditions checked, trying to merge.")
988 extras = vcs_operation_context(
993 extras = vcs_operation_context(
989 self.request.environ, repo_name=pull_request.target_repo.repo_name,
994 self.request.environ, repo_name=pull_request.target_repo.repo_name,
990 username=self._rhodecode_db_user.username, action='push',
995 username=self._rhodecode_db_user.username, action='push',
991 scm=pull_request.target_repo.repo_type)
996 scm=pull_request.target_repo.repo_type)
992 self._merge_pull_request(
997 self._merge_pull_request(
993 pull_request, self._rhodecode_db_user, extras)
998 pull_request, self._rhodecode_db_user, extras)
994 else:
999 else:
995 log.debug("Pre-conditions failed, NOT merging.")
1000 log.debug("Pre-conditions failed, NOT merging.")
996
1001
997 raise HTTPFound(
1002 raise HTTPFound(
998 h.route_path('pullrequest_show',
1003 h.route_path('pullrequest_show',
999 repo_name=pull_request.target_repo.repo_name,
1004 repo_name=pull_request.target_repo.repo_name,
1000 pull_request_id=pull_request.pull_request_id))
1005 pull_request_id=pull_request.pull_request_id))
1001
1006
1002 def _merge_pull_request(self, pull_request, user, extras):
1007 def _merge_pull_request(self, pull_request, user, extras):
1003 _ = self.request.translate
1008 _ = self.request.translate
1004 merge_resp = PullRequestModel().merge(pull_request, user, extras=extras)
1009 merge_resp = PullRequestModel().merge(pull_request, user, extras=extras)
1005
1010
1006 if merge_resp.executed:
1011 if merge_resp.executed:
1007 log.debug("The merge was successful, closing the pull request.")
1012 log.debug("The merge was successful, closing the pull request.")
1008 PullRequestModel().close_pull_request(
1013 PullRequestModel().close_pull_request(
1009 pull_request.pull_request_id, user)
1014 pull_request.pull_request_id, user)
1010 Session().commit()
1015 Session().commit()
1011 msg = _('Pull request was successfully merged and closed.')
1016 msg = _('Pull request was successfully merged and closed.')
1012 h.flash(msg, category='success')
1017 h.flash(msg, category='success')
1013 else:
1018 else:
1014 log.debug(
1019 log.debug(
1015 "The merge was not successful. Merge response: %s",
1020 "The merge was not successful. Merge response: %s",
1016 merge_resp)
1021 merge_resp)
1017 msg = PullRequestModel().merge_status_message(
1022 msg = PullRequestModel().merge_status_message(
1018 merge_resp.failure_reason)
1023 merge_resp.failure_reason)
1019 h.flash(msg, category='error')
1024 h.flash(msg, category='error')
1020
1025
1021 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1026 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1022 _ = self.request.translate
1027 _ = self.request.translate
1023 get_default_reviewers_data, validate_default_reviewers = \
1028 get_default_reviewers_data, validate_default_reviewers = \
1024 PullRequestModel().get_reviewer_functions()
1029 PullRequestModel().get_reviewer_functions()
1025
1030
1026 try:
1031 try:
1027 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1032 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1028 except ValueError as e:
1033 except ValueError as e:
1029 log.error('Reviewers Validation: {}'.format(e))
1034 log.error('Reviewers Validation: {}'.format(e))
1030 h.flash(e, category='error')
1035 h.flash(e, category='error')
1031 return
1036 return
1032
1037
1033 PullRequestModel().update_reviewers(
1038 PullRequestModel().update_reviewers(
1034 pull_request, reviewers, self._rhodecode_user)
1039 pull_request, reviewers, self._rhodecode_user)
1035 h.flash(_('Pull request reviewers updated.'), category='success')
1040 h.flash(_('Pull request reviewers updated.'), category='success')
1036 Session().commit()
1041 Session().commit()
1037
1042
1038 @LoginRequired()
1043 @LoginRequired()
1039 @NotAnonymous()
1044 @NotAnonymous()
1040 @HasRepoPermissionAnyDecorator(
1045 @HasRepoPermissionAnyDecorator(
1041 'repository.read', 'repository.write', 'repository.admin')
1046 'repository.read', 'repository.write', 'repository.admin')
1042 @CSRFRequired()
1047 @CSRFRequired()
1043 @view_config(
1048 @view_config(
1044 route_name='pullrequest_delete', request_method='POST',
1049 route_name='pullrequest_delete', request_method='POST',
1045 renderer='json_ext')
1050 renderer='json_ext')
1046 def pull_request_delete(self):
1051 def pull_request_delete(self):
1047 _ = self.request.translate
1052 _ = self.request.translate
1048
1053
1049 pull_request = PullRequest.get_or_404(
1054 pull_request = PullRequest.get_or_404(
1050 self.request.matchdict['pull_request_id'])
1055 self.request.matchdict['pull_request_id'])
1056 self.load_default_context()
1051
1057
1052 pr_closed = pull_request.is_closed()
1058 pr_closed = pull_request.is_closed()
1053 allowed_to_delete = PullRequestModel().check_user_delete(
1059 allowed_to_delete = PullRequestModel().check_user_delete(
1054 pull_request, self._rhodecode_user) and not pr_closed
1060 pull_request, self._rhodecode_user) and not pr_closed
1055
1061
1056 # only owner can delete it !
1062 # only owner can delete it !
1057 if allowed_to_delete:
1063 if allowed_to_delete:
1058 PullRequestModel().delete(pull_request, self._rhodecode_user)
1064 PullRequestModel().delete(pull_request, self._rhodecode_user)
1059 Session().commit()
1065 Session().commit()
1060 h.flash(_('Successfully deleted pull request'),
1066 h.flash(_('Successfully deleted pull request'),
1061 category='success')
1067 category='success')
1062 raise HTTPFound(h.route_path('pullrequest_show_all',
1068 raise HTTPFound(h.route_path('pullrequest_show_all',
1063 repo_name=self.db_repo_name))
1069 repo_name=self.db_repo_name))
1064
1070
1065 log.warning('user %s tried to delete pull request without access',
1071 log.warning('user %s tried to delete pull request without access',
1066 self._rhodecode_user)
1072 self._rhodecode_user)
1067 raise HTTPNotFound()
1073 raise HTTPNotFound()
1068
1074
1069 @LoginRequired()
1075 @LoginRequired()
1070 @NotAnonymous()
1076 @NotAnonymous()
1071 @HasRepoPermissionAnyDecorator(
1077 @HasRepoPermissionAnyDecorator(
1072 'repository.read', 'repository.write', 'repository.admin')
1078 'repository.read', 'repository.write', 'repository.admin')
1073 @CSRFRequired()
1079 @CSRFRequired()
1074 @view_config(
1080 @view_config(
1075 route_name='pullrequest_comment_create', request_method='POST',
1081 route_name='pullrequest_comment_create', request_method='POST',
1076 renderer='json_ext')
1082 renderer='json_ext')
1077 def pull_request_comment_create(self):
1083 def pull_request_comment_create(self):
1078 _ = self.request.translate
1084 _ = self.request.translate
1079
1085
1080 pull_request = PullRequest.get_or_404(
1086 pull_request = PullRequest.get_or_404(
1081 self.request.matchdict['pull_request_id'])
1087 self.request.matchdict['pull_request_id'])
1082 pull_request_id = pull_request.pull_request_id
1088 pull_request_id = pull_request.pull_request_id
1083
1089
1084 if pull_request.is_closed():
1090 if pull_request.is_closed():
1085 log.debug('comment: forbidden because pull request is closed')
1091 log.debug('comment: forbidden because pull request is closed')
1086 raise HTTPForbidden()
1092 raise HTTPForbidden()
1087
1093
1088 allowed_to_comment = PullRequestModel().check_user_comment(
1094 allowed_to_comment = PullRequestModel().check_user_comment(
1089 pull_request, self._rhodecode_user)
1095 pull_request, self._rhodecode_user)
1090 if not allowed_to_comment:
1096 if not allowed_to_comment:
1091 log.debug(
1097 log.debug(
1092 'comment: forbidden because pull request is from forbidden repo')
1098 'comment: forbidden because pull request is from forbidden repo')
1093 raise HTTPForbidden()
1099 raise HTTPForbidden()
1094
1100
1095 c = self.load_default_context()
1101 c = self.load_default_context()
1096
1102
1097 status = self.request.POST.get('changeset_status', None)
1103 status = self.request.POST.get('changeset_status', None)
1098 text = self.request.POST.get('text')
1104 text = self.request.POST.get('text')
1099 comment_type = self.request.POST.get('comment_type')
1105 comment_type = self.request.POST.get('comment_type')
1100 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1106 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1101 close_pull_request = self.request.POST.get('close_pull_request')
1107 close_pull_request = self.request.POST.get('close_pull_request')
1102
1108
1103 # the logic here should work like following, if we submit close
1109 # the logic here should work like following, if we submit close
1104 # pr comment, use `close_pull_request_with_comment` function
1110 # pr comment, use `close_pull_request_with_comment` function
1105 # else handle regular comment logic
1111 # else handle regular comment logic
1106
1112
1107 if close_pull_request:
1113 if close_pull_request:
1108 # only owner or admin or person with write permissions
1114 # only owner or admin or person with write permissions
1109 allowed_to_close = PullRequestModel().check_user_update(
1115 allowed_to_close = PullRequestModel().check_user_update(
1110 pull_request, self._rhodecode_user)
1116 pull_request, self._rhodecode_user)
1111 if not allowed_to_close:
1117 if not allowed_to_close:
1112 log.debug('comment: forbidden because not allowed to close '
1118 log.debug('comment: forbidden because not allowed to close '
1113 'pull request %s', pull_request_id)
1119 'pull request %s', pull_request_id)
1114 raise HTTPForbidden()
1120 raise HTTPForbidden()
1115 comment, status = PullRequestModel().close_pull_request_with_comment(
1121 comment, status = PullRequestModel().close_pull_request_with_comment(
1116 pull_request, self._rhodecode_user, self.db_repo, message=text)
1122 pull_request, self._rhodecode_user, self.db_repo, message=text)
1117 Session().flush()
1123 Session().flush()
1118 events.trigger(
1124 events.trigger(
1119 events.PullRequestCommentEvent(pull_request, comment))
1125 events.PullRequestCommentEvent(pull_request, comment))
1120
1126
1121 else:
1127 else:
1122 # regular comment case, could be inline, or one with status.
1128 # regular comment case, could be inline, or one with status.
1123 # for that one we check also permissions
1129 # for that one we check also permissions
1124
1130
1125 allowed_to_change_status = PullRequestModel().check_user_change_status(
1131 allowed_to_change_status = PullRequestModel().check_user_change_status(
1126 pull_request, self._rhodecode_user)
1132 pull_request, self._rhodecode_user)
1127
1133
1128 if status and allowed_to_change_status:
1134 if status and allowed_to_change_status:
1129 message = (_('Status change %(transition_icon)s %(status)s')
1135 message = (_('Status change %(transition_icon)s %(status)s')
1130 % {'transition_icon': '>',
1136 % {'transition_icon': '>',
1131 'status': ChangesetStatus.get_status_lbl(status)})
1137 'status': ChangesetStatus.get_status_lbl(status)})
1132 text = text or message
1138 text = text or message
1133
1139
1134 comment = CommentsModel().create(
1140 comment = CommentsModel().create(
1135 text=text,
1141 text=text,
1136 repo=self.db_repo.repo_id,
1142 repo=self.db_repo.repo_id,
1137 user=self._rhodecode_user.user_id,
1143 user=self._rhodecode_user.user_id,
1138 pull_request=pull_request,
1144 pull_request=pull_request,
1139 f_path=self.request.POST.get('f_path'),
1145 f_path=self.request.POST.get('f_path'),
1140 line_no=self.request.POST.get('line'),
1146 line_no=self.request.POST.get('line'),
1141 status_change=(ChangesetStatus.get_status_lbl(status)
1147 status_change=(ChangesetStatus.get_status_lbl(status)
1142 if status and allowed_to_change_status else None),
1148 if status and allowed_to_change_status else None),
1143 status_change_type=(status
1149 status_change_type=(status
1144 if status and allowed_to_change_status else None),
1150 if status and allowed_to_change_status else None),
1145 comment_type=comment_type,
1151 comment_type=comment_type,
1146 resolves_comment_id=resolves_comment_id
1152 resolves_comment_id=resolves_comment_id
1147 )
1153 )
1148
1154
1149 if allowed_to_change_status:
1155 if allowed_to_change_status:
1150 # calculate old status before we change it
1156 # calculate old status before we change it
1151 old_calculated_status = pull_request.calculated_review_status()
1157 old_calculated_status = pull_request.calculated_review_status()
1152
1158
1153 # get status if set !
1159 # get status if set !
1154 if status:
1160 if status:
1155 ChangesetStatusModel().set_status(
1161 ChangesetStatusModel().set_status(
1156 self.db_repo.repo_id,
1162 self.db_repo.repo_id,
1157 status,
1163 status,
1158 self._rhodecode_user.user_id,
1164 self._rhodecode_user.user_id,
1159 comment,
1165 comment,
1160 pull_request=pull_request
1166 pull_request=pull_request
1161 )
1167 )
1162
1168
1163 Session().flush()
1169 Session().flush()
1164 events.trigger(
1170 events.trigger(
1165 events.PullRequestCommentEvent(pull_request, comment))
1171 events.PullRequestCommentEvent(pull_request, comment))
1166
1172
1167 # we now calculate the status of pull request, and based on that
1173 # we now calculate the status of pull request, and based on that
1168 # calculation we set the commits status
1174 # calculation we set the commits status
1169 calculated_status = pull_request.calculated_review_status()
1175 calculated_status = pull_request.calculated_review_status()
1170 if old_calculated_status != calculated_status:
1176 if old_calculated_status != calculated_status:
1171 PullRequestModel()._trigger_pull_request_hook(
1177 PullRequestModel()._trigger_pull_request_hook(
1172 pull_request, self._rhodecode_user, 'review_status_change')
1178 pull_request, self._rhodecode_user, 'review_status_change')
1173
1179
1174 Session().commit()
1180 Session().commit()
1175
1181
1176 data = {
1182 data = {
1177 'target_id': h.safeid(h.safe_unicode(
1183 'target_id': h.safeid(h.safe_unicode(
1178 self.request.POST.get('f_path'))),
1184 self.request.POST.get('f_path'))),
1179 }
1185 }
1180 if comment:
1186 if comment:
1181 c.co = comment
1187 c.co = comment
1182 rendered_comment = render(
1188 rendered_comment = render(
1183 'rhodecode:templates/changeset/changeset_comment_block.mako',
1189 'rhodecode:templates/changeset/changeset_comment_block.mako',
1184 self._get_template_context(c), self.request)
1190 self._get_template_context(c), self.request)
1185
1191
1186 data.update(comment.get_dict())
1192 data.update(comment.get_dict())
1187 data.update({'rendered_text': rendered_comment})
1193 data.update({'rendered_text': rendered_comment})
1188
1194
1189 return data
1195 return data
1190
1196
1191 @LoginRequired()
1197 @LoginRequired()
1192 @NotAnonymous()
1198 @NotAnonymous()
1193 @HasRepoPermissionAnyDecorator(
1199 @HasRepoPermissionAnyDecorator(
1194 'repository.read', 'repository.write', 'repository.admin')
1200 'repository.read', 'repository.write', 'repository.admin')
1195 @CSRFRequired()
1201 @CSRFRequired()
1196 @view_config(
1202 @view_config(
1197 route_name='pullrequest_comment_delete', request_method='POST',
1203 route_name='pullrequest_comment_delete', request_method='POST',
1198 renderer='json_ext')
1204 renderer='json_ext')
1199 def pull_request_comment_delete(self):
1205 def pull_request_comment_delete(self):
1200 pull_request = PullRequest.get_or_404(
1206 pull_request = PullRequest.get_or_404(
1201 self.request.matchdict['pull_request_id'])
1207 self.request.matchdict['pull_request_id'])
1202
1208
1203 comment = ChangesetComment.get_or_404(
1209 comment = ChangesetComment.get_or_404(
1204 self.request.matchdict['comment_id'])
1210 self.request.matchdict['comment_id'])
1205 comment_id = comment.comment_id
1211 comment_id = comment.comment_id
1206
1212
1207 if pull_request.is_closed():
1213 if pull_request.is_closed():
1208 log.debug('comment: forbidden because pull request is closed')
1214 log.debug('comment: forbidden because pull request is closed')
1209 raise HTTPForbidden()
1215 raise HTTPForbidden()
1210
1216
1211 if not comment:
1217 if not comment:
1212 log.debug('Comment with id:%s not found, skipping', comment_id)
1218 log.debug('Comment with id:%s not found, skipping', comment_id)
1213 # comment already deleted in another call probably
1219 # comment already deleted in another call probably
1214 return True
1220 return True
1215
1221
1216 if comment.pull_request.is_closed():
1222 if comment.pull_request.is_closed():
1217 # don't allow deleting comments on closed pull request
1223 # don't allow deleting comments on closed pull request
1218 raise HTTPForbidden()
1224 raise HTTPForbidden()
1219
1225
1220 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1226 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1221 super_admin = h.HasPermissionAny('hg.admin')()
1227 super_admin = h.HasPermissionAny('hg.admin')()
1222 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1228 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1223 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1229 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1224 comment_repo_admin = is_repo_admin and is_repo_comment
1230 comment_repo_admin = is_repo_admin and is_repo_comment
1225
1231
1226 if super_admin or comment_owner or comment_repo_admin:
1232 if super_admin or comment_owner or comment_repo_admin:
1227 old_calculated_status = comment.pull_request.calculated_review_status()
1233 old_calculated_status = comment.pull_request.calculated_review_status()
1228 CommentsModel().delete(comment=comment, user=self._rhodecode_user)
1234 CommentsModel().delete(comment=comment, user=self._rhodecode_user)
1229 Session().commit()
1235 Session().commit()
1230 calculated_status = comment.pull_request.calculated_review_status()
1236 calculated_status = comment.pull_request.calculated_review_status()
1231 if old_calculated_status != calculated_status:
1237 if old_calculated_status != calculated_status:
1232 PullRequestModel()._trigger_pull_request_hook(
1238 PullRequestModel()._trigger_pull_request_hook(
1233 comment.pull_request, self._rhodecode_user, 'review_status_change')
1239 comment.pull_request, self._rhodecode_user, 'review_status_change')
1234 return True
1240 return True
1235 else:
1241 else:
1236 log.warning('No permissions for user %s to delete comment_id: %s',
1242 log.warning('No permissions for user %s to delete comment_id: %s',
1237 self._rhodecode_db_user, comment_id)
1243 self._rhodecode_db_user, comment_id)
1238 raise HTTPNotFound()
1244 raise HTTPNotFound()
@@ -1,251 +1,252 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 import deform
23 import deform
24 from pyramid.httpexceptions import HTTPFound
24 from pyramid.httpexceptions import HTTPFound
25 from pyramid.view import view_config
25 from pyramid.view import view_config
26
26
27 from rhodecode.apps._base import RepoAppView
27 from rhodecode.apps._base import RepoAppView
28 from rhodecode.forms import RcForm
28 from rhodecode.forms import RcForm
29 from rhodecode.lib import helpers as h
29 from rhodecode.lib import helpers as h
30 from rhodecode.lib import audit_logger
30 from rhodecode.lib import audit_logger
31 from rhodecode.lib.auth import (
31 from rhodecode.lib.auth import (
32 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
32 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
33 from rhodecode.model.db import RepositoryField, RepoGroup, Repository
33 from rhodecode.model.db import RepositoryField, RepoGroup, Repository
34 from rhodecode.model.meta import Session
34 from rhodecode.model.meta import Session
35 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo import RepoModel
36 from rhodecode.model.scm import RepoGroupList, ScmModel
36 from rhodecode.model.scm import RepoGroupList, ScmModel
37 from rhodecode.model.validation_schema.schemas import repo_schema
37 from rhodecode.model.validation_schema.schemas import repo_schema
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 class RepoSettingsView(RepoAppView):
42 class RepoSettingsView(RepoAppView):
43
43
44 def load_default_context(self):
44 def load_default_context(self):
45 c = self._get_local_tmpl_context()
45 c = self._get_local_tmpl_context()
46
46
47 acl_groups = RepoGroupList(
47 acl_groups = RepoGroupList(
48 RepoGroup.query().all(),
48 RepoGroup.query().all(),
49 perm_set=['group.write', 'group.admin'])
49 perm_set=['group.write', 'group.admin'])
50 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
50 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
51 c.repo_groups_choices = map(lambda k: k[0], c.repo_groups)
51 c.repo_groups_choices = map(lambda k: k[0], c.repo_groups)
52
52
53 # in case someone no longer have a group.write access to a repository
53 # in case someone no longer have a group.write access to a repository
54 # pre fill the list with this entry, we don't care if this is the same
54 # pre fill the list with this entry, we don't care if this is the same
55 # but it will allow saving repo data properly.
55 # but it will allow saving repo data properly.
56 repo_group = self.db_repo.group
56 repo_group = self.db_repo.group
57 if repo_group and repo_group.group_id not in c.repo_groups_choices:
57 if repo_group and repo_group.group_id not in c.repo_groups_choices:
58 c.repo_groups_choices.append(repo_group.group_id)
58 c.repo_groups_choices.append(repo_group.group_id)
59 c.repo_groups.append(RepoGroup._generate_choice(repo_group))
59 c.repo_groups.append(RepoGroup._generate_choice(repo_group))
60
60
61 if c.repository_requirements_missing or self.rhodecode_vcs_repo is None:
61 if c.repository_requirements_missing or self.rhodecode_vcs_repo is None:
62 # we might be in missing requirement state, so we load things
62 # we might be in missing requirement state, so we load things
63 # without touching scm_instance()
63 # without touching scm_instance()
64 c.landing_revs_choices, c.landing_revs = \
64 c.landing_revs_choices, c.landing_revs = \
65 ScmModel().get_repo_landing_revs()
65 ScmModel().get_repo_landing_revs(self.request.translate)
66 else:
66 else:
67 c.landing_revs_choices, c.landing_revs = \
67 c.landing_revs_choices, c.landing_revs = \
68 ScmModel().get_repo_landing_revs(self.db_repo)
68 ScmModel().get_repo_landing_revs(
69 self.request.translate, self.db_repo)
69
70
70 c.personal_repo_group = c.auth_user.personal_repo_group
71 c.personal_repo_group = c.auth_user.personal_repo_group
71 c.repo_fields = RepositoryField.query()\
72 c.repo_fields = RepositoryField.query()\
72 .filter(RepositoryField.repository == self.db_repo).all()
73 .filter(RepositoryField.repository == self.db_repo).all()
73
74
74
75
75 return c
76 return c
76
77
77 def _get_schema(self, c, old_values=None):
78 def _get_schema(self, c, old_values=None):
78 return repo_schema.RepoSettingsSchema().bind(
79 return repo_schema.RepoSettingsSchema().bind(
79 repo_type=self.db_repo.repo_type,
80 repo_type=self.db_repo.repo_type,
80 repo_type_options=[self.db_repo.repo_type],
81 repo_type_options=[self.db_repo.repo_type],
81 repo_ref_options=c.landing_revs_choices,
82 repo_ref_options=c.landing_revs_choices,
82 repo_ref_items=c.landing_revs,
83 repo_ref_items=c.landing_revs,
83 repo_repo_group_options=c.repo_groups_choices,
84 repo_repo_group_options=c.repo_groups_choices,
84 repo_repo_group_items=c.repo_groups,
85 repo_repo_group_items=c.repo_groups,
85 # user caller
86 # user caller
86 user=self._rhodecode_user,
87 user=self._rhodecode_user,
87 old_values=old_values
88 old_values=old_values
88 )
89 )
89
90
90 @LoginRequired()
91 @LoginRequired()
91 @HasRepoPermissionAnyDecorator('repository.admin')
92 @HasRepoPermissionAnyDecorator('repository.admin')
92 @view_config(
93 @view_config(
93 route_name='edit_repo', request_method='GET',
94 route_name='edit_repo', request_method='GET',
94 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
95 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
95 def edit_settings(self):
96 def edit_settings(self):
96 c = self.load_default_context()
97 c = self.load_default_context()
97 c.active = 'settings'
98 c.active = 'settings'
98
99
99 defaults = RepoModel()._get_defaults(self.db_repo_name)
100 defaults = RepoModel()._get_defaults(self.db_repo_name)
100 defaults['repo_owner'] = defaults['user']
101 defaults['repo_owner'] = defaults['user']
101 defaults['repo_landing_commit_ref'] = defaults['repo_landing_rev']
102 defaults['repo_landing_commit_ref'] = defaults['repo_landing_rev']
102
103
103 schema = self._get_schema(c)
104 schema = self._get_schema(c)
104 c.form = RcForm(schema, appstruct=defaults)
105 c.form = RcForm(schema, appstruct=defaults)
105 return self._get_template_context(c)
106 return self._get_template_context(c)
106
107
107 @LoginRequired()
108 @LoginRequired()
108 @HasRepoPermissionAnyDecorator('repository.admin')
109 @HasRepoPermissionAnyDecorator('repository.admin')
109 @CSRFRequired()
110 @CSRFRequired()
110 @view_config(
111 @view_config(
111 route_name='edit_repo', request_method='POST',
112 route_name='edit_repo', request_method='POST',
112 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
113 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
113 def edit_settings_update(self):
114 def edit_settings_update(self):
114 _ = self.request.translate
115 _ = self.request.translate
115 c = self.load_default_context()
116 c = self.load_default_context()
116 c.active = 'settings'
117 c.active = 'settings'
117 old_repo_name = self.db_repo_name
118 old_repo_name = self.db_repo_name
118
119
119 old_values = self.db_repo.get_api_data()
120 old_values = self.db_repo.get_api_data()
120 schema = self._get_schema(c, old_values=old_values)
121 schema = self._get_schema(c, old_values=old_values)
121
122
122 c.form = RcForm(schema)
123 c.form = RcForm(schema)
123 pstruct = self.request.POST.items()
124 pstruct = self.request.POST.items()
124 pstruct.append(('repo_type', self.db_repo.repo_type))
125 pstruct.append(('repo_type', self.db_repo.repo_type))
125 try:
126 try:
126 schema_data = c.form.validate(pstruct)
127 schema_data = c.form.validate(pstruct)
127 except deform.ValidationFailure as err_form:
128 except deform.ValidationFailure as err_form:
128 return self._get_template_context(c)
129 return self._get_template_context(c)
129
130
130 # data is now VALID, proceed with updates
131 # data is now VALID, proceed with updates
131 # save validated data back into the updates dict
132 # save validated data back into the updates dict
132 validated_updates = dict(
133 validated_updates = dict(
133 repo_name=schema_data['repo_group']['repo_name_without_group'],
134 repo_name=schema_data['repo_group']['repo_name_without_group'],
134 repo_group=schema_data['repo_group']['repo_group_id'],
135 repo_group=schema_data['repo_group']['repo_group_id'],
135
136
136 user=schema_data['repo_owner'],
137 user=schema_data['repo_owner'],
137 repo_description=schema_data['repo_description'],
138 repo_description=schema_data['repo_description'],
138 repo_private=schema_data['repo_private'],
139 repo_private=schema_data['repo_private'],
139 clone_uri=schema_data['repo_clone_uri'],
140 clone_uri=schema_data['repo_clone_uri'],
140 repo_landing_rev=schema_data['repo_landing_commit_ref'],
141 repo_landing_rev=schema_data['repo_landing_commit_ref'],
141 repo_enable_statistics=schema_data['repo_enable_statistics'],
142 repo_enable_statistics=schema_data['repo_enable_statistics'],
142 repo_enable_locking=schema_data['repo_enable_locking'],
143 repo_enable_locking=schema_data['repo_enable_locking'],
143 repo_enable_downloads=schema_data['repo_enable_downloads'],
144 repo_enable_downloads=schema_data['repo_enable_downloads'],
144 )
145 )
145 # detect if CLONE URI changed, if we get OLD means we keep old values
146 # detect if CLONE URI changed, if we get OLD means we keep old values
146 if schema_data['repo_clone_uri_change'] == 'OLD':
147 if schema_data['repo_clone_uri_change'] == 'OLD':
147 validated_updates['clone_uri'] = self.db_repo.clone_uri
148 validated_updates['clone_uri'] = self.db_repo.clone_uri
148
149
149 # use the new full name for redirect
150 # use the new full name for redirect
150 new_repo_name = schema_data['repo_group']['repo_name_with_group']
151 new_repo_name = schema_data['repo_group']['repo_name_with_group']
151
152
152 # save extra fields into our validated data
153 # save extra fields into our validated data
153 for key, value in pstruct:
154 for key, value in pstruct:
154 if key.startswith(RepositoryField.PREFIX):
155 if key.startswith(RepositoryField.PREFIX):
155 validated_updates[key] = value
156 validated_updates[key] = value
156
157
157 try:
158 try:
158 RepoModel().update(self.db_repo, **validated_updates)
159 RepoModel().update(self.db_repo, **validated_updates)
159 ScmModel().mark_for_invalidation(new_repo_name)
160 ScmModel().mark_for_invalidation(new_repo_name)
160
161
161 audit_logger.store_web(
162 audit_logger.store_web(
162 'repo.edit', action_data={'old_data': old_values},
163 'repo.edit', action_data={'old_data': old_values},
163 user=self._rhodecode_user, repo=self.db_repo)
164 user=self._rhodecode_user, repo=self.db_repo)
164
165
165 Session().commit()
166 Session().commit()
166
167
167 h.flash(_('Repository `{}` updated successfully').format(
168 h.flash(_('Repository `{}` updated successfully').format(
168 old_repo_name), category='success')
169 old_repo_name), category='success')
169 except Exception:
170 except Exception:
170 log.exception("Exception during update of repository")
171 log.exception("Exception during update of repository")
171 h.flash(_('Error occurred during update of repository {}').format(
172 h.flash(_('Error occurred during update of repository {}').format(
172 old_repo_name), category='error')
173 old_repo_name), category='error')
173
174
174 raise HTTPFound(
175 raise HTTPFound(
175 h.route_path('edit_repo', repo_name=new_repo_name))
176 h.route_path('edit_repo', repo_name=new_repo_name))
176
177
177 @LoginRequired()
178 @LoginRequired()
178 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
179 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
179 @view_config(
180 @view_config(
180 route_name='repo_edit_toggle_locking', request_method='GET',
181 route_name='repo_edit_toggle_locking', request_method='GET',
181 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
182 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
182 def toggle_locking(self):
183 def toggle_locking(self):
183 """
184 """
184 Toggle locking of repository by simple GET call to url
185 Toggle locking of repository by simple GET call to url
185 """
186 """
186 _ = self.request.translate
187 _ = self.request.translate
187 repo = self.db_repo
188 repo = self.db_repo
188
189
189 try:
190 try:
190 if repo.enable_locking:
191 if repo.enable_locking:
191 if repo.locked[0]:
192 if repo.locked[0]:
192 Repository.unlock(repo)
193 Repository.unlock(repo)
193 action = _('Unlocked')
194 action = _('Unlocked')
194 else:
195 else:
195 Repository.lock(
196 Repository.lock(
196 repo, self._rhodecode_user.user_id,
197 repo, self._rhodecode_user.user_id,
197 lock_reason=Repository.LOCK_WEB)
198 lock_reason=Repository.LOCK_WEB)
198 action = _('Locked')
199 action = _('Locked')
199
200
200 h.flash(_('Repository has been %s') % action,
201 h.flash(_('Repository has been %s') % action,
201 category='success')
202 category='success')
202 except Exception:
203 except Exception:
203 log.exception("Exception during unlocking")
204 log.exception("Exception during unlocking")
204 h.flash(_('An error occurred during unlocking'),
205 h.flash(_('An error occurred during unlocking'),
205 category='error')
206 category='error')
206 raise HTTPFound(
207 raise HTTPFound(
207 h.route_path('repo_summary', repo_name=self.db_repo_name))
208 h.route_path('repo_summary', repo_name=self.db_repo_name))
208
209
209 @LoginRequired()
210 @LoginRequired()
210 @HasRepoPermissionAnyDecorator('repository.admin')
211 @HasRepoPermissionAnyDecorator('repository.admin')
211 @view_config(
212 @view_config(
212 route_name='edit_repo_statistics', request_method='GET',
213 route_name='edit_repo_statistics', request_method='GET',
213 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
214 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
214 def edit_statistics_form(self):
215 def edit_statistics_form(self):
215 c = self.load_default_context()
216 c = self.load_default_context()
216
217
217 if self.db_repo.stats:
218 if self.db_repo.stats:
218 # this is on what revision we ended up so we add +1 for count
219 # this is on what revision we ended up so we add +1 for count
219 last_rev = self.db_repo.stats.stat_on_revision + 1
220 last_rev = self.db_repo.stats.stat_on_revision + 1
220 else:
221 else:
221 last_rev = 0
222 last_rev = 0
222
223
223 c.active = 'statistics'
224 c.active = 'statistics'
224 c.stats_revision = last_rev
225 c.stats_revision = last_rev
225 c.repo_last_rev = self.rhodecode_vcs_repo.count()
226 c.repo_last_rev = self.rhodecode_vcs_repo.count()
226
227
227 if last_rev == 0 or c.repo_last_rev == 0:
228 if last_rev == 0 or c.repo_last_rev == 0:
228 c.stats_percentage = 0
229 c.stats_percentage = 0
229 else:
230 else:
230 c.stats_percentage = '%.2f' % (
231 c.stats_percentage = '%.2f' % (
231 (float((last_rev)) / c.repo_last_rev) * 100)
232 (float((last_rev)) / c.repo_last_rev) * 100)
232 return self._get_template_context(c)
233 return self._get_template_context(c)
233
234
234 @LoginRequired()
235 @LoginRequired()
235 @HasRepoPermissionAnyDecorator('repository.admin')
236 @HasRepoPermissionAnyDecorator('repository.admin')
236 @CSRFRequired()
237 @CSRFRequired()
237 @view_config(
238 @view_config(
238 route_name='edit_repo_statistics_reset', request_method='POST',
239 route_name='edit_repo_statistics_reset', request_method='POST',
239 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
240 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
240 def repo_statistics_reset(self):
241 def repo_statistics_reset(self):
241 _ = self.request.translate
242 _ = self.request.translate
242
243
243 try:
244 try:
244 RepoModel().delete_stats(self.db_repo_name)
245 RepoModel().delete_stats(self.db_repo_name)
245 Session().commit()
246 Session().commit()
246 except Exception:
247 except Exception:
247 log.exception('Edit statistics failure')
248 log.exception('Edit statistics failure')
248 h.flash(_('An error occurred during deletion of repository stats'),
249 h.flash(_('An error occurred during deletion of repository stats'),
249 category='error')
250 category='error')
250 raise HTTPFound(
251 raise HTTPFound(
251 h.route_path('edit_repo_statistics', repo_name=self.db_repo_name))
252 h.route_path('edit_repo_statistics', repo_name=self.db_repo_name))
@@ -1,546 +1,546 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 import peppercorn
23 import peppercorn
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 from pyramid.httpexceptions import HTTPFound
26 from pyramid.httpexceptions import HTTPFound
27 from pyramid.view import view_config
27 from pyramid.view import view_config
28 from pyramid.response import Response
28 from pyramid.response import Response
29 from pyramid.renderers import render
29 from pyramid.renderers import render
30
30
31 from rhodecode.lib.exceptions import (
31 from rhodecode.lib.exceptions import (
32 RepoGroupAssignmentError, UserGroupAssignedException)
32 RepoGroupAssignmentError, UserGroupAssignedException)
33 from rhodecode.model.forms import (
33 from rhodecode.model.forms import (
34 UserGroupPermsForm, UserGroupForm, UserIndividualPermissionsForm,
34 UserGroupPermsForm, UserGroupForm, UserIndividualPermissionsForm,
35 UserPermissionsForm)
35 UserPermissionsForm)
36 from rhodecode.model.permission import PermissionModel
36 from rhodecode.model.permission import PermissionModel
37
37
38 from rhodecode.apps._base import UserGroupAppView
38 from rhodecode.apps._base import UserGroupAppView
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasUserGroupPermissionAnyDecorator, CSRFRequired)
40 LoginRequired, HasUserGroupPermissionAnyDecorator, CSRFRequired)
41 from rhodecode.lib import helpers as h, audit_logger
41 from rhodecode.lib import helpers as h, audit_logger
42 from rhodecode.lib.utils2 import str2bool
42 from rhodecode.lib.utils2 import str2bool
43 from rhodecode.model.db import (
43 from rhodecode.model.db import (
44 joinedload, User, UserGroupRepoToPerm, UserGroupRepoGroupToPerm)
44 joinedload, User, UserGroupRepoToPerm, UserGroupRepoGroupToPerm)
45 from rhodecode.model.meta import Session
45 from rhodecode.model.meta import Session
46 from rhodecode.model.user_group import UserGroupModel
46 from rhodecode.model.user_group import UserGroupModel
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 class UserGroupsView(UserGroupAppView):
51 class UserGroupsView(UserGroupAppView):
52
52
53 def load_default_context(self):
53 def load_default_context(self):
54 c = self._get_local_tmpl_context()
54 c = self._get_local_tmpl_context()
55
55
56 PermissionModel().set_global_permission_choices(
56 PermissionModel().set_global_permission_choices(
57 c, gettext_translator=self.request.translate)
57 c, gettext_translator=self.request.translate)
58
58
59
59
60 return c
60 return c
61
61
62 def _get_perms_summary(self, user_group_id):
62 def _get_perms_summary(self, user_group_id):
63 permissions = {
63 permissions = {
64 'repositories': {},
64 'repositories': {},
65 'repositories_groups': {},
65 'repositories_groups': {},
66 }
66 }
67 ugroup_repo_perms = UserGroupRepoToPerm.query()\
67 ugroup_repo_perms = UserGroupRepoToPerm.query()\
68 .options(joinedload(UserGroupRepoToPerm.permission))\
68 .options(joinedload(UserGroupRepoToPerm.permission))\
69 .options(joinedload(UserGroupRepoToPerm.repository))\
69 .options(joinedload(UserGroupRepoToPerm.repository))\
70 .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\
70 .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\
71 .all()
71 .all()
72
72
73 for gr in ugroup_repo_perms:
73 for gr in ugroup_repo_perms:
74 permissions['repositories'][gr.repository.repo_name] \
74 permissions['repositories'][gr.repository.repo_name] \
75 = gr.permission.permission_name
75 = gr.permission.permission_name
76
76
77 ugroup_group_perms = UserGroupRepoGroupToPerm.query()\
77 ugroup_group_perms = UserGroupRepoGroupToPerm.query()\
78 .options(joinedload(UserGroupRepoGroupToPerm.permission))\
78 .options(joinedload(UserGroupRepoGroupToPerm.permission))\
79 .options(joinedload(UserGroupRepoGroupToPerm.group))\
79 .options(joinedload(UserGroupRepoGroupToPerm.group))\
80 .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\
80 .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\
81 .all()
81 .all()
82
82
83 for gr in ugroup_group_perms:
83 for gr in ugroup_group_perms:
84 permissions['repositories_groups'][gr.group.group_name] \
84 permissions['repositories_groups'][gr.group.group_name] \
85 = gr.permission.permission_name
85 = gr.permission.permission_name
86 return permissions
86 return permissions
87
87
88 @LoginRequired()
88 @LoginRequired()
89 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
89 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
90 @view_config(
90 @view_config(
91 route_name='user_group_members_data', request_method='GET',
91 route_name='user_group_members_data', request_method='GET',
92 renderer='json_ext', xhr=True)
92 renderer='json_ext', xhr=True)
93 def user_group_members(self):
93 def user_group_members(self):
94 """
94 """
95 Return members of given user group
95 Return members of given user group
96 """
96 """
97 self.load_default_context()
97 self.load_default_context()
98 user_group = self.db_user_group
98 user_group = self.db_user_group
99 group_members_obj = sorted((x.user for x in user_group.members),
99 group_members_obj = sorted((x.user for x in user_group.members),
100 key=lambda u: u.username.lower())
100 key=lambda u: u.username.lower())
101
101
102 group_members = [
102 group_members = [
103 {
103 {
104 'id': user.user_id,
104 'id': user.user_id,
105 'first_name': user.first_name,
105 'first_name': user.first_name,
106 'last_name': user.last_name,
106 'last_name': user.last_name,
107 'username': user.username,
107 'username': user.username,
108 'icon_link': h.gravatar_url(user.email, 30),
108 'icon_link': h.gravatar_url(user.email, 30),
109 'value_display': h.person(user.email),
109 'value_display': h.person(user.email),
110 'value': user.username,
110 'value': user.username,
111 'value_type': 'user',
111 'value_type': 'user',
112 'active': user.active,
112 'active': user.active,
113 }
113 }
114 for user in group_members_obj
114 for user in group_members_obj
115 ]
115 ]
116
116
117 return {
117 return {
118 'members': group_members
118 'members': group_members
119 }
119 }
120
120
121 @LoginRequired()
121 @LoginRequired()
122 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
122 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
123 @view_config(
123 @view_config(
124 route_name='edit_user_group_perms_summary', request_method='GET',
124 route_name='edit_user_group_perms_summary', request_method='GET',
125 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
125 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
126 def user_group_perms_summary(self):
126 def user_group_perms_summary(self):
127 c = self.load_default_context()
127 c = self.load_default_context()
128 c.user_group = self.db_user_group
128 c.user_group = self.db_user_group
129 c.active = 'perms_summary'
129 c.active = 'perms_summary'
130 c.permissions = self._get_perms_summary(c.user_group.users_group_id)
130 c.permissions = self._get_perms_summary(c.user_group.users_group_id)
131 return self._get_template_context(c)
131 return self._get_template_context(c)
132
132
133 @LoginRequired()
133 @LoginRequired()
134 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
134 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
135 @view_config(
135 @view_config(
136 route_name='edit_user_group_perms_summary_json', request_method='GET',
136 route_name='edit_user_group_perms_summary_json', request_method='GET',
137 renderer='json_ext')
137 renderer='json_ext')
138 def user_group_perms_summary_json(self):
138 def user_group_perms_summary_json(self):
139 self.load_default_context()
139 self.load_default_context()
140 user_group = self.db_user_group
140 user_group = self.db_user_group
141 return self._get_perms_summary(user_group.users_group_id)
141 return self._get_perms_summary(user_group.users_group_id)
142
142
143 def _revoke_perms_on_yourself(self, form_result):
143 def _revoke_perms_on_yourself(self, form_result):
144 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
144 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
145 form_result['perm_updates'])
145 form_result['perm_updates'])
146 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
146 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
147 form_result['perm_additions'])
147 form_result['perm_additions'])
148 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
148 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
149 form_result['perm_deletions'])
149 form_result['perm_deletions'])
150 admin_perm = 'usergroup.admin'
150 admin_perm = 'usergroup.admin'
151 if _updates and _updates[0][1] != admin_perm or \
151 if _updates and _updates[0][1] != admin_perm or \
152 _additions and _additions[0][1] != admin_perm or \
152 _additions and _additions[0][1] != admin_perm or \
153 _deletions and _deletions[0][1] != admin_perm:
153 _deletions and _deletions[0][1] != admin_perm:
154 return True
154 return True
155 return False
155 return False
156
156
157 @LoginRequired()
157 @LoginRequired()
158 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
158 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
159 @CSRFRequired()
159 @CSRFRequired()
160 @view_config(
160 @view_config(
161 route_name='user_groups_update', request_method='POST',
161 route_name='user_groups_update', request_method='POST',
162 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
162 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
163 def user_group_update(self):
163 def user_group_update(self):
164 _ = self.request.translate
164 _ = self.request.translate
165
165
166 user_group = self.db_user_group
166 user_group = self.db_user_group
167 user_group_id = user_group.users_group_id
167 user_group_id = user_group.users_group_id
168
168
169 c = self.load_default_context()
169 c = self.load_default_context()
170 c.user_group = user_group
170 c.user_group = user_group
171 c.group_members_obj = [x.user for x in c.user_group.members]
171 c.group_members_obj = [x.user for x in c.user_group.members]
172 c.group_members_obj.sort(key=lambda u: u.username.lower())
172 c.group_members_obj.sort(key=lambda u: u.username.lower())
173 c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
173 c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
174 c.active = 'settings'
174 c.active = 'settings'
175
175
176 users_group_form = UserGroupForm(
176 users_group_form = UserGroupForm(
177 self.request.translate, edit=True,
177 self.request.translate, edit=True,
178 old_data=c.user_group.get_dict(), allow_disabled=True)()
178 old_data=c.user_group.get_dict(), allow_disabled=True)()
179
179
180 old_values = c.user_group.get_api_data()
180 old_values = c.user_group.get_api_data()
181 user_group_name = self.request.POST.get('users_group_name')
181 user_group_name = self.request.POST.get('users_group_name')
182 try:
182 try:
183 form_result = users_group_form.to_python(self.request.POST)
183 form_result = users_group_form.to_python(self.request.POST)
184 pstruct = peppercorn.parse(self.request.POST.items())
184 pstruct = peppercorn.parse(self.request.POST.items())
185 form_result['users_group_members'] = pstruct['user_group_members']
185 form_result['users_group_members'] = pstruct['user_group_members']
186
186
187 user_group, added_members, removed_members = \
187 user_group, added_members, removed_members = \
188 UserGroupModel().update(c.user_group, form_result)
188 UserGroupModel().update(c.user_group, form_result)
189 updated_user_group = form_result['users_group_name']
189 updated_user_group = form_result['users_group_name']
190
190
191 for user_id in added_members:
191 for user_id in added_members:
192 user = User.get(user_id)
192 user = User.get(user_id)
193 user_data = user.get_api_data()
193 user_data = user.get_api_data()
194 audit_logger.store_web(
194 audit_logger.store_web(
195 'user_group.edit.member.add',
195 'user_group.edit.member.add',
196 action_data={'user': user_data, 'old_data': old_values},
196 action_data={'user': user_data, 'old_data': old_values},
197 user=self._rhodecode_user)
197 user=self._rhodecode_user)
198
198
199 for user_id in removed_members:
199 for user_id in removed_members:
200 user = User.get(user_id)
200 user = User.get(user_id)
201 user_data = user.get_api_data()
201 user_data = user.get_api_data()
202 audit_logger.store_web(
202 audit_logger.store_web(
203 'user_group.edit.member.delete',
203 'user_group.edit.member.delete',
204 action_data={'user': user_data, 'old_data': old_values},
204 action_data={'user': user_data, 'old_data': old_values},
205 user=self._rhodecode_user)
205 user=self._rhodecode_user)
206
206
207 audit_logger.store_web(
207 audit_logger.store_web(
208 'user_group.edit', action_data={'old_data': old_values},
208 'user_group.edit', action_data={'old_data': old_values},
209 user=self._rhodecode_user)
209 user=self._rhodecode_user)
210
210
211 h.flash(_('Updated user group %s') % updated_user_group,
211 h.flash(_('Updated user group %s') % updated_user_group,
212 category='success')
212 category='success')
213 Session().commit()
213 Session().commit()
214 except formencode.Invalid as errors:
214 except formencode.Invalid as errors:
215 defaults = errors.value
215 defaults = errors.value
216 e = errors.error_dict or {}
216 e = errors.error_dict or {}
217
217
218 data = render(
218 data = render(
219 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
219 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
220 self._get_template_context(c), self.request)
220 self._get_template_context(c), self.request)
221 html = formencode.htmlfill.render(
221 html = formencode.htmlfill.render(
222 data,
222 data,
223 defaults=defaults,
223 defaults=defaults,
224 errors=e,
224 errors=e,
225 prefix_error=False,
225 prefix_error=False,
226 encoding="UTF-8",
226 encoding="UTF-8",
227 force_defaults=False
227 force_defaults=False
228 )
228 )
229 return Response(html)
229 return Response(html)
230
230
231 except Exception:
231 except Exception:
232 log.exception("Exception during update of user group")
232 log.exception("Exception during update of user group")
233 h.flash(_('Error occurred during update of user group %s')
233 h.flash(_('Error occurred during update of user group %s')
234 % user_group_name, category='error')
234 % user_group_name, category='error')
235
235
236 raise HTTPFound(
236 raise HTTPFound(
237 h.route_path('edit_user_group', user_group_id=user_group_id))
237 h.route_path('edit_user_group', user_group_id=user_group_id))
238
238
239 @LoginRequired()
239 @LoginRequired()
240 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
240 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
241 @CSRFRequired()
241 @CSRFRequired()
242 @view_config(
242 @view_config(
243 route_name='user_groups_delete', request_method='POST',
243 route_name='user_groups_delete', request_method='POST',
244 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
244 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
245 def user_group_delete(self):
245 def user_group_delete(self):
246 _ = self.request.translate
246 _ = self.request.translate
247 user_group = self.db_user_group
247 user_group = self.db_user_group
248
248
249 self.load_default_context()
249 self.load_default_context()
250 force = str2bool(self.request.POST.get('force'))
250 force = str2bool(self.request.POST.get('force'))
251
251
252 old_values = user_group.get_api_data()
252 old_values = user_group.get_api_data()
253 try:
253 try:
254 UserGroupModel().delete(user_group, force=force)
254 UserGroupModel().delete(user_group, force=force)
255 audit_logger.store_web(
255 audit_logger.store_web(
256 'user.delete', action_data={'old_data': old_values},
256 'user.delete', action_data={'old_data': old_values},
257 user=self._rhodecode_user)
257 user=self._rhodecode_user)
258 Session().commit()
258 Session().commit()
259 h.flash(_('Successfully deleted user group'), category='success')
259 h.flash(_('Successfully deleted user group'), category='success')
260 except UserGroupAssignedException as e:
260 except UserGroupAssignedException as e:
261 h.flash(str(e), category='error')
261 h.flash(str(e), category='error')
262 except Exception:
262 except Exception:
263 log.exception("Exception during deletion of user group")
263 log.exception("Exception during deletion of user group")
264 h.flash(_('An error occurred during deletion of user group'),
264 h.flash(_('An error occurred during deletion of user group'),
265 category='error')
265 category='error')
266 raise HTTPFound(h.route_path('user_groups'))
266 raise HTTPFound(h.route_path('user_groups'))
267
267
268 @LoginRequired()
268 @LoginRequired()
269 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
269 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
270 @view_config(
270 @view_config(
271 route_name='edit_user_group', request_method='GET',
271 route_name='edit_user_group', request_method='GET',
272 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
272 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
273 def user_group_edit(self):
273 def user_group_edit(self):
274 user_group = self.db_user_group
274 user_group = self.db_user_group
275
275
276 c = self.load_default_context()
276 c = self.load_default_context()
277 c.user_group = user_group
277 c.user_group = user_group
278 c.group_members_obj = [x.user for x in c.user_group.members]
278 c.group_members_obj = [x.user for x in c.user_group.members]
279 c.group_members_obj.sort(key=lambda u: u.username.lower())
279 c.group_members_obj.sort(key=lambda u: u.username.lower())
280 c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
280 c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
281
281
282 c.active = 'settings'
282 c.active = 'settings'
283
283
284 defaults = user_group.get_dict()
284 defaults = user_group.get_dict()
285 # fill owner
285 # fill owner
286 if user_group.user:
286 if user_group.user:
287 defaults.update({'user': user_group.user.username})
287 defaults.update({'user': user_group.user.username})
288 else:
288 else:
289 replacement_user = User.get_first_super_admin().username
289 replacement_user = User.get_first_super_admin().username
290 defaults.update({'user': replacement_user})
290 defaults.update({'user': replacement_user})
291
291
292 data = render(
292 data = render(
293 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
293 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
294 self._get_template_context(c), self.request)
294 self._get_template_context(c), self.request)
295 html = formencode.htmlfill.render(
295 html = formencode.htmlfill.render(
296 data,
296 data,
297 defaults=defaults,
297 defaults=defaults,
298 encoding="UTF-8",
298 encoding="UTF-8",
299 force_defaults=False
299 force_defaults=False
300 )
300 )
301 return Response(html)
301 return Response(html)
302
302
303 @LoginRequired()
303 @LoginRequired()
304 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
304 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
305 @view_config(
305 @view_config(
306 route_name='edit_user_group_perms', request_method='GET',
306 route_name='edit_user_group_perms', request_method='GET',
307 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
307 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
308 def user_group_edit_perms(self):
308 def user_group_edit_perms(self):
309 user_group = self.db_user_group
309 user_group = self.db_user_group
310 c = self.load_default_context()
310 c = self.load_default_context()
311 c.user_group = user_group
311 c.user_group = user_group
312 c.active = 'perms'
312 c.active = 'perms'
313
313
314 defaults = {}
314 defaults = {}
315 # fill user group users
315 # fill user group users
316 for p in c.user_group.user_user_group_to_perm:
316 for p in c.user_group.user_user_group_to_perm:
317 defaults.update({'u_perm_%s' % p.user.user_id:
317 defaults.update({'u_perm_%s' % p.user.user_id:
318 p.permission.permission_name})
318 p.permission.permission_name})
319
319
320 for p in c.user_group.user_group_user_group_to_perm:
320 for p in c.user_group.user_group_user_group_to_perm:
321 defaults.update({'g_perm_%s' % p.user_group.users_group_id:
321 defaults.update({'g_perm_%s' % p.user_group.users_group_id:
322 p.permission.permission_name})
322 p.permission.permission_name})
323
323
324 data = render(
324 data = render(
325 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
325 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
326 self._get_template_context(c), self.request)
326 self._get_template_context(c), self.request)
327 html = formencode.htmlfill.render(
327 html = formencode.htmlfill.render(
328 data,
328 data,
329 defaults=defaults,
329 defaults=defaults,
330 encoding="UTF-8",
330 encoding="UTF-8",
331 force_defaults=False
331 force_defaults=False
332 )
332 )
333 return Response(html)
333 return Response(html)
334
334
335 @LoginRequired()
335 @LoginRequired()
336 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
336 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
337 @CSRFRequired()
337 @CSRFRequired()
338 @view_config(
338 @view_config(
339 route_name='edit_user_group_perms_update', request_method='POST',
339 route_name='edit_user_group_perms_update', request_method='POST',
340 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
340 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
341 def user_group_update_perms(self):
341 def user_group_update_perms(self):
342 """
342 """
343 grant permission for given user group
343 grant permission for given user group
344 """
344 """
345 _ = self.request.translate
345 _ = self.request.translate
346
346
347 user_group = self.db_user_group
347 user_group = self.db_user_group
348 user_group_id = user_group.users_group_id
348 user_group_id = user_group.users_group_id
349 c = self.load_default_context()
349 c = self.load_default_context()
350 c.user_group = user_group
350 c.user_group = user_group
351 form = UserGroupPermsForm(self.request.translate)().to_python(self.request.POST)
351 form = UserGroupPermsForm(self.request.translate)().to_python(self.request.POST)
352
352
353 if not self._rhodecode_user.is_admin:
353 if not self._rhodecode_user.is_admin:
354 if self._revoke_perms_on_yourself(form):
354 if self._revoke_perms_on_yourself(form):
355 msg = _('Cannot change permission for yourself as admin')
355 msg = _('Cannot change permission for yourself as admin')
356 h.flash(msg, category='warning')
356 h.flash(msg, category='warning')
357 raise HTTPFound(
357 raise HTTPFound(
358 h.route_path('edit_user_group_perms',
358 h.route_path('edit_user_group_perms',
359 user_group_id=user_group_id))
359 user_group_id=user_group_id))
360
360
361 try:
361 try:
362 changes = UserGroupModel().update_permissions(
362 changes = UserGroupModel().update_permissions(
363 user_group_id,
363 user_group_id,
364 form['perm_additions'], form['perm_updates'],
364 form['perm_additions'], form['perm_updates'],
365 form['perm_deletions'])
365 form['perm_deletions'])
366
366
367 except RepoGroupAssignmentError:
367 except RepoGroupAssignmentError:
368 h.flash(_('Target group cannot be the same'), category='error')
368 h.flash(_('Target group cannot be the same'), category='error')
369 raise HTTPFound(
369 raise HTTPFound(
370 h.route_path('edit_user_group_perms',
370 h.route_path('edit_user_group_perms',
371 user_group_id=user_group_id))
371 user_group_id=user_group_id))
372
372
373 action_data = {
373 action_data = {
374 'added': changes['added'],
374 'added': changes['added'],
375 'updated': changes['updated'],
375 'updated': changes['updated'],
376 'deleted': changes['deleted'],
376 'deleted': changes['deleted'],
377 }
377 }
378 audit_logger.store_web(
378 audit_logger.store_web(
379 'user_group.edit.permissions', action_data=action_data,
379 'user_group.edit.permissions', action_data=action_data,
380 user=self._rhodecode_user)
380 user=self._rhodecode_user)
381
381
382 Session().commit()
382 Session().commit()
383 h.flash(_('User Group permissions updated'), category='success')
383 h.flash(_('User Group permissions updated'), category='success')
384 raise HTTPFound(
384 raise HTTPFound(
385 h.route_path('edit_user_group_perms', user_group_id=user_group_id))
385 h.route_path('edit_user_group_perms', user_group_id=user_group_id))
386
386
387 @LoginRequired()
387 @LoginRequired()
388 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
388 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
389 @view_config(
389 @view_config(
390 route_name='edit_user_group_global_perms', request_method='GET',
390 route_name='edit_user_group_global_perms', request_method='GET',
391 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
391 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
392 def user_group_global_perms_edit(self):
392 def user_group_global_perms_edit(self):
393 user_group = self.db_user_group
393 user_group = self.db_user_group
394 c = self.load_default_context()
394 c = self.load_default_context()
395 c.user_group = user_group
395 c.user_group = user_group
396 c.active = 'global_perms'
396 c.active = 'global_perms'
397
397
398 c.default_user = User.get_default_user()
398 c.default_user = User.get_default_user()
399 defaults = c.user_group.get_dict()
399 defaults = c.user_group.get_dict()
400 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
400 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
401 defaults.update(c.user_group.get_default_perms())
401 defaults.update(c.user_group.get_default_perms())
402
402
403 data = render(
403 data = render(
404 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
404 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
405 self._get_template_context(c), self.request)
405 self._get_template_context(c), self.request)
406 html = formencode.htmlfill.render(
406 html = formencode.htmlfill.render(
407 data,
407 data,
408 defaults=defaults,
408 defaults=defaults,
409 encoding="UTF-8",
409 encoding="UTF-8",
410 force_defaults=False
410 force_defaults=False
411 )
411 )
412 return Response(html)
412 return Response(html)
413
413
414 @LoginRequired()
414 @LoginRequired()
415 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
415 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
416 @CSRFRequired()
416 @CSRFRequired()
417 @view_config(
417 @view_config(
418 route_name='edit_user_group_global_perms_update', request_method='POST',
418 route_name='edit_user_group_global_perms_update', request_method='POST',
419 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
419 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
420 def user_group_global_perms_update(self):
420 def user_group_global_perms_update(self):
421 _ = self.request.translate
421 _ = self.request.translate
422 user_group = self.db_user_group
422 user_group = self.db_user_group
423 user_group_id = self.db_user_group.users_group_id
423 user_group_id = self.db_user_group.users_group_id
424
424
425 c = self.load_default_context()
425 c = self.load_default_context()
426 c.user_group = user_group
426 c.user_group = user_group
427 c.active = 'global_perms'
427 c.active = 'global_perms'
428
428
429 try:
429 try:
430 # first stage that verifies the checkbox
430 # first stage that verifies the checkbox
431 _form = UserIndividualPermissionsForm()
431 _form = UserIndividualPermissionsForm(self.request.translate)
432 form_result = _form.to_python(dict(self.request.POST))
432 form_result = _form.to_python(dict(self.request.POST))
433 inherit_perms = form_result['inherit_default_permissions']
433 inherit_perms = form_result['inherit_default_permissions']
434 user_group.inherit_default_permissions = inherit_perms
434 user_group.inherit_default_permissions = inherit_perms
435 Session().add(user_group)
435 Session().add(user_group)
436
436
437 if not inherit_perms:
437 if not inherit_perms:
438 # only update the individual ones if we un check the flag
438 # only update the individual ones if we un check the flag
439 _form = UserPermissionsForm(
439 _form = UserPermissionsForm(
440 self.request.translate,
440 self.request.translate,
441 [x[0] for x in c.repo_create_choices],
441 [x[0] for x in c.repo_create_choices],
442 [x[0] for x in c.repo_create_on_write_choices],
442 [x[0] for x in c.repo_create_on_write_choices],
443 [x[0] for x in c.repo_group_create_choices],
443 [x[0] for x in c.repo_group_create_choices],
444 [x[0] for x in c.user_group_create_choices],
444 [x[0] for x in c.user_group_create_choices],
445 [x[0] for x in c.fork_choices],
445 [x[0] for x in c.fork_choices],
446 [x[0] for x in c.inherit_default_permission_choices])()
446 [x[0] for x in c.inherit_default_permission_choices])()
447
447
448 form_result = _form.to_python(dict(self.request.POST))
448 form_result = _form.to_python(dict(self.request.POST))
449 form_result.update(
449 form_result.update(
450 {'perm_user_group_id': user_group.users_group_id})
450 {'perm_user_group_id': user_group.users_group_id})
451
451
452 PermissionModel().update_user_group_permissions(form_result)
452 PermissionModel().update_user_group_permissions(form_result)
453
453
454 Session().commit()
454 Session().commit()
455 h.flash(_('User Group global permissions updated successfully'),
455 h.flash(_('User Group global permissions updated successfully'),
456 category='success')
456 category='success')
457
457
458 except formencode.Invalid as errors:
458 except formencode.Invalid as errors:
459 defaults = errors.value
459 defaults = errors.value
460
460
461 data = render(
461 data = render(
462 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
462 'rhodecode:templates/admin/user_groups/user_group_edit.mako',
463 self._get_template_context(c), self.request)
463 self._get_template_context(c), self.request)
464 html = formencode.htmlfill.render(
464 html = formencode.htmlfill.render(
465 data,
465 data,
466 defaults=defaults,
466 defaults=defaults,
467 errors=errors.error_dict or {},
467 errors=errors.error_dict or {},
468 prefix_error=False,
468 prefix_error=False,
469 encoding="UTF-8",
469 encoding="UTF-8",
470 force_defaults=False
470 force_defaults=False
471 )
471 )
472 return Response(html)
472 return Response(html)
473 except Exception:
473 except Exception:
474 log.exception("Exception during permissions saving")
474 log.exception("Exception during permissions saving")
475 h.flash(_('An error occurred during permissions saving'),
475 h.flash(_('An error occurred during permissions saving'),
476 category='error')
476 category='error')
477
477
478 raise HTTPFound(
478 raise HTTPFound(
479 h.route_path('edit_user_group_global_perms',
479 h.route_path('edit_user_group_global_perms',
480 user_group_id=user_group_id))
480 user_group_id=user_group_id))
481
481
482 @LoginRequired()
482 @LoginRequired()
483 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
483 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
484 @view_config(
484 @view_config(
485 route_name='edit_user_group_advanced', request_method='GET',
485 route_name='edit_user_group_advanced', request_method='GET',
486 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
486 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
487 def user_group_edit_advanced(self):
487 def user_group_edit_advanced(self):
488 user_group = self.db_user_group
488 user_group = self.db_user_group
489
489
490 c = self.load_default_context()
490 c = self.load_default_context()
491 c.user_group = user_group
491 c.user_group = user_group
492 c.active = 'advanced'
492 c.active = 'advanced'
493 c.group_members_obj = sorted(
493 c.group_members_obj = sorted(
494 (x.user for x in c.user_group.members),
494 (x.user for x in c.user_group.members),
495 key=lambda u: u.username.lower())
495 key=lambda u: u.username.lower())
496
496
497 c.group_to_repos = sorted(
497 c.group_to_repos = sorted(
498 (x.repository for x in c.user_group.users_group_repo_to_perm),
498 (x.repository for x in c.user_group.users_group_repo_to_perm),
499 key=lambda u: u.repo_name.lower())
499 key=lambda u: u.repo_name.lower())
500
500
501 c.group_to_repo_groups = sorted(
501 c.group_to_repo_groups = sorted(
502 (x.group for x in c.user_group.users_group_repo_group_to_perm),
502 (x.group for x in c.user_group.users_group_repo_group_to_perm),
503 key=lambda u: u.group_name.lower())
503 key=lambda u: u.group_name.lower())
504
504
505 c.group_to_review_rules = sorted(
505 c.group_to_review_rules = sorted(
506 (x.users_group for x in c.user_group.user_group_review_rules),
506 (x.users_group for x in c.user_group.user_group_review_rules),
507 key=lambda u: u.users_group_name.lower())
507 key=lambda u: u.users_group_name.lower())
508
508
509 return self._get_template_context(c)
509 return self._get_template_context(c)
510
510
511 @LoginRequired()
511 @LoginRequired()
512 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
512 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
513 @CSRFRequired()
513 @CSRFRequired()
514 @view_config(
514 @view_config(
515 route_name='edit_user_group_advanced_sync', request_method='POST',
515 route_name='edit_user_group_advanced_sync', request_method='POST',
516 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
516 renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako')
517 def user_group_edit_advanced_set_synchronization(self):
517 def user_group_edit_advanced_set_synchronization(self):
518 _ = self.request.translate
518 _ = self.request.translate
519 user_group = self.db_user_group
519 user_group = self.db_user_group
520 user_group_id = user_group.users_group_id
520 user_group_id = user_group.users_group_id
521
521
522 existing = user_group.group_data.get('extern_type')
522 existing = user_group.group_data.get('extern_type')
523
523
524 if existing:
524 if existing:
525 new_state = user_group.group_data
525 new_state = user_group.group_data
526 new_state['extern_type'] = None
526 new_state['extern_type'] = None
527 else:
527 else:
528 new_state = user_group.group_data
528 new_state = user_group.group_data
529 new_state['extern_type'] = 'manual'
529 new_state['extern_type'] = 'manual'
530 new_state['extern_type_set_by'] = self._rhodecode_user.username
530 new_state['extern_type_set_by'] = self._rhodecode_user.username
531
531
532 try:
532 try:
533 user_group.group_data = new_state
533 user_group.group_data = new_state
534 Session().add(user_group)
534 Session().add(user_group)
535 Session().commit()
535 Session().commit()
536
536
537 h.flash(_('User Group synchronization updated successfully'),
537 h.flash(_('User Group synchronization updated successfully'),
538 category='success')
538 category='success')
539 except Exception:
539 except Exception:
540 log.exception("Exception during sync settings saving")
540 log.exception("Exception during sync settings saving")
541 h.flash(_('An error occurred during synchronization update'),
541 h.flash(_('An error occurred during synchronization update'),
542 category='error')
542 category='error')
543
543
544 raise HTTPFound(
544 raise HTTPFound(
545 h.route_path('edit_user_group_advanced',
545 h.route_path('edit_user_group_advanced',
546 user_group_id=user_group_id))
546 user_group_id=user_group_id))
@@ -1,109 +1,106 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import os
22 import os
23 import logging
23 import logging
24 import rhodecode
24 import rhodecode
25
25
26 # ------------------------------------------------------------------------------
26 # ------------------------------------------------------------------------------
27 # CELERY magic until refactor - issue #4163 - import order matters here:
27 # CELERY magic until refactor - issue #4163 - import order matters here:
28 #from rhodecode.lib import celerypylons # this must be first, celerypylons
28 #from rhodecode.lib import celerypylons # this must be first, celerypylons
29 # sets config settings upon import
29 # sets config settings upon import
30
30
31 import rhodecode.integrations # any modules using celery task
31 import rhodecode.integrations # any modules using celery task
32 # decorators should be added afterwards:
32 # decorators should be added afterwards:
33 # ------------------------------------------------------------------------------
33 # ------------------------------------------------------------------------------
34
34
35 from rhodecode.config import utils
35 from rhodecode.config import utils
36
36
37 from rhodecode.lib.utils import load_rcextensions
37 from rhodecode.lib.utils import load_rcextensions
38 from rhodecode.lib.utils2 import str2bool
38 from rhodecode.lib.utils2 import str2bool
39 from rhodecode.lib.vcs import connect_vcs, start_vcs_server
39 from rhodecode.lib.vcs import connect_vcs, start_vcs_server
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 def load_pyramid_environment(global_config, settings):
44 def load_pyramid_environment(global_config, settings):
45 # Some parts of the code expect a merge of global and app settings.
45 # Some parts of the code expect a merge of global and app settings.
46 settings_merged = global_config.copy()
46 settings_merged = global_config.copy()
47 settings_merged.update(settings)
47 settings_merged.update(settings)
48
48
49 # TODO(marcink): probably not required anymore
49 # TODO(marcink): probably not required anymore
50 # configure channelstream,
50 # configure channelstream,
51 settings_merged['channelstream_config'] = {
51 settings_merged['channelstream_config'] = {
52 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)),
52 'enabled': str2bool(settings_merged.get('channelstream.enabled', False)),
53 'server': settings_merged.get('channelstream.server'),
53 'server': settings_merged.get('channelstream.server'),
54 'secret': settings_merged.get('channelstream.secret')
54 'secret': settings_merged.get('channelstream.secret')
55 }
55 }
56
56
57
57
58 # TODO(marcink): celery
58 # TODO(marcink): celery
59 # # store some globals into rhodecode
59 # # store some globals into rhodecode
60 # rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery'))
60 # rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery'))
61 # rhodecode.CELERY_EAGER = str2bool(
61 # rhodecode.CELERY_EAGER = str2bool(
62 # config['app_conf'].get('celery.always.eager'))
62 # config['app_conf'].get('celery.always.eager'))
63
63
64
64
65 # If this is a test run we prepare the test environment like
65 # If this is a test run we prepare the test environment like
66 # creating a test database, test search index and test repositories.
66 # creating a test database, test search index and test repositories.
67 # This has to be done before the database connection is initialized.
67 # This has to be done before the database connection is initialized.
68 if settings['is_test']:
68 if settings['is_test']:
69 rhodecode.is_test = True
69 rhodecode.is_test = True
70 rhodecode.disable_error_handler = True
70 rhodecode.disable_error_handler = True
71
71
72 utils.initialize_test_environment(settings_merged)
72 utils.initialize_test_environment(settings_merged)
73
73
74 # Initialize the database connection.
74 # Initialize the database connection.
75 utils.initialize_database(settings_merged)
75 utils.initialize_database(settings_merged)
76
76
77 # TODO(marcink): base_path handling ?
78 # repos_path = list(db_cfg.items('paths'))[0][1]
79
80 load_rcextensions(root_path=settings_merged['here'])
77 load_rcextensions(root_path=settings_merged['here'])
81
78
82 # Limit backends to `vcs.backends` from configuration
79 # Limit backends to `vcs.backends` from configuration
83 for alias in rhodecode.BACKENDS.keys():
80 for alias in rhodecode.BACKENDS.keys():
84 if alias not in settings['vcs.backends']:
81 if alias not in settings['vcs.backends']:
85 del rhodecode.BACKENDS[alias]
82 del rhodecode.BACKENDS[alias]
86 log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys())
83 log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys())
87
84
88 # initialize vcs client and optionally run the server if enabled
85 # initialize vcs client and optionally run the server if enabled
89 vcs_server_uri = settings['vcs.server']
86 vcs_server_uri = settings['vcs.server']
90 vcs_server_enabled = settings['vcs.server.enable']
87 vcs_server_enabled = settings['vcs.server.enable']
91 start_server = (
88 start_server = (
92 settings['vcs.start_server'] and
89 settings['vcs.start_server'] and
93 not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0')))
90 not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0')))
94
91
95 if vcs_server_enabled and start_server:
92 if vcs_server_enabled and start_server:
96 log.info("Starting vcsserver")
93 log.info("Starting vcsserver")
97 start_vcs_server(server_and_port=vcs_server_uri,
94 start_vcs_server(server_and_port=vcs_server_uri,
98 protocol=utils.get_vcs_server_protocol(settings),
95 protocol=utils.get_vcs_server_protocol(settings),
99 log_level=settings['vcs.server.log_level'])
96 log_level=settings['vcs.server.log_level'])
100
97
101 utils.configure_vcs(settings)
98 utils.configure_vcs(settings)
102
99
103 # Store the settings to make them available to other modules.
100 # Store the settings to make them available to other modules.
104
101
105 rhodecode.PYRAMID_SETTINGS = settings_merged
102 rhodecode.PYRAMID_SETTINGS = settings_merged
106 rhodecode.CONFIG = settings_merged
103 rhodecode.CONFIG = settings_merged
107
104
108 if vcs_server_enabled:
105 if vcs_server_enabled:
109 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
106 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
@@ -1,543 +1,540 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 The base Controller API
22 The base Controller API
23 Provides the BaseController class for subclassing. And usage in different
23 Provides the BaseController class for subclassing. And usage in different
24 controllers
24 controllers
25 """
25 """
26
26
27 import logging
27 import logging
28 import socket
28 import socket
29
29
30 import markupsafe
30 import markupsafe
31 import ipaddress
31 import ipaddress
32 import pyramid.threadlocal
33
32
34 from paste.auth.basic import AuthBasicAuthenticator
33 from paste.auth.basic import AuthBasicAuthenticator
35 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
36 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
37
36
38 import rhodecode
37 import rhodecode
39 from rhodecode.authentication.base import VCS_TYPE
38 from rhodecode.authentication.base import VCS_TYPE
40 from rhodecode.lib import auth, utils2
39 from rhodecode.lib import auth, utils2
41 from rhodecode.lib import helpers as h
40 from rhodecode.lib import helpers as h
42 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
41 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
43 from rhodecode.lib.exceptions import UserCreationError
42 from rhodecode.lib.exceptions import UserCreationError
44 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
45 get_repo_slug, set_rhodecode_config, password_changed,
46 get_enabled_hook_classes)
47 from rhodecode.lib.utils2 import (
44 from rhodecode.lib.utils2 import (
48 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist, safe_str)
45 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist, safe_str)
49 from rhodecode.model import meta
50 from rhodecode.model.db import Repository, User, ChangesetComment
46 from rhodecode.model.db import Repository, User, ChangesetComment
51 from rhodecode.model.notification import NotificationModel
47 from rhodecode.model.notification import NotificationModel
52 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
48 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
54
49
55 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
56
51
57
52
58 def _filter_proxy(ip):
53 def _filter_proxy(ip):
59 """
54 """
60 Passed in IP addresses in HEADERS can be in a special format of multiple
55 Passed in IP addresses in HEADERS can be in a special format of multiple
61 ips. Those comma separated IPs are passed from various proxies in the
56 ips. Those comma separated IPs are passed from various proxies in the
62 chain of request processing. The left-most being the original client.
57 chain of request processing. The left-most being the original client.
63 We only care about the first IP which came from the org. client.
58 We only care about the first IP which came from the org. client.
64
59
65 :param ip: ip string from headers
60 :param ip: ip string from headers
66 """
61 """
67 if ',' in ip:
62 if ',' in ip:
68 _ips = ip.split(',')
63 _ips = ip.split(',')
69 _first_ip = _ips[0].strip()
64 _first_ip = _ips[0].strip()
70 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
65 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
71 return _first_ip
66 return _first_ip
72 return ip
67 return ip
73
68
74
69
75 def _filter_port(ip):
70 def _filter_port(ip):
76 """
71 """
77 Removes a port from ip, there are 4 main cases to handle here.
72 Removes a port from ip, there are 4 main cases to handle here.
78 - ipv4 eg. 127.0.0.1
73 - ipv4 eg. 127.0.0.1
79 - ipv6 eg. ::1
74 - ipv6 eg. ::1
80 - ipv4+port eg. 127.0.0.1:8080
75 - ipv4+port eg. 127.0.0.1:8080
81 - ipv6+port eg. [::1]:8080
76 - ipv6+port eg. [::1]:8080
82
77
83 :param ip:
78 :param ip:
84 """
79 """
85 def is_ipv6(ip_addr):
80 def is_ipv6(ip_addr):
86 if hasattr(socket, 'inet_pton'):
81 if hasattr(socket, 'inet_pton'):
87 try:
82 try:
88 socket.inet_pton(socket.AF_INET6, ip_addr)
83 socket.inet_pton(socket.AF_INET6, ip_addr)
89 except socket.error:
84 except socket.error:
90 return False
85 return False
91 else:
86 else:
92 # fallback to ipaddress
87 # fallback to ipaddress
93 try:
88 try:
94 ipaddress.IPv6Address(safe_unicode(ip_addr))
89 ipaddress.IPv6Address(safe_unicode(ip_addr))
95 except Exception:
90 except Exception:
96 return False
91 return False
97 return True
92 return True
98
93
99 if ':' not in ip: # must be ipv4 pure ip
94 if ':' not in ip: # must be ipv4 pure ip
100 return ip
95 return ip
101
96
102 if '[' in ip and ']' in ip: # ipv6 with port
97 if '[' in ip and ']' in ip: # ipv6 with port
103 return ip.split(']')[0][1:].lower()
98 return ip.split(']')[0][1:].lower()
104
99
105 # must be ipv6 or ipv4 with port
100 # must be ipv6 or ipv4 with port
106 if is_ipv6(ip):
101 if is_ipv6(ip):
107 return ip
102 return ip
108 else:
103 else:
109 ip, _port = ip.split(':')[:2] # means ipv4+port
104 ip, _port = ip.split(':')[:2] # means ipv4+port
110 return ip
105 return ip
111
106
112
107
113 def get_ip_addr(environ):
108 def get_ip_addr(environ):
114 proxy_key = 'HTTP_X_REAL_IP'
109 proxy_key = 'HTTP_X_REAL_IP'
115 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
110 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
116 def_key = 'REMOTE_ADDR'
111 def_key = 'REMOTE_ADDR'
117 _filters = lambda x: _filter_port(_filter_proxy(x))
112 _filters = lambda x: _filter_port(_filter_proxy(x))
118
113
119 ip = environ.get(proxy_key)
114 ip = environ.get(proxy_key)
120 if ip:
115 if ip:
121 return _filters(ip)
116 return _filters(ip)
122
117
123 ip = environ.get(proxy_key2)
118 ip = environ.get(proxy_key2)
124 if ip:
119 if ip:
125 return _filters(ip)
120 return _filters(ip)
126
121
127 ip = environ.get(def_key, '0.0.0.0')
122 ip = environ.get(def_key, '0.0.0.0')
128 return _filters(ip)
123 return _filters(ip)
129
124
130
125
131 def get_server_ip_addr(environ, log_errors=True):
126 def get_server_ip_addr(environ, log_errors=True):
132 hostname = environ.get('SERVER_NAME')
127 hostname = environ.get('SERVER_NAME')
133 try:
128 try:
134 return socket.gethostbyname(hostname)
129 return socket.gethostbyname(hostname)
135 except Exception as e:
130 except Exception as e:
136 if log_errors:
131 if log_errors:
137 # in some cases this lookup is not possible, and we don't want to
132 # in some cases this lookup is not possible, and we don't want to
138 # make it an exception in logs
133 # make it an exception in logs
139 log.exception('Could not retrieve server ip address: %s', e)
134 log.exception('Could not retrieve server ip address: %s', e)
140 return hostname
135 return hostname
141
136
142
137
143 def get_server_port(environ):
138 def get_server_port(environ):
144 return environ.get('SERVER_PORT')
139 return environ.get('SERVER_PORT')
145
140
146
141
147 def get_access_path(environ):
142 def get_access_path(environ):
148 path = environ.get('PATH_INFO')
143 path = environ.get('PATH_INFO')
149 org_req = environ.get('pylons.original_request')
144 org_req = environ.get('pylons.original_request')
150 if org_req:
145 if org_req:
151 path = org_req.environ.get('PATH_INFO')
146 path = org_req.environ.get('PATH_INFO')
152 return path
147 return path
153
148
154
149
155 def get_user_agent(environ):
150 def get_user_agent(environ):
156 return environ.get('HTTP_USER_AGENT')
151 return environ.get('HTTP_USER_AGENT')
157
152
158
153
159 def vcs_operation_context(
154 def vcs_operation_context(
160 environ, repo_name, username, action, scm, check_locking=True,
155 environ, repo_name, username, action, scm, check_locking=True,
161 is_shadow_repo=False):
156 is_shadow_repo=False):
162 """
157 """
163 Generate the context for a vcs operation, e.g. push or pull.
158 Generate the context for a vcs operation, e.g. push or pull.
164
159
165 This context is passed over the layers so that hooks triggered by the
160 This context is passed over the layers so that hooks triggered by the
166 vcs operation know details like the user, the user's IP address etc.
161 vcs operation know details like the user, the user's IP address etc.
167
162
168 :param check_locking: Allows to switch of the computation of the locking
163 :param check_locking: Allows to switch of the computation of the locking
169 data. This serves mainly the need of the simplevcs middleware to be
164 data. This serves mainly the need of the simplevcs middleware to be
170 able to disable this for certain operations.
165 able to disable this for certain operations.
171
166
172 """
167 """
173 # Tri-state value: False: unlock, None: nothing, True: lock
168 # Tri-state value: False: unlock, None: nothing, True: lock
174 make_lock = None
169 make_lock = None
175 locked_by = [None, None, None]
170 locked_by = [None, None, None]
176 is_anonymous = username == User.DEFAULT_USER
171 is_anonymous = username == User.DEFAULT_USER
177 if not is_anonymous and check_locking:
172 if not is_anonymous and check_locking:
178 log.debug('Checking locking on repository "%s"', repo_name)
173 log.debug('Checking locking on repository "%s"', repo_name)
179 user = User.get_by_username(username)
174 user = User.get_by_username(username)
180 repo = Repository.get_by_repo_name(repo_name)
175 repo = Repository.get_by_repo_name(repo_name)
181 make_lock, __, locked_by = repo.get_locking_state(
176 make_lock, __, locked_by = repo.get_locking_state(
182 action, user.user_id)
177 action, user.user_id)
183
178
184 settings_model = VcsSettingsModel(repo=repo_name)
179 settings_model = VcsSettingsModel(repo=repo_name)
185 ui_settings = settings_model.get_ui_settings()
180 ui_settings = settings_model.get_ui_settings()
186
181
187 extras = {
182 extras = {
188 'ip': get_ip_addr(environ),
183 'ip': get_ip_addr(environ),
189 'username': username,
184 'username': username,
190 'action': action,
185 'action': action,
191 'repository': repo_name,
186 'repository': repo_name,
192 'scm': scm,
187 'scm': scm,
193 'config': rhodecode.CONFIG['__file__'],
188 'config': rhodecode.CONFIG['__file__'],
194 'make_lock': make_lock,
189 'make_lock': make_lock,
195 'locked_by': locked_by,
190 'locked_by': locked_by,
196 'server_url': utils2.get_server_url(environ),
191 'server_url': utils2.get_server_url(environ),
197 'user_agent': get_user_agent(environ),
192 'user_agent': get_user_agent(environ),
198 'hooks': get_enabled_hook_classes(ui_settings),
193 'hooks': get_enabled_hook_classes(ui_settings),
199 'is_shadow_repo': is_shadow_repo,
194 'is_shadow_repo': is_shadow_repo,
200 }
195 }
201 return extras
196 return extras
202
197
203
198
204 class BasicAuth(AuthBasicAuthenticator):
199 class BasicAuth(AuthBasicAuthenticator):
205
200
206 def __init__(self, realm, authfunc, registry, auth_http_code=None,
201 def __init__(self, realm, authfunc, registry, auth_http_code=None,
207 initial_call_detection=False, acl_repo_name=None):
202 initial_call_detection=False, acl_repo_name=None):
208 self.realm = realm
203 self.realm = realm
209 self.initial_call = initial_call_detection
204 self.initial_call = initial_call_detection
210 self.authfunc = authfunc
205 self.authfunc = authfunc
211 self.registry = registry
206 self.registry = registry
212 self.acl_repo_name = acl_repo_name
207 self.acl_repo_name = acl_repo_name
213 self._rc_auth_http_code = auth_http_code
208 self._rc_auth_http_code = auth_http_code
214
209
215 def _get_response_from_code(self, http_code):
210 def _get_response_from_code(self, http_code):
216 try:
211 try:
217 return get_exception(safe_int(http_code))
212 return get_exception(safe_int(http_code))
218 except Exception:
213 except Exception:
219 log.exception('Failed to fetch response for code %s' % http_code)
214 log.exception('Failed to fetch response for code %s' % http_code)
220 return HTTPForbidden
215 return HTTPForbidden
221
216
222 def get_rc_realm(self):
217 def get_rc_realm(self):
223 return safe_str(self.registry.rhodecode_settings.get('rhodecode_realm'))
218 return safe_str(self.registry.rhodecode_settings.get('rhodecode_realm'))
224
219
225 def build_authentication(self):
220 def build_authentication(self):
226 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
221 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
227 if self._rc_auth_http_code and not self.initial_call:
222 if self._rc_auth_http_code and not self.initial_call:
228 # return alternative HTTP code if alternative http return code
223 # return alternative HTTP code if alternative http return code
229 # is specified in RhodeCode config, but ONLY if it's not the
224 # is specified in RhodeCode config, but ONLY if it's not the
230 # FIRST call
225 # FIRST call
231 custom_response_klass = self._get_response_from_code(
226 custom_response_klass = self._get_response_from_code(
232 self._rc_auth_http_code)
227 self._rc_auth_http_code)
233 return custom_response_klass(headers=head)
228 return custom_response_klass(headers=head)
234 return HTTPUnauthorized(headers=head)
229 return HTTPUnauthorized(headers=head)
235
230
236 def authenticate(self, environ):
231 def authenticate(self, environ):
237 authorization = AUTHORIZATION(environ)
232 authorization = AUTHORIZATION(environ)
238 if not authorization:
233 if not authorization:
239 return self.build_authentication()
234 return self.build_authentication()
240 (authmeth, auth) = authorization.split(' ', 1)
235 (authmeth, auth) = authorization.split(' ', 1)
241 if 'basic' != authmeth.lower():
236 if 'basic' != authmeth.lower():
242 return self.build_authentication()
237 return self.build_authentication()
243 auth = auth.strip().decode('base64')
238 auth = auth.strip().decode('base64')
244 _parts = auth.split(':', 1)
239 _parts = auth.split(':', 1)
245 if len(_parts) == 2:
240 if len(_parts) == 2:
246 username, password = _parts
241 username, password = _parts
247 auth_data = self.authfunc(
242 auth_data = self.authfunc(
248 username, password, environ, VCS_TYPE,
243 username, password, environ, VCS_TYPE,
249 registry=self.registry, acl_repo_name=self.acl_repo_name)
244 registry=self.registry, acl_repo_name=self.acl_repo_name)
250 if auth_data:
245 if auth_data:
251 return {'username': username, 'auth_data': auth_data}
246 return {'username': username, 'auth_data': auth_data}
252 if username and password:
247 if username and password:
253 # we mark that we actually executed authentication once, at
248 # we mark that we actually executed authentication once, at
254 # that point we can use the alternative auth code
249 # that point we can use the alternative auth code
255 self.initial_call = False
250 self.initial_call = False
256
251
257 return self.build_authentication()
252 return self.build_authentication()
258
253
259 __call__ = authenticate
254 __call__ = authenticate
260
255
261
256
262 def calculate_version_hash(config):
257 def calculate_version_hash(config):
263 return md5(
258 return md5(
264 config.get('beaker.session.secret', '') +
259 config.get('beaker.session.secret', '') +
265 rhodecode.__version__)[:8]
260 rhodecode.__version__)[:8]
266
261
267
262
268 def get_current_lang(request):
263 def get_current_lang(request):
269 # NOTE(marcink): remove after pyramid move
264 # NOTE(marcink): remove after pyramid move
270 try:
265 try:
271 return translation.get_lang()[0]
266 return translation.get_lang()[0]
272 except:
267 except:
273 pass
268 pass
274
269
275 return getattr(request, '_LOCALE_', request.locale_name)
270 return getattr(request, '_LOCALE_', request.locale_name)
276
271
277
272
278 def attach_context_attributes(context, request, user_id):
273 def attach_context_attributes(context, request, user_id):
279 """
274 """
280 Attach variables into template context called `c`.
275 Attach variables into template context called `c`.
281 """
276 """
282 config = request.registry.settings
277 config = request.registry.settings
283
278
284
279
285 rc_config = SettingsModel().get_all_settings(cache=True)
280 rc_config = SettingsModel().get_all_settings(cache=True)
286
281
287 context.rhodecode_version = rhodecode.__version__
282 context.rhodecode_version = rhodecode.__version__
288 context.rhodecode_edition = config.get('rhodecode.edition')
283 context.rhodecode_edition = config.get('rhodecode.edition')
289 # unique secret + version does not leak the version but keep consistency
284 # unique secret + version does not leak the version but keep consistency
290 context.rhodecode_version_hash = calculate_version_hash(config)
285 context.rhodecode_version_hash = calculate_version_hash(config)
291
286
292 # Default language set for the incoming request
287 # Default language set for the incoming request
293 context.language = get_current_lang(request)
288 context.language = get_current_lang(request)
294
289
295 # Visual options
290 # Visual options
296 context.visual = AttributeDict({})
291 context.visual = AttributeDict({})
297
292
298 # DB stored Visual Items
293 # DB stored Visual Items
299 context.visual.show_public_icon = str2bool(
294 context.visual.show_public_icon = str2bool(
300 rc_config.get('rhodecode_show_public_icon'))
295 rc_config.get('rhodecode_show_public_icon'))
301 context.visual.show_private_icon = str2bool(
296 context.visual.show_private_icon = str2bool(
302 rc_config.get('rhodecode_show_private_icon'))
297 rc_config.get('rhodecode_show_private_icon'))
303 context.visual.stylify_metatags = str2bool(
298 context.visual.stylify_metatags = str2bool(
304 rc_config.get('rhodecode_stylify_metatags'))
299 rc_config.get('rhodecode_stylify_metatags'))
305 context.visual.dashboard_items = safe_int(
300 context.visual.dashboard_items = safe_int(
306 rc_config.get('rhodecode_dashboard_items', 100))
301 rc_config.get('rhodecode_dashboard_items', 100))
307 context.visual.admin_grid_items = safe_int(
302 context.visual.admin_grid_items = safe_int(
308 rc_config.get('rhodecode_admin_grid_items', 100))
303 rc_config.get('rhodecode_admin_grid_items', 100))
309 context.visual.repository_fields = str2bool(
304 context.visual.repository_fields = str2bool(
310 rc_config.get('rhodecode_repository_fields'))
305 rc_config.get('rhodecode_repository_fields'))
311 context.visual.show_version = str2bool(
306 context.visual.show_version = str2bool(
312 rc_config.get('rhodecode_show_version'))
307 rc_config.get('rhodecode_show_version'))
313 context.visual.use_gravatar = str2bool(
308 context.visual.use_gravatar = str2bool(
314 rc_config.get('rhodecode_use_gravatar'))
309 rc_config.get('rhodecode_use_gravatar'))
315 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
310 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
316 context.visual.default_renderer = rc_config.get(
311 context.visual.default_renderer = rc_config.get(
317 'rhodecode_markup_renderer', 'rst')
312 'rhodecode_markup_renderer', 'rst')
318 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
313 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
319 context.visual.rhodecode_support_url = \
314 context.visual.rhodecode_support_url = \
320 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
315 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
321
316
322 context.visual.affected_files_cut_off = 60
317 context.visual.affected_files_cut_off = 60
323
318
324 context.pre_code = rc_config.get('rhodecode_pre_code')
319 context.pre_code = rc_config.get('rhodecode_pre_code')
325 context.post_code = rc_config.get('rhodecode_post_code')
320 context.post_code = rc_config.get('rhodecode_post_code')
326 context.rhodecode_name = rc_config.get('rhodecode_title')
321 context.rhodecode_name = rc_config.get('rhodecode_title')
327 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
322 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
328 # if we have specified default_encoding in the request, it has more
323 # if we have specified default_encoding in the request, it has more
329 # priority
324 # priority
330 if request.GET.get('default_encoding'):
325 if request.GET.get('default_encoding'):
331 context.default_encodings.insert(0, request.GET.get('default_encoding'))
326 context.default_encodings.insert(0, request.GET.get('default_encoding'))
332 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
327 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
333
328
334 # INI stored
329 # INI stored
335 context.labs_active = str2bool(
330 context.labs_active = str2bool(
336 config.get('labs_settings_active', 'false'))
331 config.get('labs_settings_active', 'false'))
337 context.visual.allow_repo_location_change = str2bool(
332 context.visual.allow_repo_location_change = str2bool(
338 config.get('allow_repo_location_change', True))
333 config.get('allow_repo_location_change', True))
339 context.visual.allow_custom_hooks_settings = str2bool(
334 context.visual.allow_custom_hooks_settings = str2bool(
340 config.get('allow_custom_hooks_settings', True))
335 config.get('allow_custom_hooks_settings', True))
341 context.debug_style = str2bool(config.get('debug_style', False))
336 context.debug_style = str2bool(config.get('debug_style', False))
342
337
343 context.rhodecode_instanceid = config.get('instance_id')
338 context.rhodecode_instanceid = config.get('instance_id')
344
339
345 context.visual.cut_off_limit_diff = safe_int(
340 context.visual.cut_off_limit_diff = safe_int(
346 config.get('cut_off_limit_diff'))
341 config.get('cut_off_limit_diff'))
347 context.visual.cut_off_limit_file = safe_int(
342 context.visual.cut_off_limit_file = safe_int(
348 config.get('cut_off_limit_file'))
343 config.get('cut_off_limit_file'))
349
344
350 # AppEnlight
345 # AppEnlight
351 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
346 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
352 context.appenlight_api_public_key = config.get(
347 context.appenlight_api_public_key = config.get(
353 'appenlight.api_public_key', '')
348 'appenlight.api_public_key', '')
354 context.appenlight_server_url = config.get('appenlight.server_url', '')
349 context.appenlight_server_url = config.get('appenlight.server_url', '')
355
350
356 # JS template context
351 # JS template context
357 context.template_context = {
352 context.template_context = {
358 'repo_name': None,
353 'repo_name': None,
359 'repo_type': None,
354 'repo_type': None,
360 'repo_landing_commit': None,
355 'repo_landing_commit': None,
361 'rhodecode_user': {
356 'rhodecode_user': {
362 'username': None,
357 'username': None,
363 'email': None,
358 'email': None,
364 'notification_status': False
359 'notification_status': False
365 },
360 },
366 'visual': {
361 'visual': {
367 'default_renderer': None
362 'default_renderer': None
368 },
363 },
369 'commit_data': {
364 'commit_data': {
370 'commit_id': None
365 'commit_id': None
371 },
366 },
372 'pull_request_data': {'pull_request_id': None},
367 'pull_request_data': {'pull_request_id': None},
373 'timeago': {
368 'timeago': {
374 'refresh_time': 120 * 1000,
369 'refresh_time': 120 * 1000,
375 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
370 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
376 },
371 },
377 'pyramid_dispatch': {
372 'pyramid_dispatch': {
378
373
379 },
374 },
380 'extra': {'plugins': {}}
375 'extra': {'plugins': {}}
381 }
376 }
382 # END CONFIG VARS
377 # END CONFIG VARS
383
378
384 diffmode = 'sideside'
379 diffmode = 'sideside'
385 if request.GET.get('diffmode'):
380 if request.GET.get('diffmode'):
386 if request.GET['diffmode'] == 'unified':
381 if request.GET['diffmode'] == 'unified':
387 diffmode = 'unified'
382 diffmode = 'unified'
388 elif request.session.get('diffmode'):
383 elif request.session.get('diffmode'):
389 diffmode = request.session['diffmode']
384 diffmode = request.session['diffmode']
390
385
391 context.diffmode = diffmode
386 context.diffmode = diffmode
392
387
393 if request.session.get('diffmode') != diffmode:
388 if request.session.get('diffmode') != diffmode:
394 request.session['diffmode'] = diffmode
389 request.session['diffmode'] = diffmode
395
390
396 context.csrf_token = auth.get_csrf_token(session=request.session)
391 context.csrf_token = auth.get_csrf_token(session=request.session)
397 context.backends = rhodecode.BACKENDS.keys()
392 context.backends = rhodecode.BACKENDS.keys()
398 context.backends.sort()
393 context.backends.sort()
399 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
394 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
400
395
401 # web case
396 # web case
402 if hasattr(request, 'user'):
397 if hasattr(request, 'user'):
403 context.auth_user = request.user
398 context.auth_user = request.user
404 context.rhodecode_user = request.user
399 context.rhodecode_user = request.user
405
400
406 # api case
401 # api case
407 if hasattr(request, 'rpc_user'):
402 if hasattr(request, 'rpc_user'):
408 context.auth_user = request.rpc_user
403 context.auth_user = request.rpc_user
409 context.rhodecode_user = request.rpc_user
404 context.rhodecode_user = request.rpc_user
410
405
411 # attach the whole call context to the request
406 # attach the whole call context to the request
412 request.call_context = context
407 request.call_context = context
413
408
414
409
415 def get_auth_user(request):
410 def get_auth_user(request):
416 environ = request.environ
411 environ = request.environ
417 session = request.session
412 session = request.session
418
413
419 ip_addr = get_ip_addr(environ)
414 ip_addr = get_ip_addr(environ)
420 # make sure that we update permissions each time we call controller
415 # make sure that we update permissions each time we call controller
421 _auth_token = (request.GET.get('auth_token', '') or
416 _auth_token = (request.GET.get('auth_token', '') or
422 request.GET.get('api_key', ''))
417 request.GET.get('api_key', ''))
423
418
424 if _auth_token:
419 if _auth_token:
425 # when using API_KEY we assume user exists, and
420 # when using API_KEY we assume user exists, and
426 # doesn't need auth based on cookies.
421 # doesn't need auth based on cookies.
427 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
422 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
428 authenticated = False
423 authenticated = False
429 else:
424 else:
430 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
425 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
431 try:
426 try:
432 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
427 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
433 ip_addr=ip_addr)
428 ip_addr=ip_addr)
434 except UserCreationError as e:
429 except UserCreationError as e:
435 h.flash(e, 'error')
430 h.flash(e, 'error')
436 # container auth or other auth functions that create users
431 # container auth or other auth functions that create users
437 # on the fly can throw this exception signaling that there's
432 # on the fly can throw this exception signaling that there's
438 # issue with user creation, explanation should be provided
433 # issue with user creation, explanation should be provided
439 # in Exception itself. We then create a simple blank
434 # in Exception itself. We then create a simple blank
440 # AuthUser
435 # AuthUser
441 auth_user = AuthUser(ip_addr=ip_addr)
436 auth_user = AuthUser(ip_addr=ip_addr)
442
437
438 # in case someone changes a password for user it triggers session
439 # flush and forces a re-login
443 if password_changed(auth_user, session):
440 if password_changed(auth_user, session):
444 session.invalidate()
441 session.invalidate()
445 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
442 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
446 auth_user = AuthUser(ip_addr=ip_addr)
443 auth_user = AuthUser(ip_addr=ip_addr)
447
444
448 authenticated = cookie_store.get('is_authenticated')
445 authenticated = cookie_store.get('is_authenticated')
449
446
450 if not auth_user.is_authenticated and auth_user.is_user_object:
447 if not auth_user.is_authenticated and auth_user.is_user_object:
451 # user is not authenticated and not empty
448 # user is not authenticated and not empty
452 auth_user.set_authenticated(authenticated)
449 auth_user.set_authenticated(authenticated)
453
450
454 return auth_user
451 return auth_user
455
452
456
453
457 def h_filter(s):
454 def h_filter(s):
458 """
455 """
459 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
456 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
460 we wrap this with additional functionality that converts None to empty
457 we wrap this with additional functionality that converts None to empty
461 strings
458 strings
462 """
459 """
463 if s is None:
460 if s is None:
464 return markupsafe.Markup()
461 return markupsafe.Markup()
465 return markupsafe.escape(s)
462 return markupsafe.escape(s)
466
463
467
464
468 def add_events_routes(config):
465 def add_events_routes(config):
469 """
466 """
470 Adds routing that can be used in events. Because some events are triggered
467 Adds routing that can be used in events. Because some events are triggered
471 outside of pyramid context, we need to bootstrap request with some
468 outside of pyramid context, we need to bootstrap request with some
472 routing registered
469 routing registered
473 """
470 """
474
471
475 from rhodecode.apps._base import ADMIN_PREFIX
472 from rhodecode.apps._base import ADMIN_PREFIX
476
473
477 config.add_route(name='home', pattern='/')
474 config.add_route(name='home', pattern='/')
478
475
479 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
476 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
480 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
477 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
481 config.add_route(name='repo_summary', pattern='/{repo_name}')
478 config.add_route(name='repo_summary', pattern='/{repo_name}')
482 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
479 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
483 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
480 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
484
481
485 config.add_route(name='pullrequest_show',
482 config.add_route(name='pullrequest_show',
486 pattern='/{repo_name}/pull-request/{pull_request_id}')
483 pattern='/{repo_name}/pull-request/{pull_request_id}')
487 config.add_route(name='pull_requests_global',
484 config.add_route(name='pull_requests_global',
488 pattern='/pull-request/{pull_request_id}')
485 pattern='/pull-request/{pull_request_id}')
489 config.add_route(name='repo_commit',
486 config.add_route(name='repo_commit',
490 pattern='/{repo_name}/changeset/{commit_id}')
487 pattern='/{repo_name}/changeset/{commit_id}')
491
488
492 config.add_route(name='repo_files',
489 config.add_route(name='repo_files',
493 pattern='/{repo_name}/files/{commit_id}/{f_path}')
490 pattern='/{repo_name}/files/{commit_id}/{f_path}')
494
491
495
492
496 def bootstrap_config(request):
493 def bootstrap_config(request):
497 import pyramid.testing
494 import pyramid.testing
498 registry = pyramid.testing.Registry('RcTestRegistry')
495 registry = pyramid.testing.Registry('RcTestRegistry')
499
496
500 config = pyramid.testing.setUp(registry=registry, request=request)
497 config = pyramid.testing.setUp(registry=registry, request=request)
501
498
502 # allow pyramid lookup in testing
499 # allow pyramid lookup in testing
503 config.include('pyramid_mako')
500 config.include('pyramid_mako')
504 config.include('pyramid_beaker')
501 config.include('pyramid_beaker')
505
502
506 add_events_routes(config)
503 add_events_routes(config)
507
504
508 return config
505 return config
509
506
510
507
511 def bootstrap_request(**kwargs):
508 def bootstrap_request(**kwargs):
512 import pyramid.testing
509 import pyramid.testing
513
510
514 class TestRequest(pyramid.testing.DummyRequest):
511 class TestRequest(pyramid.testing.DummyRequest):
515 application_url = kwargs.pop('application_url', 'http://example.com')
512 application_url = kwargs.pop('application_url', 'http://example.com')
516 host = kwargs.pop('host', 'example.com:80')
513 host = kwargs.pop('host', 'example.com:80')
517 domain = kwargs.pop('domain', 'example.com')
514 domain = kwargs.pop('domain', 'example.com')
518
515
519 def translate(self, msg):
516 def translate(self, msg):
520 return msg
517 return msg
521
518
522 def plularize(self, singular, plural, n):
519 def plularize(self, singular, plural, n):
523 return singular
520 return singular
524
521
525 def get_partial_renderer(self, tmpl_name):
522 def get_partial_renderer(self, tmpl_name):
526
523
527 from rhodecode.lib.partial_renderer import get_partial_renderer
524 from rhodecode.lib.partial_renderer import get_partial_renderer
528 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
525 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
529
526
530 _call_context = {}
527 _call_context = {}
531 @property
528 @property
532 def call_context(self):
529 def call_context(self):
533 return self._call_context
530 return self._call_context
534
531
535 class TestDummySession(pyramid.testing.DummySession):
532 class TestDummySession(pyramid.testing.DummySession):
536 def save(*arg, **kw):
533 def save(*arg, **kw):
537 pass
534 pass
538
535
539 request = TestRequest(**kwargs)
536 request = TestRequest(**kwargs)
540 request.session = TestDummySession()
537 request.session = TestDummySession()
541
538
542 return request
539 return request
543
540
@@ -1,1170 +1,1107 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Set of diffing helpers, previously part of vcs
23 Set of diffing helpers, previously part of vcs
24 """
24 """
25
25
26 import re
26 import collections
27 import collections
27 import re
28 import difflib
28 import difflib
29 import logging
29 import logging
30
30
31 from itertools import tee, imap
31 from itertools import tee, imap
32
32
33 from rhodecode.translation import temp_translation_factory as _
34
35 from rhodecode.lib.vcs.exceptions import VCSError
33 from rhodecode.lib.vcs.exceptions import VCSError
36 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
34 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib.helpers import escape
39 from rhodecode.lib.utils2 import safe_unicode
35 from rhodecode.lib.utils2 import safe_unicode
40
36
41 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
42
38
43 # define max context, a file with more than this numbers of lines is unusable
39 # define max context, a file with more than this numbers of lines is unusable
44 # in browser anyway
40 # in browser anyway
45 MAX_CONTEXT = 1024 * 1014
41 MAX_CONTEXT = 1024 * 1014
46
42
47
43
48 class OPS(object):
44 class OPS(object):
49 ADD = 'A'
45 ADD = 'A'
50 MOD = 'M'
46 MOD = 'M'
51 DEL = 'D'
47 DEL = 'D'
52
48
53
49
54 def wrap_to_table(str_):
55 return '''<table class="code-difftable">
56 <tr class="line no-comment">
57 <td class="add-comment-line tooltip" title="%s"><span class="add-comment-content"></span></td>
58 <td></td>
59 <td class="lineno new"></td>
60 <td class="code no-comment"><pre>%s</pre></td>
61 </tr>
62 </table>''' % (_('Click to comment'), str_)
63
64
65 def wrapped_diff(filenode_old, filenode_new, diff_limit=None, file_limit=None,
66 show_full_diff=False, ignore_whitespace=True, line_context=3,
67 enable_comments=False):
68 """
69 returns a wrapped diff into a table, checks for cut_off_limit for file and
70 whole diff and presents proper message
71 """
72
73 if filenode_old is None:
74 filenode_old = FileNode(filenode_new.path, '', EmptyCommit())
75
76 if filenode_old.is_binary or filenode_new.is_binary:
77 diff = wrap_to_table(_('Binary file'))
78 stats = None
79 size = 0
80 data = None
81
82 elif diff_limit != -1 and (diff_limit is None or
83 (filenode_old.size < diff_limit and filenode_new.size < diff_limit)):
84
85 f_gitdiff = get_gitdiff(filenode_old, filenode_new,
86 ignore_whitespace=ignore_whitespace,
87 context=line_context)
88 diff_processor = DiffProcessor(
89 f_gitdiff, format='gitdiff', diff_limit=diff_limit,
90 file_limit=file_limit, show_full_diff=show_full_diff)
91 _parsed = diff_processor.prepare()
92
93 diff = diff_processor.as_html(enable_comments=enable_comments)
94 stats = _parsed[0]['stats'] if _parsed else None
95 size = len(diff or '')
96 data = _parsed[0] if _parsed else None
97 else:
98 diff = wrap_to_table(_('Changeset was too big and was cut off, use '
99 'diff menu to display this diff'))
100 stats = None
101 size = 0
102 data = None
103 if not diff:
104 submodules = filter(lambda o: isinstance(o, SubModuleNode),
105 [filenode_new, filenode_old])
106 if submodules:
107 diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
108 else:
109 diff = wrap_to_table(_('No changes detected'))
110
111 cs1 = filenode_old.commit.raw_id
112 cs2 = filenode_new.commit.raw_id
113
114 return size, cs1, cs2, diff, stats, data
115
116
117 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
50 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
118 """
51 """
119 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
52 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
120
53
121 :param ignore_whitespace: ignore whitespaces in diff
54 :param ignore_whitespace: ignore whitespaces in diff
122 """
55 """
123 # make sure we pass in default context
56 # make sure we pass in default context
124 context = context or 3
57 context = context or 3
125 # protect against IntOverflow when passing HUGE context
58 # protect against IntOverflow when passing HUGE context
126 if context > MAX_CONTEXT:
59 if context > MAX_CONTEXT:
127 context = MAX_CONTEXT
60 context = MAX_CONTEXT
128
61
129 submodules = filter(lambda o: isinstance(o, SubModuleNode),
62 submodules = filter(lambda o: isinstance(o, SubModuleNode),
130 [filenode_new, filenode_old])
63 [filenode_new, filenode_old])
131 if submodules:
64 if submodules:
132 return ''
65 return ''
133
66
134 for filenode in (filenode_old, filenode_new):
67 for filenode in (filenode_old, filenode_new):
135 if not isinstance(filenode, FileNode):
68 if not isinstance(filenode, FileNode):
136 raise VCSError(
69 raise VCSError(
137 "Given object should be FileNode object, not %s"
70 "Given object should be FileNode object, not %s"
138 % filenode.__class__)
71 % filenode.__class__)
139
72
140 repo = filenode_new.commit.repository
73 repo = filenode_new.commit.repository
141 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
74 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
142 new_commit = filenode_new.commit
75 new_commit = filenode_new.commit
143
76
144 vcs_gitdiff = repo.get_diff(
77 vcs_gitdiff = repo.get_diff(
145 old_commit, new_commit, filenode_new.path,
78 old_commit, new_commit, filenode_new.path,
146 ignore_whitespace, context, path1=filenode_old.path)
79 ignore_whitespace, context, path1=filenode_old.path)
147 return vcs_gitdiff
80 return vcs_gitdiff
148
81
149 NEW_FILENODE = 1
82 NEW_FILENODE = 1
150 DEL_FILENODE = 2
83 DEL_FILENODE = 2
151 MOD_FILENODE = 3
84 MOD_FILENODE = 3
152 RENAMED_FILENODE = 4
85 RENAMED_FILENODE = 4
153 COPIED_FILENODE = 5
86 COPIED_FILENODE = 5
154 CHMOD_FILENODE = 6
87 CHMOD_FILENODE = 6
155 BIN_FILENODE = 7
88 BIN_FILENODE = 7
156
89
157
90
158 class LimitedDiffContainer(object):
91 class LimitedDiffContainer(object):
159
92
160 def __init__(self, diff_limit, cur_diff_size, diff):
93 def __init__(self, diff_limit, cur_diff_size, diff):
161 self.diff = diff
94 self.diff = diff
162 self.diff_limit = diff_limit
95 self.diff_limit = diff_limit
163 self.cur_diff_size = cur_diff_size
96 self.cur_diff_size = cur_diff_size
164
97
165 def __getitem__(self, key):
98 def __getitem__(self, key):
166 return self.diff.__getitem__(key)
99 return self.diff.__getitem__(key)
167
100
168 def __iter__(self):
101 def __iter__(self):
169 for l in self.diff:
102 for l in self.diff:
170 yield l
103 yield l
171
104
172
105
173 class Action(object):
106 class Action(object):
174 """
107 """
175 Contains constants for the action value of the lines in a parsed diff.
108 Contains constants for the action value of the lines in a parsed diff.
176 """
109 """
177
110
178 ADD = 'add'
111 ADD = 'add'
179 DELETE = 'del'
112 DELETE = 'del'
180 UNMODIFIED = 'unmod'
113 UNMODIFIED = 'unmod'
181
114
182 CONTEXT = 'context'
115 CONTEXT = 'context'
183 OLD_NO_NL = 'old-no-nl'
116 OLD_NO_NL = 'old-no-nl'
184 NEW_NO_NL = 'new-no-nl'
117 NEW_NO_NL = 'new-no-nl'
185
118
186
119
187 class DiffProcessor(object):
120 class DiffProcessor(object):
188 """
121 """
189 Give it a unified or git diff and it returns a list of the files that were
122 Give it a unified or git diff and it returns a list of the files that were
190 mentioned in the diff together with a dict of meta information that
123 mentioned in the diff together with a dict of meta information that
191 can be used to render it in a HTML template.
124 can be used to render it in a HTML template.
192
125
193 .. note:: Unicode handling
126 .. note:: Unicode handling
194
127
195 The original diffs are a byte sequence and can contain filenames
128 The original diffs are a byte sequence and can contain filenames
196 in mixed encodings. This class generally returns `unicode` objects
129 in mixed encodings. This class generally returns `unicode` objects
197 since the result is intended for presentation to the user.
130 since the result is intended for presentation to the user.
198
131
199 """
132 """
200 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
133 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
201 _newline_marker = re.compile(r'^\\ No newline at end of file')
134 _newline_marker = re.compile(r'^\\ No newline at end of file')
202
135
203 # used for inline highlighter word split
136 # used for inline highlighter word split
204 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
137 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
205
138
206 # collapse ranges of commits over given number
139 # collapse ranges of commits over given number
207 _collapse_commits_over = 5
140 _collapse_commits_over = 5
208
141
209 def __init__(self, diff, format='gitdiff', diff_limit=None,
142 def __init__(self, diff, format='gitdiff', diff_limit=None,
210 file_limit=None, show_full_diff=True):
143 file_limit=None, show_full_diff=True):
211 """
144 """
212 :param diff: A `Diff` object representing a diff from a vcs backend
145 :param diff: A `Diff` object representing a diff from a vcs backend
213 :param format: format of diff passed, `udiff` or `gitdiff`
146 :param format: format of diff passed, `udiff` or `gitdiff`
214 :param diff_limit: define the size of diff that is considered "big"
147 :param diff_limit: define the size of diff that is considered "big"
215 based on that parameter cut off will be triggered, set to None
148 based on that parameter cut off will be triggered, set to None
216 to show full diff
149 to show full diff
217 """
150 """
218 self._diff = diff
151 self._diff = diff
219 self._format = format
152 self._format = format
220 self.adds = 0
153 self.adds = 0
221 self.removes = 0
154 self.removes = 0
222 # calculate diff size
155 # calculate diff size
223 self.diff_limit = diff_limit
156 self.diff_limit = diff_limit
224 self.file_limit = file_limit
157 self.file_limit = file_limit
225 self.show_full_diff = show_full_diff
158 self.show_full_diff = show_full_diff
226 self.cur_diff_size = 0
159 self.cur_diff_size = 0
227 self.parsed = False
160 self.parsed = False
228 self.parsed_diff = []
161 self.parsed_diff = []
229
162
230 log.debug('Initialized DiffProcessor with %s mode', format)
163 log.debug('Initialized DiffProcessor with %s mode', format)
231 if format == 'gitdiff':
164 if format == 'gitdiff':
232 self.differ = self._highlight_line_difflib
165 self.differ = self._highlight_line_difflib
233 self._parser = self._parse_gitdiff
166 self._parser = self._parse_gitdiff
234 else:
167 else:
235 self.differ = self._highlight_line_udiff
168 self.differ = self._highlight_line_udiff
236 self._parser = self._new_parse_gitdiff
169 self._parser = self._new_parse_gitdiff
237
170
238 def _copy_iterator(self):
171 def _copy_iterator(self):
239 """
172 """
240 make a fresh copy of generator, we should not iterate thru
173 make a fresh copy of generator, we should not iterate thru
241 an original as it's needed for repeating operations on
174 an original as it's needed for repeating operations on
242 this instance of DiffProcessor
175 this instance of DiffProcessor
243 """
176 """
244 self.__udiff, iterator_copy = tee(self.__udiff)
177 self.__udiff, iterator_copy = tee(self.__udiff)
245 return iterator_copy
178 return iterator_copy
246
179
247 def _escaper(self, string):
180 def _escaper(self, string):
248 """
181 """
249 Escaper for diff escapes special chars and checks the diff limit
182 Escaper for diff escapes special chars and checks the diff limit
250
183
251 :param string:
184 :param string:
252 """
185 """
253
186
254 self.cur_diff_size += len(string)
187 self.cur_diff_size += len(string)
255
188
256 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
189 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
257 raise DiffLimitExceeded('Diff Limit Exceeded')
190 raise DiffLimitExceeded('Diff Limit Exceeded')
258
191
259 return safe_unicode(string)\
192 return safe_unicode(string)\
260 .replace('&', '&amp;')\
193 .replace('&', '&amp;')\
261 .replace('<', '&lt;')\
194 .replace('<', '&lt;')\
262 .replace('>', '&gt;')
195 .replace('>', '&gt;')
263
196
264 def _line_counter(self, l):
197 def _line_counter(self, l):
265 """
198 """
266 Checks each line and bumps total adds/removes for this diff
199 Checks each line and bumps total adds/removes for this diff
267
200
268 :param l:
201 :param l:
269 """
202 """
270 if l.startswith('+') and not l.startswith('+++'):
203 if l.startswith('+') and not l.startswith('+++'):
271 self.adds += 1
204 self.adds += 1
272 elif l.startswith('-') and not l.startswith('---'):
205 elif l.startswith('-') and not l.startswith('---'):
273 self.removes += 1
206 self.removes += 1
274 return safe_unicode(l)
207 return safe_unicode(l)
275
208
276 def _highlight_line_difflib(self, line, next_):
209 def _highlight_line_difflib(self, line, next_):
277 """
210 """
278 Highlight inline changes in both lines.
211 Highlight inline changes in both lines.
279 """
212 """
280
213
281 if line['action'] == Action.DELETE:
214 if line['action'] == Action.DELETE:
282 old, new = line, next_
215 old, new = line, next_
283 else:
216 else:
284 old, new = next_, line
217 old, new = next_, line
285
218
286 oldwords = self._token_re.split(old['line'])
219 oldwords = self._token_re.split(old['line'])
287 newwords = self._token_re.split(new['line'])
220 newwords = self._token_re.split(new['line'])
288 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
221 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
289
222
290 oldfragments, newfragments = [], []
223 oldfragments, newfragments = [], []
291 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
224 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
292 oldfrag = ''.join(oldwords[i1:i2])
225 oldfrag = ''.join(oldwords[i1:i2])
293 newfrag = ''.join(newwords[j1:j2])
226 newfrag = ''.join(newwords[j1:j2])
294 if tag != 'equal':
227 if tag != 'equal':
295 if oldfrag:
228 if oldfrag:
296 oldfrag = '<del>%s</del>' % oldfrag
229 oldfrag = '<del>%s</del>' % oldfrag
297 if newfrag:
230 if newfrag:
298 newfrag = '<ins>%s</ins>' % newfrag
231 newfrag = '<ins>%s</ins>' % newfrag
299 oldfragments.append(oldfrag)
232 oldfragments.append(oldfrag)
300 newfragments.append(newfrag)
233 newfragments.append(newfrag)
301
234
302 old['line'] = "".join(oldfragments)
235 old['line'] = "".join(oldfragments)
303 new['line'] = "".join(newfragments)
236 new['line'] = "".join(newfragments)
304
237
305 def _highlight_line_udiff(self, line, next_):
238 def _highlight_line_udiff(self, line, next_):
306 """
239 """
307 Highlight inline changes in both lines.
240 Highlight inline changes in both lines.
308 """
241 """
309 start = 0
242 start = 0
310 limit = min(len(line['line']), len(next_['line']))
243 limit = min(len(line['line']), len(next_['line']))
311 while start < limit and line['line'][start] == next_['line'][start]:
244 while start < limit and line['line'][start] == next_['line'][start]:
312 start += 1
245 start += 1
313 end = -1
246 end = -1
314 limit -= start
247 limit -= start
315 while -end <= limit and line['line'][end] == next_['line'][end]:
248 while -end <= limit and line['line'][end] == next_['line'][end]:
316 end -= 1
249 end -= 1
317 end += 1
250 end += 1
318 if start or end:
251 if start or end:
319 def do(l):
252 def do(l):
320 last = end + len(l['line'])
253 last = end + len(l['line'])
321 if l['action'] == Action.ADD:
254 if l['action'] == Action.ADD:
322 tag = 'ins'
255 tag = 'ins'
323 else:
256 else:
324 tag = 'del'
257 tag = 'del'
325 l['line'] = '%s<%s>%s</%s>%s' % (
258 l['line'] = '%s<%s>%s</%s>%s' % (
326 l['line'][:start],
259 l['line'][:start],
327 tag,
260 tag,
328 l['line'][start:last],
261 l['line'][start:last],
329 tag,
262 tag,
330 l['line'][last:]
263 l['line'][last:]
331 )
264 )
332 do(line)
265 do(line)
333 do(next_)
266 do(next_)
334
267
335 def _clean_line(self, line, command):
268 def _clean_line(self, line, command):
336 if command in ['+', '-', ' ']:
269 if command in ['+', '-', ' ']:
337 # only modify the line if it's actually a diff thing
270 # only modify the line if it's actually a diff thing
338 line = line[1:]
271 line = line[1:]
339 return line
272 return line
340
273
341 def _parse_gitdiff(self, inline_diff=True):
274 def _parse_gitdiff(self, inline_diff=True):
342 _files = []
275 _files = []
343 diff_container = lambda arg: arg
276 diff_container = lambda arg: arg
344
277
345 for chunk in self._diff.chunks():
278 for chunk in self._diff.chunks():
346 head = chunk.header
279 head = chunk.header
347
280
348 diff = imap(self._escaper, chunk.diff.splitlines(1))
281 diff = imap(self._escaper, chunk.diff.splitlines(1))
349 raw_diff = chunk.raw
282 raw_diff = chunk.raw
350 limited_diff = False
283 limited_diff = False
351 exceeds_limit = False
284 exceeds_limit = False
352
285
353 op = None
286 op = None
354 stats = {
287 stats = {
355 'added': 0,
288 'added': 0,
356 'deleted': 0,
289 'deleted': 0,
357 'binary': False,
290 'binary': False,
358 'ops': {},
291 'ops': {},
359 }
292 }
360
293
361 if head['deleted_file_mode']:
294 if head['deleted_file_mode']:
362 op = OPS.DEL
295 op = OPS.DEL
363 stats['binary'] = True
296 stats['binary'] = True
364 stats['ops'][DEL_FILENODE] = 'deleted file'
297 stats['ops'][DEL_FILENODE] = 'deleted file'
365
298
366 elif head['new_file_mode']:
299 elif head['new_file_mode']:
367 op = OPS.ADD
300 op = OPS.ADD
368 stats['binary'] = True
301 stats['binary'] = True
369 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
302 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
370 else: # modify operation, can be copy, rename or chmod
303 else: # modify operation, can be copy, rename or chmod
371
304
372 # CHMOD
305 # CHMOD
373 if head['new_mode'] and head['old_mode']:
306 if head['new_mode'] and head['old_mode']:
374 op = OPS.MOD
307 op = OPS.MOD
375 stats['binary'] = True
308 stats['binary'] = True
376 stats['ops'][CHMOD_FILENODE] = (
309 stats['ops'][CHMOD_FILENODE] = (
377 'modified file chmod %s => %s' % (
310 'modified file chmod %s => %s' % (
378 head['old_mode'], head['new_mode']))
311 head['old_mode'], head['new_mode']))
379 # RENAME
312 # RENAME
380 if head['rename_from'] != head['rename_to']:
313 if head['rename_from'] != head['rename_to']:
381 op = OPS.MOD
314 op = OPS.MOD
382 stats['binary'] = True
315 stats['binary'] = True
383 stats['ops'][RENAMED_FILENODE] = (
316 stats['ops'][RENAMED_FILENODE] = (
384 'file renamed from %s to %s' % (
317 'file renamed from %s to %s' % (
385 head['rename_from'], head['rename_to']))
318 head['rename_from'], head['rename_to']))
386 # COPY
319 # COPY
387 if head.get('copy_from') and head.get('copy_to'):
320 if head.get('copy_from') and head.get('copy_to'):
388 op = OPS.MOD
321 op = OPS.MOD
389 stats['binary'] = True
322 stats['binary'] = True
390 stats['ops'][COPIED_FILENODE] = (
323 stats['ops'][COPIED_FILENODE] = (
391 'file copied from %s to %s' % (
324 'file copied from %s to %s' % (
392 head['copy_from'], head['copy_to']))
325 head['copy_from'], head['copy_to']))
393
326
394 # If our new parsed headers didn't match anything fallback to
327 # If our new parsed headers didn't match anything fallback to
395 # old style detection
328 # old style detection
396 if op is None:
329 if op is None:
397 if not head['a_file'] and head['b_file']:
330 if not head['a_file'] and head['b_file']:
398 op = OPS.ADD
331 op = OPS.ADD
399 stats['binary'] = True
332 stats['binary'] = True
400 stats['ops'][NEW_FILENODE] = 'new file'
333 stats['ops'][NEW_FILENODE] = 'new file'
401
334
402 elif head['a_file'] and not head['b_file']:
335 elif head['a_file'] and not head['b_file']:
403 op = OPS.DEL
336 op = OPS.DEL
404 stats['binary'] = True
337 stats['binary'] = True
405 stats['ops'][DEL_FILENODE] = 'deleted file'
338 stats['ops'][DEL_FILENODE] = 'deleted file'
406
339
407 # it's not ADD not DELETE
340 # it's not ADD not DELETE
408 if op is None:
341 if op is None:
409 op = OPS.MOD
342 op = OPS.MOD
410 stats['binary'] = True
343 stats['binary'] = True
411 stats['ops'][MOD_FILENODE] = 'modified file'
344 stats['ops'][MOD_FILENODE] = 'modified file'
412
345
413 # a real non-binary diff
346 # a real non-binary diff
414 if head['a_file'] or head['b_file']:
347 if head['a_file'] or head['b_file']:
415 try:
348 try:
416 raw_diff, chunks, _stats = self._parse_lines(diff)
349 raw_diff, chunks, _stats = self._parse_lines(diff)
417 stats['binary'] = False
350 stats['binary'] = False
418 stats['added'] = _stats[0]
351 stats['added'] = _stats[0]
419 stats['deleted'] = _stats[1]
352 stats['deleted'] = _stats[1]
420 # explicit mark that it's a modified file
353 # explicit mark that it's a modified file
421 if op == OPS.MOD:
354 if op == OPS.MOD:
422 stats['ops'][MOD_FILENODE] = 'modified file'
355 stats['ops'][MOD_FILENODE] = 'modified file'
423 exceeds_limit = len(raw_diff) > self.file_limit
356 exceeds_limit = len(raw_diff) > self.file_limit
424
357
425 # changed from _escaper function so we validate size of
358 # changed from _escaper function so we validate size of
426 # each file instead of the whole diff
359 # each file instead of the whole diff
427 # diff will hide big files but still show small ones
360 # diff will hide big files but still show small ones
428 # from my tests, big files are fairly safe to be parsed
361 # from my tests, big files are fairly safe to be parsed
429 # but the browser is the bottleneck
362 # but the browser is the bottleneck
430 if not self.show_full_diff and exceeds_limit:
363 if not self.show_full_diff and exceeds_limit:
431 raise DiffLimitExceeded('File Limit Exceeded')
364 raise DiffLimitExceeded('File Limit Exceeded')
432
365
433 except DiffLimitExceeded:
366 except DiffLimitExceeded:
434 diff_container = lambda _diff: \
367 diff_container = lambda _diff: \
435 LimitedDiffContainer(
368 LimitedDiffContainer(
436 self.diff_limit, self.cur_diff_size, _diff)
369 self.diff_limit, self.cur_diff_size, _diff)
437
370
438 exceeds_limit = len(raw_diff) > self.file_limit
371 exceeds_limit = len(raw_diff) > self.file_limit
439 limited_diff = True
372 limited_diff = True
440 chunks = []
373 chunks = []
441
374
442 else: # GIT format binary patch, or possibly empty diff
375 else: # GIT format binary patch, or possibly empty diff
443 if head['bin_patch']:
376 if head['bin_patch']:
444 # we have operation already extracted, but we mark simply
377 # we have operation already extracted, but we mark simply
445 # it's a diff we wont show for binary files
378 # it's a diff we wont show for binary files
446 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
379 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
447 chunks = []
380 chunks = []
448
381
449 if chunks and not self.show_full_diff and op == OPS.DEL:
382 if chunks and not self.show_full_diff and op == OPS.DEL:
450 # if not full diff mode show deleted file contents
383 # if not full diff mode show deleted file contents
451 # TODO: anderson: if the view is not too big, there is no way
384 # TODO: anderson: if the view is not too big, there is no way
452 # to see the content of the file
385 # to see the content of the file
453 chunks = []
386 chunks = []
454
387
455 chunks.insert(0, [{
388 chunks.insert(0, [{
456 'old_lineno': '',
389 'old_lineno': '',
457 'new_lineno': '',
390 'new_lineno': '',
458 'action': Action.CONTEXT,
391 'action': Action.CONTEXT,
459 'line': msg,
392 'line': msg,
460 } for _op, msg in stats['ops'].iteritems()
393 } for _op, msg in stats['ops'].iteritems()
461 if _op not in [MOD_FILENODE]])
394 if _op not in [MOD_FILENODE]])
462
395
463 _files.append({
396 _files.append({
464 'filename': safe_unicode(head['b_path']),
397 'filename': safe_unicode(head['b_path']),
465 'old_revision': head['a_blob_id'],
398 'old_revision': head['a_blob_id'],
466 'new_revision': head['b_blob_id'],
399 'new_revision': head['b_blob_id'],
467 'chunks': chunks,
400 'chunks': chunks,
468 'raw_diff': safe_unicode(raw_diff),
401 'raw_diff': safe_unicode(raw_diff),
469 'operation': op,
402 'operation': op,
470 'stats': stats,
403 'stats': stats,
471 'exceeds_limit': exceeds_limit,
404 'exceeds_limit': exceeds_limit,
472 'is_limited_diff': limited_diff,
405 'is_limited_diff': limited_diff,
473 })
406 })
474
407
475 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
408 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
476 OPS.DEL: 2}.get(info['operation'])
409 OPS.DEL: 2}.get(info['operation'])
477
410
478 if not inline_diff:
411 if not inline_diff:
479 return diff_container(sorted(_files, key=sorter))
412 return diff_container(sorted(_files, key=sorter))
480
413
481 # highlight inline changes
414 # highlight inline changes
482 for diff_data in _files:
415 for diff_data in _files:
483 for chunk in diff_data['chunks']:
416 for chunk in diff_data['chunks']:
484 lineiter = iter(chunk)
417 lineiter = iter(chunk)
485 try:
418 try:
486 while 1:
419 while 1:
487 line = lineiter.next()
420 line = lineiter.next()
488 if line['action'] not in (
421 if line['action'] not in (
489 Action.UNMODIFIED, Action.CONTEXT):
422 Action.UNMODIFIED, Action.CONTEXT):
490 nextline = lineiter.next()
423 nextline = lineiter.next()
491 if nextline['action'] in ['unmod', 'context'] or \
424 if nextline['action'] in ['unmod', 'context'] or \
492 nextline['action'] == line['action']:
425 nextline['action'] == line['action']:
493 continue
426 continue
494 self.differ(line, nextline)
427 self.differ(line, nextline)
495 except StopIteration:
428 except StopIteration:
496 pass
429 pass
497
430
498 return diff_container(sorted(_files, key=sorter))
431 return diff_container(sorted(_files, key=sorter))
499
432
500 def _check_large_diff(self):
433 def _check_large_diff(self):
501 log.debug('Diff exceeds current diff_limit of %s', self.diff_limit)
434 log.debug('Diff exceeds current diff_limit of %s', self.diff_limit)
502 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
435 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
503 raise DiffLimitExceeded('Diff Limit `%s` Exceeded', self.diff_limit)
436 raise DiffLimitExceeded('Diff Limit `%s` Exceeded', self.diff_limit)
504
437
505 # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff
438 # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff
506 def _new_parse_gitdiff(self, inline_diff=True):
439 def _new_parse_gitdiff(self, inline_diff=True):
507 _files = []
440 _files = []
508
441
509 # this can be overriden later to a LimitedDiffContainer type
442 # this can be overriden later to a LimitedDiffContainer type
510 diff_container = lambda arg: arg
443 diff_container = lambda arg: arg
511
444
512 for chunk in self._diff.chunks():
445 for chunk in self._diff.chunks():
513 head = chunk.header
446 head = chunk.header
514 log.debug('parsing diff %r' % head)
447 log.debug('parsing diff %r' % head)
515
448
516 raw_diff = chunk.raw
449 raw_diff = chunk.raw
517 limited_diff = False
450 limited_diff = False
518 exceeds_limit = False
451 exceeds_limit = False
519
452
520 op = None
453 op = None
521 stats = {
454 stats = {
522 'added': 0,
455 'added': 0,
523 'deleted': 0,
456 'deleted': 0,
524 'binary': False,
457 'binary': False,
525 'old_mode': None,
458 'old_mode': None,
526 'new_mode': None,
459 'new_mode': None,
527 'ops': {},
460 'ops': {},
528 }
461 }
529 if head['old_mode']:
462 if head['old_mode']:
530 stats['old_mode'] = head['old_mode']
463 stats['old_mode'] = head['old_mode']
531 if head['new_mode']:
464 if head['new_mode']:
532 stats['new_mode'] = head['new_mode']
465 stats['new_mode'] = head['new_mode']
533 if head['b_mode']:
466 if head['b_mode']:
534 stats['new_mode'] = head['b_mode']
467 stats['new_mode'] = head['b_mode']
535
468
536 # delete file
469 # delete file
537 if head['deleted_file_mode']:
470 if head['deleted_file_mode']:
538 op = OPS.DEL
471 op = OPS.DEL
539 stats['binary'] = True
472 stats['binary'] = True
540 stats['ops'][DEL_FILENODE] = 'deleted file'
473 stats['ops'][DEL_FILENODE] = 'deleted file'
541
474
542 # new file
475 # new file
543 elif head['new_file_mode']:
476 elif head['new_file_mode']:
544 op = OPS.ADD
477 op = OPS.ADD
545 stats['binary'] = True
478 stats['binary'] = True
546 stats['old_mode'] = None
479 stats['old_mode'] = None
547 stats['new_mode'] = head['new_file_mode']
480 stats['new_mode'] = head['new_file_mode']
548 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
481 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
549
482
550 # modify operation, can be copy, rename or chmod
483 # modify operation, can be copy, rename or chmod
551 else:
484 else:
552 # CHMOD
485 # CHMOD
553 if head['new_mode'] and head['old_mode']:
486 if head['new_mode'] and head['old_mode']:
554 op = OPS.MOD
487 op = OPS.MOD
555 stats['binary'] = True
488 stats['binary'] = True
556 stats['ops'][CHMOD_FILENODE] = (
489 stats['ops'][CHMOD_FILENODE] = (
557 'modified file chmod %s => %s' % (
490 'modified file chmod %s => %s' % (
558 head['old_mode'], head['new_mode']))
491 head['old_mode'], head['new_mode']))
559
492
560 # RENAME
493 # RENAME
561 if head['rename_from'] != head['rename_to']:
494 if head['rename_from'] != head['rename_to']:
562 op = OPS.MOD
495 op = OPS.MOD
563 stats['binary'] = True
496 stats['binary'] = True
564 stats['renamed'] = (head['rename_from'], head['rename_to'])
497 stats['renamed'] = (head['rename_from'], head['rename_to'])
565 stats['ops'][RENAMED_FILENODE] = (
498 stats['ops'][RENAMED_FILENODE] = (
566 'file renamed from %s to %s' % (
499 'file renamed from %s to %s' % (
567 head['rename_from'], head['rename_to']))
500 head['rename_from'], head['rename_to']))
568 # COPY
501 # COPY
569 if head.get('copy_from') and head.get('copy_to'):
502 if head.get('copy_from') and head.get('copy_to'):
570 op = OPS.MOD
503 op = OPS.MOD
571 stats['binary'] = True
504 stats['binary'] = True
572 stats['copied'] = (head['copy_from'], head['copy_to'])
505 stats['copied'] = (head['copy_from'], head['copy_to'])
573 stats['ops'][COPIED_FILENODE] = (
506 stats['ops'][COPIED_FILENODE] = (
574 'file copied from %s to %s' % (
507 'file copied from %s to %s' % (
575 head['copy_from'], head['copy_to']))
508 head['copy_from'], head['copy_to']))
576
509
577 # If our new parsed headers didn't match anything fallback to
510 # If our new parsed headers didn't match anything fallback to
578 # old style detection
511 # old style detection
579 if op is None:
512 if op is None:
580 if not head['a_file'] and head['b_file']:
513 if not head['a_file'] and head['b_file']:
581 op = OPS.ADD
514 op = OPS.ADD
582 stats['binary'] = True
515 stats['binary'] = True
583 stats['new_file'] = True
516 stats['new_file'] = True
584 stats['ops'][NEW_FILENODE] = 'new file'
517 stats['ops'][NEW_FILENODE] = 'new file'
585
518
586 elif head['a_file'] and not head['b_file']:
519 elif head['a_file'] and not head['b_file']:
587 op = OPS.DEL
520 op = OPS.DEL
588 stats['binary'] = True
521 stats['binary'] = True
589 stats['ops'][DEL_FILENODE] = 'deleted file'
522 stats['ops'][DEL_FILENODE] = 'deleted file'
590
523
591 # it's not ADD not DELETE
524 # it's not ADD not DELETE
592 if op is None:
525 if op is None:
593 op = OPS.MOD
526 op = OPS.MOD
594 stats['binary'] = True
527 stats['binary'] = True
595 stats['ops'][MOD_FILENODE] = 'modified file'
528 stats['ops'][MOD_FILENODE] = 'modified file'
596
529
597 # a real non-binary diff
530 # a real non-binary diff
598 if head['a_file'] or head['b_file']:
531 if head['a_file'] or head['b_file']:
599 diff = iter(chunk.diff.splitlines(1))
532 diff = iter(chunk.diff.splitlines(1))
600
533
601 # append each file to the diff size
534 # append each file to the diff size
602 raw_chunk_size = len(raw_diff)
535 raw_chunk_size = len(raw_diff)
603
536
604 exceeds_limit = raw_chunk_size > self.file_limit
537 exceeds_limit = raw_chunk_size > self.file_limit
605 self.cur_diff_size += raw_chunk_size
538 self.cur_diff_size += raw_chunk_size
606
539
607 try:
540 try:
608 # Check each file instead of the whole diff.
541 # Check each file instead of the whole diff.
609 # Diff will hide big files but still show small ones.
542 # Diff will hide big files but still show small ones.
610 # From the tests big files are fairly safe to be parsed
543 # From the tests big files are fairly safe to be parsed
611 # but the browser is the bottleneck.
544 # but the browser is the bottleneck.
612 if not self.show_full_diff and exceeds_limit:
545 if not self.show_full_diff and exceeds_limit:
613 log.debug('File `%s` exceeds current file_limit of %s',
546 log.debug('File `%s` exceeds current file_limit of %s',
614 safe_unicode(head['b_path']), self.file_limit)
547 safe_unicode(head['b_path']), self.file_limit)
615 raise DiffLimitExceeded(
548 raise DiffLimitExceeded(
616 'File Limit %s Exceeded', self.file_limit)
549 'File Limit %s Exceeded', self.file_limit)
617
550
618 self._check_large_diff()
551 self._check_large_diff()
619
552
620 raw_diff, chunks, _stats = self._new_parse_lines(diff)
553 raw_diff, chunks, _stats = self._new_parse_lines(diff)
621 stats['binary'] = False
554 stats['binary'] = False
622 stats['added'] = _stats[0]
555 stats['added'] = _stats[0]
623 stats['deleted'] = _stats[1]
556 stats['deleted'] = _stats[1]
624 # explicit mark that it's a modified file
557 # explicit mark that it's a modified file
625 if op == OPS.MOD:
558 if op == OPS.MOD:
626 stats['ops'][MOD_FILENODE] = 'modified file'
559 stats['ops'][MOD_FILENODE] = 'modified file'
627
560
628 except DiffLimitExceeded:
561 except DiffLimitExceeded:
629 diff_container = lambda _diff: \
562 diff_container = lambda _diff: \
630 LimitedDiffContainer(
563 LimitedDiffContainer(
631 self.diff_limit, self.cur_diff_size, _diff)
564 self.diff_limit, self.cur_diff_size, _diff)
632
565
633 limited_diff = True
566 limited_diff = True
634 chunks = []
567 chunks = []
635
568
636 else: # GIT format binary patch, or possibly empty diff
569 else: # GIT format binary patch, or possibly empty diff
637 if head['bin_patch']:
570 if head['bin_patch']:
638 # we have operation already extracted, but we mark simply
571 # we have operation already extracted, but we mark simply
639 # it's a diff we wont show for binary files
572 # it's a diff we wont show for binary files
640 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
573 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
641 chunks = []
574 chunks = []
642
575
643 # Hide content of deleted node by setting empty chunks
576 # Hide content of deleted node by setting empty chunks
644 if chunks and not self.show_full_diff and op == OPS.DEL:
577 if chunks and not self.show_full_diff and op == OPS.DEL:
645 # if not full diff mode show deleted file contents
578 # if not full diff mode show deleted file contents
646 # TODO: anderson: if the view is not too big, there is no way
579 # TODO: anderson: if the view is not too big, there is no way
647 # to see the content of the file
580 # to see the content of the file
648 chunks = []
581 chunks = []
649
582
650 chunks.insert(
583 chunks.insert(
651 0, [{'old_lineno': '',
584 0, [{'old_lineno': '',
652 'new_lineno': '',
585 'new_lineno': '',
653 'action': Action.CONTEXT,
586 'action': Action.CONTEXT,
654 'line': msg,
587 'line': msg,
655 } for _op, msg in stats['ops'].iteritems()
588 } for _op, msg in stats['ops'].iteritems()
656 if _op not in [MOD_FILENODE]])
589 if _op not in [MOD_FILENODE]])
657
590
658 original_filename = safe_unicode(head['a_path'])
591 original_filename = safe_unicode(head['a_path'])
659 _files.append({
592 _files.append({
660 'original_filename': original_filename,
593 'original_filename': original_filename,
661 'filename': safe_unicode(head['b_path']),
594 'filename': safe_unicode(head['b_path']),
662 'old_revision': head['a_blob_id'],
595 'old_revision': head['a_blob_id'],
663 'new_revision': head['b_blob_id'],
596 'new_revision': head['b_blob_id'],
664 'chunks': chunks,
597 'chunks': chunks,
665 'raw_diff': safe_unicode(raw_diff),
598 'raw_diff': safe_unicode(raw_diff),
666 'operation': op,
599 'operation': op,
667 'stats': stats,
600 'stats': stats,
668 'exceeds_limit': exceeds_limit,
601 'exceeds_limit': exceeds_limit,
669 'is_limited_diff': limited_diff,
602 'is_limited_diff': limited_diff,
670 })
603 })
671
604
672 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
605 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
673 OPS.DEL: 2}.get(info['operation'])
606 OPS.DEL: 2}.get(info['operation'])
674
607
675 return diff_container(sorted(_files, key=sorter))
608 return diff_container(sorted(_files, key=sorter))
676
609
677 # FIXME: NEWDIFFS: dan: this gets replaced by _new_parse_lines
610 # FIXME: NEWDIFFS: dan: this gets replaced by _new_parse_lines
678 def _parse_lines(self, diff):
611 def _parse_lines(self, diff):
679 """
612 """
680 Parse the diff an return data for the template.
613 Parse the diff an return data for the template.
681 """
614 """
682
615
683 lineiter = iter(diff)
616 lineiter = iter(diff)
684 stats = [0, 0]
617 stats = [0, 0]
685 chunks = []
618 chunks = []
686 raw_diff = []
619 raw_diff = []
687
620
688 try:
621 try:
689 line = lineiter.next()
622 line = lineiter.next()
690
623
691 while line:
624 while line:
692 raw_diff.append(line)
625 raw_diff.append(line)
693 lines = []
626 lines = []
694 chunks.append(lines)
627 chunks.append(lines)
695
628
696 match = self._chunk_re.match(line)
629 match = self._chunk_re.match(line)
697
630
698 if not match:
631 if not match:
699 break
632 break
700
633
701 gr = match.groups()
634 gr = match.groups()
702 (old_line, old_end,
635 (old_line, old_end,
703 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
636 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
704 old_line -= 1
637 old_line -= 1
705 new_line -= 1
638 new_line -= 1
706
639
707 context = len(gr) == 5
640 context = len(gr) == 5
708 old_end += old_line
641 old_end += old_line
709 new_end += new_line
642 new_end += new_line
710
643
711 if context:
644 if context:
712 # skip context only if it's first line
645 # skip context only if it's first line
713 if int(gr[0]) > 1:
646 if int(gr[0]) > 1:
714 lines.append({
647 lines.append({
715 'old_lineno': '...',
648 'old_lineno': '...',
716 'new_lineno': '...',
649 'new_lineno': '...',
717 'action': Action.CONTEXT,
650 'action': Action.CONTEXT,
718 'line': line,
651 'line': line,
719 })
652 })
720
653
721 line = lineiter.next()
654 line = lineiter.next()
722
655
723 while old_line < old_end or new_line < new_end:
656 while old_line < old_end or new_line < new_end:
724 command = ' '
657 command = ' '
725 if line:
658 if line:
726 command = line[0]
659 command = line[0]
727
660
728 affects_old = affects_new = False
661 affects_old = affects_new = False
729
662
730 # ignore those if we don't expect them
663 # ignore those if we don't expect them
731 if command in '#@':
664 if command in '#@':
732 continue
665 continue
733 elif command == '+':
666 elif command == '+':
734 affects_new = True
667 affects_new = True
735 action = Action.ADD
668 action = Action.ADD
736 stats[0] += 1
669 stats[0] += 1
737 elif command == '-':
670 elif command == '-':
738 affects_old = True
671 affects_old = True
739 action = Action.DELETE
672 action = Action.DELETE
740 stats[1] += 1
673 stats[1] += 1
741 else:
674 else:
742 affects_old = affects_new = True
675 affects_old = affects_new = True
743 action = Action.UNMODIFIED
676 action = Action.UNMODIFIED
744
677
745 if not self._newline_marker.match(line):
678 if not self._newline_marker.match(line):
746 old_line += affects_old
679 old_line += affects_old
747 new_line += affects_new
680 new_line += affects_new
748 lines.append({
681 lines.append({
749 'old_lineno': affects_old and old_line or '',
682 'old_lineno': affects_old and old_line or '',
750 'new_lineno': affects_new and new_line or '',
683 'new_lineno': affects_new and new_line or '',
751 'action': action,
684 'action': action,
752 'line': self._clean_line(line, command)
685 'line': self._clean_line(line, command)
753 })
686 })
754 raw_diff.append(line)
687 raw_diff.append(line)
755
688
756 line = lineiter.next()
689 line = lineiter.next()
757
690
758 if self._newline_marker.match(line):
691 if self._newline_marker.match(line):
759 # we need to append to lines, since this is not
692 # we need to append to lines, since this is not
760 # counted in the line specs of diff
693 # counted in the line specs of diff
761 lines.append({
694 lines.append({
762 'old_lineno': '...',
695 'old_lineno': '...',
763 'new_lineno': '...',
696 'new_lineno': '...',
764 'action': Action.CONTEXT,
697 'action': Action.CONTEXT,
765 'line': self._clean_line(line, command)
698 'line': self._clean_line(line, command)
766 })
699 })
767
700
768 except StopIteration:
701 except StopIteration:
769 pass
702 pass
770 return ''.join(raw_diff), chunks, stats
703 return ''.join(raw_diff), chunks, stats
771
704
772 # FIXME: NEWDIFFS: dan: this replaces _parse_lines
705 # FIXME: NEWDIFFS: dan: this replaces _parse_lines
773 def _new_parse_lines(self, diff_iter):
706 def _new_parse_lines(self, diff_iter):
774 """
707 """
775 Parse the diff an return data for the template.
708 Parse the diff an return data for the template.
776 """
709 """
777
710
778 stats = [0, 0]
711 stats = [0, 0]
779 chunks = []
712 chunks = []
780 raw_diff = []
713 raw_diff = []
781
714
782 diff_iter = imap(lambda s: safe_unicode(s), diff_iter)
715 diff_iter = imap(lambda s: safe_unicode(s), diff_iter)
783
716
784 try:
717 try:
785 line = diff_iter.next()
718 line = diff_iter.next()
786
719
787 while line:
720 while line:
788 raw_diff.append(line)
721 raw_diff.append(line)
789 match = self._chunk_re.match(line)
722 match = self._chunk_re.match(line)
790
723
791 if not match:
724 if not match:
792 break
725 break
793
726
794 gr = match.groups()
727 gr = match.groups()
795 (old_line, old_end,
728 (old_line, old_end,
796 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
729 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
797
730
798 lines = []
731 lines = []
799 hunk = {
732 hunk = {
800 'section_header': gr[-1],
733 'section_header': gr[-1],
801 'source_start': old_line,
734 'source_start': old_line,
802 'source_length': old_end,
735 'source_length': old_end,
803 'target_start': new_line,
736 'target_start': new_line,
804 'target_length': new_end,
737 'target_length': new_end,
805 'lines': lines,
738 'lines': lines,
806 }
739 }
807 chunks.append(hunk)
740 chunks.append(hunk)
808
741
809 old_line -= 1
742 old_line -= 1
810 new_line -= 1
743 new_line -= 1
811
744
812 context = len(gr) == 5
745 context = len(gr) == 5
813 old_end += old_line
746 old_end += old_line
814 new_end += new_line
747 new_end += new_line
815
748
816 line = diff_iter.next()
749 line = diff_iter.next()
817
750
818 while old_line < old_end or new_line < new_end:
751 while old_line < old_end or new_line < new_end:
819 command = ' '
752 command = ' '
820 if line:
753 if line:
821 command = line[0]
754 command = line[0]
822
755
823 affects_old = affects_new = False
756 affects_old = affects_new = False
824
757
825 # ignore those if we don't expect them
758 # ignore those if we don't expect them
826 if command in '#@':
759 if command in '#@':
827 continue
760 continue
828 elif command == '+':
761 elif command == '+':
829 affects_new = True
762 affects_new = True
830 action = Action.ADD
763 action = Action.ADD
831 stats[0] += 1
764 stats[0] += 1
832 elif command == '-':
765 elif command == '-':
833 affects_old = True
766 affects_old = True
834 action = Action.DELETE
767 action = Action.DELETE
835 stats[1] += 1
768 stats[1] += 1
836 else:
769 else:
837 affects_old = affects_new = True
770 affects_old = affects_new = True
838 action = Action.UNMODIFIED
771 action = Action.UNMODIFIED
839
772
840 if not self._newline_marker.match(line):
773 if not self._newline_marker.match(line):
841 old_line += affects_old
774 old_line += affects_old
842 new_line += affects_new
775 new_line += affects_new
843 lines.append({
776 lines.append({
844 'old_lineno': affects_old and old_line or '',
777 'old_lineno': affects_old and old_line or '',
845 'new_lineno': affects_new and new_line or '',
778 'new_lineno': affects_new and new_line or '',
846 'action': action,
779 'action': action,
847 'line': self._clean_line(line, command)
780 'line': self._clean_line(line, command)
848 })
781 })
849 raw_diff.append(line)
782 raw_diff.append(line)
850
783
851 line = diff_iter.next()
784 line = diff_iter.next()
852
785
853 if self._newline_marker.match(line):
786 if self._newline_marker.match(line):
854 # we need to append to lines, since this is not
787 # we need to append to lines, since this is not
855 # counted in the line specs of diff
788 # counted in the line specs of diff
856 if affects_old:
789 if affects_old:
857 action = Action.OLD_NO_NL
790 action = Action.OLD_NO_NL
858 elif affects_new:
791 elif affects_new:
859 action = Action.NEW_NO_NL
792 action = Action.NEW_NO_NL
860 else:
793 else:
861 raise Exception('invalid context for no newline')
794 raise Exception('invalid context for no newline')
862
795
863 lines.append({
796 lines.append({
864 'old_lineno': None,
797 'old_lineno': None,
865 'new_lineno': None,
798 'new_lineno': None,
866 'action': action,
799 'action': action,
867 'line': self._clean_line(line, command)
800 'line': self._clean_line(line, command)
868 })
801 })
869
802
870 except StopIteration:
803 except StopIteration:
871 pass
804 pass
872
805
873 return ''.join(raw_diff), chunks, stats
806 return ''.join(raw_diff), chunks, stats
874
807
875 def _safe_id(self, idstring):
808 def _safe_id(self, idstring):
876 """Make a string safe for including in an id attribute.
809 """Make a string safe for including in an id attribute.
877
810
878 The HTML spec says that id attributes 'must begin with
811 The HTML spec says that id attributes 'must begin with
879 a letter ([A-Za-z]) and may be followed by any number
812 a letter ([A-Za-z]) and may be followed by any number
880 of letters, digits ([0-9]), hyphens ("-"), underscores
813 of letters, digits ([0-9]), hyphens ("-"), underscores
881 ("_"), colons (":"), and periods (".")'. These regexps
814 ("_"), colons (":"), and periods (".")'. These regexps
882 are slightly over-zealous, in that they remove colons
815 are slightly over-zealous, in that they remove colons
883 and periods unnecessarily.
816 and periods unnecessarily.
884
817
885 Whitespace is transformed into underscores, and then
818 Whitespace is transformed into underscores, and then
886 anything which is not a hyphen or a character that
819 anything which is not a hyphen or a character that
887 matches \w (alphanumerics and underscore) is removed.
820 matches \w (alphanumerics and underscore) is removed.
888
821
889 """
822 """
890 # Transform all whitespace to underscore
823 # Transform all whitespace to underscore
891 idstring = re.sub(r'\s', "_", '%s' % idstring)
824 idstring = re.sub(r'\s', "_", '%s' % idstring)
892 # Remove everything that is not a hyphen or a member of \w
825 # Remove everything that is not a hyphen or a member of \w
893 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
826 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
894 return idstring
827 return idstring
895
828
896 def prepare(self, inline_diff=True):
829 def prepare(self, inline_diff=True):
897 """
830 """
898 Prepare the passed udiff for HTML rendering.
831 Prepare the passed udiff for HTML rendering.
899
832
900 :return: A list of dicts with diff information.
833 :return: A list of dicts with diff information.
901 """
834 """
902 parsed = self._parser(inline_diff=inline_diff)
835 parsed = self._parser(inline_diff=inline_diff)
903 self.parsed = True
836 self.parsed = True
904 self.parsed_diff = parsed
837 self.parsed_diff = parsed
905 return parsed
838 return parsed
906
839
907 def as_raw(self, diff_lines=None):
840 def as_raw(self, diff_lines=None):
908 """
841 """
909 Returns raw diff as a byte string
842 Returns raw diff as a byte string
910 """
843 """
911 return self._diff.raw
844 return self._diff.raw
912
845
913 def as_html(self, table_class='code-difftable', line_class='line',
846 def as_html(self, table_class='code-difftable', line_class='line',
914 old_lineno_class='lineno old', new_lineno_class='lineno new',
847 old_lineno_class='lineno old', new_lineno_class='lineno new',
915 code_class='code', enable_comments=False, parsed_lines=None):
848 code_class='code', enable_comments=False, parsed_lines=None):
916 """
849 """
917 Return given diff as html table with customized css classes
850 Return given diff as html table with customized css classes
918 """
851 """
852 # TODO(marcink): not sure how to pass in translator
853 # here in an efficient way, leave the _ for proper gettext extraction
854 _ = lambda s: s
855
919 def _link_to_if(condition, label, url):
856 def _link_to_if(condition, label, url):
920 """
857 """
921 Generates a link if condition is meet or just the label if not.
858 Generates a link if condition is meet or just the label if not.
922 """
859 """
923
860
924 if condition:
861 if condition:
925 return '''<a href="%(url)s" class="tooltip"
862 return '''<a href="%(url)s" class="tooltip"
926 title="%(title)s">%(label)s</a>''' % {
863 title="%(title)s">%(label)s</a>''' % {
927 'title': _('Click to select line'),
864 'title': _('Click to select line'),
928 'url': url,
865 'url': url,
929 'label': label
866 'label': label
930 }
867 }
931 else:
868 else:
932 return label
869 return label
933 if not self.parsed:
870 if not self.parsed:
934 self.prepare()
871 self.prepare()
935
872
936 diff_lines = self.parsed_diff
873 diff_lines = self.parsed_diff
937 if parsed_lines:
874 if parsed_lines:
938 diff_lines = parsed_lines
875 diff_lines = parsed_lines
939
876
940 _html_empty = True
877 _html_empty = True
941 _html = []
878 _html = []
942 _html.append('''<table class="%(table_class)s">\n''' % {
879 _html.append('''<table class="%(table_class)s">\n''' % {
943 'table_class': table_class
880 'table_class': table_class
944 })
881 })
945
882
946 for diff in diff_lines:
883 for diff in diff_lines:
947 for line in diff['chunks']:
884 for line in diff['chunks']:
948 _html_empty = False
885 _html_empty = False
949 for change in line:
886 for change in line:
950 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
887 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
951 'lc': line_class,
888 'lc': line_class,
952 'action': change['action']
889 'action': change['action']
953 })
890 })
954 anchor_old_id = ''
891 anchor_old_id = ''
955 anchor_new_id = ''
892 anchor_new_id = ''
956 anchor_old = "%(filename)s_o%(oldline_no)s" % {
893 anchor_old = "%(filename)s_o%(oldline_no)s" % {
957 'filename': self._safe_id(diff['filename']),
894 'filename': self._safe_id(diff['filename']),
958 'oldline_no': change['old_lineno']
895 'oldline_no': change['old_lineno']
959 }
896 }
960 anchor_new = "%(filename)s_n%(oldline_no)s" % {
897 anchor_new = "%(filename)s_n%(oldline_no)s" % {
961 'filename': self._safe_id(diff['filename']),
898 'filename': self._safe_id(diff['filename']),
962 'oldline_no': change['new_lineno']
899 'oldline_no': change['new_lineno']
963 }
900 }
964 cond_old = (change['old_lineno'] != '...' and
901 cond_old = (change['old_lineno'] != '...' and
965 change['old_lineno'])
902 change['old_lineno'])
966 cond_new = (change['new_lineno'] != '...' and
903 cond_new = (change['new_lineno'] != '...' and
967 change['new_lineno'])
904 change['new_lineno'])
968 if cond_old:
905 if cond_old:
969 anchor_old_id = 'id="%s"' % anchor_old
906 anchor_old_id = 'id="%s"' % anchor_old
970 if cond_new:
907 if cond_new:
971 anchor_new_id = 'id="%s"' % anchor_new
908 anchor_new_id = 'id="%s"' % anchor_new
972
909
973 if change['action'] != Action.CONTEXT:
910 if change['action'] != Action.CONTEXT:
974 anchor_link = True
911 anchor_link = True
975 else:
912 else:
976 anchor_link = False
913 anchor_link = False
977
914
978 ###########################################################
915 ###########################################################
979 # COMMENT ICONS
916 # COMMENT ICONS
980 ###########################################################
917 ###########################################################
981 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
918 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
982
919
983 if enable_comments and change['action'] != Action.CONTEXT:
920 if enable_comments and change['action'] != Action.CONTEXT:
984 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
921 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
985
922
986 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
923 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
987
924
988 ###########################################################
925 ###########################################################
989 # OLD LINE NUMBER
926 # OLD LINE NUMBER
990 ###########################################################
927 ###########################################################
991 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
928 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
992 'a_id': anchor_old_id,
929 'a_id': anchor_old_id,
993 'olc': old_lineno_class
930 'olc': old_lineno_class
994 })
931 })
995
932
996 _html.append('''%(link)s''' % {
933 _html.append('''%(link)s''' % {
997 'link': _link_to_if(anchor_link, change['old_lineno'],
934 'link': _link_to_if(anchor_link, change['old_lineno'],
998 '#%s' % anchor_old)
935 '#%s' % anchor_old)
999 })
936 })
1000 _html.append('''</td>\n''')
937 _html.append('''</td>\n''')
1001 ###########################################################
938 ###########################################################
1002 # NEW LINE NUMBER
939 # NEW LINE NUMBER
1003 ###########################################################
940 ###########################################################
1004
941
1005 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
942 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
1006 'a_id': anchor_new_id,
943 'a_id': anchor_new_id,
1007 'nlc': new_lineno_class
944 'nlc': new_lineno_class
1008 })
945 })
1009
946
1010 _html.append('''%(link)s''' % {
947 _html.append('''%(link)s''' % {
1011 'link': _link_to_if(anchor_link, change['new_lineno'],
948 'link': _link_to_if(anchor_link, change['new_lineno'],
1012 '#%s' % anchor_new)
949 '#%s' % anchor_new)
1013 })
950 })
1014 _html.append('''</td>\n''')
951 _html.append('''</td>\n''')
1015 ###########################################################
952 ###########################################################
1016 # CODE
953 # CODE
1017 ###########################################################
954 ###########################################################
1018 code_classes = [code_class]
955 code_classes = [code_class]
1019 if (not enable_comments or
956 if (not enable_comments or
1020 change['action'] == Action.CONTEXT):
957 change['action'] == Action.CONTEXT):
1021 code_classes.append('no-comment')
958 code_classes.append('no-comment')
1022 _html.append('\t<td class="%s">' % ' '.join(code_classes))
959 _html.append('\t<td class="%s">' % ' '.join(code_classes))
1023 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
960 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
1024 'code': change['line']
961 'code': change['line']
1025 })
962 })
1026
963
1027 _html.append('''\t</td>''')
964 _html.append('''\t</td>''')
1028 _html.append('''\n</tr>\n''')
965 _html.append('''\n</tr>\n''')
1029 _html.append('''</table>''')
966 _html.append('''</table>''')
1030 if _html_empty:
967 if _html_empty:
1031 return None
968 return None
1032 return ''.join(_html)
969 return ''.join(_html)
1033
970
1034 def stat(self):
971 def stat(self):
1035 """
972 """
1036 Returns tuple of added, and removed lines for this instance
973 Returns tuple of added, and removed lines for this instance
1037 """
974 """
1038 return self.adds, self.removes
975 return self.adds, self.removes
1039
976
1040 def get_context_of_line(
977 def get_context_of_line(
1041 self, path, diff_line=None, context_before=3, context_after=3):
978 self, path, diff_line=None, context_before=3, context_after=3):
1042 """
979 """
1043 Returns the context lines for the specified diff line.
980 Returns the context lines for the specified diff line.
1044
981
1045 :type diff_line: :class:`DiffLineNumber`
982 :type diff_line: :class:`DiffLineNumber`
1046 """
983 """
1047 assert self.parsed, "DiffProcessor is not initialized."
984 assert self.parsed, "DiffProcessor is not initialized."
1048
985
1049 if None not in diff_line:
986 if None not in diff_line:
1050 raise ValueError(
987 raise ValueError(
1051 "Cannot specify both line numbers: {}".format(diff_line))
988 "Cannot specify both line numbers: {}".format(diff_line))
1052
989
1053 file_diff = self._get_file_diff(path)
990 file_diff = self._get_file_diff(path)
1054 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
991 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
1055
992
1056 first_line_to_include = max(idx - context_before, 0)
993 first_line_to_include = max(idx - context_before, 0)
1057 first_line_after_context = idx + context_after + 1
994 first_line_after_context = idx + context_after + 1
1058 context_lines = chunk[first_line_to_include:first_line_after_context]
995 context_lines = chunk[first_line_to_include:first_line_after_context]
1059
996
1060 line_contents = [
997 line_contents = [
1061 _context_line(line) for line in context_lines
998 _context_line(line) for line in context_lines
1062 if _is_diff_content(line)]
999 if _is_diff_content(line)]
1063 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
1000 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
1064 # Once they are fixed, we can drop this line here.
1001 # Once they are fixed, we can drop this line here.
1065 if line_contents:
1002 if line_contents:
1066 line_contents[-1] = (
1003 line_contents[-1] = (
1067 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
1004 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
1068 return line_contents
1005 return line_contents
1069
1006
1070 def find_context(self, path, context, offset=0):
1007 def find_context(self, path, context, offset=0):
1071 """
1008 """
1072 Finds the given `context` inside of the diff.
1009 Finds the given `context` inside of the diff.
1073
1010
1074 Use the parameter `offset` to specify which offset the target line has
1011 Use the parameter `offset` to specify which offset the target line has
1075 inside of the given `context`. This way the correct diff line will be
1012 inside of the given `context`. This way the correct diff line will be
1076 returned.
1013 returned.
1077
1014
1078 :param offset: Shall be used to specify the offset of the main line
1015 :param offset: Shall be used to specify the offset of the main line
1079 within the given `context`.
1016 within the given `context`.
1080 """
1017 """
1081 if offset < 0 or offset >= len(context):
1018 if offset < 0 or offset >= len(context):
1082 raise ValueError(
1019 raise ValueError(
1083 "Only positive values up to the length of the context "
1020 "Only positive values up to the length of the context "
1084 "minus one are allowed.")
1021 "minus one are allowed.")
1085
1022
1086 matches = []
1023 matches = []
1087 file_diff = self._get_file_diff(path)
1024 file_diff = self._get_file_diff(path)
1088
1025
1089 for chunk in file_diff['chunks']:
1026 for chunk in file_diff['chunks']:
1090 context_iter = iter(context)
1027 context_iter = iter(context)
1091 for line_idx, line in enumerate(chunk):
1028 for line_idx, line in enumerate(chunk):
1092 try:
1029 try:
1093 if _context_line(line) == context_iter.next():
1030 if _context_line(line) == context_iter.next():
1094 continue
1031 continue
1095 except StopIteration:
1032 except StopIteration:
1096 matches.append((line_idx, chunk))
1033 matches.append((line_idx, chunk))
1097 context_iter = iter(context)
1034 context_iter = iter(context)
1098
1035
1099 # Increment position and triger StopIteration
1036 # Increment position and triger StopIteration
1100 # if we had a match at the end
1037 # if we had a match at the end
1101 line_idx += 1
1038 line_idx += 1
1102 try:
1039 try:
1103 context_iter.next()
1040 context_iter.next()
1104 except StopIteration:
1041 except StopIteration:
1105 matches.append((line_idx, chunk))
1042 matches.append((line_idx, chunk))
1106
1043
1107 effective_offset = len(context) - offset
1044 effective_offset = len(context) - offset
1108 found_at_diff_lines = [
1045 found_at_diff_lines = [
1109 _line_to_diff_line_number(chunk[idx - effective_offset])
1046 _line_to_diff_line_number(chunk[idx - effective_offset])
1110 for idx, chunk in matches]
1047 for idx, chunk in matches]
1111
1048
1112 return found_at_diff_lines
1049 return found_at_diff_lines
1113
1050
1114 def _get_file_diff(self, path):
1051 def _get_file_diff(self, path):
1115 for file_diff in self.parsed_diff:
1052 for file_diff in self.parsed_diff:
1116 if file_diff['filename'] == path:
1053 if file_diff['filename'] == path:
1117 break
1054 break
1118 else:
1055 else:
1119 raise FileNotInDiffException("File {} not in diff".format(path))
1056 raise FileNotInDiffException("File {} not in diff".format(path))
1120 return file_diff
1057 return file_diff
1121
1058
1122 def _find_chunk_line_index(self, file_diff, diff_line):
1059 def _find_chunk_line_index(self, file_diff, diff_line):
1123 for chunk in file_diff['chunks']:
1060 for chunk in file_diff['chunks']:
1124 for idx, line in enumerate(chunk):
1061 for idx, line in enumerate(chunk):
1125 if line['old_lineno'] == diff_line.old:
1062 if line['old_lineno'] == diff_line.old:
1126 return chunk, idx
1063 return chunk, idx
1127 if line['new_lineno'] == diff_line.new:
1064 if line['new_lineno'] == diff_line.new:
1128 return chunk, idx
1065 return chunk, idx
1129 raise LineNotInDiffException(
1066 raise LineNotInDiffException(
1130 "The line {} is not part of the diff.".format(diff_line))
1067 "The line {} is not part of the diff.".format(diff_line))
1131
1068
1132
1069
1133 def _is_diff_content(line):
1070 def _is_diff_content(line):
1134 return line['action'] in (
1071 return line['action'] in (
1135 Action.UNMODIFIED, Action.ADD, Action.DELETE)
1072 Action.UNMODIFIED, Action.ADD, Action.DELETE)
1136
1073
1137
1074
1138 def _context_line(line):
1075 def _context_line(line):
1139 return (line['action'], line['line'])
1076 return (line['action'], line['line'])
1140
1077
1141
1078
1142 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
1079 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
1143
1080
1144
1081
1145 def _line_to_diff_line_number(line):
1082 def _line_to_diff_line_number(line):
1146 new_line_no = line['new_lineno'] or None
1083 new_line_no = line['new_lineno'] or None
1147 old_line_no = line['old_lineno'] or None
1084 old_line_no = line['old_lineno'] or None
1148 return DiffLineNumber(old=old_line_no, new=new_line_no)
1085 return DiffLineNumber(old=old_line_no, new=new_line_no)
1149
1086
1150
1087
1151 class FileNotInDiffException(Exception):
1088 class FileNotInDiffException(Exception):
1152 """
1089 """
1153 Raised when the context for a missing file is requested.
1090 Raised when the context for a missing file is requested.
1154
1091
1155 If you request the context for a line in a file which is not part of the
1092 If you request the context for a line in a file which is not part of the
1156 given diff, then this exception is raised.
1093 given diff, then this exception is raised.
1157 """
1094 """
1158
1095
1159
1096
1160 class LineNotInDiffException(Exception):
1097 class LineNotInDiffException(Exception):
1161 """
1098 """
1162 Raised when the context for a missing line is requested.
1099 Raised when the context for a missing line is requested.
1163
1100
1164 If you request the context for a line in a file and this line is not
1101 If you request the context for a line in a file and this line is not
1165 part of the given diff, then this exception is raised.
1102 part of the given diff, then this exception is raised.
1166 """
1103 """
1167
1104
1168
1105
1169 class DiffLimitExceeded(Exception):
1106 class DiffLimitExceeded(Exception):
1170 pass
1107 pass
@@ -1,64 +1,64 b''
1 import datetime
1 import datetime
2 import decimal
2 import decimal
3 import functools
3 import functools
4
4
5 import simplejson as json
5 import simplejson as json
6
6
7 from rhodecode.lib.datelib import is_aware
7 from rhodecode.lib.datelib import is_aware
8
8
9 try:
9 try:
10 import rhodecode.translation
10 import rhodecode.translation
11 except ImportError:
11 except ImportError:
12 rhodecode = None
12 rhodecode = None
13
13
14 __all__ = ['json']
14 __all__ = ['json']
15
15
16
16
17 def _obj_dump(obj):
17 def _obj_dump(obj):
18 """
18 """
19 Custom function for dumping objects to JSON, if obj has __json__ attribute
19 Custom function for dumping objects to JSON, if obj has __json__ attribute
20 or method defined it will be used for serialization
20 or method defined it will be used for serialization
21
21
22 :param obj:
22 :param obj:
23 """
23 """
24
24
25 # See "Date Time String Format" in the ECMA-262 specification.
25 # See "Date Time String Format" in the ECMA-262 specification.
26 # some code borrowed from django 1.4
26 # some code borrowed from django 1.4
27 if isinstance(obj, set):
27 if isinstance(obj, set):
28 return list(obj)
28 return list(obj)
29 elif isinstance(obj, datetime.datetime):
29 elif isinstance(obj, datetime.datetime):
30 r = obj.isoformat()
30 r = obj.isoformat()
31 if isinstance(obj.microsecond, (int, long)):
31 if isinstance(obj.microsecond, (int, long)):
32 r = r[:23] + r[26:]
32 r = r[:23] + r[26:]
33 if r.endswith('+00:00'):
33 if r.endswith('+00:00'):
34 r = r[:-6] + 'Z'
34 r = r[:-6] + 'Z'
35 return r
35 return r
36 elif isinstance(obj, datetime.date):
36 elif isinstance(obj, datetime.date):
37 return obj.isoformat()
37 return obj.isoformat()
38 elif isinstance(obj, datetime.time):
38 elif isinstance(obj, datetime.time):
39 if is_aware(obj):
39 if is_aware(obj):
40 raise TypeError("Time-zone aware times are not JSON serializable")
40 raise TypeError("Time-zone aware times are not JSON serializable")
41 r = obj.isoformat()
41 r = obj.isoformat()
42 if isinstance(obj.microsecond, (int, long)):
42 if isinstance(obj.microsecond, (int, long)):
43 r = r[:12]
43 r = r[:12]
44 return r
44 return r
45 elif hasattr(obj, '__json__'):
45 elif hasattr(obj, '__json__'):
46 if callable(obj.__json__):
46 if callable(obj.__json__):
47 return obj.__json__()
47 return obj.__json__()
48 else:
48 else:
49 return obj.__json__
49 return obj.__json__
50 elif isinstance(obj, decimal.Decimal):
50 elif isinstance(obj, decimal.Decimal):
51 return str(obj)
51 return str(obj)
52 elif isinstance(obj, complex):
52 elif isinstance(obj, complex):
53 return [obj.real, obj.imag]
53 return [obj.real, obj.imag]
54 elif rhodecode and isinstance(obj, rhodecode.translation.LazyString):
54 elif rhodecode and isinstance(obj, rhodecode.translation._LazyString):
55 return obj.eval()
55 return obj.eval()
56 else:
56 else:
57 raise TypeError(repr(obj) + " is not JSON serializable")
57 raise TypeError(repr(obj) + " is not JSON serializable")
58
58
59
59
60 json.dumps = functools.partial(json.dumps, default=_obj_dump, use_decimal=False)
60 json.dumps = functools.partial(json.dumps, default=_obj_dump, use_decimal=False)
61 json.dump = functools.partial(json.dump, default=_obj_dump, use_decimal=False)
61 json.dump = functools.partial(json.dump, default=_obj_dump, use_decimal=False)
62
62
63 # alias for formatted json
63 # alias for formatted json
64 formatted_json = functools.partial(json.dumps, indent=4, sort_keys=True)
64 formatted_json = functools.partial(json.dumps, indent=4, sort_keys=True)
@@ -1,2064 +1,2064 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27
27
28 import random
28 import random
29 import hashlib
29 import hashlib
30 import StringIO
30 import StringIO
31 import urllib
31 import urllib
32 import math
32 import math
33 import logging
33 import logging
34 import re
34 import re
35 import urlparse
35 import urlparse
36 import time
36 import time
37 import string
37 import string
38 import hashlib
38 import hashlib
39 from collections import OrderedDict
39 from collections import OrderedDict
40
40
41 import pygments
41 import pygments
42 import itertools
42 import itertools
43 import fnmatch
43 import fnmatch
44
44
45 from datetime import datetime
45 from datetime import datetime
46 from functools import partial
46 from functools import partial
47 from pygments.formatters.html import HtmlFormatter
47 from pygments.formatters.html import HtmlFormatter
48 from pygments import highlight as code_highlight
48 from pygments import highlight as code_highlight
49 from pygments.lexers import (
49 from pygments.lexers import (
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
51
51
52 from pyramid.threadlocal import get_current_request
52 from pyramid.threadlocal import get_current_request
53
53
54 from webhelpers.html import literal, HTML, escape
54 from webhelpers.html import literal, HTML, escape
55 from webhelpers.html.tools import *
55 from webhelpers.html.tools import *
56 from webhelpers.html.builder import make_tag
56 from webhelpers.html.builder import make_tag
57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
60 submit, text, password, textarea, title, ul, xml_declaration, radio
60 submit, text, password, textarea, title, ul, xml_declaration, radio
61 from webhelpers.html.tools import auto_link, button_to, highlight, \
61 from webhelpers.html.tools import auto_link, button_to, highlight, \
62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
63 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
63 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
64 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
64 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
65 replace_whitespace, urlify, truncate, wrap_paragraphs
65 replace_whitespace, urlify, truncate, wrap_paragraphs
66 from webhelpers.date import time_ago_in_words
66 from webhelpers.date import time_ago_in_words
67 from webhelpers.paginate import Page as _Page
67 from webhelpers.paginate import Page as _Page
68 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
68 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
69 convert_boolean_attrs, NotGiven, _make_safe_id_component
69 convert_boolean_attrs, NotGiven, _make_safe_id_component
70 from webhelpers2.number import format_byte_size
70 from webhelpers2.number import format_byte_size
71
71
72 from rhodecode.lib.action_parser import action_parser
72 from rhodecode.lib.action_parser import action_parser
73 from rhodecode.lib.ext_json import json
73 from rhodecode.lib.ext_json import json
74 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
74 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
75 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
75 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
76 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
76 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
77 AttributeDict, safe_int, md5, md5_safe
77 AttributeDict, safe_int, md5, md5_safe
78 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
78 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
79 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
79 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
80 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
80 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
81 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
81 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
82 from rhodecode.model.changeset_status import ChangesetStatusModel
82 from rhodecode.model.changeset_status import ChangesetStatusModel
83 from rhodecode.model.db import Permission, User, Repository
83 from rhodecode.model.db import Permission, User, Repository
84 from rhodecode.model.repo_group import RepoGroupModel
84 from rhodecode.model.repo_group import RepoGroupModel
85 from rhodecode.model.settings import IssueTrackerSettingsModel
85 from rhodecode.model.settings import IssueTrackerSettingsModel
86
86
87 log = logging.getLogger(__name__)
87 log = logging.getLogger(__name__)
88
88
89
89
90 DEFAULT_USER = User.DEFAULT_USER
90 DEFAULT_USER = User.DEFAULT_USER
91 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
91 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
92
92
93
93
94 def asset(path, ver=None, **kwargs):
94 def asset(path, ver=None, **kwargs):
95 """
95 """
96 Helper to generate a static asset file path for rhodecode assets
96 Helper to generate a static asset file path for rhodecode assets
97
97
98 eg. h.asset('images/image.png', ver='3923')
98 eg. h.asset('images/image.png', ver='3923')
99
99
100 :param path: path of asset
100 :param path: path of asset
101 :param ver: optional version query param to append as ?ver=
101 :param ver: optional version query param to append as ?ver=
102 """
102 """
103 request = get_current_request()
103 request = get_current_request()
104 query = {}
104 query = {}
105 query.update(kwargs)
105 query.update(kwargs)
106 if ver:
106 if ver:
107 query = {'ver': ver}
107 query = {'ver': ver}
108 return request.static_path(
108 return request.static_path(
109 'rhodecode:public/{}'.format(path), _query=query)
109 'rhodecode:public/{}'.format(path), _query=query)
110
110
111
111
112 default_html_escape_table = {
112 default_html_escape_table = {
113 ord('&'): u'&amp;',
113 ord('&'): u'&amp;',
114 ord('<'): u'&lt;',
114 ord('<'): u'&lt;',
115 ord('>'): u'&gt;',
115 ord('>'): u'&gt;',
116 ord('"'): u'&quot;',
116 ord('"'): u'&quot;',
117 ord("'"): u'&#39;',
117 ord("'"): u'&#39;',
118 }
118 }
119
119
120
120
121 def html_escape(text, html_escape_table=default_html_escape_table):
121 def html_escape(text, html_escape_table=default_html_escape_table):
122 """Produce entities within text."""
122 """Produce entities within text."""
123 return text.translate(html_escape_table)
123 return text.translate(html_escape_table)
124
124
125
125
126 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
126 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
127 """
127 """
128 Truncate string ``s`` at the first occurrence of ``sub``.
128 Truncate string ``s`` at the first occurrence of ``sub``.
129
129
130 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
130 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
131 """
131 """
132 suffix_if_chopped = suffix_if_chopped or ''
132 suffix_if_chopped = suffix_if_chopped or ''
133 pos = s.find(sub)
133 pos = s.find(sub)
134 if pos == -1:
134 if pos == -1:
135 return s
135 return s
136
136
137 if inclusive:
137 if inclusive:
138 pos += len(sub)
138 pos += len(sub)
139
139
140 chopped = s[:pos]
140 chopped = s[:pos]
141 left = s[pos:].strip()
141 left = s[pos:].strip()
142
142
143 if left and suffix_if_chopped:
143 if left and suffix_if_chopped:
144 chopped += suffix_if_chopped
144 chopped += suffix_if_chopped
145
145
146 return chopped
146 return chopped
147
147
148
148
149 def shorter(text, size=20):
149 def shorter(text, size=20):
150 postfix = '...'
150 postfix = '...'
151 if len(text) > size:
151 if len(text) > size:
152 return text[:size - len(postfix)] + postfix
152 return text[:size - len(postfix)] + postfix
153 return text
153 return text
154
154
155
155
156 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
156 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
157 """
157 """
158 Reset button
158 Reset button
159 """
159 """
160 _set_input_attrs(attrs, type, name, value)
160 _set_input_attrs(attrs, type, name, value)
161 _set_id_attr(attrs, id, name)
161 _set_id_attr(attrs, id, name)
162 convert_boolean_attrs(attrs, ["disabled"])
162 convert_boolean_attrs(attrs, ["disabled"])
163 return HTML.input(**attrs)
163 return HTML.input(**attrs)
164
164
165 reset = _reset
165 reset = _reset
166 safeid = _make_safe_id_component
166 safeid = _make_safe_id_component
167
167
168
168
169 def branding(name, length=40):
169 def branding(name, length=40):
170 return truncate(name, length, indicator="")
170 return truncate(name, length, indicator="")
171
171
172
172
173 def FID(raw_id, path):
173 def FID(raw_id, path):
174 """
174 """
175 Creates a unique ID for filenode based on it's hash of path and commit
175 Creates a unique ID for filenode based on it's hash of path and commit
176 it's safe to use in urls
176 it's safe to use in urls
177
177
178 :param raw_id:
178 :param raw_id:
179 :param path:
179 :param path:
180 """
180 """
181
181
182 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
182 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
183
183
184
184
185 class _GetError(object):
185 class _GetError(object):
186 """Get error from form_errors, and represent it as span wrapped error
186 """Get error from form_errors, and represent it as span wrapped error
187 message
187 message
188
188
189 :param field_name: field to fetch errors for
189 :param field_name: field to fetch errors for
190 :param form_errors: form errors dict
190 :param form_errors: form errors dict
191 """
191 """
192
192
193 def __call__(self, field_name, form_errors):
193 def __call__(self, field_name, form_errors):
194 tmpl = """<span class="error_msg">%s</span>"""
194 tmpl = """<span class="error_msg">%s</span>"""
195 if form_errors and field_name in form_errors:
195 if form_errors and field_name in form_errors:
196 return literal(tmpl % form_errors.get(field_name))
196 return literal(tmpl % form_errors.get(field_name))
197
197
198 get_error = _GetError()
198 get_error = _GetError()
199
199
200
200
201 class _ToolTip(object):
201 class _ToolTip(object):
202
202
203 def __call__(self, tooltip_title, trim_at=50):
203 def __call__(self, tooltip_title, trim_at=50):
204 """
204 """
205 Special function just to wrap our text into nice formatted
205 Special function just to wrap our text into nice formatted
206 autowrapped text
206 autowrapped text
207
207
208 :param tooltip_title:
208 :param tooltip_title:
209 """
209 """
210 tooltip_title = escape(tooltip_title)
210 tooltip_title = escape(tooltip_title)
211 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
211 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
212 return tooltip_title
212 return tooltip_title
213 tooltip = _ToolTip()
213 tooltip = _ToolTip()
214
214
215
215
216 def files_breadcrumbs(repo_name, commit_id, file_path):
216 def files_breadcrumbs(repo_name, commit_id, file_path):
217 if isinstance(file_path, str):
217 if isinstance(file_path, str):
218 file_path = safe_unicode(file_path)
218 file_path = safe_unicode(file_path)
219
219
220 # TODO: johbo: Is this always a url like path, or is this operating
220 # TODO: johbo: Is this always a url like path, or is this operating
221 # system dependent?
221 # system dependent?
222 path_segments = file_path.split('/')
222 path_segments = file_path.split('/')
223
223
224 repo_name_html = escape(repo_name)
224 repo_name_html = escape(repo_name)
225 if len(path_segments) == 1 and path_segments[0] == '':
225 if len(path_segments) == 1 and path_segments[0] == '':
226 url_segments = [repo_name_html]
226 url_segments = [repo_name_html]
227 else:
227 else:
228 url_segments = [
228 url_segments = [
229 link_to(
229 link_to(
230 repo_name_html,
230 repo_name_html,
231 route_path(
231 route_path(
232 'repo_files',
232 'repo_files',
233 repo_name=repo_name,
233 repo_name=repo_name,
234 commit_id=commit_id,
234 commit_id=commit_id,
235 f_path=''),
235 f_path=''),
236 class_='pjax-link')]
236 class_='pjax-link')]
237
237
238 last_cnt = len(path_segments) - 1
238 last_cnt = len(path_segments) - 1
239 for cnt, segment in enumerate(path_segments):
239 for cnt, segment in enumerate(path_segments):
240 if not segment:
240 if not segment:
241 continue
241 continue
242 segment_html = escape(segment)
242 segment_html = escape(segment)
243
243
244 if cnt != last_cnt:
244 if cnt != last_cnt:
245 url_segments.append(
245 url_segments.append(
246 link_to(
246 link_to(
247 segment_html,
247 segment_html,
248 route_path(
248 route_path(
249 'repo_files',
249 'repo_files',
250 repo_name=repo_name,
250 repo_name=repo_name,
251 commit_id=commit_id,
251 commit_id=commit_id,
252 f_path='/'.join(path_segments[:cnt + 1])),
252 f_path='/'.join(path_segments[:cnt + 1])),
253 class_='pjax-link'))
253 class_='pjax-link'))
254 else:
254 else:
255 url_segments.append(segment_html)
255 url_segments.append(segment_html)
256
256
257 return literal('/'.join(url_segments))
257 return literal('/'.join(url_segments))
258
258
259
259
260 class CodeHtmlFormatter(HtmlFormatter):
260 class CodeHtmlFormatter(HtmlFormatter):
261 """
261 """
262 My code Html Formatter for source codes
262 My code Html Formatter for source codes
263 """
263 """
264
264
265 def wrap(self, source, outfile):
265 def wrap(self, source, outfile):
266 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
266 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
267
267
268 def _wrap_code(self, source):
268 def _wrap_code(self, source):
269 for cnt, it in enumerate(source):
269 for cnt, it in enumerate(source):
270 i, t = it
270 i, t = it
271 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
271 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
272 yield i, t
272 yield i, t
273
273
274 def _wrap_tablelinenos(self, inner):
274 def _wrap_tablelinenos(self, inner):
275 dummyoutfile = StringIO.StringIO()
275 dummyoutfile = StringIO.StringIO()
276 lncount = 0
276 lncount = 0
277 for t, line in inner:
277 for t, line in inner:
278 if t:
278 if t:
279 lncount += 1
279 lncount += 1
280 dummyoutfile.write(line)
280 dummyoutfile.write(line)
281
281
282 fl = self.linenostart
282 fl = self.linenostart
283 mw = len(str(lncount + fl - 1))
283 mw = len(str(lncount + fl - 1))
284 sp = self.linenospecial
284 sp = self.linenospecial
285 st = self.linenostep
285 st = self.linenostep
286 la = self.lineanchors
286 la = self.lineanchors
287 aln = self.anchorlinenos
287 aln = self.anchorlinenos
288 nocls = self.noclasses
288 nocls = self.noclasses
289 if sp:
289 if sp:
290 lines = []
290 lines = []
291
291
292 for i in range(fl, fl + lncount):
292 for i in range(fl, fl + lncount):
293 if i % st == 0:
293 if i % st == 0:
294 if i % sp == 0:
294 if i % sp == 0:
295 if aln:
295 if aln:
296 lines.append('<a href="#%s%d" class="special">%*d</a>' %
296 lines.append('<a href="#%s%d" class="special">%*d</a>' %
297 (la, i, mw, i))
297 (la, i, mw, i))
298 else:
298 else:
299 lines.append('<span class="special">%*d</span>' % (mw, i))
299 lines.append('<span class="special">%*d</span>' % (mw, i))
300 else:
300 else:
301 if aln:
301 if aln:
302 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
302 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
303 else:
303 else:
304 lines.append('%*d' % (mw, i))
304 lines.append('%*d' % (mw, i))
305 else:
305 else:
306 lines.append('')
306 lines.append('')
307 ls = '\n'.join(lines)
307 ls = '\n'.join(lines)
308 else:
308 else:
309 lines = []
309 lines = []
310 for i in range(fl, fl + lncount):
310 for i in range(fl, fl + lncount):
311 if i % st == 0:
311 if i % st == 0:
312 if aln:
312 if aln:
313 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
313 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
314 else:
314 else:
315 lines.append('%*d' % (mw, i))
315 lines.append('%*d' % (mw, i))
316 else:
316 else:
317 lines.append('')
317 lines.append('')
318 ls = '\n'.join(lines)
318 ls = '\n'.join(lines)
319
319
320 # in case you wonder about the seemingly redundant <div> here: since the
320 # in case you wonder about the seemingly redundant <div> here: since the
321 # content in the other cell also is wrapped in a div, some browsers in
321 # content in the other cell also is wrapped in a div, some browsers in
322 # some configurations seem to mess up the formatting...
322 # some configurations seem to mess up the formatting...
323 if nocls:
323 if nocls:
324 yield 0, ('<table class="%stable">' % self.cssclass +
324 yield 0, ('<table class="%stable">' % self.cssclass +
325 '<tr><td><div class="linenodiv" '
325 '<tr><td><div class="linenodiv" '
326 'style="background-color: #f0f0f0; padding-right: 10px">'
326 'style="background-color: #f0f0f0; padding-right: 10px">'
327 '<pre style="line-height: 125%">' +
327 '<pre style="line-height: 125%">' +
328 ls + '</pre></div></td><td id="hlcode" class="code">')
328 ls + '</pre></div></td><td id="hlcode" class="code">')
329 else:
329 else:
330 yield 0, ('<table class="%stable">' % self.cssclass +
330 yield 0, ('<table class="%stable">' % self.cssclass +
331 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
331 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
332 ls + '</pre></div></td><td id="hlcode" class="code">')
332 ls + '</pre></div></td><td id="hlcode" class="code">')
333 yield 0, dummyoutfile.getvalue()
333 yield 0, dummyoutfile.getvalue()
334 yield 0, '</td></tr></table>'
334 yield 0, '</td></tr></table>'
335
335
336
336
337 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
337 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
338 def __init__(self, **kw):
338 def __init__(self, **kw):
339 # only show these line numbers if set
339 # only show these line numbers if set
340 self.only_lines = kw.pop('only_line_numbers', [])
340 self.only_lines = kw.pop('only_line_numbers', [])
341 self.query_terms = kw.pop('query_terms', [])
341 self.query_terms = kw.pop('query_terms', [])
342 self.max_lines = kw.pop('max_lines', 5)
342 self.max_lines = kw.pop('max_lines', 5)
343 self.line_context = kw.pop('line_context', 3)
343 self.line_context = kw.pop('line_context', 3)
344 self.url = kw.pop('url', None)
344 self.url = kw.pop('url', None)
345
345
346 super(CodeHtmlFormatter, self).__init__(**kw)
346 super(CodeHtmlFormatter, self).__init__(**kw)
347
347
348 def _wrap_code(self, source):
348 def _wrap_code(self, source):
349 for cnt, it in enumerate(source):
349 for cnt, it in enumerate(source):
350 i, t = it
350 i, t = it
351 t = '<pre>%s</pre>' % t
351 t = '<pre>%s</pre>' % t
352 yield i, t
352 yield i, t
353
353
354 def _wrap_tablelinenos(self, inner):
354 def _wrap_tablelinenos(self, inner):
355 yield 0, '<table class="code-highlight %stable">' % self.cssclass
355 yield 0, '<table class="code-highlight %stable">' % self.cssclass
356
356
357 last_shown_line_number = 0
357 last_shown_line_number = 0
358 current_line_number = 1
358 current_line_number = 1
359
359
360 for t, line in inner:
360 for t, line in inner:
361 if not t:
361 if not t:
362 yield t, line
362 yield t, line
363 continue
363 continue
364
364
365 if current_line_number in self.only_lines:
365 if current_line_number in self.only_lines:
366 if last_shown_line_number + 1 != current_line_number:
366 if last_shown_line_number + 1 != current_line_number:
367 yield 0, '<tr>'
367 yield 0, '<tr>'
368 yield 0, '<td class="line">...</td>'
368 yield 0, '<td class="line">...</td>'
369 yield 0, '<td id="hlcode" class="code"></td>'
369 yield 0, '<td id="hlcode" class="code"></td>'
370 yield 0, '</tr>'
370 yield 0, '</tr>'
371
371
372 yield 0, '<tr>'
372 yield 0, '<tr>'
373 if self.url:
373 if self.url:
374 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
374 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
375 self.url, current_line_number, current_line_number)
375 self.url, current_line_number, current_line_number)
376 else:
376 else:
377 yield 0, '<td class="line"><a href="">%i</a></td>' % (
377 yield 0, '<td class="line"><a href="">%i</a></td>' % (
378 current_line_number)
378 current_line_number)
379 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
379 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
380 yield 0, '</tr>'
380 yield 0, '</tr>'
381
381
382 last_shown_line_number = current_line_number
382 last_shown_line_number = current_line_number
383
383
384 current_line_number += 1
384 current_line_number += 1
385
385
386
386
387 yield 0, '</table>'
387 yield 0, '</table>'
388
388
389
389
390 def extract_phrases(text_query):
390 def extract_phrases(text_query):
391 """
391 """
392 Extracts phrases from search term string making sure phrases
392 Extracts phrases from search term string making sure phrases
393 contained in double quotes are kept together - and discarding empty values
393 contained in double quotes are kept together - and discarding empty values
394 or fully whitespace values eg.
394 or fully whitespace values eg.
395
395
396 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
396 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
397
397
398 """
398 """
399
399
400 in_phrase = False
400 in_phrase = False
401 buf = ''
401 buf = ''
402 phrases = []
402 phrases = []
403 for char in text_query:
403 for char in text_query:
404 if in_phrase:
404 if in_phrase:
405 if char == '"': # end phrase
405 if char == '"': # end phrase
406 phrases.append(buf)
406 phrases.append(buf)
407 buf = ''
407 buf = ''
408 in_phrase = False
408 in_phrase = False
409 continue
409 continue
410 else:
410 else:
411 buf += char
411 buf += char
412 continue
412 continue
413 else:
413 else:
414 if char == '"': # start phrase
414 if char == '"': # start phrase
415 in_phrase = True
415 in_phrase = True
416 phrases.append(buf)
416 phrases.append(buf)
417 buf = ''
417 buf = ''
418 continue
418 continue
419 elif char == ' ':
419 elif char == ' ':
420 phrases.append(buf)
420 phrases.append(buf)
421 buf = ''
421 buf = ''
422 continue
422 continue
423 else:
423 else:
424 buf += char
424 buf += char
425
425
426 phrases.append(buf)
426 phrases.append(buf)
427 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
427 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
428 return phrases
428 return phrases
429
429
430
430
431 def get_matching_offsets(text, phrases):
431 def get_matching_offsets(text, phrases):
432 """
432 """
433 Returns a list of string offsets in `text` that the list of `terms` match
433 Returns a list of string offsets in `text` that the list of `terms` match
434
434
435 >>> get_matching_offsets('some text here', ['some', 'here'])
435 >>> get_matching_offsets('some text here', ['some', 'here'])
436 [(0, 4), (10, 14)]
436 [(0, 4), (10, 14)]
437
437
438 """
438 """
439 offsets = []
439 offsets = []
440 for phrase in phrases:
440 for phrase in phrases:
441 for match in re.finditer(phrase, text):
441 for match in re.finditer(phrase, text):
442 offsets.append((match.start(), match.end()))
442 offsets.append((match.start(), match.end()))
443
443
444 return offsets
444 return offsets
445
445
446
446
447 def normalize_text_for_matching(x):
447 def normalize_text_for_matching(x):
448 """
448 """
449 Replaces all non alnum characters to spaces and lower cases the string,
449 Replaces all non alnum characters to spaces and lower cases the string,
450 useful for comparing two text strings without punctuation
450 useful for comparing two text strings without punctuation
451 """
451 """
452 return re.sub(r'[^\w]', ' ', x.lower())
452 return re.sub(r'[^\w]', ' ', x.lower())
453
453
454
454
455 def get_matching_line_offsets(lines, terms):
455 def get_matching_line_offsets(lines, terms):
456 """ Return a set of `lines` indices (starting from 1) matching a
456 """ Return a set of `lines` indices (starting from 1) matching a
457 text search query, along with `context` lines above/below matching lines
457 text search query, along with `context` lines above/below matching lines
458
458
459 :param lines: list of strings representing lines
459 :param lines: list of strings representing lines
460 :param terms: search term string to match in lines eg. 'some text'
460 :param terms: search term string to match in lines eg. 'some text'
461 :param context: number of lines above/below a matching line to add to result
461 :param context: number of lines above/below a matching line to add to result
462 :param max_lines: cut off for lines of interest
462 :param max_lines: cut off for lines of interest
463 eg.
463 eg.
464
464
465 text = '''
465 text = '''
466 words words words
466 words words words
467 words words words
467 words words words
468 some text some
468 some text some
469 words words words
469 words words words
470 words words words
470 words words words
471 text here what
471 text here what
472 '''
472 '''
473 get_matching_line_offsets(text, 'text', context=1)
473 get_matching_line_offsets(text, 'text', context=1)
474 {3: [(5, 9)], 6: [(0, 4)]]
474 {3: [(5, 9)], 6: [(0, 4)]]
475
475
476 """
476 """
477 matching_lines = {}
477 matching_lines = {}
478 phrases = [normalize_text_for_matching(phrase)
478 phrases = [normalize_text_for_matching(phrase)
479 for phrase in extract_phrases(terms)]
479 for phrase in extract_phrases(terms)]
480
480
481 for line_index, line in enumerate(lines, start=1):
481 for line_index, line in enumerate(lines, start=1):
482 match_offsets = get_matching_offsets(
482 match_offsets = get_matching_offsets(
483 normalize_text_for_matching(line), phrases)
483 normalize_text_for_matching(line), phrases)
484 if match_offsets:
484 if match_offsets:
485 matching_lines[line_index] = match_offsets
485 matching_lines[line_index] = match_offsets
486
486
487 return matching_lines
487 return matching_lines
488
488
489
489
490 def hsv_to_rgb(h, s, v):
490 def hsv_to_rgb(h, s, v):
491 """ Convert hsv color values to rgb """
491 """ Convert hsv color values to rgb """
492
492
493 if s == 0.0:
493 if s == 0.0:
494 return v, v, v
494 return v, v, v
495 i = int(h * 6.0) # XXX assume int() truncates!
495 i = int(h * 6.0) # XXX assume int() truncates!
496 f = (h * 6.0) - i
496 f = (h * 6.0) - i
497 p = v * (1.0 - s)
497 p = v * (1.0 - s)
498 q = v * (1.0 - s * f)
498 q = v * (1.0 - s * f)
499 t = v * (1.0 - s * (1.0 - f))
499 t = v * (1.0 - s * (1.0 - f))
500 i = i % 6
500 i = i % 6
501 if i == 0:
501 if i == 0:
502 return v, t, p
502 return v, t, p
503 if i == 1:
503 if i == 1:
504 return q, v, p
504 return q, v, p
505 if i == 2:
505 if i == 2:
506 return p, v, t
506 return p, v, t
507 if i == 3:
507 if i == 3:
508 return p, q, v
508 return p, q, v
509 if i == 4:
509 if i == 4:
510 return t, p, v
510 return t, p, v
511 if i == 5:
511 if i == 5:
512 return v, p, q
512 return v, p, q
513
513
514
514
515 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
515 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
516 """
516 """
517 Generator for getting n of evenly distributed colors using
517 Generator for getting n of evenly distributed colors using
518 hsv color and golden ratio. It always return same order of colors
518 hsv color and golden ratio. It always return same order of colors
519
519
520 :param n: number of colors to generate
520 :param n: number of colors to generate
521 :param saturation: saturation of returned colors
521 :param saturation: saturation of returned colors
522 :param lightness: lightness of returned colors
522 :param lightness: lightness of returned colors
523 :returns: RGB tuple
523 :returns: RGB tuple
524 """
524 """
525
525
526 golden_ratio = 0.618033988749895
526 golden_ratio = 0.618033988749895
527 h = 0.22717784590367374
527 h = 0.22717784590367374
528
528
529 for _ in xrange(n):
529 for _ in xrange(n):
530 h += golden_ratio
530 h += golden_ratio
531 h %= 1
531 h %= 1
532 HSV_tuple = [h, saturation, lightness]
532 HSV_tuple = [h, saturation, lightness]
533 RGB_tuple = hsv_to_rgb(*HSV_tuple)
533 RGB_tuple = hsv_to_rgb(*HSV_tuple)
534 yield map(lambda x: str(int(x * 256)), RGB_tuple)
534 yield map(lambda x: str(int(x * 256)), RGB_tuple)
535
535
536
536
537 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
537 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
538 """
538 """
539 Returns a function which when called with an argument returns a unique
539 Returns a function which when called with an argument returns a unique
540 color for that argument, eg.
540 color for that argument, eg.
541
541
542 :param n: number of colors to generate
542 :param n: number of colors to generate
543 :param saturation: saturation of returned colors
543 :param saturation: saturation of returned colors
544 :param lightness: lightness of returned colors
544 :param lightness: lightness of returned colors
545 :returns: css RGB string
545 :returns: css RGB string
546
546
547 >>> color_hash = color_hasher()
547 >>> color_hash = color_hasher()
548 >>> color_hash('hello')
548 >>> color_hash('hello')
549 'rgb(34, 12, 59)'
549 'rgb(34, 12, 59)'
550 >>> color_hash('hello')
550 >>> color_hash('hello')
551 'rgb(34, 12, 59)'
551 'rgb(34, 12, 59)'
552 >>> color_hash('other')
552 >>> color_hash('other')
553 'rgb(90, 224, 159)'
553 'rgb(90, 224, 159)'
554 """
554 """
555
555
556 color_dict = {}
556 color_dict = {}
557 cgenerator = unique_color_generator(
557 cgenerator = unique_color_generator(
558 saturation=saturation, lightness=lightness)
558 saturation=saturation, lightness=lightness)
559
559
560 def get_color_string(thing):
560 def get_color_string(thing):
561 if thing in color_dict:
561 if thing in color_dict:
562 col = color_dict[thing]
562 col = color_dict[thing]
563 else:
563 else:
564 col = color_dict[thing] = cgenerator.next()
564 col = color_dict[thing] = cgenerator.next()
565 return "rgb(%s)" % (', '.join(col))
565 return "rgb(%s)" % (', '.join(col))
566
566
567 return get_color_string
567 return get_color_string
568
568
569
569
570 def get_lexer_safe(mimetype=None, filepath=None):
570 def get_lexer_safe(mimetype=None, filepath=None):
571 """
571 """
572 Tries to return a relevant pygments lexer using mimetype/filepath name,
572 Tries to return a relevant pygments lexer using mimetype/filepath name,
573 defaulting to plain text if none could be found
573 defaulting to plain text if none could be found
574 """
574 """
575 lexer = None
575 lexer = None
576 try:
576 try:
577 if mimetype:
577 if mimetype:
578 lexer = get_lexer_for_mimetype(mimetype)
578 lexer = get_lexer_for_mimetype(mimetype)
579 if not lexer:
579 if not lexer:
580 lexer = get_lexer_for_filename(filepath)
580 lexer = get_lexer_for_filename(filepath)
581 except pygments.util.ClassNotFound:
581 except pygments.util.ClassNotFound:
582 pass
582 pass
583
583
584 if not lexer:
584 if not lexer:
585 lexer = get_lexer_by_name('text')
585 lexer = get_lexer_by_name('text')
586
586
587 return lexer
587 return lexer
588
588
589
589
590 def get_lexer_for_filenode(filenode):
590 def get_lexer_for_filenode(filenode):
591 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
591 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
592 return lexer
592 return lexer
593
593
594
594
595 def pygmentize(filenode, **kwargs):
595 def pygmentize(filenode, **kwargs):
596 """
596 """
597 pygmentize function using pygments
597 pygmentize function using pygments
598
598
599 :param filenode:
599 :param filenode:
600 """
600 """
601 lexer = get_lexer_for_filenode(filenode)
601 lexer = get_lexer_for_filenode(filenode)
602 return literal(code_highlight(filenode.content, lexer,
602 return literal(code_highlight(filenode.content, lexer,
603 CodeHtmlFormatter(**kwargs)))
603 CodeHtmlFormatter(**kwargs)))
604
604
605
605
606 def is_following_repo(repo_name, user_id):
606 def is_following_repo(repo_name, user_id):
607 from rhodecode.model.scm import ScmModel
607 from rhodecode.model.scm import ScmModel
608 return ScmModel().is_following_repo(repo_name, user_id)
608 return ScmModel().is_following_repo(repo_name, user_id)
609
609
610
610
611 class _Message(object):
611 class _Message(object):
612 """A message returned by ``Flash.pop_messages()``.
612 """A message returned by ``Flash.pop_messages()``.
613
613
614 Converting the message to a string returns the message text. Instances
614 Converting the message to a string returns the message text. Instances
615 also have the following attributes:
615 also have the following attributes:
616
616
617 * ``message``: the message text.
617 * ``message``: the message text.
618 * ``category``: the category specified when the message was created.
618 * ``category``: the category specified when the message was created.
619 """
619 """
620
620
621 def __init__(self, category, message):
621 def __init__(self, category, message):
622 self.category = category
622 self.category = category
623 self.message = message
623 self.message = message
624
624
625 def __str__(self):
625 def __str__(self):
626 return self.message
626 return self.message
627
627
628 __unicode__ = __str__
628 __unicode__ = __str__
629
629
630 def __html__(self):
630 def __html__(self):
631 return escape(safe_unicode(self.message))
631 return escape(safe_unicode(self.message))
632
632
633
633
634 class Flash(object):
634 class Flash(object):
635 # List of allowed categories. If None, allow any category.
635 # List of allowed categories. If None, allow any category.
636 categories = ["warning", "notice", "error", "success"]
636 categories = ["warning", "notice", "error", "success"]
637
637
638 # Default category if none is specified.
638 # Default category if none is specified.
639 default_category = "notice"
639 default_category = "notice"
640
640
641 def __init__(self, session_key="flash", categories=None,
641 def __init__(self, session_key="flash", categories=None,
642 default_category=None):
642 default_category=None):
643 """
643 """
644 Instantiate a ``Flash`` object.
644 Instantiate a ``Flash`` object.
645
645
646 ``session_key`` is the key to save the messages under in the user's
646 ``session_key`` is the key to save the messages under in the user's
647 session.
647 session.
648
648
649 ``categories`` is an optional list which overrides the default list
649 ``categories`` is an optional list which overrides the default list
650 of categories.
650 of categories.
651
651
652 ``default_category`` overrides the default category used for messages
652 ``default_category`` overrides the default category used for messages
653 when none is specified.
653 when none is specified.
654 """
654 """
655 self.session_key = session_key
655 self.session_key = session_key
656 if categories is not None:
656 if categories is not None:
657 self.categories = categories
657 self.categories = categories
658 if default_category is not None:
658 if default_category is not None:
659 self.default_category = default_category
659 self.default_category = default_category
660 if self.categories and self.default_category not in self.categories:
660 if self.categories and self.default_category not in self.categories:
661 raise ValueError(
661 raise ValueError(
662 "unrecognized default category %r" % (self.default_category,))
662 "unrecognized default category %r" % (self.default_category,))
663
663
664 def pop_messages(self, session=None, request=None):
664 def pop_messages(self, session=None, request=None):
665 """
665 """
666 Return all accumulated messages and delete them from the session.
666 Return all accumulated messages and delete them from the session.
667
667
668 The return value is a list of ``Message`` objects.
668 The return value is a list of ``Message`` objects.
669 """
669 """
670 messages = []
670 messages = []
671
671
672 if not session:
672 if not session:
673 if not request:
673 if not request:
674 request = get_current_request()
674 request = get_current_request()
675 session = request.session
675 session = request.session
676
676
677 # Pop the 'old' pylons flash messages. They are tuples of the form
677 # Pop the 'old' pylons flash messages. They are tuples of the form
678 # (category, message)
678 # (category, message)
679 for cat, msg in session.pop(self.session_key, []):
679 for cat, msg in session.pop(self.session_key, []):
680 messages.append(_Message(cat, msg))
680 messages.append(_Message(cat, msg))
681
681
682 # Pop the 'new' pyramid flash messages for each category as list
682 # Pop the 'new' pyramid flash messages for each category as list
683 # of strings.
683 # of strings.
684 for cat in self.categories:
684 for cat in self.categories:
685 for msg in session.pop_flash(queue=cat):
685 for msg in session.pop_flash(queue=cat):
686 messages.append(_Message(cat, msg))
686 messages.append(_Message(cat, msg))
687 # Map messages from the default queue to the 'notice' category.
687 # Map messages from the default queue to the 'notice' category.
688 for msg in session.pop_flash():
688 for msg in session.pop_flash():
689 messages.append(_Message('notice', msg))
689 messages.append(_Message('notice', msg))
690
690
691 session.save()
691 session.save()
692 return messages
692 return messages
693
693
694 def json_alerts(self, session=None, request=None):
694 def json_alerts(self, session=None, request=None):
695 payloads = []
695 payloads = []
696 messages = flash.pop_messages(session=session, request=request)
696 messages = flash.pop_messages(session=session, request=request)
697 if messages:
697 if messages:
698 for message in messages:
698 for message in messages:
699 subdata = {}
699 subdata = {}
700 if hasattr(message.message, 'rsplit'):
700 if hasattr(message.message, 'rsplit'):
701 flash_data = message.message.rsplit('|DELIM|', 1)
701 flash_data = message.message.rsplit('|DELIM|', 1)
702 org_message = flash_data[0]
702 org_message = flash_data[0]
703 if len(flash_data) > 1:
703 if len(flash_data) > 1:
704 subdata = json.loads(flash_data[1])
704 subdata = json.loads(flash_data[1])
705 else:
705 else:
706 org_message = message.message
706 org_message = message.message
707 payloads.append({
707 payloads.append({
708 'message': {
708 'message': {
709 'message': u'{}'.format(org_message),
709 'message': u'{}'.format(org_message),
710 'level': message.category,
710 'level': message.category,
711 'force': True,
711 'force': True,
712 'subdata': subdata
712 'subdata': subdata
713 }
713 }
714 })
714 })
715 return json.dumps(payloads)
715 return json.dumps(payloads)
716
716
717 def __call__(self, message, category=None, ignore_duplicate=False,
717 def __call__(self, message, category=None, ignore_duplicate=False,
718 session=None, request=None):
718 session=None, request=None):
719
719
720 if not session:
720 if not session:
721 if not request:
721 if not request:
722 request = get_current_request()
722 request = get_current_request()
723 session = request.session
723 session = request.session
724
724
725 session.flash(
725 session.flash(
726 message, queue=category, allow_duplicate=not ignore_duplicate)
726 message, queue=category, allow_duplicate=not ignore_duplicate)
727
727
728
728
729 flash = Flash()
729 flash = Flash()
730
730
731 #==============================================================================
731 #==============================================================================
732 # SCM FILTERS available via h.
732 # SCM FILTERS available via h.
733 #==============================================================================
733 #==============================================================================
734 from rhodecode.lib.vcs.utils import author_name, author_email
734 from rhodecode.lib.vcs.utils import author_name, author_email
735 from rhodecode.lib.utils2 import credentials_filter, age as _age
735 from rhodecode.lib.utils2 import credentials_filter, age as _age
736 from rhodecode.model.db import User, ChangesetStatus
736 from rhodecode.model.db import User, ChangesetStatus
737
737
738 age = _age
738 age = _age
739 capitalize = lambda x: x.capitalize()
739 capitalize = lambda x: x.capitalize()
740 email = author_email
740 email = author_email
741 short_id = lambda x: x[:12]
741 short_id = lambda x: x[:12]
742 hide_credentials = lambda x: ''.join(credentials_filter(x))
742 hide_credentials = lambda x: ''.join(credentials_filter(x))
743
743
744
744
745 def age_component(datetime_iso, value=None, time_is_local=False):
745 def age_component(datetime_iso, value=None, time_is_local=False):
746 title = value or format_date(datetime_iso)
746 title = value or format_date(datetime_iso)
747 tzinfo = '+00:00'
747 tzinfo = '+00:00'
748
748
749 # detect if we have a timezone info, otherwise, add it
749 # detect if we have a timezone info, otherwise, add it
750 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
750 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
751 if time_is_local:
751 if time_is_local:
752 tzinfo = time.strftime("+%H:%M",
752 tzinfo = time.strftime("+%H:%M",
753 time.gmtime(
753 time.gmtime(
754 (datetime.now() - datetime.utcnow()).seconds + 1
754 (datetime.now() - datetime.utcnow()).seconds + 1
755 )
755 )
756 )
756 )
757
757
758 return literal(
758 return literal(
759 '<time class="timeago tooltip" '
759 '<time class="timeago tooltip" '
760 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
760 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
761 datetime_iso, title, tzinfo))
761 datetime_iso, title, tzinfo))
762
762
763
763
764 def _shorten_commit_id(commit_id):
764 def _shorten_commit_id(commit_id):
765 from rhodecode import CONFIG
765 from rhodecode import CONFIG
766 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
766 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
767 return commit_id[:def_len]
767 return commit_id[:def_len]
768
768
769
769
770 def show_id(commit):
770 def show_id(commit):
771 """
771 """
772 Configurable function that shows ID
772 Configurable function that shows ID
773 by default it's r123:fffeeefffeee
773 by default it's r123:fffeeefffeee
774
774
775 :param commit: commit instance
775 :param commit: commit instance
776 """
776 """
777 from rhodecode import CONFIG
777 from rhodecode import CONFIG
778 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
778 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
779
779
780 raw_id = _shorten_commit_id(commit.raw_id)
780 raw_id = _shorten_commit_id(commit.raw_id)
781 if show_idx:
781 if show_idx:
782 return 'r%s:%s' % (commit.idx, raw_id)
782 return 'r%s:%s' % (commit.idx, raw_id)
783 else:
783 else:
784 return '%s' % (raw_id, )
784 return '%s' % (raw_id, )
785
785
786
786
787 def format_date(date):
787 def format_date(date):
788 """
788 """
789 use a standardized formatting for dates used in RhodeCode
789 use a standardized formatting for dates used in RhodeCode
790
790
791 :param date: date/datetime object
791 :param date: date/datetime object
792 :return: formatted date
792 :return: formatted date
793 """
793 """
794
794
795 if date:
795 if date:
796 _fmt = "%a, %d %b %Y %H:%M:%S"
796 _fmt = "%a, %d %b %Y %H:%M:%S"
797 return safe_unicode(date.strftime(_fmt))
797 return safe_unicode(date.strftime(_fmt))
798
798
799 return u""
799 return u""
800
800
801
801
802 class _RepoChecker(object):
802 class _RepoChecker(object):
803
803
804 def __init__(self, backend_alias):
804 def __init__(self, backend_alias):
805 self._backend_alias = backend_alias
805 self._backend_alias = backend_alias
806
806
807 def __call__(self, repository):
807 def __call__(self, repository):
808 if hasattr(repository, 'alias'):
808 if hasattr(repository, 'alias'):
809 _type = repository.alias
809 _type = repository.alias
810 elif hasattr(repository, 'repo_type'):
810 elif hasattr(repository, 'repo_type'):
811 _type = repository.repo_type
811 _type = repository.repo_type
812 else:
812 else:
813 _type = repository
813 _type = repository
814 return _type == self._backend_alias
814 return _type == self._backend_alias
815
815
816 is_git = _RepoChecker('git')
816 is_git = _RepoChecker('git')
817 is_hg = _RepoChecker('hg')
817 is_hg = _RepoChecker('hg')
818 is_svn = _RepoChecker('svn')
818 is_svn = _RepoChecker('svn')
819
819
820
820
821 def get_repo_type_by_name(repo_name):
821 def get_repo_type_by_name(repo_name):
822 repo = Repository.get_by_repo_name(repo_name)
822 repo = Repository.get_by_repo_name(repo_name)
823 return repo.repo_type
823 return repo.repo_type
824
824
825
825
826 def is_svn_without_proxy(repository):
826 def is_svn_without_proxy(repository):
827 if is_svn(repository):
827 if is_svn(repository):
828 from rhodecode.model.settings import VcsSettingsModel
828 from rhodecode.model.settings import VcsSettingsModel
829 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
829 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
830 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
830 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
831 return False
831 return False
832
832
833
833
834 def discover_user(author):
834 def discover_user(author):
835 """
835 """
836 Tries to discover RhodeCode User based on the autho string. Author string
836 Tries to discover RhodeCode User based on the autho string. Author string
837 is typically `FirstName LastName <email@address.com>`
837 is typically `FirstName LastName <email@address.com>`
838 """
838 """
839
839
840 # if author is already an instance use it for extraction
840 # if author is already an instance use it for extraction
841 if isinstance(author, User):
841 if isinstance(author, User):
842 return author
842 return author
843
843
844 # Valid email in the attribute passed, see if they're in the system
844 # Valid email in the attribute passed, see if they're in the system
845 _email = author_email(author)
845 _email = author_email(author)
846 if _email != '':
846 if _email != '':
847 user = User.get_by_email(_email, case_insensitive=True, cache=True)
847 user = User.get_by_email(_email, case_insensitive=True, cache=True)
848 if user is not None:
848 if user is not None:
849 return user
849 return user
850
850
851 # Maybe it's a username, we try to extract it and fetch by username ?
851 # Maybe it's a username, we try to extract it and fetch by username ?
852 _author = author_name(author)
852 _author = author_name(author)
853 user = User.get_by_username(_author, case_insensitive=True, cache=True)
853 user = User.get_by_username(_author, case_insensitive=True, cache=True)
854 if user is not None:
854 if user is not None:
855 return user
855 return user
856
856
857 return None
857 return None
858
858
859
859
860 def email_or_none(author):
860 def email_or_none(author):
861 # extract email from the commit string
861 # extract email from the commit string
862 _email = author_email(author)
862 _email = author_email(author)
863
863
864 # If we have an email, use it, otherwise
864 # If we have an email, use it, otherwise
865 # see if it contains a username we can get an email from
865 # see if it contains a username we can get an email from
866 if _email != '':
866 if _email != '':
867 return _email
867 return _email
868 else:
868 else:
869 user = User.get_by_username(
869 user = User.get_by_username(
870 author_name(author), case_insensitive=True, cache=True)
870 author_name(author), case_insensitive=True, cache=True)
871
871
872 if user is not None:
872 if user is not None:
873 return user.email
873 return user.email
874
874
875 # No valid email, not a valid user in the system, none!
875 # No valid email, not a valid user in the system, none!
876 return None
876 return None
877
877
878
878
879 def link_to_user(author, length=0, **kwargs):
879 def link_to_user(author, length=0, **kwargs):
880 user = discover_user(author)
880 user = discover_user(author)
881 # user can be None, but if we have it already it means we can re-use it
881 # user can be None, but if we have it already it means we can re-use it
882 # in the person() function, so we save 1 intensive-query
882 # in the person() function, so we save 1 intensive-query
883 if user:
883 if user:
884 author = user
884 author = user
885
885
886 display_person = person(author, 'username_or_name_or_email')
886 display_person = person(author, 'username_or_name_or_email')
887 if length:
887 if length:
888 display_person = shorter(display_person, length)
888 display_person = shorter(display_person, length)
889
889
890 if user:
890 if user:
891 return link_to(
891 return link_to(
892 escape(display_person),
892 escape(display_person),
893 route_path('user_profile', username=user.username),
893 route_path('user_profile', username=user.username),
894 **kwargs)
894 **kwargs)
895 else:
895 else:
896 return escape(display_person)
896 return escape(display_person)
897
897
898
898
899 def person(author, show_attr="username_and_name"):
899 def person(author, show_attr="username_and_name"):
900 user = discover_user(author)
900 user = discover_user(author)
901 if user:
901 if user:
902 return getattr(user, show_attr)
902 return getattr(user, show_attr)
903 else:
903 else:
904 _author = author_name(author)
904 _author = author_name(author)
905 _email = email(author)
905 _email = email(author)
906 return _author or _email
906 return _author or _email
907
907
908
908
909 def author_string(email):
909 def author_string(email):
910 if email:
910 if email:
911 user = User.get_by_email(email, case_insensitive=True, cache=True)
911 user = User.get_by_email(email, case_insensitive=True, cache=True)
912 if user:
912 if user:
913 if user.first_name or user.last_name:
913 if user.first_name or user.last_name:
914 return '%s %s &lt;%s&gt;' % (
914 return '%s %s &lt;%s&gt;' % (
915 user.first_name, user.last_name, email)
915 user.first_name, user.last_name, email)
916 else:
916 else:
917 return email
917 return email
918 else:
918 else:
919 return email
919 return email
920 else:
920 else:
921 return None
921 return None
922
922
923
923
924 def person_by_id(id_, show_attr="username_and_name"):
924 def person_by_id(id_, show_attr="username_and_name"):
925 # attr to return from fetched user
925 # attr to return from fetched user
926 person_getter = lambda usr: getattr(usr, show_attr)
926 person_getter = lambda usr: getattr(usr, show_attr)
927
927
928 #maybe it's an ID ?
928 #maybe it's an ID ?
929 if str(id_).isdigit() or isinstance(id_, int):
929 if str(id_).isdigit() or isinstance(id_, int):
930 id_ = int(id_)
930 id_ = int(id_)
931 user = User.get(id_)
931 user = User.get(id_)
932 if user is not None:
932 if user is not None:
933 return person_getter(user)
933 return person_getter(user)
934 return id_
934 return id_
935
935
936
936
937 def gravatar_with_user(request, author, show_disabled=False):
937 def gravatar_with_user(request, author, show_disabled=False):
938 _render = request.get_partial_renderer(
938 _render = request.get_partial_renderer(
939 'rhodecode:templates/base/base.mako')
939 'rhodecode:templates/base/base.mako')
940 return _render('gravatar_with_user', author, show_disabled=show_disabled)
940 return _render('gravatar_with_user', author, show_disabled=show_disabled)
941
941
942
942
943 tags_paterns = OrderedDict((
943 tags_paterns = OrderedDict((
944 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
944 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
945 '<div class="metatag" tag="lang">\\2</div>')),
945 '<div class="metatag" tag="lang">\\2</div>')),
946
946
947 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
947 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
948 '<div class="metatag" tag="see">see: \\1 </div>')),
948 '<div class="metatag" tag="see">see: \\1 </div>')),
949
949
950 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((.*?)\)\]'),
950 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((.*?)\)\]'),
951 '<div class="metatag" tag="url"> <a href="\\2">\\1</a> </div>')),
951 '<div class="metatag" tag="url"> <a href="\\2">\\1</a> </div>')),
952
952
953 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
953 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
954 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
954 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
955
955
956 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
956 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
957 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
957 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
958
958
959 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
959 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
960 '<div class="metatag" tag="state \\1">\\1</div>')),
960 '<div class="metatag" tag="state \\1">\\1</div>')),
961
961
962 # label in grey
962 # label in grey
963 ('label', (re.compile(r'\[([a-z]+)\]'),
963 ('label', (re.compile(r'\[([a-z]+)\]'),
964 '<div class="metatag" tag="label">\\1</div>')),
964 '<div class="metatag" tag="label">\\1</div>')),
965
965
966 # generic catch all in grey
966 # generic catch all in grey
967 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
967 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
968 '<div class="metatag" tag="generic">\\1</div>')),
968 '<div class="metatag" tag="generic">\\1</div>')),
969 ))
969 ))
970
970
971
971
972 def extract_metatags(value):
972 def extract_metatags(value):
973 """
973 """
974 Extract supported meta-tags from given text value
974 Extract supported meta-tags from given text value
975 """
975 """
976 tags = []
976 tags = []
977 if not value:
977 if not value:
978 return tags, ''
978 return tags, ''
979
979
980 for key, val in tags_paterns.items():
980 for key, val in tags_paterns.items():
981 pat, replace_html = val
981 pat, replace_html = val
982 tags.extend([(key, x.group()) for x in pat.finditer(value)])
982 tags.extend([(key, x.group()) for x in pat.finditer(value)])
983 value = pat.sub('', value)
983 value = pat.sub('', value)
984
984
985 return tags, value
985 return tags, value
986
986
987
987
988 def style_metatag(tag_type, value):
988 def style_metatag(tag_type, value):
989 """
989 """
990 converts tags from value into html equivalent
990 converts tags from value into html equivalent
991 """
991 """
992 if not value:
992 if not value:
993 return ''
993 return ''
994
994
995 html_value = value
995 html_value = value
996 tag_data = tags_paterns.get(tag_type)
996 tag_data = tags_paterns.get(tag_type)
997 if tag_data:
997 if tag_data:
998 pat, replace_html = tag_data
998 pat, replace_html = tag_data
999 # convert to plain `unicode` instead of a markup tag to be used in
999 # convert to plain `unicode` instead of a markup tag to be used in
1000 # regex expressions. safe_unicode doesn't work here
1000 # regex expressions. safe_unicode doesn't work here
1001 html_value = pat.sub(replace_html, unicode(value))
1001 html_value = pat.sub(replace_html, unicode(value))
1002
1002
1003 return html_value
1003 return html_value
1004
1004
1005
1005
1006 def bool2icon(value):
1006 def bool2icon(value):
1007 """
1007 """
1008 Returns boolean value of a given value, represented as html element with
1008 Returns boolean value of a given value, represented as html element with
1009 classes that will represent icons
1009 classes that will represent icons
1010
1010
1011 :param value: given value to convert to html node
1011 :param value: given value to convert to html node
1012 """
1012 """
1013
1013
1014 if value: # does bool conversion
1014 if value: # does bool conversion
1015 return HTML.tag('i', class_="icon-true")
1015 return HTML.tag('i', class_="icon-true")
1016 else: # not true as bool
1016 else: # not true as bool
1017 return HTML.tag('i', class_="icon-false")
1017 return HTML.tag('i', class_="icon-false")
1018
1018
1019
1019
1020 #==============================================================================
1020 #==============================================================================
1021 # PERMS
1021 # PERMS
1022 #==============================================================================
1022 #==============================================================================
1023 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
1023 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
1024 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
1024 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
1025 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1025 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1026 csrf_token_key
1026 csrf_token_key
1027
1027
1028
1028
1029 #==============================================================================
1029 #==============================================================================
1030 # GRAVATAR URL
1030 # GRAVATAR URL
1031 #==============================================================================
1031 #==============================================================================
1032 class InitialsGravatar(object):
1032 class InitialsGravatar(object):
1033 def __init__(self, email_address, first_name, last_name, size=30,
1033 def __init__(self, email_address, first_name, last_name, size=30,
1034 background=None, text_color='#fff'):
1034 background=None, text_color='#fff'):
1035 self.size = size
1035 self.size = size
1036 self.first_name = first_name
1036 self.first_name = first_name
1037 self.last_name = last_name
1037 self.last_name = last_name
1038 self.email_address = email_address
1038 self.email_address = email_address
1039 self.background = background or self.str2color(email_address)
1039 self.background = background or self.str2color(email_address)
1040 self.text_color = text_color
1040 self.text_color = text_color
1041
1041
1042 def get_color_bank(self):
1042 def get_color_bank(self):
1043 """
1043 """
1044 returns a predefined list of colors that gravatars can use.
1044 returns a predefined list of colors that gravatars can use.
1045 Those are randomized distinct colors that guarantee readability and
1045 Those are randomized distinct colors that guarantee readability and
1046 uniqueness.
1046 uniqueness.
1047
1047
1048 generated with: http://phrogz.net/css/distinct-colors.html
1048 generated with: http://phrogz.net/css/distinct-colors.html
1049 """
1049 """
1050 return [
1050 return [
1051 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1051 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1052 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1052 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1053 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1053 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1054 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1054 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1055 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1055 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1056 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1056 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1057 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1057 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1058 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1058 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1059 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1059 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1060 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1060 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1061 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1061 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1062 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1062 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1063 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1063 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1064 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1064 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1065 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1065 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1066 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1066 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1067 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1067 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1068 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1068 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1069 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1069 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1070 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1070 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1071 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1071 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1072 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1072 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1073 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1073 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1074 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1074 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1075 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1075 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1076 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1076 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1077 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1077 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1078 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1078 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1079 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1079 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1080 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1080 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1081 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1081 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1082 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1082 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1083 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1083 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1084 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1084 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1085 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1085 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1086 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1086 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1087 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1087 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1088 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1088 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1089 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1089 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1090 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1090 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1091 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1091 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1092 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1092 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1093 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1093 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1094 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1094 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1095 '#4f8c46', '#368dd9', '#5c0073'
1095 '#4f8c46', '#368dd9', '#5c0073'
1096 ]
1096 ]
1097
1097
1098 def rgb_to_hex_color(self, rgb_tuple):
1098 def rgb_to_hex_color(self, rgb_tuple):
1099 """
1099 """
1100 Converts an rgb_tuple passed to an hex color.
1100 Converts an rgb_tuple passed to an hex color.
1101
1101
1102 :param rgb_tuple: tuple with 3 ints represents rgb color space
1102 :param rgb_tuple: tuple with 3 ints represents rgb color space
1103 """
1103 """
1104 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1104 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1105
1105
1106 def email_to_int_list(self, email_str):
1106 def email_to_int_list(self, email_str):
1107 """
1107 """
1108 Get every byte of the hex digest value of email and turn it to integer.
1108 Get every byte of the hex digest value of email and turn it to integer.
1109 It's going to be always between 0-255
1109 It's going to be always between 0-255
1110 """
1110 """
1111 digest = md5_safe(email_str.lower())
1111 digest = md5_safe(email_str.lower())
1112 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1112 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1113
1113
1114 def pick_color_bank_index(self, email_str, color_bank):
1114 def pick_color_bank_index(self, email_str, color_bank):
1115 return self.email_to_int_list(email_str)[0] % len(color_bank)
1115 return self.email_to_int_list(email_str)[0] % len(color_bank)
1116
1116
1117 def str2color(self, email_str):
1117 def str2color(self, email_str):
1118 """
1118 """
1119 Tries to map in a stable algorithm an email to color
1119 Tries to map in a stable algorithm an email to color
1120
1120
1121 :param email_str:
1121 :param email_str:
1122 """
1122 """
1123 color_bank = self.get_color_bank()
1123 color_bank = self.get_color_bank()
1124 # pick position (module it's length so we always find it in the
1124 # pick position (module it's length so we always find it in the
1125 # bank even if it's smaller than 256 values
1125 # bank even if it's smaller than 256 values
1126 pos = self.pick_color_bank_index(email_str, color_bank)
1126 pos = self.pick_color_bank_index(email_str, color_bank)
1127 return color_bank[pos]
1127 return color_bank[pos]
1128
1128
1129 def normalize_email(self, email_address):
1129 def normalize_email(self, email_address):
1130 import unicodedata
1130 import unicodedata
1131 # default host used to fill in the fake/missing email
1131 # default host used to fill in the fake/missing email
1132 default_host = u'localhost'
1132 default_host = u'localhost'
1133
1133
1134 if not email_address:
1134 if not email_address:
1135 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1135 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1136
1136
1137 email_address = safe_unicode(email_address)
1137 email_address = safe_unicode(email_address)
1138
1138
1139 if u'@' not in email_address:
1139 if u'@' not in email_address:
1140 email_address = u'%s@%s' % (email_address, default_host)
1140 email_address = u'%s@%s' % (email_address, default_host)
1141
1141
1142 if email_address.endswith(u'@'):
1142 if email_address.endswith(u'@'):
1143 email_address = u'%s%s' % (email_address, default_host)
1143 email_address = u'%s%s' % (email_address, default_host)
1144
1144
1145 email_address = unicodedata.normalize('NFKD', email_address)\
1145 email_address = unicodedata.normalize('NFKD', email_address)\
1146 .encode('ascii', 'ignore')
1146 .encode('ascii', 'ignore')
1147 return email_address
1147 return email_address
1148
1148
1149 def get_initials(self):
1149 def get_initials(self):
1150 """
1150 """
1151 Returns 2 letter initials calculated based on the input.
1151 Returns 2 letter initials calculated based on the input.
1152 The algorithm picks first given email address, and takes first letter
1152 The algorithm picks first given email address, and takes first letter
1153 of part before @, and then the first letter of server name. In case
1153 of part before @, and then the first letter of server name. In case
1154 the part before @ is in a format of `somestring.somestring2` it replaces
1154 the part before @ is in a format of `somestring.somestring2` it replaces
1155 the server letter with first letter of somestring2
1155 the server letter with first letter of somestring2
1156
1156
1157 In case function was initialized with both first and lastname, this
1157 In case function was initialized with both first and lastname, this
1158 overrides the extraction from email by first letter of the first and
1158 overrides the extraction from email by first letter of the first and
1159 last name. We add special logic to that functionality, In case Full name
1159 last name. We add special logic to that functionality, In case Full name
1160 is compound, like Guido Von Rossum, we use last part of the last name
1160 is compound, like Guido Von Rossum, we use last part of the last name
1161 (Von Rossum) picking `R`.
1161 (Von Rossum) picking `R`.
1162
1162
1163 Function also normalizes the non-ascii characters to they ascii
1163 Function also normalizes the non-ascii characters to they ascii
1164 representation, eg Ä„ => A
1164 representation, eg Ä„ => A
1165 """
1165 """
1166 import unicodedata
1166 import unicodedata
1167 # replace non-ascii to ascii
1167 # replace non-ascii to ascii
1168 first_name = unicodedata.normalize(
1168 first_name = unicodedata.normalize(
1169 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1169 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1170 last_name = unicodedata.normalize(
1170 last_name = unicodedata.normalize(
1171 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1171 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1172
1172
1173 # do NFKD encoding, and also make sure email has proper format
1173 # do NFKD encoding, and also make sure email has proper format
1174 email_address = self.normalize_email(self.email_address)
1174 email_address = self.normalize_email(self.email_address)
1175
1175
1176 # first push the email initials
1176 # first push the email initials
1177 prefix, server = email_address.split('@', 1)
1177 prefix, server = email_address.split('@', 1)
1178
1178
1179 # check if prefix is maybe a 'first_name.last_name' syntax
1179 # check if prefix is maybe a 'first_name.last_name' syntax
1180 _dot_split = prefix.rsplit('.', 1)
1180 _dot_split = prefix.rsplit('.', 1)
1181 if len(_dot_split) == 2 and _dot_split[1]:
1181 if len(_dot_split) == 2 and _dot_split[1]:
1182 initials = [_dot_split[0][0], _dot_split[1][0]]
1182 initials = [_dot_split[0][0], _dot_split[1][0]]
1183 else:
1183 else:
1184 initials = [prefix[0], server[0]]
1184 initials = [prefix[0], server[0]]
1185
1185
1186 # then try to replace either first_name or last_name
1186 # then try to replace either first_name or last_name
1187 fn_letter = (first_name or " ")[0].strip()
1187 fn_letter = (first_name or " ")[0].strip()
1188 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1188 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1189
1189
1190 if fn_letter:
1190 if fn_letter:
1191 initials[0] = fn_letter
1191 initials[0] = fn_letter
1192
1192
1193 if ln_letter:
1193 if ln_letter:
1194 initials[1] = ln_letter
1194 initials[1] = ln_letter
1195
1195
1196 return ''.join(initials).upper()
1196 return ''.join(initials).upper()
1197
1197
1198 def get_img_data_by_type(self, font_family, img_type):
1198 def get_img_data_by_type(self, font_family, img_type):
1199 default_user = """
1199 default_user = """
1200 <svg xmlns="http://www.w3.org/2000/svg"
1200 <svg xmlns="http://www.w3.org/2000/svg"
1201 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1201 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1202 viewBox="-15 -10 439.165 429.164"
1202 viewBox="-15 -10 439.165 429.164"
1203
1203
1204 xml:space="preserve"
1204 xml:space="preserve"
1205 style="background:{background};" >
1205 style="background:{background};" >
1206
1206
1207 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1207 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1208 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1208 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1209 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1209 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1210 168.596,153.916,216.671,
1210 168.596,153.916,216.671,
1211 204.583,216.671z" fill="{text_color}"/>
1211 204.583,216.671z" fill="{text_color}"/>
1212 <path d="M407.164,374.717L360.88,
1212 <path d="M407.164,374.717L360.88,
1213 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1213 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1214 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1214 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1215 15.366-44.203,23.488-69.076,23.488c-24.877,
1215 15.366-44.203,23.488-69.076,23.488c-24.877,
1216 0-48.762-8.122-69.078-23.488
1216 0-48.762-8.122-69.078-23.488
1217 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1217 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1218 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1218 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1219 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1219 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1220 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1220 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1221 19.402-10.527 C409.699,390.129,
1221 19.402-10.527 C409.699,390.129,
1222 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1222 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1223 </svg>""".format(
1223 </svg>""".format(
1224 size=self.size,
1224 size=self.size,
1225 background='#979797', # @grey4
1225 background='#979797', # @grey4
1226 text_color=self.text_color,
1226 text_color=self.text_color,
1227 font_family=font_family)
1227 font_family=font_family)
1228
1228
1229 return {
1229 return {
1230 "default_user": default_user
1230 "default_user": default_user
1231 }[img_type]
1231 }[img_type]
1232
1232
1233 def get_img_data(self, svg_type=None):
1233 def get_img_data(self, svg_type=None):
1234 """
1234 """
1235 generates the svg metadata for image
1235 generates the svg metadata for image
1236 """
1236 """
1237
1237
1238 font_family = ','.join([
1238 font_family = ','.join([
1239 'proximanovaregular',
1239 'proximanovaregular',
1240 'Proxima Nova Regular',
1240 'Proxima Nova Regular',
1241 'Proxima Nova',
1241 'Proxima Nova',
1242 'Arial',
1242 'Arial',
1243 'Lucida Grande',
1243 'Lucida Grande',
1244 'sans-serif'
1244 'sans-serif'
1245 ])
1245 ])
1246 if svg_type:
1246 if svg_type:
1247 return self.get_img_data_by_type(font_family, svg_type)
1247 return self.get_img_data_by_type(font_family, svg_type)
1248
1248
1249 initials = self.get_initials()
1249 initials = self.get_initials()
1250 img_data = """
1250 img_data = """
1251 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1251 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1252 width="{size}" height="{size}"
1252 width="{size}" height="{size}"
1253 style="width: 100%; height: 100%; background-color: {background}"
1253 style="width: 100%; height: 100%; background-color: {background}"
1254 viewBox="0 0 {size} {size}">
1254 viewBox="0 0 {size} {size}">
1255 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1255 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1256 pointer-events="auto" fill="{text_color}"
1256 pointer-events="auto" fill="{text_color}"
1257 font-family="{font_family}"
1257 font-family="{font_family}"
1258 style="font-weight: 400; font-size: {f_size}px;">{text}
1258 style="font-weight: 400; font-size: {f_size}px;">{text}
1259 </text>
1259 </text>
1260 </svg>""".format(
1260 </svg>""".format(
1261 size=self.size,
1261 size=self.size,
1262 f_size=self.size/1.85, # scale the text inside the box nicely
1262 f_size=self.size/1.85, # scale the text inside the box nicely
1263 background=self.background,
1263 background=self.background,
1264 text_color=self.text_color,
1264 text_color=self.text_color,
1265 text=initials.upper(),
1265 text=initials.upper(),
1266 font_family=font_family)
1266 font_family=font_family)
1267
1267
1268 return img_data
1268 return img_data
1269
1269
1270 def generate_svg(self, svg_type=None):
1270 def generate_svg(self, svg_type=None):
1271 img_data = self.get_img_data(svg_type)
1271 img_data = self.get_img_data(svg_type)
1272 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1272 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1273
1273
1274
1274
1275 def initials_gravatar(email_address, first_name, last_name, size=30):
1275 def initials_gravatar(email_address, first_name, last_name, size=30):
1276 svg_type = None
1276 svg_type = None
1277 if email_address == User.DEFAULT_USER_EMAIL:
1277 if email_address == User.DEFAULT_USER_EMAIL:
1278 svg_type = 'default_user'
1278 svg_type = 'default_user'
1279 klass = InitialsGravatar(email_address, first_name, last_name, size)
1279 klass = InitialsGravatar(email_address, first_name, last_name, size)
1280 return klass.generate_svg(svg_type=svg_type)
1280 return klass.generate_svg(svg_type=svg_type)
1281
1281
1282
1282
1283 def gravatar_url(email_address, size=30, request=None):
1283 def gravatar_url(email_address, size=30, request=None):
1284 request = get_current_request()
1284 request = get_current_request()
1285 _use_gravatar = request.call_context.visual.use_gravatar
1285 _use_gravatar = request.call_context.visual.use_gravatar
1286 _gravatar_url = request.call_context.visual.gravatar_url
1286 _gravatar_url = request.call_context.visual.gravatar_url
1287
1287
1288 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1288 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1289
1289
1290 email_address = email_address or User.DEFAULT_USER_EMAIL
1290 email_address = email_address or User.DEFAULT_USER_EMAIL
1291 if isinstance(email_address, unicode):
1291 if isinstance(email_address, unicode):
1292 # hashlib crashes on unicode items
1292 # hashlib crashes on unicode items
1293 email_address = safe_str(email_address)
1293 email_address = safe_str(email_address)
1294
1294
1295 # empty email or default user
1295 # empty email or default user
1296 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1296 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1297 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1297 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1298
1298
1299 if _use_gravatar:
1299 if _use_gravatar:
1300 # TODO: Disuse pyramid thread locals. Think about another solution to
1300 # TODO: Disuse pyramid thread locals. Think about another solution to
1301 # get the host and schema here.
1301 # get the host and schema here.
1302 request = get_current_request()
1302 request = get_current_request()
1303 tmpl = safe_str(_gravatar_url)
1303 tmpl = safe_str(_gravatar_url)
1304 tmpl = tmpl.replace('{email}', email_address)\
1304 tmpl = tmpl.replace('{email}', email_address)\
1305 .replace('{md5email}', md5_safe(email_address.lower())) \
1305 .replace('{md5email}', md5_safe(email_address.lower())) \
1306 .replace('{netloc}', request.host)\
1306 .replace('{netloc}', request.host)\
1307 .replace('{scheme}', request.scheme)\
1307 .replace('{scheme}', request.scheme)\
1308 .replace('{size}', safe_str(size))
1308 .replace('{size}', safe_str(size))
1309 return tmpl
1309 return tmpl
1310 else:
1310 else:
1311 return initials_gravatar(email_address, '', '', size=size)
1311 return initials_gravatar(email_address, '', '', size=size)
1312
1312
1313
1313
1314 class Page(_Page):
1314 class Page(_Page):
1315 """
1315 """
1316 Custom pager to match rendering style with paginator
1316 Custom pager to match rendering style with paginator
1317 """
1317 """
1318
1318
1319 def _get_pos(self, cur_page, max_page, items):
1319 def _get_pos(self, cur_page, max_page, items):
1320 edge = (items / 2) + 1
1320 edge = (items / 2) + 1
1321 if (cur_page <= edge):
1321 if (cur_page <= edge):
1322 radius = max(items / 2, items - cur_page)
1322 radius = max(items / 2, items - cur_page)
1323 elif (max_page - cur_page) < edge:
1323 elif (max_page - cur_page) < edge:
1324 radius = (items - 1) - (max_page - cur_page)
1324 radius = (items - 1) - (max_page - cur_page)
1325 else:
1325 else:
1326 radius = items / 2
1326 radius = items / 2
1327
1327
1328 left = max(1, (cur_page - (radius)))
1328 left = max(1, (cur_page - (radius)))
1329 right = min(max_page, cur_page + (radius))
1329 right = min(max_page, cur_page + (radius))
1330 return left, cur_page, right
1330 return left, cur_page, right
1331
1331
1332 def _range(self, regexp_match):
1332 def _range(self, regexp_match):
1333 """
1333 """
1334 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1334 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1335
1335
1336 Arguments:
1336 Arguments:
1337
1337
1338 regexp_match
1338 regexp_match
1339 A "re" (regular expressions) match object containing the
1339 A "re" (regular expressions) match object containing the
1340 radius of linked pages around the current page in
1340 radius of linked pages around the current page in
1341 regexp_match.group(1) as a string
1341 regexp_match.group(1) as a string
1342
1342
1343 This function is supposed to be called as a callable in
1343 This function is supposed to be called as a callable in
1344 re.sub.
1344 re.sub.
1345
1345
1346 """
1346 """
1347 radius = int(regexp_match.group(1))
1347 radius = int(regexp_match.group(1))
1348
1348
1349 # Compute the first and last page number within the radius
1349 # Compute the first and last page number within the radius
1350 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1350 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1351 # -> leftmost_page = 5
1351 # -> leftmost_page = 5
1352 # -> rightmost_page = 9
1352 # -> rightmost_page = 9
1353 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1353 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1354 self.last_page,
1354 self.last_page,
1355 (radius * 2) + 1)
1355 (radius * 2) + 1)
1356 nav_items = []
1356 nav_items = []
1357
1357
1358 # Create a link to the first page (unless we are on the first page
1358 # Create a link to the first page (unless we are on the first page
1359 # or there would be no need to insert '..' spacers)
1359 # or there would be no need to insert '..' spacers)
1360 if self.page != self.first_page and self.first_page < leftmost_page:
1360 if self.page != self.first_page and self.first_page < leftmost_page:
1361 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1361 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1362
1362
1363 # Insert dots if there are pages between the first page
1363 # Insert dots if there are pages between the first page
1364 # and the currently displayed page range
1364 # and the currently displayed page range
1365 if leftmost_page - self.first_page > 1:
1365 if leftmost_page - self.first_page > 1:
1366 # Wrap in a SPAN tag if nolink_attr is set
1366 # Wrap in a SPAN tag if nolink_attr is set
1367 text = '..'
1367 text = '..'
1368 if self.dotdot_attr:
1368 if self.dotdot_attr:
1369 text = HTML.span(c=text, **self.dotdot_attr)
1369 text = HTML.span(c=text, **self.dotdot_attr)
1370 nav_items.append(text)
1370 nav_items.append(text)
1371
1371
1372 for thispage in xrange(leftmost_page, rightmost_page + 1):
1372 for thispage in xrange(leftmost_page, rightmost_page + 1):
1373 # Hilight the current page number and do not use a link
1373 # Hilight the current page number and do not use a link
1374 if thispage == self.page:
1374 if thispage == self.page:
1375 text = '%s' % (thispage,)
1375 text = '%s' % (thispage,)
1376 # Wrap in a SPAN tag if nolink_attr is set
1376 # Wrap in a SPAN tag if nolink_attr is set
1377 if self.curpage_attr:
1377 if self.curpage_attr:
1378 text = HTML.span(c=text, **self.curpage_attr)
1378 text = HTML.span(c=text, **self.curpage_attr)
1379 nav_items.append(text)
1379 nav_items.append(text)
1380 # Otherwise create just a link to that page
1380 # Otherwise create just a link to that page
1381 else:
1381 else:
1382 text = '%s' % (thispage,)
1382 text = '%s' % (thispage,)
1383 nav_items.append(self._pagerlink(thispage, text))
1383 nav_items.append(self._pagerlink(thispage, text))
1384
1384
1385 # Insert dots if there are pages between the displayed
1385 # Insert dots if there are pages between the displayed
1386 # page numbers and the end of the page range
1386 # page numbers and the end of the page range
1387 if self.last_page - rightmost_page > 1:
1387 if self.last_page - rightmost_page > 1:
1388 text = '..'
1388 text = '..'
1389 # Wrap in a SPAN tag if nolink_attr is set
1389 # Wrap in a SPAN tag if nolink_attr is set
1390 if self.dotdot_attr:
1390 if self.dotdot_attr:
1391 text = HTML.span(c=text, **self.dotdot_attr)
1391 text = HTML.span(c=text, **self.dotdot_attr)
1392 nav_items.append(text)
1392 nav_items.append(text)
1393
1393
1394 # Create a link to the very last page (unless we are on the last
1394 # Create a link to the very last page (unless we are on the last
1395 # page or there would be no need to insert '..' spacers)
1395 # page or there would be no need to insert '..' spacers)
1396 if self.page != self.last_page and rightmost_page < self.last_page:
1396 if self.page != self.last_page and rightmost_page < self.last_page:
1397 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1397 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1398
1398
1399 ## prerender links
1399 ## prerender links
1400 #_page_link = url.current()
1400 #_page_link = url.current()
1401 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1401 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1402 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1402 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1403 return self.separator.join(nav_items)
1403 return self.separator.join(nav_items)
1404
1404
1405 def pager(self, format='~2~', page_param='page', partial_param='partial',
1405 def pager(self, format='~2~', page_param='page', partial_param='partial',
1406 show_if_single_page=False, separator=' ', onclick=None,
1406 show_if_single_page=False, separator=' ', onclick=None,
1407 symbol_first='<<', symbol_last='>>',
1407 symbol_first='<<', symbol_last='>>',
1408 symbol_previous='<', symbol_next='>',
1408 symbol_previous='<', symbol_next='>',
1409 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1409 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1410 curpage_attr={'class': 'pager_curpage'},
1410 curpage_attr={'class': 'pager_curpage'},
1411 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1411 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1412
1412
1413 self.curpage_attr = curpage_attr
1413 self.curpage_attr = curpage_attr
1414 self.separator = separator
1414 self.separator = separator
1415 self.pager_kwargs = kwargs
1415 self.pager_kwargs = kwargs
1416 self.page_param = page_param
1416 self.page_param = page_param
1417 self.partial_param = partial_param
1417 self.partial_param = partial_param
1418 self.onclick = onclick
1418 self.onclick = onclick
1419 self.link_attr = link_attr
1419 self.link_attr = link_attr
1420 self.dotdot_attr = dotdot_attr
1420 self.dotdot_attr = dotdot_attr
1421
1421
1422 # Don't show navigator if there is no more than one page
1422 # Don't show navigator if there is no more than one page
1423 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1423 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1424 return ''
1424 return ''
1425
1425
1426 from string import Template
1426 from string import Template
1427 # Replace ~...~ in token format by range of pages
1427 # Replace ~...~ in token format by range of pages
1428 result = re.sub(r'~(\d+)~', self._range, format)
1428 result = re.sub(r'~(\d+)~', self._range, format)
1429
1429
1430 # Interpolate '%' variables
1430 # Interpolate '%' variables
1431 result = Template(result).safe_substitute({
1431 result = Template(result).safe_substitute({
1432 'first_page': self.first_page,
1432 'first_page': self.first_page,
1433 'last_page': self.last_page,
1433 'last_page': self.last_page,
1434 'page': self.page,
1434 'page': self.page,
1435 'page_count': self.page_count,
1435 'page_count': self.page_count,
1436 'items_per_page': self.items_per_page,
1436 'items_per_page': self.items_per_page,
1437 'first_item': self.first_item,
1437 'first_item': self.first_item,
1438 'last_item': self.last_item,
1438 'last_item': self.last_item,
1439 'item_count': self.item_count,
1439 'item_count': self.item_count,
1440 'link_first': self.page > self.first_page and \
1440 'link_first': self.page > self.first_page and \
1441 self._pagerlink(self.first_page, symbol_first) or '',
1441 self._pagerlink(self.first_page, symbol_first) or '',
1442 'link_last': self.page < self.last_page and \
1442 'link_last': self.page < self.last_page and \
1443 self._pagerlink(self.last_page, symbol_last) or '',
1443 self._pagerlink(self.last_page, symbol_last) or '',
1444 'link_previous': self.previous_page and \
1444 'link_previous': self.previous_page and \
1445 self._pagerlink(self.previous_page, symbol_previous) \
1445 self._pagerlink(self.previous_page, symbol_previous) \
1446 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1446 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1447 'link_next': self.next_page and \
1447 'link_next': self.next_page and \
1448 self._pagerlink(self.next_page, symbol_next) \
1448 self._pagerlink(self.next_page, symbol_next) \
1449 or HTML.span(symbol_next, class_="pg-next disabled")
1449 or HTML.span(symbol_next, class_="pg-next disabled")
1450 })
1450 })
1451
1451
1452 return literal(result)
1452 return literal(result)
1453
1453
1454
1454
1455 #==============================================================================
1455 #==============================================================================
1456 # REPO PAGER, PAGER FOR REPOSITORY
1456 # REPO PAGER, PAGER FOR REPOSITORY
1457 #==============================================================================
1457 #==============================================================================
1458 class RepoPage(Page):
1458 class RepoPage(Page):
1459
1459
1460 def __init__(self, collection, page=1, items_per_page=20,
1460 def __init__(self, collection, page=1, items_per_page=20,
1461 item_count=None, url=None, **kwargs):
1461 item_count=None, url=None, **kwargs):
1462
1462
1463 """Create a "RepoPage" instance. special pager for paging
1463 """Create a "RepoPage" instance. special pager for paging
1464 repository
1464 repository
1465 """
1465 """
1466 self._url_generator = url
1466 self._url_generator = url
1467
1467
1468 # Safe the kwargs class-wide so they can be used in the pager() method
1468 # Safe the kwargs class-wide so they can be used in the pager() method
1469 self.kwargs = kwargs
1469 self.kwargs = kwargs
1470
1470
1471 # Save a reference to the collection
1471 # Save a reference to the collection
1472 self.original_collection = collection
1472 self.original_collection = collection
1473
1473
1474 self.collection = collection
1474 self.collection = collection
1475
1475
1476 # The self.page is the number of the current page.
1476 # The self.page is the number of the current page.
1477 # The first page has the number 1!
1477 # The first page has the number 1!
1478 try:
1478 try:
1479 self.page = int(page) # make it int() if we get it as a string
1479 self.page = int(page) # make it int() if we get it as a string
1480 except (ValueError, TypeError):
1480 except (ValueError, TypeError):
1481 self.page = 1
1481 self.page = 1
1482
1482
1483 self.items_per_page = items_per_page
1483 self.items_per_page = items_per_page
1484
1484
1485 # Unless the user tells us how many items the collections has
1485 # Unless the user tells us how many items the collections has
1486 # we calculate that ourselves.
1486 # we calculate that ourselves.
1487 if item_count is not None:
1487 if item_count is not None:
1488 self.item_count = item_count
1488 self.item_count = item_count
1489 else:
1489 else:
1490 self.item_count = len(self.collection)
1490 self.item_count = len(self.collection)
1491
1491
1492 # Compute the number of the first and last available page
1492 # Compute the number of the first and last available page
1493 if self.item_count > 0:
1493 if self.item_count > 0:
1494 self.first_page = 1
1494 self.first_page = 1
1495 self.page_count = int(math.ceil(float(self.item_count) /
1495 self.page_count = int(math.ceil(float(self.item_count) /
1496 self.items_per_page))
1496 self.items_per_page))
1497 self.last_page = self.first_page + self.page_count - 1
1497 self.last_page = self.first_page + self.page_count - 1
1498
1498
1499 # Make sure that the requested page number is the range of
1499 # Make sure that the requested page number is the range of
1500 # valid pages
1500 # valid pages
1501 if self.page > self.last_page:
1501 if self.page > self.last_page:
1502 self.page = self.last_page
1502 self.page = self.last_page
1503 elif self.page < self.first_page:
1503 elif self.page < self.first_page:
1504 self.page = self.first_page
1504 self.page = self.first_page
1505
1505
1506 # Note: the number of items on this page can be less than
1506 # Note: the number of items on this page can be less than
1507 # items_per_page if the last page is not full
1507 # items_per_page if the last page is not full
1508 self.first_item = max(0, (self.item_count) - (self.page *
1508 self.first_item = max(0, (self.item_count) - (self.page *
1509 items_per_page))
1509 items_per_page))
1510 self.last_item = ((self.item_count - 1) - items_per_page *
1510 self.last_item = ((self.item_count - 1) - items_per_page *
1511 (self.page - 1))
1511 (self.page - 1))
1512
1512
1513 self.items = list(self.collection[self.first_item:self.last_item + 1])
1513 self.items = list(self.collection[self.first_item:self.last_item + 1])
1514
1514
1515 # Links to previous and next page
1515 # Links to previous and next page
1516 if self.page > self.first_page:
1516 if self.page > self.first_page:
1517 self.previous_page = self.page - 1
1517 self.previous_page = self.page - 1
1518 else:
1518 else:
1519 self.previous_page = None
1519 self.previous_page = None
1520
1520
1521 if self.page < self.last_page:
1521 if self.page < self.last_page:
1522 self.next_page = self.page + 1
1522 self.next_page = self.page + 1
1523 else:
1523 else:
1524 self.next_page = None
1524 self.next_page = None
1525
1525
1526 # No items available
1526 # No items available
1527 else:
1527 else:
1528 self.first_page = None
1528 self.first_page = None
1529 self.page_count = 0
1529 self.page_count = 0
1530 self.last_page = None
1530 self.last_page = None
1531 self.first_item = None
1531 self.first_item = None
1532 self.last_item = None
1532 self.last_item = None
1533 self.previous_page = None
1533 self.previous_page = None
1534 self.next_page = None
1534 self.next_page = None
1535 self.items = []
1535 self.items = []
1536
1536
1537 # This is a subclass of the 'list' type. Initialise the list now.
1537 # This is a subclass of the 'list' type. Initialise the list now.
1538 list.__init__(self, reversed(self.items))
1538 list.__init__(self, reversed(self.items))
1539
1539
1540
1540
1541 def breadcrumb_repo_link(repo):
1541 def breadcrumb_repo_link(repo):
1542 """
1542 """
1543 Makes a breadcrumbs path link to repo
1543 Makes a breadcrumbs path link to repo
1544
1544
1545 ex::
1545 ex::
1546 group >> subgroup >> repo
1546 group >> subgroup >> repo
1547
1547
1548 :param repo: a Repository instance
1548 :param repo: a Repository instance
1549 """
1549 """
1550
1550
1551 path = [
1551 path = [
1552 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1552 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1553 for group in repo.groups_with_parents
1553 for group in repo.groups_with_parents
1554 ] + [
1554 ] + [
1555 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1555 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1556 ]
1556 ]
1557
1557
1558 return literal(' &raquo; '.join(path))
1558 return literal(' &raquo; '.join(path))
1559
1559
1560
1560
1561 def format_byte_size_binary(file_size):
1561 def format_byte_size_binary(file_size):
1562 """
1562 """
1563 Formats file/folder sizes to standard.
1563 Formats file/folder sizes to standard.
1564 """
1564 """
1565 if file_size is None:
1565 if file_size is None:
1566 file_size = 0
1566 file_size = 0
1567
1567
1568 formatted_size = format_byte_size(file_size, binary=True)
1568 formatted_size = format_byte_size(file_size, binary=True)
1569 return formatted_size
1569 return formatted_size
1570
1570
1571
1571
1572 def urlify_text(text_, safe=True):
1572 def urlify_text(text_, safe=True):
1573 """
1573 """
1574 Extrac urls from text and make html links out of them
1574 Extrac urls from text and make html links out of them
1575
1575
1576 :param text_:
1576 :param text_:
1577 """
1577 """
1578
1578
1579 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1579 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1580 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1580 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1581
1581
1582 def url_func(match_obj):
1582 def url_func(match_obj):
1583 url_full = match_obj.groups()[0]
1583 url_full = match_obj.groups()[0]
1584 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1584 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1585 _newtext = url_pat.sub(url_func, text_)
1585 _newtext = url_pat.sub(url_func, text_)
1586 if safe:
1586 if safe:
1587 return literal(_newtext)
1587 return literal(_newtext)
1588 return _newtext
1588 return _newtext
1589
1589
1590
1590
1591 def urlify_commits(text_, repository):
1591 def urlify_commits(text_, repository):
1592 """
1592 """
1593 Extract commit ids from text and make link from them
1593 Extract commit ids from text and make link from them
1594
1594
1595 :param text_:
1595 :param text_:
1596 :param repository: repo name to build the URL with
1596 :param repository: repo name to build the URL with
1597 """
1597 """
1598
1598
1599 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1599 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1600
1600
1601 def url_func(match_obj):
1601 def url_func(match_obj):
1602 commit_id = match_obj.groups()[1]
1602 commit_id = match_obj.groups()[1]
1603 pref = match_obj.groups()[0]
1603 pref = match_obj.groups()[0]
1604 suf = match_obj.groups()[2]
1604 suf = match_obj.groups()[2]
1605
1605
1606 tmpl = (
1606 tmpl = (
1607 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1607 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1608 '%(commit_id)s</a>%(suf)s'
1608 '%(commit_id)s</a>%(suf)s'
1609 )
1609 )
1610 return tmpl % {
1610 return tmpl % {
1611 'pref': pref,
1611 'pref': pref,
1612 'cls': 'revision-link',
1612 'cls': 'revision-link',
1613 'url': route_url('repo_commit', repo_name=repository,
1613 'url': route_url('repo_commit', repo_name=repository,
1614 commit_id=commit_id),
1614 commit_id=commit_id),
1615 'commit_id': commit_id,
1615 'commit_id': commit_id,
1616 'suf': suf
1616 'suf': suf
1617 }
1617 }
1618
1618
1619 newtext = URL_PAT.sub(url_func, text_)
1619 newtext = URL_PAT.sub(url_func, text_)
1620
1620
1621 return newtext
1621 return newtext
1622
1622
1623
1623
1624 def _process_url_func(match_obj, repo_name, uid, entry,
1624 def _process_url_func(match_obj, repo_name, uid, entry,
1625 return_raw_data=False, link_format='html'):
1625 return_raw_data=False, link_format='html'):
1626 pref = ''
1626 pref = ''
1627 if match_obj.group().startswith(' '):
1627 if match_obj.group().startswith(' '):
1628 pref = ' '
1628 pref = ' '
1629
1629
1630 issue_id = ''.join(match_obj.groups())
1630 issue_id = ''.join(match_obj.groups())
1631
1631
1632 if link_format == 'html':
1632 if link_format == 'html':
1633 tmpl = (
1633 tmpl = (
1634 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1634 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1635 '%(issue-prefix)s%(id-repr)s'
1635 '%(issue-prefix)s%(id-repr)s'
1636 '</a>')
1636 '</a>')
1637 elif link_format == 'rst':
1637 elif link_format == 'rst':
1638 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1638 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1639 elif link_format == 'markdown':
1639 elif link_format == 'markdown':
1640 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1640 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1641 else:
1641 else:
1642 raise ValueError('Bad link_format:{}'.format(link_format))
1642 raise ValueError('Bad link_format:{}'.format(link_format))
1643
1643
1644 (repo_name_cleaned,
1644 (repo_name_cleaned,
1645 parent_group_name) = RepoGroupModel().\
1645 parent_group_name) = RepoGroupModel().\
1646 _get_group_name_and_parent(repo_name)
1646 _get_group_name_and_parent(repo_name)
1647
1647
1648 # variables replacement
1648 # variables replacement
1649 named_vars = {
1649 named_vars = {
1650 'id': issue_id,
1650 'id': issue_id,
1651 'repo': repo_name,
1651 'repo': repo_name,
1652 'repo_name': repo_name_cleaned,
1652 'repo_name': repo_name_cleaned,
1653 'group_name': parent_group_name
1653 'group_name': parent_group_name
1654 }
1654 }
1655 # named regex variables
1655 # named regex variables
1656 named_vars.update(match_obj.groupdict())
1656 named_vars.update(match_obj.groupdict())
1657 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1657 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1658
1658
1659 data = {
1659 data = {
1660 'pref': pref,
1660 'pref': pref,
1661 'cls': 'issue-tracker-link',
1661 'cls': 'issue-tracker-link',
1662 'url': _url,
1662 'url': _url,
1663 'id-repr': issue_id,
1663 'id-repr': issue_id,
1664 'issue-prefix': entry['pref'],
1664 'issue-prefix': entry['pref'],
1665 'serv': entry['url'],
1665 'serv': entry['url'],
1666 }
1666 }
1667 if return_raw_data:
1667 if return_raw_data:
1668 return {
1668 return {
1669 'id': issue_id,
1669 'id': issue_id,
1670 'url': _url
1670 'url': _url
1671 }
1671 }
1672 return tmpl % data
1672 return tmpl % data
1673
1673
1674
1674
1675 def process_patterns(text_string, repo_name, link_format='html'):
1675 def process_patterns(text_string, repo_name, link_format='html'):
1676 allowed_formats = ['html', 'rst', 'markdown']
1676 allowed_formats = ['html', 'rst', 'markdown']
1677 if link_format not in allowed_formats:
1677 if link_format not in allowed_formats:
1678 raise ValueError('Link format can be only one of:{} got {}'.format(
1678 raise ValueError('Link format can be only one of:{} got {}'.format(
1679 allowed_formats, link_format))
1679 allowed_formats, link_format))
1680
1680
1681 repo = None
1681 repo = None
1682 if repo_name:
1682 if repo_name:
1683 # Retrieving repo_name to avoid invalid repo_name to explode on
1683 # Retrieving repo_name to avoid invalid repo_name to explode on
1684 # IssueTrackerSettingsModel but still passing invalid name further down
1684 # IssueTrackerSettingsModel but still passing invalid name further down
1685 repo = Repository.get_by_repo_name(repo_name, cache=True)
1685 repo = Repository.get_by_repo_name(repo_name, cache=True)
1686
1686
1687 settings_model = IssueTrackerSettingsModel(repo=repo)
1687 settings_model = IssueTrackerSettingsModel(repo=repo)
1688 active_entries = settings_model.get_settings(cache=True)
1688 active_entries = settings_model.get_settings(cache=True)
1689
1689
1690 issues_data = []
1690 issues_data = []
1691 newtext = text_string
1691 newtext = text_string
1692
1692
1693 for uid, entry in active_entries.items():
1693 for uid, entry in active_entries.items():
1694 log.debug('found issue tracker entry with uid %s' % (uid,))
1694 log.debug('found issue tracker entry with uid %s' % (uid,))
1695
1695
1696 if not (entry['pat'] and entry['url']):
1696 if not (entry['pat'] and entry['url']):
1697 log.debug('skipping due to missing data')
1697 log.debug('skipping due to missing data')
1698 continue
1698 continue
1699
1699
1700 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1700 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1701 % (uid, entry['pat'], entry['url'], entry['pref']))
1701 % (uid, entry['pat'], entry['url'], entry['pref']))
1702
1702
1703 try:
1703 try:
1704 pattern = re.compile(r'%s' % entry['pat'])
1704 pattern = re.compile(r'%s' % entry['pat'])
1705 except re.error:
1705 except re.error:
1706 log.exception(
1706 log.exception(
1707 'issue tracker pattern: `%s` failed to compile',
1707 'issue tracker pattern: `%s` failed to compile',
1708 entry['pat'])
1708 entry['pat'])
1709 continue
1709 continue
1710
1710
1711 data_func = partial(
1711 data_func = partial(
1712 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1712 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1713 return_raw_data=True)
1713 return_raw_data=True)
1714
1714
1715 for match_obj in pattern.finditer(text_string):
1715 for match_obj in pattern.finditer(text_string):
1716 issues_data.append(data_func(match_obj))
1716 issues_data.append(data_func(match_obj))
1717
1717
1718 url_func = partial(
1718 url_func = partial(
1719 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1719 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1720 link_format=link_format)
1720 link_format=link_format)
1721
1721
1722 newtext = pattern.sub(url_func, newtext)
1722 newtext = pattern.sub(url_func, newtext)
1723 log.debug('processed prefix:uid `%s`' % (uid,))
1723 log.debug('processed prefix:uid `%s`' % (uid,))
1724
1724
1725 return newtext, issues_data
1725 return newtext, issues_data
1726
1726
1727
1727
1728 def urlify_commit_message(commit_text, repository=None):
1728 def urlify_commit_message(commit_text, repository=None):
1729 """
1729 """
1730 Parses given text message and makes proper links.
1730 Parses given text message and makes proper links.
1731 issues are linked to given issue-server, and rest is a commit link
1731 issues are linked to given issue-server, and rest is a commit link
1732
1732
1733 :param commit_text:
1733 :param commit_text:
1734 :param repository:
1734 :param repository:
1735 """
1735 """
1736 def escaper(string):
1736 def escaper(string):
1737 return string.replace('<', '&lt;').replace('>', '&gt;')
1737 return string.replace('<', '&lt;').replace('>', '&gt;')
1738
1738
1739 newtext = escaper(commit_text)
1739 newtext = escaper(commit_text)
1740
1740
1741 # extract http/https links and make them real urls
1741 # extract http/https links and make them real urls
1742 newtext = urlify_text(newtext, safe=False)
1742 newtext = urlify_text(newtext, safe=False)
1743
1743
1744 # urlify commits - extract commit ids and make link out of them, if we have
1744 # urlify commits - extract commit ids and make link out of them, if we have
1745 # the scope of repository present.
1745 # the scope of repository present.
1746 if repository:
1746 if repository:
1747 newtext = urlify_commits(newtext, repository)
1747 newtext = urlify_commits(newtext, repository)
1748
1748
1749 # process issue tracker patterns
1749 # process issue tracker patterns
1750 newtext, issues = process_patterns(newtext, repository or '')
1750 newtext, issues = process_patterns(newtext, repository or '')
1751
1751
1752 return literal(newtext)
1752 return literal(newtext)
1753
1753
1754
1754
1755 def render_binary(repo_name, file_obj):
1755 def render_binary(repo_name, file_obj):
1756 """
1756 """
1757 Choose how to render a binary file
1757 Choose how to render a binary file
1758 """
1758 """
1759 filename = file_obj.name
1759 filename = file_obj.name
1760
1760
1761 # images
1761 # images
1762 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1762 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1763 if fnmatch.fnmatch(filename, pat=ext):
1763 if fnmatch.fnmatch(filename, pat=ext):
1764 alt = filename
1764 alt = filename
1765 src = route_path(
1765 src = route_path(
1766 'repo_file_raw', repo_name=repo_name,
1766 'repo_file_raw', repo_name=repo_name,
1767 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1767 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1768 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1768 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1769
1769
1770
1770
1771 def renderer_from_filename(filename, exclude=None):
1771 def renderer_from_filename(filename, exclude=None):
1772 """
1772 """
1773 choose a renderer based on filename, this works only for text based files
1773 choose a renderer based on filename, this works only for text based files
1774 """
1774 """
1775
1775
1776 # ipython
1776 # ipython
1777 for ext in ['*.ipynb']:
1777 for ext in ['*.ipynb']:
1778 if fnmatch.fnmatch(filename, pat=ext):
1778 if fnmatch.fnmatch(filename, pat=ext):
1779 return 'jupyter'
1779 return 'jupyter'
1780
1780
1781 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1781 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1782 if is_markup:
1782 if is_markup:
1783 return is_markup
1783 return is_markup
1784 return None
1784 return None
1785
1785
1786
1786
1787 def render(source, renderer='rst', mentions=False, relative_urls=None,
1787 def render(source, renderer='rst', mentions=False, relative_urls=None,
1788 repo_name=None):
1788 repo_name=None):
1789
1789
1790 def maybe_convert_relative_links(html_source):
1790 def maybe_convert_relative_links(html_source):
1791 if relative_urls:
1791 if relative_urls:
1792 return relative_links(html_source, relative_urls)
1792 return relative_links(html_source, relative_urls)
1793 return html_source
1793 return html_source
1794
1794
1795 if renderer == 'rst':
1795 if renderer == 'rst':
1796 if repo_name:
1796 if repo_name:
1797 # process patterns on comments if we pass in repo name
1797 # process patterns on comments if we pass in repo name
1798 source, issues = process_patterns(
1798 source, issues = process_patterns(
1799 source, repo_name, link_format='rst')
1799 source, repo_name, link_format='rst')
1800
1800
1801 return literal(
1801 return literal(
1802 '<div class="rst-block">%s</div>' %
1802 '<div class="rst-block">%s</div>' %
1803 maybe_convert_relative_links(
1803 maybe_convert_relative_links(
1804 MarkupRenderer.rst(source, mentions=mentions)))
1804 MarkupRenderer.rst(source, mentions=mentions)))
1805 elif renderer == 'markdown':
1805 elif renderer == 'markdown':
1806 if repo_name:
1806 if repo_name:
1807 # process patterns on comments if we pass in repo name
1807 # process patterns on comments if we pass in repo name
1808 source, issues = process_patterns(
1808 source, issues = process_patterns(
1809 source, repo_name, link_format='markdown')
1809 source, repo_name, link_format='markdown')
1810
1810
1811 return literal(
1811 return literal(
1812 '<div class="markdown-block">%s</div>' %
1812 '<div class="markdown-block">%s</div>' %
1813 maybe_convert_relative_links(
1813 maybe_convert_relative_links(
1814 MarkupRenderer.markdown(source, flavored=True,
1814 MarkupRenderer.markdown(source, flavored=True,
1815 mentions=mentions)))
1815 mentions=mentions)))
1816 elif renderer == 'jupyter':
1816 elif renderer == 'jupyter':
1817 return literal(
1817 return literal(
1818 '<div class="ipynb">%s</div>' %
1818 '<div class="ipynb">%s</div>' %
1819 maybe_convert_relative_links(
1819 maybe_convert_relative_links(
1820 MarkupRenderer.jupyter(source)))
1820 MarkupRenderer.jupyter(source)))
1821
1821
1822 # None means just show the file-source
1822 # None means just show the file-source
1823 return None
1823 return None
1824
1824
1825
1825
1826 def commit_status(repo, commit_id):
1826 def commit_status(repo, commit_id):
1827 return ChangesetStatusModel().get_status(repo, commit_id)
1827 return ChangesetStatusModel().get_status(repo, commit_id)
1828
1828
1829
1829
1830 def commit_status_lbl(commit_status):
1830 def commit_status_lbl(commit_status):
1831 return dict(ChangesetStatus.STATUSES).get(commit_status)
1831 return dict(ChangesetStatus.STATUSES).get(commit_status)
1832
1832
1833
1833
1834 def commit_time(repo_name, commit_id):
1834 def commit_time(repo_name, commit_id):
1835 repo = Repository.get_by_repo_name(repo_name)
1835 repo = Repository.get_by_repo_name(repo_name)
1836 commit = repo.get_commit(commit_id=commit_id)
1836 commit = repo.get_commit(commit_id=commit_id)
1837 return commit.date
1837 return commit.date
1838
1838
1839
1839
1840 def get_permission_name(key):
1840 def get_permission_name(key):
1841 return dict(Permission.PERMS).get(key)
1841 return dict(Permission.PERMS).get(key)
1842
1842
1843
1843
1844 def journal_filter_help(request):
1844 def journal_filter_help(request):
1845 _ = request.translate
1845 _ = request.translate
1846
1846
1847 return _(
1847 return _(
1848 'Example filter terms:\n' +
1848 'Example filter terms:\n' +
1849 ' repository:vcs\n' +
1849 ' repository:vcs\n' +
1850 ' username:marcin\n' +
1850 ' username:marcin\n' +
1851 ' username:(NOT marcin)\n' +
1851 ' username:(NOT marcin)\n' +
1852 ' action:*push*\n' +
1852 ' action:*push*\n' +
1853 ' ip:127.0.0.1\n' +
1853 ' ip:127.0.0.1\n' +
1854 ' date:20120101\n' +
1854 ' date:20120101\n' +
1855 ' date:[20120101100000 TO 20120102]\n' +
1855 ' date:[20120101100000 TO 20120102]\n' +
1856 '\n' +
1856 '\n' +
1857 'Generate wildcards using \'*\' character:\n' +
1857 'Generate wildcards using \'*\' character:\n' +
1858 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1858 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1859 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1859 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1860 '\n' +
1860 '\n' +
1861 'Optional AND / OR operators in queries\n' +
1861 'Optional AND / OR operators in queries\n' +
1862 ' "repository:vcs OR repository:test"\n' +
1862 ' "repository:vcs OR repository:test"\n' +
1863 ' "username:test AND repository:test*"\n'
1863 ' "username:test AND repository:test*"\n'
1864 )
1864 )
1865
1865
1866
1866
1867 def search_filter_help(searcher, request):
1867 def search_filter_help(searcher, request):
1868 _ = request.translate
1868 _ = request.translate
1869
1869
1870 terms = ''
1870 terms = ''
1871 return _(
1871 return _(
1872 'Example filter terms for `{searcher}` search:\n' +
1872 'Example filter terms for `{searcher}` search:\n' +
1873 '{terms}\n' +
1873 '{terms}\n' +
1874 'Generate wildcards using \'*\' character:\n' +
1874 'Generate wildcards using \'*\' character:\n' +
1875 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1875 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1876 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1876 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1877 '\n' +
1877 '\n' +
1878 'Optional AND / OR operators in queries\n' +
1878 'Optional AND / OR operators in queries\n' +
1879 ' "repo_name:vcs OR repo_name:test"\n' +
1879 ' "repo_name:vcs OR repo_name:test"\n' +
1880 ' "owner:test AND repo_name:test*"\n' +
1880 ' "owner:test AND repo_name:test*"\n' +
1881 'More: {search_doc}'
1881 'More: {search_doc}'
1882 ).format(searcher=searcher.name,
1882 ).format(searcher=searcher.name,
1883 terms=terms, search_doc=searcher.query_lang_doc)
1883 terms=terms, search_doc=searcher.query_lang_doc)
1884
1884
1885
1885
1886 def not_mapped_error(repo_name):
1886 def not_mapped_error(repo_name):
1887 from rhodecode.translation import _
1887 from rhodecode.translation import _
1888 flash(_('%s repository is not mapped to db perhaps'
1888 flash(_('%s repository is not mapped to db perhaps'
1889 ' it was created or renamed from the filesystem'
1889 ' it was created or renamed from the filesystem'
1890 ' please run the application again'
1890 ' please run the application again'
1891 ' in order to rescan repositories') % repo_name, category='error')
1891 ' in order to rescan repositories') % repo_name, category='error')
1892
1892
1893
1893
1894 def ip_range(ip_addr):
1894 def ip_range(ip_addr):
1895 from rhodecode.model.db import UserIpMap
1895 from rhodecode.model.db import UserIpMap
1896 s, e = UserIpMap._get_ip_range(ip_addr)
1896 s, e = UserIpMap._get_ip_range(ip_addr)
1897 return '%s - %s' % (s, e)
1897 return '%s - %s' % (s, e)
1898
1898
1899
1899
1900 def form(url, method='post', needs_csrf_token=True, **attrs):
1900 def form(url, method='post', needs_csrf_token=True, **attrs):
1901 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1901 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1902 if method.lower() != 'get' and needs_csrf_token:
1902 if method.lower() != 'get' and needs_csrf_token:
1903 raise Exception(
1903 raise Exception(
1904 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1904 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1905 'CSRF token. If the endpoint does not require such token you can ' +
1905 'CSRF token. If the endpoint does not require such token you can ' +
1906 'explicitly set the parameter needs_csrf_token to false.')
1906 'explicitly set the parameter needs_csrf_token to false.')
1907
1907
1908 return wh_form(url, method=method, **attrs)
1908 return wh_form(url, method=method, **attrs)
1909
1909
1910
1910
1911 def secure_form(form_url, method="POST", multipart=False, **attrs):
1911 def secure_form(form_url, method="POST", multipart=False, **attrs):
1912 """Start a form tag that points the action to an url. This
1912 """Start a form tag that points the action to an url. This
1913 form tag will also include the hidden field containing
1913 form tag will also include the hidden field containing
1914 the auth token.
1914 the auth token.
1915
1915
1916 The url options should be given either as a string, or as a
1916 The url options should be given either as a string, or as a
1917 ``url()`` function. The method for the form defaults to POST.
1917 ``url()`` function. The method for the form defaults to POST.
1918
1918
1919 Options:
1919 Options:
1920
1920
1921 ``multipart``
1921 ``multipart``
1922 If set to True, the enctype is set to "multipart/form-data".
1922 If set to True, the enctype is set to "multipart/form-data".
1923 ``method``
1923 ``method``
1924 The method to use when submitting the form, usually either
1924 The method to use when submitting the form, usually either
1925 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1925 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1926 hidden input with name _method is added to simulate the verb
1926 hidden input with name _method is added to simulate the verb
1927 over POST.
1927 over POST.
1928
1928
1929 """
1929 """
1930 from webhelpers.pylonslib.secure_form import insecure_form
1930 from webhelpers.pylonslib.secure_form import insecure_form
1931
1931
1932 session = None
1933
1934 # TODO(marcink): after pyramid migration require request variable ALWAYS
1935 if 'request' in attrs:
1932 if 'request' in attrs:
1936 session = attrs['request'].session
1933 session = attrs['request'].session
1937 del attrs['request']
1934 del attrs['request']
1935 else:
1936 raise ValueError(
1937 'Calling this form requires request= to be passed as argument')
1938
1938
1939 form = insecure_form(form_url, method, multipart, **attrs)
1939 form = insecure_form(form_url, method, multipart, **attrs)
1940 token = literal(
1940 token = literal(
1941 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1941 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1942 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1942 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1943
1943
1944 return literal("%s\n%s" % (form, token))
1944 return literal("%s\n%s" % (form, token))
1945
1945
1946
1946
1947 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1947 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1948 select_html = select(name, selected, options, **attrs)
1948 select_html = select(name, selected, options, **attrs)
1949 select2 = """
1949 select2 = """
1950 <script>
1950 <script>
1951 $(document).ready(function() {
1951 $(document).ready(function() {
1952 $('#%s').select2({
1952 $('#%s').select2({
1953 containerCssClass: 'drop-menu',
1953 containerCssClass: 'drop-menu',
1954 dropdownCssClass: 'drop-menu-dropdown',
1954 dropdownCssClass: 'drop-menu-dropdown',
1955 dropdownAutoWidth: true%s
1955 dropdownAutoWidth: true%s
1956 });
1956 });
1957 });
1957 });
1958 </script>
1958 </script>
1959 """
1959 """
1960 filter_option = """,
1960 filter_option = """,
1961 minimumResultsForSearch: -1
1961 minimumResultsForSearch: -1
1962 """
1962 """
1963 input_id = attrs.get('id') or name
1963 input_id = attrs.get('id') or name
1964 filter_enabled = "" if enable_filter else filter_option
1964 filter_enabled = "" if enable_filter else filter_option
1965 select_script = literal(select2 % (input_id, filter_enabled))
1965 select_script = literal(select2 % (input_id, filter_enabled))
1966
1966
1967 return literal(select_html+select_script)
1967 return literal(select_html+select_script)
1968
1968
1969
1969
1970 def get_visual_attr(tmpl_context_var, attr_name):
1970 def get_visual_attr(tmpl_context_var, attr_name):
1971 """
1971 """
1972 A safe way to get a variable from visual variable of template context
1972 A safe way to get a variable from visual variable of template context
1973
1973
1974 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1974 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1975 :param attr_name: name of the attribute we fetch from the c.visual
1975 :param attr_name: name of the attribute we fetch from the c.visual
1976 """
1976 """
1977 visual = getattr(tmpl_context_var, 'visual', None)
1977 visual = getattr(tmpl_context_var, 'visual', None)
1978 if not visual:
1978 if not visual:
1979 return
1979 return
1980 else:
1980 else:
1981 return getattr(visual, attr_name, None)
1981 return getattr(visual, attr_name, None)
1982
1982
1983
1983
1984 def get_last_path_part(file_node):
1984 def get_last_path_part(file_node):
1985 if not file_node.path:
1985 if not file_node.path:
1986 return u''
1986 return u''
1987
1987
1988 path = safe_unicode(file_node.path.split('/')[-1])
1988 path = safe_unicode(file_node.path.split('/')[-1])
1989 return u'../' + path
1989 return u'../' + path
1990
1990
1991
1991
1992 def route_url(*args, **kwargs):
1992 def route_url(*args, **kwargs):
1993 """
1993 """
1994 Wrapper around pyramids `route_url` (fully qualified url) function.
1994 Wrapper around pyramids `route_url` (fully qualified url) function.
1995 """
1995 """
1996 req = get_current_request()
1996 req = get_current_request()
1997 return req.route_url(*args, **kwargs)
1997 return req.route_url(*args, **kwargs)
1998
1998
1999
1999
2000 def route_path(*args, **kwargs):
2000 def route_path(*args, **kwargs):
2001 """
2001 """
2002 Wrapper around pyramids `route_path` function.
2002 Wrapper around pyramids `route_path` function.
2003 """
2003 """
2004 req = get_current_request()
2004 req = get_current_request()
2005 return req.route_path(*args, **kwargs)
2005 return req.route_path(*args, **kwargs)
2006
2006
2007
2007
2008 def route_path_or_none(*args, **kwargs):
2008 def route_path_or_none(*args, **kwargs):
2009 try:
2009 try:
2010 return route_path(*args, **kwargs)
2010 return route_path(*args, **kwargs)
2011 except KeyError:
2011 except KeyError:
2012 return None
2012 return None
2013
2013
2014
2014
2015 def current_route_path(request, **kw):
2015 def current_route_path(request, **kw):
2016 new_args = request.GET.mixed()
2016 new_args = request.GET.mixed()
2017 new_args.update(kw)
2017 new_args.update(kw)
2018 return request.current_route_path(_query=new_args)
2018 return request.current_route_path(_query=new_args)
2019
2019
2020
2020
2021 def api_call_example(method, args):
2021 def api_call_example(method, args):
2022 """
2022 """
2023 Generates an API call example via CURL
2023 Generates an API call example via CURL
2024 """
2024 """
2025 args_json = json.dumps(OrderedDict([
2025 args_json = json.dumps(OrderedDict([
2026 ('id', 1),
2026 ('id', 1),
2027 ('auth_token', 'SECRET'),
2027 ('auth_token', 'SECRET'),
2028 ('method', method),
2028 ('method', method),
2029 ('args', args)
2029 ('args', args)
2030 ]))
2030 ]))
2031 return literal(
2031 return literal(
2032 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2032 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2033 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2033 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2034 "and needs to be of `api calls` role."
2034 "and needs to be of `api calls` role."
2035 .format(
2035 .format(
2036 api_url=route_url('apiv2'),
2036 api_url=route_url('apiv2'),
2037 token_url=route_url('my_account_auth_tokens'),
2037 token_url=route_url('my_account_auth_tokens'),
2038 data=args_json))
2038 data=args_json))
2039
2039
2040
2040
2041 def notification_description(notification, request):
2041 def notification_description(notification, request):
2042 """
2042 """
2043 Generate notification human readable description based on notification type
2043 Generate notification human readable description based on notification type
2044 """
2044 """
2045 from rhodecode.model.notification import NotificationModel
2045 from rhodecode.model.notification import NotificationModel
2046 return NotificationModel().make_description(
2046 return NotificationModel().make_description(
2047 notification, translate=request.translate)
2047 notification, translate=request.translate)
2048
2048
2049
2049
2050 def go_import_header(request, db_repo=None):
2050 def go_import_header(request, db_repo=None):
2051 """
2051 """
2052 Creates a header for go-import functionality in Go Lang
2052 Creates a header for go-import functionality in Go Lang
2053 """
2053 """
2054
2054
2055 if not db_repo:
2055 if not db_repo:
2056 return
2056 return
2057 if 'go-get' not in request.GET:
2057 if 'go-get' not in request.GET:
2058 return
2058 return
2059
2059
2060 clone_url = db_repo.clone_url()
2060 clone_url = db_repo.clone_url()
2061 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2061 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2062 # we have a repo and go-get flag,
2062 # we have a repo and go-get flag,
2063 return literal('<meta name="go-import" content="{} {} {}">'.format(
2063 return literal('<meta name="go-import" content="{} {} {}">'.format(
2064 prefix, db_repo.repo_type, clone_url))
2064 prefix, db_repo.repo_type, clone_url))
@@ -1,281 +1,280 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Index schema for RhodeCode
22 Index schema for RhodeCode
23 """
23 """
24
24
25 from __future__ import absolute_import
25 from __future__ import absolute_import
26 import logging
27 import os
26 import os
28 import re
27 import re
28 import logging
29
29
30 from rhodecode.translation import temp_translation_factory as _
30 from whoosh import query as query_lib
31
32 from whoosh import query as query_lib, sorting
33 from whoosh.highlight import HtmlFormatter, ContextFragmenter
31 from whoosh.highlight import HtmlFormatter, ContextFragmenter
34 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
32 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
35 from whoosh.qparser import QueryParser, QueryParserError
33 from whoosh.qparser import QueryParser, QueryParserError
36
34
37 import rhodecode.lib.helpers as h
35 import rhodecode.lib.helpers as h
38 from rhodecode.lib.index import BaseSearch
36 from rhodecode.lib.index import BaseSearch
37 from rhodecode.lib.utils2 import safe_unicode
39
38
40 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
41
40
42
41
43 try:
42 try:
44 # we first try to import from rhodecode tools, fallback to copies if
43 # we first try to import from rhodecode tools, fallback to copies if
45 # we're unable to
44 # we're unable to
46 from rhodecode_tools.lib.fts_index.whoosh_schema import (
45 from rhodecode_tools.lib.fts_index.whoosh_schema import (
47 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
46 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
48 COMMIT_SCHEMA)
47 COMMIT_SCHEMA)
49 except ImportError:
48 except ImportError:
50 log.warning('rhodecode_tools schema not available, doing a fallback '
49 log.warning('rhodecode_tools schema not available, doing a fallback '
51 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
50 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
52 from rhodecode.lib.index.whoosh_fallback_schema import (
51 from rhodecode.lib.index.whoosh_fallback_schema import (
53 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
52 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
54 COMMIT_SCHEMA)
53 COMMIT_SCHEMA)
55
54
56
55
57 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
56 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
58 FRAGMENTER = ContextFragmenter(200)
57 FRAGMENTER = ContextFragmenter(200)
59
58
60 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
61
60
62
61
63 class Search(BaseSearch):
62 class Search(BaseSearch):
64 # this also shows in UI
63 # this also shows in UI
65 query_lang_doc = 'http://whoosh.readthedocs.io/en/latest/querylang.html'
64 query_lang_doc = 'http://whoosh.readthedocs.io/en/latest/querylang.html'
66 name = 'whoosh'
65 name = 'whoosh'
67
66
68 def __init__(self, config):
67 def __init__(self, config):
69 super(Search, self).__init__()
68 super(Search, self).__init__()
70 self.config = config
69 self.config = config
71 if not os.path.isdir(self.config['location']):
70 if not os.path.isdir(self.config['location']):
72 os.makedirs(self.config['location'])
71 os.makedirs(self.config['location'])
73
72
74 opener = create_in
73 opener = create_in
75 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
74 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
76 opener = open_dir
75 opener = open_dir
77 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
76 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
78 indexname=FILE_INDEX_NAME)
77 indexname=FILE_INDEX_NAME)
79
78
80 opener = create_in
79 opener = create_in
81 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
80 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
82 opener = open_dir
81 opener = open_dir
83 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
82 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
84 indexname=COMMIT_INDEX_NAME)
83 indexname=COMMIT_INDEX_NAME)
85
84
86 self.commit_schema = COMMIT_SCHEMA
85 self.commit_schema = COMMIT_SCHEMA
87 self.commit_index = changeset_index
86 self.commit_index = changeset_index
88 self.file_schema = FILE_SCHEMA
87 self.file_schema = FILE_SCHEMA
89 self.file_index = file_index
88 self.file_index = file_index
90 self.searcher = None
89 self.searcher = None
91
90
92 def cleanup(self):
91 def cleanup(self):
93 if self.searcher:
92 if self.searcher:
94 self.searcher.close()
93 self.searcher.close()
95
94
96 def _extend_query(self, query):
95 def _extend_query(self, query):
97 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
96 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
98 if hashes:
97 if hashes:
99 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
98 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
100 query = u'(%s) OR %s' % (query, hashes_or_query)
99 query = u'(%s) OR %s' % (query, hashes_or_query)
101 return query
100 return query
102
101
103 def search(self, query, document_type, search_user,
102 def search(self, query, document_type, search_user,
104 repo_name=None, requested_page=1, page_limit=10, sort=None,
103 repo_name=None, requested_page=1, page_limit=10, sort=None,
105 raise_on_exc=True):
104 raise_on_exc=True):
106
105
107 original_query = query
106 original_query = query
108 query = self._extend_query(query)
107 query = self._extend_query(query)
109
108
110 log.debug(u'QUERY: %s on %s', query, document_type)
109 log.debug(u'QUERY: %s on %s', query, document_type)
111 result = {
110 result = {
112 'results': [],
111 'results': [],
113 'count': 0,
112 'count': 0,
114 'error': None,
113 'error': None,
115 'runtime': 0
114 'runtime': 0
116 }
115 }
117 search_type, index_name, schema_defn = self._prepare_for_search(
116 search_type, index_name, schema_defn = self._prepare_for_search(
118 document_type)
117 document_type)
119 self._init_searcher(index_name)
118 self._init_searcher(index_name)
120 try:
119 try:
121 qp = QueryParser(search_type, schema=schema_defn)
120 qp = QueryParser(search_type, schema=schema_defn)
122 allowed_repos_filter = self._get_repo_filter(
121 allowed_repos_filter = self._get_repo_filter(
123 search_user, repo_name)
122 search_user, repo_name)
124 try:
123 try:
125 query = qp.parse(unicode(query))
124 query = qp.parse(safe_unicode(query))
126 log.debug('query: %s (%s)' % (query, repr(query)))
125 log.debug('query: %s (%s)' % (query, repr(query)))
127
126
128 reverse, sortedby = False, None
127 reverse, sortedby = False, None
129 if search_type == 'message':
128 if search_type == 'message':
130 if sort == 'oldfirst':
129 if sort == 'oldfirst':
131 sortedby = 'date'
130 sortedby = 'date'
132 reverse = False
131 reverse = False
133 elif sort == 'newfirst':
132 elif sort == 'newfirst':
134 sortedby = 'date'
133 sortedby = 'date'
135 reverse = True
134 reverse = True
136
135
137 whoosh_results = self.searcher.search(
136 whoosh_results = self.searcher.search(
138 query, filter=allowed_repos_filter, limit=None,
137 query, filter=allowed_repos_filter, limit=None,
139 sortedby=sortedby, reverse=reverse)
138 sortedby=sortedby, reverse=reverse)
140
139
141 # fixes for 32k limit that whoosh uses for highlight
140 # fixes for 32k limit that whoosh uses for highlight
142 whoosh_results.fragmenter.charlimit = None
141 whoosh_results.fragmenter.charlimit = None
143 res_ln = whoosh_results.scored_length()
142 res_ln = whoosh_results.scored_length()
144 result['runtime'] = whoosh_results.runtime
143 result['runtime'] = whoosh_results.runtime
145 result['count'] = res_ln
144 result['count'] = res_ln
146 result['results'] = WhooshResultWrapper(
145 result['results'] = WhooshResultWrapper(
147 search_type, res_ln, whoosh_results)
146 search_type, res_ln, whoosh_results)
148
147
149 except QueryParserError:
148 except QueryParserError:
150 result['error'] = _('Invalid search query. Try quoting it.')
149 result['error'] = 'Invalid search query. Try quoting it.'
151 except (EmptyIndexError, IOError, OSError):
150 except (EmptyIndexError, IOError, OSError):
152 msg = _('There is no index to search in. '
151 msg = 'There is no index to search in. Please run whoosh indexer'
153 'Please run whoosh indexer')
154 log.exception(msg)
152 log.exception(msg)
155 result['error'] = msg
153 result['error'] = msg
156 except Exception:
154 except Exception:
157 msg = _('An error occurred during this search operation')
155 msg = 'An error occurred during this search operation'
158 log.exception(msg)
156 log.exception(msg)
159 result['error'] = msg
157 result['error'] = msg
160
158
161 return result
159 return result
162
160
163 def statistics(self):
161 def statistics(self, translator):
162 _ = translator
164 stats = [
163 stats = [
165 {'key': _('Index Type'), 'value': 'Whoosh'},
164 {'key': _('Index Type'), 'value': 'Whoosh'},
166 {'key': _('File Index'), 'value': str(self.file_index)},
165 {'key': _('File Index'), 'value': str(self.file_index)},
167 {'key': _('Indexed documents'),
166 {'key': _('Indexed documents'),
168 'value': self.file_index.doc_count()},
167 'value': self.file_index.doc_count()},
169 {'key': _('Last update'),
168 {'key': _('Last update'),
170 'value': h.time_to_datetime(self.file_index.last_modified())},
169 'value': h.time_to_datetime(self.file_index.last_modified())},
171 {'key': _('Commit index'), 'value': str(self.commit_index)},
170 {'key': _('Commit index'), 'value': str(self.commit_index)},
172 {'key': _('Indexed documents'),
171 {'key': _('Indexed documents'),
173 'value': str(self.commit_index.doc_count())},
172 'value': str(self.commit_index.doc_count())},
174 {'key': _('Last update'),
173 {'key': _('Last update'),
175 'value': h.time_to_datetime(self.commit_index.last_modified())}
174 'value': h.time_to_datetime(self.commit_index.last_modified())}
176 ]
175 ]
177 return stats
176 return stats
178
177
179 def _get_repo_filter(self, auth_user, repo_name):
178 def _get_repo_filter(self, auth_user, repo_name):
180
179
181 allowed_to_search = [
180 allowed_to_search = [
182 repo for repo, perm in
181 repo for repo, perm in
183 auth_user.permissions['repositories'].items()
182 auth_user.permissions['repositories'].items()
184 if perm != 'repository.none']
183 if perm != 'repository.none']
185
184
186 if repo_name:
185 if repo_name:
187 repo_filter = [query_lib.Term('repository', repo_name)]
186 repo_filter = [query_lib.Term('repository', repo_name)]
188
187
189 elif 'hg.admin' in auth_user.permissions.get('global', []):
188 elif 'hg.admin' in auth_user.permissions.get('global', []):
190 return None
189 return None
191
190
192 else:
191 else:
193 repo_filter = [query_lib.Term('repository', _rn)
192 repo_filter = [query_lib.Term('repository', _rn)
194 for _rn in allowed_to_search]
193 for _rn in allowed_to_search]
195 # in case we're not allowed to search anywhere, it's a trick
194 # in case we're not allowed to search anywhere, it's a trick
196 # to tell whoosh we're filtering, on ALL results
195 # to tell whoosh we're filtering, on ALL results
197 repo_filter = repo_filter or [query_lib.Term('repository', '')]
196 repo_filter = repo_filter or [query_lib.Term('repository', '')]
198
197
199 return query_lib.Or(repo_filter)
198 return query_lib.Or(repo_filter)
200
199
201 def _prepare_for_search(self, cur_type):
200 def _prepare_for_search(self, cur_type):
202 search_type = {
201 search_type = {
203 'content': 'content',
202 'content': 'content',
204 'commit': 'message',
203 'commit': 'message',
205 'path': 'path',
204 'path': 'path',
206 'repository': 'repository'
205 'repository': 'repository'
207 }.get(cur_type, 'content')
206 }.get(cur_type, 'content')
208
207
209 index_name = {
208 index_name = {
210 'content': FILE_INDEX_NAME,
209 'content': FILE_INDEX_NAME,
211 'commit': COMMIT_INDEX_NAME,
210 'commit': COMMIT_INDEX_NAME,
212 'path': FILE_INDEX_NAME
211 'path': FILE_INDEX_NAME
213 }.get(cur_type, FILE_INDEX_NAME)
212 }.get(cur_type, FILE_INDEX_NAME)
214
213
215 schema_defn = {
214 schema_defn = {
216 'content': self.file_schema,
215 'content': self.file_schema,
217 'commit': self.commit_schema,
216 'commit': self.commit_schema,
218 'path': self.file_schema
217 'path': self.file_schema
219 }.get(cur_type, self.file_schema)
218 }.get(cur_type, self.file_schema)
220
219
221 log.debug('IDX: %s' % index_name)
220 log.debug('IDX: %s' % index_name)
222 log.debug('SCHEMA: %s' % schema_defn)
221 log.debug('SCHEMA: %s' % schema_defn)
223 return search_type, index_name, schema_defn
222 return search_type, index_name, schema_defn
224
223
225 def _init_searcher(self, index_name):
224 def _init_searcher(self, index_name):
226 idx = open_dir(self.config['location'], indexname=index_name)
225 idx = open_dir(self.config['location'], indexname=index_name)
227 self.searcher = idx.searcher()
226 self.searcher = idx.searcher()
228 return self.searcher
227 return self.searcher
229
228
230
229
231 class WhooshResultWrapper(object):
230 class WhooshResultWrapper(object):
232 def __init__(self, search_type, total_hits, results):
231 def __init__(self, search_type, total_hits, results):
233 self.search_type = search_type
232 self.search_type = search_type
234 self.results = results
233 self.results = results
235 self.total_hits = total_hits
234 self.total_hits = total_hits
236
235
237 def __str__(self):
236 def __str__(self):
238 return '<%s at %s>' % (self.__class__.__name__, len(self))
237 return '<%s at %s>' % (self.__class__.__name__, len(self))
239
238
240 def __repr__(self):
239 def __repr__(self):
241 return self.__str__()
240 return self.__str__()
242
241
243 def __len__(self):
242 def __len__(self):
244 return self.total_hits
243 return self.total_hits
245
244
246 def __iter__(self):
245 def __iter__(self):
247 """
246 """
248 Allows Iteration over results,and lazy generate content
247 Allows Iteration over results,and lazy generate content
249
248
250 *Requires* implementation of ``__getitem__`` method.
249 *Requires* implementation of ``__getitem__`` method.
251 """
250 """
252 for hit in self.results:
251 for hit in self.results:
253 yield self.get_full_content(hit)
252 yield self.get_full_content(hit)
254
253
255 def __getitem__(self, key):
254 def __getitem__(self, key):
256 """
255 """
257 Slicing of resultWrapper
256 Slicing of resultWrapper
258 """
257 """
259 i, j = key.start, key.stop
258 i, j = key.start, key.stop
260 for hit in self.results[i:j]:
259 for hit in self.results[i:j]:
261 yield self.get_full_content(hit)
260 yield self.get_full_content(hit)
262
261
263 def get_full_content(self, hit):
262 def get_full_content(self, hit):
264 # TODO: marcink: this feels like an overkill, there's a lot of data
263 # TODO: marcink: this feels like an overkill, there's a lot of data
265 # inside hit object, and we don't need all
264 # inside hit object, and we don't need all
266 res = dict(hit)
265 res = dict(hit)
267
266
268 f_path = '' # noqa
267 f_path = '' # noqa
269 if self.search_type in ['content', 'path']:
268 if self.search_type in ['content', 'path']:
270 f_path = res['path'][len(res['repository']):]
269 f_path = res['path'][len(res['repository']):]
271 f_path = f_path.lstrip(os.sep)
270 f_path = f_path.lstrip(os.sep)
272
271
273 if self.search_type == 'content':
272 if self.search_type == 'content':
274 res.update({'content_short_hl': hit.highlights('content'),
273 res.update({'content_short_hl': hit.highlights('content'),
275 'f_path': f_path})
274 'f_path': f_path})
276 elif self.search_type == 'path':
275 elif self.search_type == 'path':
277 res.update({'f_path': f_path})
276 res.update({'f_path': f_path})
278 elif self.search_type == 'message':
277 elif self.search_type == 'message':
279 res.update({'message_hl': hit.highlights('message')})
278 res.update({'message_hl': hit.highlights('message')})
280
279
281 return res
280 return res
@@ -1,1007 +1,1007 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26
26
27 import collections
27 import collections
28 import datetime
28 import datetime
29 import dateutil.relativedelta
29 import dateutil.relativedelta
30 import hashlib
30 import hashlib
31 import logging
31 import logging
32 import re
32 import re
33 import sys
33 import sys
34 import time
34 import time
35 import urllib
35 import urllib
36 import urlobject
36 import urlobject
37 import uuid
37 import uuid
38
38
39 import pygments.lexers
39 import pygments.lexers
40 import sqlalchemy
40 import sqlalchemy
41 import sqlalchemy.engine.url
41 import sqlalchemy.engine.url
42 import sqlalchemy.exc
42 import sqlalchemy.exc
43 import sqlalchemy.sql
43 import sqlalchemy.sql
44 import webob
44 import webob
45 import routes.util
45 import routes.util
46 import pyramid.threadlocal
46 import pyramid.threadlocal
47
47
48 import rhodecode
48 import rhodecode
49 from rhodecode.translation import _, _pluralize
49 from rhodecode.translation import _, _pluralize
50
50
51
51
52 def md5(s):
52 def md5(s):
53 return hashlib.md5(s).hexdigest()
53 return hashlib.md5(s).hexdigest()
54
54
55
55
56 def md5_safe(s):
56 def md5_safe(s):
57 return md5(safe_str(s))
57 return md5(safe_str(s))
58
58
59
59
60 def __get_lem(extra_mapping=None):
60 def __get_lem(extra_mapping=None):
61 """
61 """
62 Get language extension map based on what's inside pygments lexers
62 Get language extension map based on what's inside pygments lexers
63 """
63 """
64 d = collections.defaultdict(lambda: [])
64 d = collections.defaultdict(lambda: [])
65
65
66 def __clean(s):
66 def __clean(s):
67 s = s.lstrip('*')
67 s = s.lstrip('*')
68 s = s.lstrip('.')
68 s = s.lstrip('.')
69
69
70 if s.find('[') != -1:
70 if s.find('[') != -1:
71 exts = []
71 exts = []
72 start, stop = s.find('['), s.find(']')
72 start, stop = s.find('['), s.find(']')
73
73
74 for suffix in s[start + 1:stop]:
74 for suffix in s[start + 1:stop]:
75 exts.append(s[:s.find('[')] + suffix)
75 exts.append(s[:s.find('[')] + suffix)
76 return [e.lower() for e in exts]
76 return [e.lower() for e in exts]
77 else:
77 else:
78 return [s.lower()]
78 return [s.lower()]
79
79
80 for lx, t in sorted(pygments.lexers.LEXERS.items()):
80 for lx, t in sorted(pygments.lexers.LEXERS.items()):
81 m = map(__clean, t[-2])
81 m = map(__clean, t[-2])
82 if m:
82 if m:
83 m = reduce(lambda x, y: x + y, m)
83 m = reduce(lambda x, y: x + y, m)
84 for ext in m:
84 for ext in m:
85 desc = lx.replace('Lexer', '')
85 desc = lx.replace('Lexer', '')
86 d[ext].append(desc)
86 d[ext].append(desc)
87
87
88 data = dict(d)
88 data = dict(d)
89
89
90 extra_mapping = extra_mapping or {}
90 extra_mapping = extra_mapping or {}
91 if extra_mapping:
91 if extra_mapping:
92 for k, v in extra_mapping.items():
92 for k, v in extra_mapping.items():
93 if k not in data:
93 if k not in data:
94 # register new mapping2lexer
94 # register new mapping2lexer
95 data[k] = [v]
95 data[k] = [v]
96
96
97 return data
97 return data
98
98
99
99
100 def str2bool(_str):
100 def str2bool(_str):
101 """
101 """
102 returns True/False value from given string, it tries to translate the
102 returns True/False value from given string, it tries to translate the
103 string into boolean
103 string into boolean
104
104
105 :param _str: string value to translate into boolean
105 :param _str: string value to translate into boolean
106 :rtype: boolean
106 :rtype: boolean
107 :returns: boolean from given string
107 :returns: boolean from given string
108 """
108 """
109 if _str is None:
109 if _str is None:
110 return False
110 return False
111 if _str in (True, False):
111 if _str in (True, False):
112 return _str
112 return _str
113 _str = str(_str).strip().lower()
113 _str = str(_str).strip().lower()
114 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
114 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
115
115
116
116
117 def aslist(obj, sep=None, strip=True):
117 def aslist(obj, sep=None, strip=True):
118 """
118 """
119 Returns given string separated by sep as list
119 Returns given string separated by sep as list
120
120
121 :param obj:
121 :param obj:
122 :param sep:
122 :param sep:
123 :param strip:
123 :param strip:
124 """
124 """
125 if isinstance(obj, (basestring,)):
125 if isinstance(obj, (basestring,)):
126 lst = obj.split(sep)
126 lst = obj.split(sep)
127 if strip:
127 if strip:
128 lst = [v.strip() for v in lst]
128 lst = [v.strip() for v in lst]
129 return lst
129 return lst
130 elif isinstance(obj, (list, tuple)):
130 elif isinstance(obj, (list, tuple)):
131 return obj
131 return obj
132 elif obj is None:
132 elif obj is None:
133 return []
133 return []
134 else:
134 else:
135 return [obj]
135 return [obj]
136
136
137
137
138 def convert_line_endings(line, mode):
138 def convert_line_endings(line, mode):
139 """
139 """
140 Converts a given line "line end" accordingly to given mode
140 Converts a given line "line end" accordingly to given mode
141
141
142 Available modes are::
142 Available modes are::
143 0 - Unix
143 0 - Unix
144 1 - Mac
144 1 - Mac
145 2 - DOS
145 2 - DOS
146
146
147 :param line: given line to convert
147 :param line: given line to convert
148 :param mode: mode to convert to
148 :param mode: mode to convert to
149 :rtype: str
149 :rtype: str
150 :return: converted line according to mode
150 :return: converted line according to mode
151 """
151 """
152 if mode == 0:
152 if mode == 0:
153 line = line.replace('\r\n', '\n')
153 line = line.replace('\r\n', '\n')
154 line = line.replace('\r', '\n')
154 line = line.replace('\r', '\n')
155 elif mode == 1:
155 elif mode == 1:
156 line = line.replace('\r\n', '\r')
156 line = line.replace('\r\n', '\r')
157 line = line.replace('\n', '\r')
157 line = line.replace('\n', '\r')
158 elif mode == 2:
158 elif mode == 2:
159 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
159 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
160 return line
160 return line
161
161
162
162
163 def detect_mode(line, default):
163 def detect_mode(line, default):
164 """
164 """
165 Detects line break for given line, if line break couldn't be found
165 Detects line break for given line, if line break couldn't be found
166 given default value is returned
166 given default value is returned
167
167
168 :param line: str line
168 :param line: str line
169 :param default: default
169 :param default: default
170 :rtype: int
170 :rtype: int
171 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
171 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
172 """
172 """
173 if line.endswith('\r\n'):
173 if line.endswith('\r\n'):
174 return 2
174 return 2
175 elif line.endswith('\n'):
175 elif line.endswith('\n'):
176 return 0
176 return 0
177 elif line.endswith('\r'):
177 elif line.endswith('\r'):
178 return 1
178 return 1
179 else:
179 else:
180 return default
180 return default
181
181
182
182
183 def safe_int(val, default=None):
183 def safe_int(val, default=None):
184 """
184 """
185 Returns int() of val if val is not convertable to int use default
185 Returns int() of val if val is not convertable to int use default
186 instead
186 instead
187
187
188 :param val:
188 :param val:
189 :param default:
189 :param default:
190 """
190 """
191
191
192 try:
192 try:
193 val = int(val)
193 val = int(val)
194 except (ValueError, TypeError):
194 except (ValueError, TypeError):
195 val = default
195 val = default
196
196
197 return val
197 return val
198
198
199
199
200 def safe_unicode(str_, from_encoding=None):
200 def safe_unicode(str_, from_encoding=None):
201 """
201 """
202 safe unicode function. Does few trick to turn str_ into unicode
202 safe unicode function. Does few trick to turn str_ into unicode
203
203
204 In case of UnicodeDecode error, we try to return it with encoding detected
204 In case of UnicodeDecode error, we try to return it with encoding detected
205 by chardet library if it fails fallback to unicode with errors replaced
205 by chardet library if it fails fallback to unicode with errors replaced
206
206
207 :param str_: string to decode
207 :param str_: string to decode
208 :rtype: unicode
208 :rtype: unicode
209 :returns: unicode object
209 :returns: unicode object
210 """
210 """
211 if isinstance(str_, unicode):
211 if isinstance(str_, unicode):
212 return str_
212 return str_
213
213
214 if not from_encoding:
214 if not from_encoding:
215 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
215 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
216 'utf8'), sep=',')
216 'utf8'), sep=',')
217 from_encoding = DEFAULT_ENCODINGS
217 from_encoding = DEFAULT_ENCODINGS
218
218
219 if not isinstance(from_encoding, (list, tuple)):
219 if not isinstance(from_encoding, (list, tuple)):
220 from_encoding = [from_encoding]
220 from_encoding = [from_encoding]
221
221
222 try:
222 try:
223 return unicode(str_)
223 return unicode(str_)
224 except UnicodeDecodeError:
224 except UnicodeDecodeError:
225 pass
225 pass
226
226
227 for enc in from_encoding:
227 for enc in from_encoding:
228 try:
228 try:
229 return unicode(str_, enc)
229 return unicode(str_, enc)
230 except UnicodeDecodeError:
230 except UnicodeDecodeError:
231 pass
231 pass
232
232
233 try:
233 try:
234 import chardet
234 import chardet
235 encoding = chardet.detect(str_)['encoding']
235 encoding = chardet.detect(str_)['encoding']
236 if encoding is None:
236 if encoding is None:
237 raise Exception()
237 raise Exception()
238 return str_.decode(encoding)
238 return str_.decode(encoding)
239 except (ImportError, UnicodeDecodeError, Exception):
239 except (ImportError, UnicodeDecodeError, Exception):
240 return unicode(str_, from_encoding[0], 'replace')
240 return unicode(str_, from_encoding[0], 'replace')
241
241
242
242
243 def safe_str(unicode_, to_encoding=None):
243 def safe_str(unicode_, to_encoding=None):
244 """
244 """
245 safe str function. Does few trick to turn unicode_ into string
245 safe str function. Does few trick to turn unicode_ into string
246
246
247 In case of UnicodeEncodeError, we try to return it with encoding detected
247 In case of UnicodeEncodeError, we try to return it with encoding detected
248 by chardet library if it fails fallback to string with errors replaced
248 by chardet library if it fails fallback to string with errors replaced
249
249
250 :param unicode_: unicode to encode
250 :param unicode_: unicode to encode
251 :rtype: str
251 :rtype: str
252 :returns: str object
252 :returns: str object
253 """
253 """
254
254
255 # if it's not basestr cast to str
255 # if it's not basestr cast to str
256 if not isinstance(unicode_, basestring):
256 if not isinstance(unicode_, basestring):
257 return str(unicode_)
257 return str(unicode_)
258
258
259 if isinstance(unicode_, str):
259 if isinstance(unicode_, str):
260 return unicode_
260 return unicode_
261
261
262 if not to_encoding:
262 if not to_encoding:
263 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
263 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
264 'utf8'), sep=',')
264 'utf8'), sep=',')
265 to_encoding = DEFAULT_ENCODINGS
265 to_encoding = DEFAULT_ENCODINGS
266
266
267 if not isinstance(to_encoding, (list, tuple)):
267 if not isinstance(to_encoding, (list, tuple)):
268 to_encoding = [to_encoding]
268 to_encoding = [to_encoding]
269
269
270 for enc in to_encoding:
270 for enc in to_encoding:
271 try:
271 try:
272 return unicode_.encode(enc)
272 return unicode_.encode(enc)
273 except UnicodeEncodeError:
273 except UnicodeEncodeError:
274 pass
274 pass
275
275
276 try:
276 try:
277 import chardet
277 import chardet
278 encoding = chardet.detect(unicode_)['encoding']
278 encoding = chardet.detect(unicode_)['encoding']
279 if encoding is None:
279 if encoding is None:
280 raise UnicodeEncodeError()
280 raise UnicodeEncodeError()
281
281
282 return unicode_.encode(encoding)
282 return unicode_.encode(encoding)
283 except (ImportError, UnicodeEncodeError):
283 except (ImportError, UnicodeEncodeError):
284 return unicode_.encode(to_encoding[0], 'replace')
284 return unicode_.encode(to_encoding[0], 'replace')
285
285
286
286
287 def remove_suffix(s, suffix):
287 def remove_suffix(s, suffix):
288 if s.endswith(suffix):
288 if s.endswith(suffix):
289 s = s[:-1 * len(suffix)]
289 s = s[:-1 * len(suffix)]
290 return s
290 return s
291
291
292
292
293 def remove_prefix(s, prefix):
293 def remove_prefix(s, prefix):
294 if s.startswith(prefix):
294 if s.startswith(prefix):
295 s = s[len(prefix):]
295 s = s[len(prefix):]
296 return s
296 return s
297
297
298
298
299 def find_calling_context(ignore_modules=None):
299 def find_calling_context(ignore_modules=None):
300 """
300 """
301 Look through the calling stack and return the frame which called
301 Look through the calling stack and return the frame which called
302 this function and is part of core module ( ie. rhodecode.* )
302 this function and is part of core module ( ie. rhodecode.* )
303
303
304 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
304 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
305 """
305 """
306
306
307 ignore_modules = ignore_modules or []
307 ignore_modules = ignore_modules or []
308
308
309 f = sys._getframe(2)
309 f = sys._getframe(2)
310 while f.f_back is not None:
310 while f.f_back is not None:
311 name = f.f_globals.get('__name__')
311 name = f.f_globals.get('__name__')
312 if name and name.startswith(__name__.split('.')[0]):
312 if name and name.startswith(__name__.split('.')[0]):
313 if name not in ignore_modules:
313 if name not in ignore_modules:
314 return f
314 return f
315 f = f.f_back
315 f = f.f_back
316 return None
316 return None
317
317
318
318
319 def ping_connection(connection, branch):
319 def ping_connection(connection, branch):
320 if branch:
320 if branch:
321 # "branch" refers to a sub-connection of a connection,
321 # "branch" refers to a sub-connection of a connection,
322 # we don't want to bother pinging on these.
322 # we don't want to bother pinging on these.
323 return
323 return
324
324
325 # turn off "close with result". This flag is only used with
325 # turn off "close with result". This flag is only used with
326 # "connectionless" execution, otherwise will be False in any case
326 # "connectionless" execution, otherwise will be False in any case
327 save_should_close_with_result = connection.should_close_with_result
327 save_should_close_with_result = connection.should_close_with_result
328 connection.should_close_with_result = False
328 connection.should_close_with_result = False
329
329
330 try:
330 try:
331 # run a SELECT 1. use a core select() so that
331 # run a SELECT 1. use a core select() so that
332 # the SELECT of a scalar value without a table is
332 # the SELECT of a scalar value without a table is
333 # appropriately formatted for the backend
333 # appropriately formatted for the backend
334 connection.scalar(sqlalchemy.sql.select([1]))
334 connection.scalar(sqlalchemy.sql.select([1]))
335 except sqlalchemy.exc.DBAPIError as err:
335 except sqlalchemy.exc.DBAPIError as err:
336 # catch SQLAlchemy's DBAPIError, which is a wrapper
336 # catch SQLAlchemy's DBAPIError, which is a wrapper
337 # for the DBAPI's exception. It includes a .connection_invalidated
337 # for the DBAPI's exception. It includes a .connection_invalidated
338 # attribute which specifies if this connection is a "disconnect"
338 # attribute which specifies if this connection is a "disconnect"
339 # condition, which is based on inspection of the original exception
339 # condition, which is based on inspection of the original exception
340 # by the dialect in use.
340 # by the dialect in use.
341 if err.connection_invalidated:
341 if err.connection_invalidated:
342 # run the same SELECT again - the connection will re-validate
342 # run the same SELECT again - the connection will re-validate
343 # itself and establish a new connection. The disconnect detection
343 # itself and establish a new connection. The disconnect detection
344 # here also causes the whole connection pool to be invalidated
344 # here also causes the whole connection pool to be invalidated
345 # so that all stale connections are discarded.
345 # so that all stale connections are discarded.
346 connection.scalar(sqlalchemy.sql.select([1]))
346 connection.scalar(sqlalchemy.sql.select([1]))
347 else:
347 else:
348 raise
348 raise
349 finally:
349 finally:
350 # restore "close with result"
350 # restore "close with result"
351 connection.should_close_with_result = save_should_close_with_result
351 connection.should_close_with_result = save_should_close_with_result
352
352
353
353
354 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
354 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
355 """Custom engine_from_config functions."""
355 """Custom engine_from_config functions."""
356 log = logging.getLogger('sqlalchemy.engine')
356 log = logging.getLogger('sqlalchemy.engine')
357 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
357 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
358
358
359 def color_sql(sql):
359 def color_sql(sql):
360 color_seq = '\033[1;33m' # This is yellow: code 33
360 color_seq = '\033[1;33m' # This is yellow: code 33
361 normal = '\x1b[0m'
361 normal = '\x1b[0m'
362 return ''.join([color_seq, sql, normal])
362 return ''.join([color_seq, sql, normal])
363
363
364 if configuration['debug']:
364 if configuration['debug']:
365 # attach events only for debug configuration
365 # attach events only for debug configuration
366
366
367 def before_cursor_execute(conn, cursor, statement,
367 def before_cursor_execute(conn, cursor, statement,
368 parameters, context, executemany):
368 parameters, context, executemany):
369 setattr(conn, 'query_start_time', time.time())
369 setattr(conn, 'query_start_time', time.time())
370 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
370 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
371 calling_context = find_calling_context(ignore_modules=[
371 calling_context = find_calling_context(ignore_modules=[
372 'rhodecode.lib.caching_query',
372 'rhodecode.lib.caching_query',
373 'rhodecode.model.settings',
373 'rhodecode.model.settings',
374 ])
374 ])
375 if calling_context:
375 if calling_context:
376 log.info(color_sql('call context %s:%s' % (
376 log.info(color_sql('call context %s:%s' % (
377 calling_context.f_code.co_filename,
377 calling_context.f_code.co_filename,
378 calling_context.f_lineno,
378 calling_context.f_lineno,
379 )))
379 )))
380
380
381 def after_cursor_execute(conn, cursor, statement,
381 def after_cursor_execute(conn, cursor, statement,
382 parameters, context, executemany):
382 parameters, context, executemany):
383 delattr(conn, 'query_start_time')
383 delattr(conn, 'query_start_time')
384
384
385 sqlalchemy.event.listen(engine, "engine_connect",
385 sqlalchemy.event.listen(engine, "engine_connect",
386 ping_connection)
386 ping_connection)
387 sqlalchemy.event.listen(engine, "before_cursor_execute",
387 sqlalchemy.event.listen(engine, "before_cursor_execute",
388 before_cursor_execute)
388 before_cursor_execute)
389 sqlalchemy.event.listen(engine, "after_cursor_execute",
389 sqlalchemy.event.listen(engine, "after_cursor_execute",
390 after_cursor_execute)
390 after_cursor_execute)
391
391
392 return engine
392 return engine
393
393
394
394
395 def get_encryption_key(config):
395 def get_encryption_key(config):
396 secret = config.get('rhodecode.encrypted_values.secret')
396 secret = config.get('rhodecode.encrypted_values.secret')
397 default = config['beaker.session.secret']
397 default = config['beaker.session.secret']
398 return secret or default
398 return secret or default
399
399
400
400
401 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
401 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
402 short_format=False):
402 short_format=False):
403 """
403 """
404 Turns a datetime into an age string.
404 Turns a datetime into an age string.
405 If show_short_version is True, this generates a shorter string with
405 If show_short_version is True, this generates a shorter string with
406 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
406 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
407
407
408 * IMPORTANT*
408 * IMPORTANT*
409 Code of this function is written in special way so it's easier to
409 Code of this function is written in special way so it's easier to
410 backport it to javascript. If you mean to update it, please also update
410 backport it to javascript. If you mean to update it, please also update
411 `jquery.timeago-extension.js` file
411 `jquery.timeago-extension.js` file
412
412
413 :param prevdate: datetime object
413 :param prevdate: datetime object
414 :param now: get current time, if not define we use
414 :param now: get current time, if not define we use
415 `datetime.datetime.now()`
415 `datetime.datetime.now()`
416 :param show_short_version: if it should approximate the date and
416 :param show_short_version: if it should approximate the date and
417 return a shorter string
417 return a shorter string
418 :param show_suffix:
418 :param show_suffix:
419 :param short_format: show short format, eg 2D instead of 2 days
419 :param short_format: show short format, eg 2D instead of 2 days
420 :rtype: unicode
420 :rtype: unicode
421 :returns: unicode words describing age
421 :returns: unicode words describing age
422 """
422 """
423
423
424 def _get_relative_delta(now, prevdate):
424 def _get_relative_delta(now, prevdate):
425 base = dateutil.relativedelta.relativedelta(now, prevdate)
425 base = dateutil.relativedelta.relativedelta(now, prevdate)
426 return {
426 return {
427 'year': base.years,
427 'year': base.years,
428 'month': base.months,
428 'month': base.months,
429 'day': base.days,
429 'day': base.days,
430 'hour': base.hours,
430 'hour': base.hours,
431 'minute': base.minutes,
431 'minute': base.minutes,
432 'second': base.seconds,
432 'second': base.seconds,
433 }
433 }
434
434
435 def _is_leap_year(year):
435 def _is_leap_year(year):
436 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
436 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
437
437
438 def get_month(prevdate):
438 def get_month(prevdate):
439 return prevdate.month
439 return prevdate.month
440
440
441 def get_year(prevdate):
441 def get_year(prevdate):
442 return prevdate.year
442 return prevdate.year
443
443
444 now = now or datetime.datetime.now()
444 now = now or datetime.datetime.now()
445 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
445 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
446 deltas = {}
446 deltas = {}
447 future = False
447 future = False
448
448
449 if prevdate > now:
449 if prevdate > now:
450 now_old = now
450 now_old = now
451 now = prevdate
451 now = prevdate
452 prevdate = now_old
452 prevdate = now_old
453 future = True
453 future = True
454 if future:
454 if future:
455 prevdate = prevdate.replace(microsecond=0)
455 prevdate = prevdate.replace(microsecond=0)
456 # Get date parts deltas
456 # Get date parts deltas
457 for part in order:
457 for part in order:
458 rel_delta = _get_relative_delta(now, prevdate)
458 rel_delta = _get_relative_delta(now, prevdate)
459 deltas[part] = rel_delta[part]
459 deltas[part] = rel_delta[part]
460
460
461 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
461 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
462 # not 1 hour, -59 minutes and -59 seconds)
462 # not 1 hour, -59 minutes and -59 seconds)
463 offsets = [[5, 60], [4, 60], [3, 24]]
463 offsets = [[5, 60], [4, 60], [3, 24]]
464 for element in offsets: # seconds, minutes, hours
464 for element in offsets: # seconds, minutes, hours
465 num = element[0]
465 num = element[0]
466 length = element[1]
466 length = element[1]
467
467
468 part = order[num]
468 part = order[num]
469 carry_part = order[num - 1]
469 carry_part = order[num - 1]
470
470
471 if deltas[part] < 0:
471 if deltas[part] < 0:
472 deltas[part] += length
472 deltas[part] += length
473 deltas[carry_part] -= 1
473 deltas[carry_part] -= 1
474
474
475 # Same thing for days except that the increment depends on the (variable)
475 # Same thing for days except that the increment depends on the (variable)
476 # number of days in the month
476 # number of days in the month
477 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
477 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
478 if deltas['day'] < 0:
478 if deltas['day'] < 0:
479 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
479 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
480 deltas['day'] += 29
480 deltas['day'] += 29
481 else:
481 else:
482 deltas['day'] += month_lengths[get_month(prevdate) - 1]
482 deltas['day'] += month_lengths[get_month(prevdate) - 1]
483
483
484 deltas['month'] -= 1
484 deltas['month'] -= 1
485
485
486 if deltas['month'] < 0:
486 if deltas['month'] < 0:
487 deltas['month'] += 12
487 deltas['month'] += 12
488 deltas['year'] -= 1
488 deltas['year'] -= 1
489
489
490 # Format the result
490 # Format the result
491 if short_format:
491 if short_format:
492 fmt_funcs = {
492 fmt_funcs = {
493 'year': lambda d: u'%dy' % d,
493 'year': lambda d: u'%dy' % d,
494 'month': lambda d: u'%dm' % d,
494 'month': lambda d: u'%dm' % d,
495 'day': lambda d: u'%dd' % d,
495 'day': lambda d: u'%dd' % d,
496 'hour': lambda d: u'%dh' % d,
496 'hour': lambda d: u'%dh' % d,
497 'minute': lambda d: u'%dmin' % d,
497 'minute': lambda d: u'%dmin' % d,
498 'second': lambda d: u'%dsec' % d,
498 'second': lambda d: u'%dsec' % d,
499 }
499 }
500 else:
500 else:
501 fmt_funcs = {
501 fmt_funcs = {
502 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
502 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
503 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
503 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
504 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
504 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
505 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
505 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
506 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
506 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
507 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
507 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
508 }
508 }
509
509
510 i = 0
510 i = 0
511 for part in order:
511 for part in order:
512 value = deltas[part]
512 value = deltas[part]
513 if value != 0:
513 if value != 0:
514
514
515 if i < 5:
515 if i < 5:
516 sub_part = order[i + 1]
516 sub_part = order[i + 1]
517 sub_value = deltas[sub_part]
517 sub_value = deltas[sub_part]
518 else:
518 else:
519 sub_value = 0
519 sub_value = 0
520
520
521 if sub_value == 0 or show_short_version:
521 if sub_value == 0 or show_short_version:
522 _val = fmt_funcs[part](value)
522 _val = fmt_funcs[part](value)
523 if future:
523 if future:
524 if show_suffix:
524 if show_suffix:
525 return _(u'in ${ago}', mapping={'ago': _val})
525 return _(u'in ${ago}', mapping={'ago': _val})
526 else:
526 else:
527 return _(_val)
527 return _(_val)
528
528
529 else:
529 else:
530 if show_suffix:
530 if show_suffix:
531 return _(u'${ago} ago', mapping={'ago': _val})
531 return _(u'${ago} ago', mapping={'ago': _val})
532 else:
532 else:
533 return _(_val)
533 return _(_val)
534
534
535 val = fmt_funcs[part](value)
535 val = fmt_funcs[part](value)
536 val_detail = fmt_funcs[sub_part](sub_value)
536 val_detail = fmt_funcs[sub_part](sub_value)
537 mapping = {'val': val, 'detail': val_detail}
537 mapping = {'val': val, 'detail': val_detail}
538
538
539 if short_format:
539 if short_format:
540 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
540 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
541 if show_suffix:
541 if show_suffix:
542 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
542 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
543 if future:
543 if future:
544 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
544 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
545 else:
545 else:
546 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
546 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
547 if show_suffix:
547 if show_suffix:
548 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
548 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
549 if future:
549 if future:
550 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
550 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
551
551
552 return datetime_tmpl
552 return datetime_tmpl
553 i += 1
553 i += 1
554 return _(u'just now')
554 return _(u'just now')
555
555
556
556
557 def cleaned_uri(uri):
557 def cleaned_uri(uri):
558 """
558 """
559 Quotes '[' and ']' from uri if there is only one of them.
559 Quotes '[' and ']' from uri if there is only one of them.
560 according to RFC3986 we cannot use such chars in uri
560 according to RFC3986 we cannot use such chars in uri
561 :param uri:
561 :param uri:
562 :return: uri without this chars
562 :return: uri without this chars
563 """
563 """
564 return urllib.quote(uri, safe='@$:/')
564 return urllib.quote(uri, safe='@$:/')
565
565
566
566
567 def uri_filter(uri):
567 def uri_filter(uri):
568 """
568 """
569 Removes user:password from given url string
569 Removes user:password from given url string
570
570
571 :param uri:
571 :param uri:
572 :rtype: unicode
572 :rtype: unicode
573 :returns: filtered list of strings
573 :returns: filtered list of strings
574 """
574 """
575 if not uri:
575 if not uri:
576 return ''
576 return ''
577
577
578 proto = ''
578 proto = ''
579
579
580 for pat in ('https://', 'http://'):
580 for pat in ('https://', 'http://'):
581 if uri.startswith(pat):
581 if uri.startswith(pat):
582 uri = uri[len(pat):]
582 uri = uri[len(pat):]
583 proto = pat
583 proto = pat
584 break
584 break
585
585
586 # remove passwords and username
586 # remove passwords and username
587 uri = uri[uri.find('@') + 1:]
587 uri = uri[uri.find('@') + 1:]
588
588
589 # get the port
589 # get the port
590 cred_pos = uri.find(':')
590 cred_pos = uri.find(':')
591 if cred_pos == -1:
591 if cred_pos == -1:
592 host, port = uri, None
592 host, port = uri, None
593 else:
593 else:
594 host, port = uri[:cred_pos], uri[cred_pos + 1:]
594 host, port = uri[:cred_pos], uri[cred_pos + 1:]
595
595
596 return filter(None, [proto, host, port])
596 return filter(None, [proto, host, port])
597
597
598
598
599 def credentials_filter(uri):
599 def credentials_filter(uri):
600 """
600 """
601 Returns a url with removed credentials
601 Returns a url with removed credentials
602
602
603 :param uri:
603 :param uri:
604 """
604 """
605
605
606 uri = uri_filter(uri)
606 uri = uri_filter(uri)
607 # check if we have port
607 # check if we have port
608 if len(uri) > 2 and uri[2]:
608 if len(uri) > 2 and uri[2]:
609 uri[2] = ':' + uri[2]
609 uri[2] = ':' + uri[2]
610
610
611 return ''.join(uri)
611 return ''.join(uri)
612
612
613
613
614 def get_clone_url(request, uri_tmpl, repo_name, repo_id, **override):
614 def get_clone_url(request, uri_tmpl, repo_name, repo_id, **override):
615 qualifed_home_url = request.route_url('home')
615 qualifed_home_url = request.route_url('home')
616 parsed_url = urlobject.URLObject(qualifed_home_url)
616 parsed_url = urlobject.URLObject(qualifed_home_url)
617 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
617 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
618 args = {
618 args = {
619 'scheme': parsed_url.scheme,
619 'scheme': parsed_url.scheme,
620 'user': '',
620 'user': '',
621 # path if we use proxy-prefix
621 # path if we use proxy-prefix
622 'netloc': parsed_url.netloc+decoded_path,
622 'netloc': parsed_url.netloc+decoded_path,
623 'prefix': decoded_path,
623 'prefix': decoded_path,
624 'repo': repo_name,
624 'repo': repo_name,
625 'repoid': str(repo_id)
625 'repoid': str(repo_id)
626 }
626 }
627 args.update(override)
627 args.update(override)
628 args['user'] = urllib.quote(safe_str(args['user']))
628 args['user'] = urllib.quote(safe_str(args['user']))
629
629
630 for k, v in args.items():
630 for k, v in args.items():
631 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
631 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
632
632
633 # remove leading @ sign if it's present. Case of empty user
633 # remove leading @ sign if it's present. Case of empty user
634 url_obj = urlobject.URLObject(uri_tmpl)
634 url_obj = urlobject.URLObject(uri_tmpl)
635 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
635 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
636
636
637 return safe_unicode(url)
637 return safe_unicode(url)
638
638
639
639
640 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
640 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
641 """
641 """
642 Safe version of get_commit if this commit doesn't exists for a
642 Safe version of get_commit if this commit doesn't exists for a
643 repository it returns a Dummy one instead
643 repository it returns a Dummy one instead
644
644
645 :param repo: repository instance
645 :param repo: repository instance
646 :param commit_id: commit id as str
646 :param commit_id: commit id as str
647 :param pre_load: optional list of commit attributes to load
647 :param pre_load: optional list of commit attributes to load
648 """
648 """
649 # TODO(skreft): remove these circular imports
649 # TODO(skreft): remove these circular imports
650 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
650 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
651 from rhodecode.lib.vcs.exceptions import RepositoryError
651 from rhodecode.lib.vcs.exceptions import RepositoryError
652 if not isinstance(repo, BaseRepository):
652 if not isinstance(repo, BaseRepository):
653 raise Exception('You must pass an Repository '
653 raise Exception('You must pass an Repository '
654 'object as first argument got %s', type(repo))
654 'object as first argument got %s', type(repo))
655
655
656 try:
656 try:
657 commit = repo.get_commit(
657 commit = repo.get_commit(
658 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
658 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
659 except (RepositoryError, LookupError):
659 except (RepositoryError, LookupError):
660 commit = EmptyCommit()
660 commit = EmptyCommit()
661 return commit
661 return commit
662
662
663
663
664 def datetime_to_time(dt):
664 def datetime_to_time(dt):
665 if dt:
665 if dt:
666 return time.mktime(dt.timetuple())
666 return time.mktime(dt.timetuple())
667
667
668
668
669 def time_to_datetime(tm):
669 def time_to_datetime(tm):
670 if tm:
670 if tm:
671 if isinstance(tm, basestring):
671 if isinstance(tm, basestring):
672 try:
672 try:
673 tm = float(tm)
673 tm = float(tm)
674 except ValueError:
674 except ValueError:
675 return
675 return
676 return datetime.datetime.fromtimestamp(tm)
676 return datetime.datetime.fromtimestamp(tm)
677
677
678
678
679 def time_to_utcdatetime(tm):
679 def time_to_utcdatetime(tm):
680 if tm:
680 if tm:
681 if isinstance(tm, basestring):
681 if isinstance(tm, basestring):
682 try:
682 try:
683 tm = float(tm)
683 tm = float(tm)
684 except ValueError:
684 except ValueError:
685 return
685 return
686 return datetime.datetime.utcfromtimestamp(tm)
686 return datetime.datetime.utcfromtimestamp(tm)
687
687
688
688
689 MENTIONS_REGEX = re.compile(
689 MENTIONS_REGEX = re.compile(
690 # ^@ or @ without any special chars in front
690 # ^@ or @ without any special chars in front
691 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
691 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
692 # main body starts with letter, then can be . - _
692 # main body starts with letter, then can be . - _
693 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
693 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
694 re.VERBOSE | re.MULTILINE)
694 re.VERBOSE | re.MULTILINE)
695
695
696
696
697 def extract_mentioned_users(s):
697 def extract_mentioned_users(s):
698 """
698 """
699 Returns unique usernames from given string s that have @mention
699 Returns unique usernames from given string s that have @mention
700
700
701 :param s: string to get mentions
701 :param s: string to get mentions
702 """
702 """
703 usrs = set()
703 usrs = set()
704 for username in MENTIONS_REGEX.findall(s):
704 for username in MENTIONS_REGEX.findall(s):
705 usrs.add(username)
705 usrs.add(username)
706
706
707 return sorted(list(usrs), key=lambda k: k.lower())
707 return sorted(list(usrs), key=lambda k: k.lower())
708
708
709
709
710 class StrictAttributeDict(dict):
710 class StrictAttributeDict(dict):
711 """
711 """
712 Strict Version of Attribute dict which raises an Attribute error when
712 Strict Version of Attribute dict which raises an Attribute error when
713 requested attribute is not set
713 requested attribute is not set
714 """
714 """
715 def __getattr__(self, attr):
715 def __getattr__(self, attr):
716 try:
716 try:
717 return self[attr]
717 return self[attr]
718 except KeyError:
718 except KeyError:
719 raise AttributeError('%s object has no attribute %s' % (
719 raise AttributeError('%s object has no attribute %s' % (
720 self.__class__, attr))
720 self.__class__, attr))
721 __setattr__ = dict.__setitem__
721 __setattr__ = dict.__setitem__
722 __delattr__ = dict.__delitem__
722 __delattr__ = dict.__delitem__
723
723
724
724
725 class AttributeDict(dict):
725 class AttributeDict(dict):
726 def __getattr__(self, attr):
726 def __getattr__(self, attr):
727 return self.get(attr, None)
727 return self.get(attr, None)
728 __setattr__ = dict.__setitem__
728 __setattr__ = dict.__setitem__
729 __delattr__ = dict.__delitem__
729 __delattr__ = dict.__delitem__
730
730
731
731
732 def fix_PATH(os_=None):
732 def fix_PATH(os_=None):
733 """
733 """
734 Get current active python path, and append it to PATH variable to fix
734 Get current active python path, and append it to PATH variable to fix
735 issues of subprocess calls and different python versions
735 issues of subprocess calls and different python versions
736 """
736 """
737 if os_ is None:
737 if os_ is None:
738 import os
738 import os
739 else:
739 else:
740 os = os_
740 os = os_
741
741
742 cur_path = os.path.split(sys.executable)[0]
742 cur_path = os.path.split(sys.executable)[0]
743 if not os.environ['PATH'].startswith(cur_path):
743 if not os.environ['PATH'].startswith(cur_path):
744 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
744 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
745
745
746
746
747 def obfuscate_url_pw(engine):
747 def obfuscate_url_pw(engine):
748 _url = engine or ''
748 _url = engine or ''
749 try:
749 try:
750 _url = sqlalchemy.engine.url.make_url(engine)
750 _url = sqlalchemy.engine.url.make_url(engine)
751 if _url.password:
751 if _url.password:
752 _url.password = 'XXXXX'
752 _url.password = 'XXXXX'
753 except Exception:
753 except Exception:
754 pass
754 pass
755 return unicode(_url)
755 return unicode(_url)
756
756
757
757
758 def get_server_url(environ):
758 def get_server_url(environ):
759 req = webob.Request(environ)
759 req = webob.Request(environ)
760 return req.host_url + req.script_name
760 return req.host_url + req.script_name
761
761
762
762
763 def unique_id(hexlen=32):
763 def unique_id(hexlen=32):
764 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
764 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
765 return suuid(truncate_to=hexlen, alphabet=alphabet)
765 return suuid(truncate_to=hexlen, alphabet=alphabet)
766
766
767
767
768 def suuid(url=None, truncate_to=22, alphabet=None):
768 def suuid(url=None, truncate_to=22, alphabet=None):
769 """
769 """
770 Generate and return a short URL safe UUID.
770 Generate and return a short URL safe UUID.
771
771
772 If the url parameter is provided, set the namespace to the provided
772 If the url parameter is provided, set the namespace to the provided
773 URL and generate a UUID.
773 URL and generate a UUID.
774
774
775 :param url to get the uuid for
775 :param url to get the uuid for
776 :truncate_to: truncate the basic 22 UUID to shorter version
776 :truncate_to: truncate the basic 22 UUID to shorter version
777
777
778 The IDs won't be universally unique any longer, but the probability of
778 The IDs won't be universally unique any longer, but the probability of
779 a collision will still be very low.
779 a collision will still be very low.
780 """
780 """
781 # Define our alphabet.
781 # Define our alphabet.
782 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
782 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
783
783
784 # If no URL is given, generate a random UUID.
784 # If no URL is given, generate a random UUID.
785 if url is None:
785 if url is None:
786 unique_id = uuid.uuid4().int
786 unique_id = uuid.uuid4().int
787 else:
787 else:
788 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
788 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
789
789
790 alphabet_length = len(_ALPHABET)
790 alphabet_length = len(_ALPHABET)
791 output = []
791 output = []
792 while unique_id > 0:
792 while unique_id > 0:
793 digit = unique_id % alphabet_length
793 digit = unique_id % alphabet_length
794 output.append(_ALPHABET[digit])
794 output.append(_ALPHABET[digit])
795 unique_id = int(unique_id / alphabet_length)
795 unique_id = int(unique_id / alphabet_length)
796 return "".join(output)[:truncate_to]
796 return "".join(output)[:truncate_to]
797
797
798
798
799 def get_current_rhodecode_user():
799 def get_current_rhodecode_user(request=None):
800 """
800 """
801 Gets rhodecode user from request
801 Gets rhodecode user from request
802 """
802 """
803 pyramid_request = pyramid.threadlocal.get_current_request()
803 pyramid_request = request or pyramid.threadlocal.get_current_request()
804
804
805 # web case
805 # web case
806 if pyramid_request and hasattr(pyramid_request, 'user'):
806 if pyramid_request and hasattr(pyramid_request, 'user'):
807 return pyramid_request.user
807 return pyramid_request.user
808
808
809 # api case
809 # api case
810 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
810 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
811 return pyramid_request.rpc_user
811 return pyramid_request.rpc_user
812
812
813 return None
813 return None
814
814
815
815
816 def action_logger_generic(action, namespace=''):
816 def action_logger_generic(action, namespace=''):
817 """
817 """
818 A generic logger for actions useful to the system overview, tries to find
818 A generic logger for actions useful to the system overview, tries to find
819 an acting user for the context of the call otherwise reports unknown user
819 an acting user for the context of the call otherwise reports unknown user
820
820
821 :param action: logging message eg 'comment 5 deleted'
821 :param action: logging message eg 'comment 5 deleted'
822 :param type: string
822 :param type: string
823
823
824 :param namespace: namespace of the logging message eg. 'repo.comments'
824 :param namespace: namespace of the logging message eg. 'repo.comments'
825 :param type: string
825 :param type: string
826
826
827 """
827 """
828
828
829 logger_name = 'rhodecode.actions'
829 logger_name = 'rhodecode.actions'
830
830
831 if namespace:
831 if namespace:
832 logger_name += '.' + namespace
832 logger_name += '.' + namespace
833
833
834 log = logging.getLogger(logger_name)
834 log = logging.getLogger(logger_name)
835
835
836 # get a user if we can
836 # get a user if we can
837 user = get_current_rhodecode_user()
837 user = get_current_rhodecode_user()
838
838
839 logfunc = log.info
839 logfunc = log.info
840
840
841 if not user:
841 if not user:
842 user = '<unknown user>'
842 user = '<unknown user>'
843 logfunc = log.warning
843 logfunc = log.warning
844
844
845 logfunc('Logging action by {}: {}'.format(user, action))
845 logfunc('Logging action by {}: {}'.format(user, action))
846
846
847
847
848 def escape_split(text, sep=',', maxsplit=-1):
848 def escape_split(text, sep=',', maxsplit=-1):
849 r"""
849 r"""
850 Allows for escaping of the separator: e.g. arg='foo\, bar'
850 Allows for escaping of the separator: e.g. arg='foo\, bar'
851
851
852 It should be noted that the way bash et. al. do command line parsing, those
852 It should be noted that the way bash et. al. do command line parsing, those
853 single quotes are required.
853 single quotes are required.
854 """
854 """
855 escaped_sep = r'\%s' % sep
855 escaped_sep = r'\%s' % sep
856
856
857 if escaped_sep not in text:
857 if escaped_sep not in text:
858 return text.split(sep, maxsplit)
858 return text.split(sep, maxsplit)
859
859
860 before, _mid, after = text.partition(escaped_sep)
860 before, _mid, after = text.partition(escaped_sep)
861 startlist = before.split(sep, maxsplit) # a regular split is fine here
861 startlist = before.split(sep, maxsplit) # a regular split is fine here
862 unfinished = startlist[-1]
862 unfinished = startlist[-1]
863 startlist = startlist[:-1]
863 startlist = startlist[:-1]
864
864
865 # recurse because there may be more escaped separators
865 # recurse because there may be more escaped separators
866 endlist = escape_split(after, sep, maxsplit)
866 endlist = escape_split(after, sep, maxsplit)
867
867
868 # finish building the escaped value. we use endlist[0] becaue the first
868 # finish building the escaped value. we use endlist[0] becaue the first
869 # part of the string sent in recursion is the rest of the escaped value.
869 # part of the string sent in recursion is the rest of the escaped value.
870 unfinished += sep + endlist[0]
870 unfinished += sep + endlist[0]
871
871
872 return startlist + [unfinished] + endlist[1:] # put together all the parts
872 return startlist + [unfinished] + endlist[1:] # put together all the parts
873
873
874
874
875 class OptionalAttr(object):
875 class OptionalAttr(object):
876 """
876 """
877 Special Optional Option that defines other attribute. Example::
877 Special Optional Option that defines other attribute. Example::
878
878
879 def test(apiuser, userid=Optional(OAttr('apiuser')):
879 def test(apiuser, userid=Optional(OAttr('apiuser')):
880 user = Optional.extract(userid)
880 user = Optional.extract(userid)
881 # calls
881 # calls
882
882
883 """
883 """
884
884
885 def __init__(self, attr_name):
885 def __init__(self, attr_name):
886 self.attr_name = attr_name
886 self.attr_name = attr_name
887
887
888 def __repr__(self):
888 def __repr__(self):
889 return '<OptionalAttr:%s>' % self.attr_name
889 return '<OptionalAttr:%s>' % self.attr_name
890
890
891 def __call__(self):
891 def __call__(self):
892 return self
892 return self
893
893
894
894
895 # alias
895 # alias
896 OAttr = OptionalAttr
896 OAttr = OptionalAttr
897
897
898
898
899 class Optional(object):
899 class Optional(object):
900 """
900 """
901 Defines an optional parameter::
901 Defines an optional parameter::
902
902
903 param = param.getval() if isinstance(param, Optional) else param
903 param = param.getval() if isinstance(param, Optional) else param
904 param = param() if isinstance(param, Optional) else param
904 param = param() if isinstance(param, Optional) else param
905
905
906 is equivalent of::
906 is equivalent of::
907
907
908 param = Optional.extract(param)
908 param = Optional.extract(param)
909
909
910 """
910 """
911
911
912 def __init__(self, type_):
912 def __init__(self, type_):
913 self.type_ = type_
913 self.type_ = type_
914
914
915 def __repr__(self):
915 def __repr__(self):
916 return '<Optional:%s>' % self.type_.__repr__()
916 return '<Optional:%s>' % self.type_.__repr__()
917
917
918 def __call__(self):
918 def __call__(self):
919 return self.getval()
919 return self.getval()
920
920
921 def getval(self):
921 def getval(self):
922 """
922 """
923 returns value from this Optional instance
923 returns value from this Optional instance
924 """
924 """
925 if isinstance(self.type_, OAttr):
925 if isinstance(self.type_, OAttr):
926 # use params name
926 # use params name
927 return self.type_.attr_name
927 return self.type_.attr_name
928 return self.type_
928 return self.type_
929
929
930 @classmethod
930 @classmethod
931 def extract(cls, val):
931 def extract(cls, val):
932 """
932 """
933 Extracts value from Optional() instance
933 Extracts value from Optional() instance
934
934
935 :param val:
935 :param val:
936 :return: original value if it's not Optional instance else
936 :return: original value if it's not Optional instance else
937 value of instance
937 value of instance
938 """
938 """
939 if isinstance(val, cls):
939 if isinstance(val, cls):
940 return val.getval()
940 return val.getval()
941 return val
941 return val
942
942
943
943
944 def get_routes_generator_for_server_url(server_url):
944 def get_routes_generator_for_server_url(server_url):
945 parsed_url = urlobject.URLObject(server_url)
945 parsed_url = urlobject.URLObject(server_url)
946 netloc = safe_str(parsed_url.netloc)
946 netloc = safe_str(parsed_url.netloc)
947 script_name = safe_str(parsed_url.path)
947 script_name = safe_str(parsed_url.path)
948
948
949 if ':' in netloc:
949 if ':' in netloc:
950 server_name, server_port = netloc.split(':')
950 server_name, server_port = netloc.split(':')
951 else:
951 else:
952 server_name = netloc
952 server_name = netloc
953 server_port = (parsed_url.scheme == 'https' and '443' or '80')
953 server_port = (parsed_url.scheme == 'https' and '443' or '80')
954
954
955 environ = {
955 environ = {
956 'REQUEST_METHOD': 'GET',
956 'REQUEST_METHOD': 'GET',
957 'PATH_INFO': '/',
957 'PATH_INFO': '/',
958 'SERVER_NAME': server_name,
958 'SERVER_NAME': server_name,
959 'SERVER_PORT': server_port,
959 'SERVER_PORT': server_port,
960 'SCRIPT_NAME': script_name,
960 'SCRIPT_NAME': script_name,
961 }
961 }
962 if parsed_url.scheme == 'https':
962 if parsed_url.scheme == 'https':
963 environ['HTTPS'] = 'on'
963 environ['HTTPS'] = 'on'
964 environ['wsgi.url_scheme'] = 'https'
964 environ['wsgi.url_scheme'] = 'https'
965
965
966 return routes.util.URLGenerator(rhodecode.CONFIG['routes.map'], environ)
966 return routes.util.URLGenerator(rhodecode.CONFIG['routes.map'], environ)
967
967
968
968
969 def glob2re(pat):
969 def glob2re(pat):
970 """
970 """
971 Translate a shell PATTERN to a regular expression.
971 Translate a shell PATTERN to a regular expression.
972
972
973 There is no way to quote meta-characters.
973 There is no way to quote meta-characters.
974 """
974 """
975
975
976 i, n = 0, len(pat)
976 i, n = 0, len(pat)
977 res = ''
977 res = ''
978 while i < n:
978 while i < n:
979 c = pat[i]
979 c = pat[i]
980 i = i+1
980 i = i+1
981 if c == '*':
981 if c == '*':
982 #res = res + '.*'
982 #res = res + '.*'
983 res = res + '[^/]*'
983 res = res + '[^/]*'
984 elif c == '?':
984 elif c == '?':
985 #res = res + '.'
985 #res = res + '.'
986 res = res + '[^/]'
986 res = res + '[^/]'
987 elif c == '[':
987 elif c == '[':
988 j = i
988 j = i
989 if j < n and pat[j] == '!':
989 if j < n and pat[j] == '!':
990 j = j+1
990 j = j+1
991 if j < n and pat[j] == ']':
991 if j < n and pat[j] == ']':
992 j = j+1
992 j = j+1
993 while j < n and pat[j] != ']':
993 while j < n and pat[j] != ']':
994 j = j+1
994 j = j+1
995 if j >= n:
995 if j >= n:
996 res = res + '\\['
996 res = res + '\\['
997 else:
997 else:
998 stuff = pat[i:j].replace('\\','\\\\')
998 stuff = pat[i:j].replace('\\','\\\\')
999 i = j+1
999 i = j+1
1000 if stuff[0] == '!':
1000 if stuff[0] == '!':
1001 stuff = '^' + stuff[1:]
1001 stuff = '^' + stuff[1:]
1002 elif stuff[0] == '^':
1002 elif stuff[0] == '^':
1003 stuff = '\\' + stuff
1003 stuff = '\\' + stuff
1004 res = '%s[%s]' % (res, stuff)
1004 res = '%s[%s]' % (res, stuff)
1005 else:
1005 else:
1006 res = res + re.escape(c)
1006 res = res + re.escape(c)
1007 return res + '\Z(?ms)'
1007 return res + '\Z(?ms)'
@@ -1,615 +1,614 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 this is forms validation classes
22 this is forms validation classes
23 http://formencode.org/module-formencode.validators.html
23 http://formencode.org/module-formencode.validators.html
24 for list off all availible validators
24 for list off all availible validators
25
25
26 we can create our own validators
26 we can create our own validators
27
27
28 The table below outlines the options which can be used in a schema in addition to the validators themselves
28 The table below outlines the options which can be used in a schema in addition to the validators themselves
29 pre_validators [] These validators will be applied before the schema
29 pre_validators [] These validators will be applied before the schema
30 chained_validators [] These validators will be applied after the schema
30 chained_validators [] These validators will be applied after the schema
31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
35
35
36
36
37 <name> = formencode.validators.<name of validator>
37 <name> = formencode.validators.<name of validator>
38 <name> must equal form name
38 <name> must equal form name
39 list=[1,2,3,4,5]
39 list=[1,2,3,4,5]
40 for SELECT use formencode.All(OneOf(list), Int())
40 for SELECT use formencode.All(OneOf(list), Int())
41
41
42 """
42 """
43
43
44 import deform
44 import deform
45 import logging
45 import logging
46 import formencode
46 import formencode
47
47
48 from pkg_resources import resource_filename
48 from pkg_resources import resource_filename
49 from formencode import All, Pipe
49 from formencode import All, Pipe
50
50
51 from rhodecode.translation import temp_translation_factory as _
52 from pyramid.threadlocal import get_current_request
51 from pyramid.threadlocal import get_current_request
53
52
54 from rhodecode import BACKENDS
53 from rhodecode import BACKENDS
55 from rhodecode.lib import helpers
54 from rhodecode.lib import helpers
56 from rhodecode.model import validators as v
55 from rhodecode.model import validators as v
57
56
58 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
59
58
60
59
61 deform_templates = resource_filename('deform', 'templates')
60 deform_templates = resource_filename('deform', 'templates')
62 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
61 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
63 search_path = (rhodecode_templates, deform_templates)
62 search_path = (rhodecode_templates, deform_templates)
64
63
65
64
66 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
65 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
67 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
66 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
68 def __call__(self, template_name, **kw):
67 def __call__(self, template_name, **kw):
69 kw['h'] = helpers
68 kw['h'] = helpers
70 kw['request'] = get_current_request()
69 kw['request'] = get_current_request()
71 return self.load(template_name)(**kw)
70 return self.load(template_name)(**kw)
72
71
73
72
74 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
73 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
75 deform.Form.set_default_renderer(form_renderer)
74 deform.Form.set_default_renderer(form_renderer)
76
75
77
76
78 def LoginForm(localizer):
77 def LoginForm(localizer):
79 _ = localizer
78 _ = localizer
80
79
81 class _LoginForm(formencode.Schema):
80 class _LoginForm(formencode.Schema):
82 allow_extra_fields = True
81 allow_extra_fields = True
83 filter_extra_fields = True
82 filter_extra_fields = True
84 username = v.UnicodeString(
83 username = v.UnicodeString(
85 strip=True,
84 strip=True,
86 min=1,
85 min=1,
87 not_empty=True,
86 not_empty=True,
88 messages={
87 messages={
89 'empty': _(u'Please enter a login'),
88 'empty': _(u'Please enter a login'),
90 'tooShort': _(u'Enter a value %(min)i characters long or more')
89 'tooShort': _(u'Enter a value %(min)i characters long or more')
91 }
90 }
92 )
91 )
93
92
94 password = v.UnicodeString(
93 password = v.UnicodeString(
95 strip=False,
94 strip=False,
96 min=3,
95 min=3,
97 max=72,
96 max=72,
98 not_empty=True,
97 not_empty=True,
99 messages={
98 messages={
100 'empty': _(u'Please enter a password'),
99 'empty': _(u'Please enter a password'),
101 'tooShort': _(u'Enter %(min)i characters or more')}
100 'tooShort': _(u'Enter %(min)i characters or more')}
102 )
101 )
103
102
104 remember = v.StringBoolean(if_missing=False)
103 remember = v.StringBoolean(if_missing=False)
105
104
106 chained_validators = [v.ValidAuth(localizer)]
105 chained_validators = [v.ValidAuth(localizer)]
107 return _LoginForm
106 return _LoginForm
108
107
109
108
110 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
109 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
111 old_data = old_data or {}
110 old_data = old_data or {}
112 available_languages = available_languages or []
111 available_languages = available_languages or []
113 _ = localizer
112 _ = localizer
114
113
115 class _UserForm(formencode.Schema):
114 class _UserForm(formencode.Schema):
116 allow_extra_fields = True
115 allow_extra_fields = True
117 filter_extra_fields = True
116 filter_extra_fields = True
118 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
117 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
119 v.ValidUsername(localizer, edit, old_data))
118 v.ValidUsername(localizer, edit, old_data))
120 if edit:
119 if edit:
121 new_password = All(
120 new_password = All(
122 v.ValidPassword(localizer),
121 v.ValidPassword(localizer),
123 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
122 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
124 )
123 )
125 password_confirmation = All(
124 password_confirmation = All(
126 v.ValidPassword(localizer),
125 v.ValidPassword(localizer),
127 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
126 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
128 )
127 )
129 admin = v.StringBoolean(if_missing=False)
128 admin = v.StringBoolean(if_missing=False)
130 else:
129 else:
131 password = All(
130 password = All(
132 v.ValidPassword(localizer),
131 v.ValidPassword(localizer),
133 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
132 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
134 )
133 )
135 password_confirmation = All(
134 password_confirmation = All(
136 v.ValidPassword(localizer),
135 v.ValidPassword(localizer),
137 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
136 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
138 )
137 )
139
138
140 password_change = v.StringBoolean(if_missing=False)
139 password_change = v.StringBoolean(if_missing=False)
141 create_repo_group = v.StringBoolean(if_missing=False)
140 create_repo_group = v.StringBoolean(if_missing=False)
142
141
143 active = v.StringBoolean(if_missing=False)
142 active = v.StringBoolean(if_missing=False)
144 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
143 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
145 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
146 email = All(v.Email(not_empty=True), v.UniqSystemEmail(localizer, old_data))
145 email = All(v.Email(not_empty=True), v.UniqSystemEmail(localizer, old_data))
147 extern_name = v.UnicodeString(strip=True)
146 extern_name = v.UnicodeString(strip=True)
148 extern_type = v.UnicodeString(strip=True)
147 extern_type = v.UnicodeString(strip=True)
149 language = v.OneOf(available_languages, hideList=False,
148 language = v.OneOf(available_languages, hideList=False,
150 testValueList=True, if_missing=None)
149 testValueList=True, if_missing=None)
151 chained_validators = [v.ValidPasswordsMatch(localizer)]
150 chained_validators = [v.ValidPasswordsMatch(localizer)]
152 return _UserForm
151 return _UserForm
153
152
154
153
155 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
156 old_data = old_data or {}
155 old_data = old_data or {}
157 _ = localizer
156 _ = localizer
158
157
159 class _UserGroupForm(formencode.Schema):
158 class _UserGroupForm(formencode.Schema):
160 allow_extra_fields = True
159 allow_extra_fields = True
161 filter_extra_fields = True
160 filter_extra_fields = True
162
161
163 users_group_name = All(
162 users_group_name = All(
164 v.UnicodeString(strip=True, min=1, not_empty=True),
163 v.UnicodeString(strip=True, min=1, not_empty=True),
165 v.ValidUserGroup(localizer, edit, old_data)
164 v.ValidUserGroup(localizer, edit, old_data)
166 )
165 )
167 user_group_description = v.UnicodeString(strip=True, min=1,
166 user_group_description = v.UnicodeString(strip=True, min=1,
168 not_empty=False)
167 not_empty=False)
169
168
170 users_group_active = v.StringBoolean(if_missing=False)
169 users_group_active = v.StringBoolean(if_missing=False)
171
170
172 if edit:
171 if edit:
173 # this is user group owner
172 # this is user group owner
174 user = All(
173 user = All(
175 v.UnicodeString(not_empty=True),
174 v.UnicodeString(not_empty=True),
176 v.ValidRepoUser(localizer, allow_disabled))
175 v.ValidRepoUser(localizer, allow_disabled))
177 return _UserGroupForm
176 return _UserGroupForm
178
177
179
178
180 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
181 can_create_in_root=False, allow_disabled=False):
180 can_create_in_root=False, allow_disabled=False):
182 _ = localizer
181 _ = localizer
183 old_data = old_data or {}
182 old_data = old_data or {}
184 available_groups = available_groups or []
183 available_groups = available_groups or []
185
184
186 class _RepoGroupForm(formencode.Schema):
185 class _RepoGroupForm(formencode.Schema):
187 allow_extra_fields = True
186 allow_extra_fields = True
188 filter_extra_fields = False
187 filter_extra_fields = False
189
188
190 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
191 v.SlugifyName(localizer),)
190 v.SlugifyName(localizer),)
192 group_description = v.UnicodeString(strip=True, min=1,
191 group_description = v.UnicodeString(strip=True, min=1,
193 not_empty=False)
192 not_empty=False)
194 group_copy_permissions = v.StringBoolean(if_missing=False)
193 group_copy_permissions = v.StringBoolean(if_missing=False)
195
194
196 group_parent_id = v.OneOf(available_groups, hideList=False,
195 group_parent_id = v.OneOf(available_groups, hideList=False,
197 testValueList=True, not_empty=True)
196 testValueList=True, not_empty=True)
198 enable_locking = v.StringBoolean(if_missing=False)
197 enable_locking = v.StringBoolean(if_missing=False)
199 chained_validators = [
198 chained_validators = [
200 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
201
200
202 if edit:
201 if edit:
203 # this is repo group owner
202 # this is repo group owner
204 user = All(
203 user = All(
205 v.UnicodeString(not_empty=True),
204 v.UnicodeString(not_empty=True),
206 v.ValidRepoUser(localizer, allow_disabled))
205 v.ValidRepoUser(localizer, allow_disabled))
207 return _RepoGroupForm
206 return _RepoGroupForm
208
207
209
208
210 def RegisterForm(localizer, edit=False, old_data=None):
209 def RegisterForm(localizer, edit=False, old_data=None):
211 _ = localizer
210 _ = localizer
212 old_data = old_data or {}
211 old_data = old_data or {}
213
212
214 class _RegisterForm(formencode.Schema):
213 class _RegisterForm(formencode.Schema):
215 allow_extra_fields = True
214 allow_extra_fields = True
216 filter_extra_fields = True
215 filter_extra_fields = True
217 username = All(
216 username = All(
218 v.ValidUsername(localizer, edit, old_data),
217 v.ValidUsername(localizer, edit, old_data),
219 v.UnicodeString(strip=True, min=1, not_empty=True)
218 v.UnicodeString(strip=True, min=1, not_empty=True)
220 )
219 )
221 password = All(
220 password = All(
222 v.ValidPassword(localizer),
221 v.ValidPassword(localizer),
223 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
224 )
223 )
225 password_confirmation = All(
224 password_confirmation = All(
226 v.ValidPassword(localizer),
225 v.ValidPassword(localizer),
227 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
228 )
227 )
229 active = v.StringBoolean(if_missing=False)
228 active = v.StringBoolean(if_missing=False)
230 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
232 email = All(v.Email(not_empty=True), v.UniqSystemEmail(localizer, old_data))
231 email = All(v.Email(not_empty=True), v.UniqSystemEmail(localizer, old_data))
233
232
234 chained_validators = [v.ValidPasswordsMatch(localizer)]
233 chained_validators = [v.ValidPasswordsMatch(localizer)]
235 return _RegisterForm
234 return _RegisterForm
236
235
237
236
238 def PasswordResetForm(localizer):
237 def PasswordResetForm(localizer):
239 _ = localizer
238 _ = localizer
240
239
241 class _PasswordResetForm(formencode.Schema):
240 class _PasswordResetForm(formencode.Schema):
242 allow_extra_fields = True
241 allow_extra_fields = True
243 filter_extra_fields = True
242 filter_extra_fields = True
244 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
245 return _PasswordResetForm
244 return _PasswordResetForm
246
245
247
246
248 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None,
247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None,
249 landing_revs=None, allow_disabled=False):
248 landing_revs=None, allow_disabled=False):
250 _ = localizer
249 _ = localizer
251 old_data = old_data or {}
250 old_data = old_data or {}
252 repo_groups = repo_groups or []
251 repo_groups = repo_groups or []
253 landing_revs = landing_revs or []
252 landing_revs = landing_revs or []
254 supported_backends = BACKENDS.keys()
253 supported_backends = BACKENDS.keys()
255
254
256 class _RepoForm(formencode.Schema):
255 class _RepoForm(formencode.Schema):
257 allow_extra_fields = True
256 allow_extra_fields = True
258 filter_extra_fields = False
257 filter_extra_fields = False
259 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
258 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
260 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
259 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
261 repo_group = All(v.CanWriteGroup(localizer, old_data),
260 repo_group = All(v.CanWriteGroup(localizer, old_data),
262 v.OneOf(repo_groups, hideList=True))
261 v.OneOf(repo_groups, hideList=True))
263 repo_type = v.OneOf(supported_backends, required=False,
262 repo_type = v.OneOf(supported_backends, required=False,
264 if_missing=old_data.get('repo_type'))
263 if_missing=old_data.get('repo_type'))
265 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
264 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
266 repo_private = v.StringBoolean(if_missing=False)
265 repo_private = v.StringBoolean(if_missing=False)
267 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
266 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
268 repo_copy_permissions = v.StringBoolean(if_missing=False)
267 repo_copy_permissions = v.StringBoolean(if_missing=False)
269 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
268 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
270
269
271 repo_enable_statistics = v.StringBoolean(if_missing=False)
270 repo_enable_statistics = v.StringBoolean(if_missing=False)
272 repo_enable_downloads = v.StringBoolean(if_missing=False)
271 repo_enable_downloads = v.StringBoolean(if_missing=False)
273 repo_enable_locking = v.StringBoolean(if_missing=False)
272 repo_enable_locking = v.StringBoolean(if_missing=False)
274
273
275 if edit:
274 if edit:
276 # this is repo owner
275 # this is repo owner
277 user = All(
276 user = All(
278 v.UnicodeString(not_empty=True),
277 v.UnicodeString(not_empty=True),
279 v.ValidRepoUser(localizer, allow_disabled))
278 v.ValidRepoUser(localizer, allow_disabled))
280 clone_uri_change = v.UnicodeString(
279 clone_uri_change = v.UnicodeString(
281 not_empty=False, if_missing=v.Missing)
280 not_empty=False, if_missing=v.Missing)
282
281
283 chained_validators = [v.ValidCloneUri(localizer),
282 chained_validators = [v.ValidCloneUri(localizer),
284 v.ValidRepoName(localizer, edit, old_data)]
283 v.ValidRepoName(localizer, edit, old_data)]
285 return _RepoForm
284 return _RepoForm
286
285
287
286
288 def RepoPermsForm(localizer):
287 def RepoPermsForm(localizer):
289 _ = localizer
288 _ = localizer
290
289
291 class _RepoPermsForm(formencode.Schema):
290 class _RepoPermsForm(formencode.Schema):
292 allow_extra_fields = True
291 allow_extra_fields = True
293 filter_extra_fields = False
292 filter_extra_fields = False
294 chained_validators = [v.ValidPerms(localizer, type_='repo')]
293 chained_validators = [v.ValidPerms(localizer, type_='repo')]
295 return _RepoPermsForm
294 return _RepoPermsForm
296
295
297
296
298 def RepoGroupPermsForm(localizer, valid_recursive_choices):
297 def RepoGroupPermsForm(localizer, valid_recursive_choices):
299 _ = localizer
298 _ = localizer
300
299
301 class _RepoGroupPermsForm(formencode.Schema):
300 class _RepoGroupPermsForm(formencode.Schema):
302 allow_extra_fields = True
301 allow_extra_fields = True
303 filter_extra_fields = False
302 filter_extra_fields = False
304 recursive = v.OneOf(valid_recursive_choices)
303 recursive = v.OneOf(valid_recursive_choices)
305 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
304 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
306 return _RepoGroupPermsForm
305 return _RepoGroupPermsForm
307
306
308
307
309 def UserGroupPermsForm(localizer):
308 def UserGroupPermsForm(localizer):
310 _ = localizer
309 _ = localizer
311
310
312 class _UserPermsForm(formencode.Schema):
311 class _UserPermsForm(formencode.Schema):
313 allow_extra_fields = True
312 allow_extra_fields = True
314 filter_extra_fields = False
313 filter_extra_fields = False
315 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
314 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
316 return _UserPermsForm
315 return _UserPermsForm
317
316
318
317
319 def RepoFieldForm(localizer):
318 def RepoFieldForm(localizer):
320 _ = localizer
319 _ = localizer
321
320
322 class _RepoFieldForm(formencode.Schema):
321 class _RepoFieldForm(formencode.Schema):
323 filter_extra_fields = True
322 filter_extra_fields = True
324 allow_extra_fields = True
323 allow_extra_fields = True
325
324
326 new_field_key = All(v.FieldKey(localizer),
325 new_field_key = All(v.FieldKey(localizer),
327 v.UnicodeString(strip=True, min=3, not_empty=True))
326 v.UnicodeString(strip=True, min=3, not_empty=True))
328 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
327 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
329 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
328 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
330 if_missing='str')
329 if_missing='str')
331 new_field_label = v.UnicodeString(not_empty=False)
330 new_field_label = v.UnicodeString(not_empty=False)
332 new_field_desc = v.UnicodeString(not_empty=False)
331 new_field_desc = v.UnicodeString(not_empty=False)
333 return _RepoFieldForm
332 return _RepoFieldForm
334
333
335
334
336 def RepoForkForm(localizer, edit=False, old_data=None,
335 def RepoForkForm(localizer, edit=False, old_data=None,
337 supported_backends=BACKENDS.keys(), repo_groups=None,
336 supported_backends=BACKENDS.keys(), repo_groups=None,
338 landing_revs=None):
337 landing_revs=None):
339 _ = localizer
338 _ = localizer
340 old_data = old_data or {}
339 old_data = old_data or {}
341 repo_groups = repo_groups or []
340 repo_groups = repo_groups or []
342 landing_revs = landing_revs or []
341 landing_revs = landing_revs or []
343
342
344 class _RepoForkForm(formencode.Schema):
343 class _RepoForkForm(formencode.Schema):
345 allow_extra_fields = True
344 allow_extra_fields = True
346 filter_extra_fields = False
345 filter_extra_fields = False
347 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
346 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
348 v.SlugifyName(localizer))
347 v.SlugifyName(localizer))
349 repo_group = All(v.CanWriteGroup(localizer, ),
348 repo_group = All(v.CanWriteGroup(localizer, ),
350 v.OneOf(repo_groups, hideList=True))
349 v.OneOf(repo_groups, hideList=True))
351 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
350 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
352 description = v.UnicodeString(strip=True, min=1, not_empty=True)
351 description = v.UnicodeString(strip=True, min=1, not_empty=True)
353 private = v.StringBoolean(if_missing=False)
352 private = v.StringBoolean(if_missing=False)
354 copy_permissions = v.StringBoolean(if_missing=False)
353 copy_permissions = v.StringBoolean(if_missing=False)
355 fork_parent_id = v.UnicodeString()
354 fork_parent_id = v.UnicodeString()
356 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
355 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
357 landing_rev = v.OneOf(landing_revs, hideList=True)
356 landing_rev = v.OneOf(landing_revs, hideList=True)
358 return _RepoForkForm
357 return _RepoForkForm
359
358
360
359
361 def ApplicationSettingsForm(localizer):
360 def ApplicationSettingsForm(localizer):
362 _ = localizer
361 _ = localizer
363
362
364 class _ApplicationSettingsForm(formencode.Schema):
363 class _ApplicationSettingsForm(formencode.Schema):
365 allow_extra_fields = True
364 allow_extra_fields = True
366 filter_extra_fields = False
365 filter_extra_fields = False
367 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
366 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
368 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
367 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
369 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
368 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
370 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
369 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
371 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
370 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
372 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
371 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
373 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
372 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
374 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
373 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
375 return _ApplicationSettingsForm
374 return _ApplicationSettingsForm
376
375
377
376
378 def ApplicationVisualisationForm(localizer):
377 def ApplicationVisualisationForm(localizer):
379 _ = localizer
378 _ = localizer
380
379
381 class _ApplicationVisualisationForm(formencode.Schema):
380 class _ApplicationVisualisationForm(formencode.Schema):
382 allow_extra_fields = True
381 allow_extra_fields = True
383 filter_extra_fields = False
382 filter_extra_fields = False
384 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
383 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
385 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
384 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
386 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
385 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
387
386
388 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
387 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
389 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
388 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
390 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
389 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
391 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
390 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
392 rhodecode_show_version = v.StringBoolean(if_missing=False)
391 rhodecode_show_version = v.StringBoolean(if_missing=False)
393 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
392 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
394 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
393 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
395 rhodecode_gravatar_url = v.UnicodeString(min=3)
394 rhodecode_gravatar_url = v.UnicodeString(min=3)
396 rhodecode_clone_uri_tmpl = v.UnicodeString(min=3)
395 rhodecode_clone_uri_tmpl = v.UnicodeString(min=3)
397 rhodecode_support_url = v.UnicodeString()
396 rhodecode_support_url = v.UnicodeString()
398 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
397 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
399 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
398 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
400 return _ApplicationVisualisationForm
399 return _ApplicationVisualisationForm
401
400
402
401
403 class _BaseVcsSettingsForm(formencode.Schema):
402 class _BaseVcsSettingsForm(formencode.Schema):
404
403
405 allow_extra_fields = True
404 allow_extra_fields = True
406 filter_extra_fields = False
405 filter_extra_fields = False
407 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
406 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
408 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
407 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
409 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
408 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
410
409
411 # PR/Code-review
410 # PR/Code-review
412 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
411 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
413 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
412 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
414
413
415 # hg
414 # hg
416 extensions_largefiles = v.StringBoolean(if_missing=False)
415 extensions_largefiles = v.StringBoolean(if_missing=False)
417 extensions_evolve = v.StringBoolean(if_missing=False)
416 extensions_evolve = v.StringBoolean(if_missing=False)
418 phases_publish = v.StringBoolean(if_missing=False)
417 phases_publish = v.StringBoolean(if_missing=False)
419
418
420 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
419 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
421 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
420 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
422
421
423 # git
422 # git
424 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
423 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
425 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
424 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
426 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
425 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
427
426
428 # svn
427 # svn
429 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
428 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
430 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
429 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
431
430
432
431
433 def ApplicationUiSettingsForm(localizer):
432 def ApplicationUiSettingsForm(localizer):
434 _ = localizer
433 _ = localizer
435
434
436 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
435 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
437 web_push_ssl = v.StringBoolean(if_missing=False)
436 web_push_ssl = v.StringBoolean(if_missing=False)
438 paths_root_path = All(
437 paths_root_path = All(
439 v.ValidPath(localizer),
438 v.ValidPath(localizer),
440 v.UnicodeString(strip=True, min=1, not_empty=True)
439 v.UnicodeString(strip=True, min=1, not_empty=True)
441 )
440 )
442 largefiles_usercache = All(
441 largefiles_usercache = All(
443 v.ValidPath(localizer),
442 v.ValidPath(localizer),
444 v.UnicodeString(strip=True, min=2, not_empty=True))
443 v.UnicodeString(strip=True, min=2, not_empty=True))
445 vcs_git_lfs_store_location = All(
444 vcs_git_lfs_store_location = All(
446 v.ValidPath(localizer),
445 v.ValidPath(localizer),
447 v.UnicodeString(strip=True, min=2, not_empty=True))
446 v.UnicodeString(strip=True, min=2, not_empty=True))
448 extensions_hgsubversion = v.StringBoolean(if_missing=False)
447 extensions_hgsubversion = v.StringBoolean(if_missing=False)
449 extensions_hggit = v.StringBoolean(if_missing=False)
448 extensions_hggit = v.StringBoolean(if_missing=False)
450 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
449 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
451 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
450 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
452 return _ApplicationUiSettingsForm
451 return _ApplicationUiSettingsForm
453
452
454
453
455 def RepoVcsSettingsForm(localizer, repo_name):
454 def RepoVcsSettingsForm(localizer, repo_name):
456 _ = localizer
455 _ = localizer
457
456
458 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
457 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
459 inherit_global_settings = v.StringBoolean(if_missing=False)
458 inherit_global_settings = v.StringBoolean(if_missing=False)
460 new_svn_branch = v.ValidSvnPattern(localizer,
459 new_svn_branch = v.ValidSvnPattern(localizer,
461 section='vcs_svn_branch', repo_name=repo_name)
460 section='vcs_svn_branch', repo_name=repo_name)
462 new_svn_tag = v.ValidSvnPattern(localizer,
461 new_svn_tag = v.ValidSvnPattern(localizer,
463 section='vcs_svn_tag', repo_name=repo_name)
462 section='vcs_svn_tag', repo_name=repo_name)
464 return _RepoVcsSettingsForm
463 return _RepoVcsSettingsForm
465
464
466
465
467 def LabsSettingsForm(localizer):
466 def LabsSettingsForm(localizer):
468 _ = localizer
467 _ = localizer
469
468
470 class _LabSettingsForm(formencode.Schema):
469 class _LabSettingsForm(formencode.Schema):
471 allow_extra_fields = True
470 allow_extra_fields = True
472 filter_extra_fields = False
471 filter_extra_fields = False
473 return _LabSettingsForm
472 return _LabSettingsForm
474
473
475
474
476 def ApplicationPermissionsForm(
475 def ApplicationPermissionsForm(
477 localizer, register_choices, password_reset_choices,
476 localizer, register_choices, password_reset_choices,
478 extern_activate_choices):
477 extern_activate_choices):
479 _ = localizer
478 _ = localizer
480
479
481 class _DefaultPermissionsForm(formencode.Schema):
480 class _DefaultPermissionsForm(formencode.Schema):
482 allow_extra_fields = True
481 allow_extra_fields = True
483 filter_extra_fields = True
482 filter_extra_fields = True
484
483
485 anonymous = v.StringBoolean(if_missing=False)
484 anonymous = v.StringBoolean(if_missing=False)
486 default_register = v.OneOf(register_choices)
485 default_register = v.OneOf(register_choices)
487 default_register_message = v.UnicodeString()
486 default_register_message = v.UnicodeString()
488 default_password_reset = v.OneOf(password_reset_choices)
487 default_password_reset = v.OneOf(password_reset_choices)
489 default_extern_activate = v.OneOf(extern_activate_choices)
488 default_extern_activate = v.OneOf(extern_activate_choices)
490 return _DefaultPermissionsForm
489 return _DefaultPermissionsForm
491
490
492
491
493 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
492 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
494 user_group_perms_choices):
493 user_group_perms_choices):
495 _ = localizer
494 _ = localizer
496
495
497 class _ObjectPermissionsForm(formencode.Schema):
496 class _ObjectPermissionsForm(formencode.Schema):
498 allow_extra_fields = True
497 allow_extra_fields = True
499 filter_extra_fields = True
498 filter_extra_fields = True
500 overwrite_default_repo = v.StringBoolean(if_missing=False)
499 overwrite_default_repo = v.StringBoolean(if_missing=False)
501 overwrite_default_group = v.StringBoolean(if_missing=False)
500 overwrite_default_group = v.StringBoolean(if_missing=False)
502 overwrite_default_user_group = v.StringBoolean(if_missing=False)
501 overwrite_default_user_group = v.StringBoolean(if_missing=False)
503 default_repo_perm = v.OneOf(repo_perms_choices)
502 default_repo_perm = v.OneOf(repo_perms_choices)
504 default_group_perm = v.OneOf(group_perms_choices)
503 default_group_perm = v.OneOf(group_perms_choices)
505 default_user_group_perm = v.OneOf(user_group_perms_choices)
504 default_user_group_perm = v.OneOf(user_group_perms_choices)
506 return _ObjectPermissionsForm
505 return _ObjectPermissionsForm
507
506
508
507
509 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
508 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
510 repo_group_create_choices, user_group_create_choices,
509 repo_group_create_choices, user_group_create_choices,
511 fork_choices, inherit_default_permissions_choices):
510 fork_choices, inherit_default_permissions_choices):
512 _ = localizer
511 _ = localizer
513
512
514 class _DefaultPermissionsForm(formencode.Schema):
513 class _DefaultPermissionsForm(formencode.Schema):
515 allow_extra_fields = True
514 allow_extra_fields = True
516 filter_extra_fields = True
515 filter_extra_fields = True
517
516
518 anonymous = v.StringBoolean(if_missing=False)
517 anonymous = v.StringBoolean(if_missing=False)
519
518
520 default_repo_create = v.OneOf(create_choices)
519 default_repo_create = v.OneOf(create_choices)
521 default_repo_create_on_write = v.OneOf(create_on_write_choices)
520 default_repo_create_on_write = v.OneOf(create_on_write_choices)
522 default_user_group_create = v.OneOf(user_group_create_choices)
521 default_user_group_create = v.OneOf(user_group_create_choices)
523 default_repo_group_create = v.OneOf(repo_group_create_choices)
522 default_repo_group_create = v.OneOf(repo_group_create_choices)
524 default_fork_create = v.OneOf(fork_choices)
523 default_fork_create = v.OneOf(fork_choices)
525 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
524 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
526 return _DefaultPermissionsForm
525 return _DefaultPermissionsForm
527
526
528
527
529 def UserIndividualPermissionsForm(localizer):
528 def UserIndividualPermissionsForm(localizer):
530 _ = localizer
529 _ = localizer
531
530
532 class _DefaultPermissionsForm(formencode.Schema):
531 class _DefaultPermissionsForm(formencode.Schema):
533 allow_extra_fields = True
532 allow_extra_fields = True
534 filter_extra_fields = True
533 filter_extra_fields = True
535
534
536 inherit_default_permissions = v.StringBoolean(if_missing=False)
535 inherit_default_permissions = v.StringBoolean(if_missing=False)
537 return _DefaultPermissionsForm
536 return _DefaultPermissionsForm
538
537
539
538
540 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
539 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
541 _ = localizer
540 _ = localizer
542 old_data = old_data or {}
541 old_data = old_data or {}
543
542
544 class _DefaultsForm(formencode.Schema):
543 class _DefaultsForm(formencode.Schema):
545 allow_extra_fields = True
544 allow_extra_fields = True
546 filter_extra_fields = True
545 filter_extra_fields = True
547 default_repo_type = v.OneOf(supported_backends)
546 default_repo_type = v.OneOf(supported_backends)
548 default_repo_private = v.StringBoolean(if_missing=False)
547 default_repo_private = v.StringBoolean(if_missing=False)
549 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
548 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
550 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
549 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
551 default_repo_enable_locking = v.StringBoolean(if_missing=False)
550 default_repo_enable_locking = v.StringBoolean(if_missing=False)
552 return _DefaultsForm
551 return _DefaultsForm
553
552
554
553
555 def AuthSettingsForm(localizer):
554 def AuthSettingsForm(localizer):
556 _ = localizer
555 _ = localizer
557
556
558 class _AuthSettingsForm(formencode.Schema):
557 class _AuthSettingsForm(formencode.Schema):
559 allow_extra_fields = True
558 allow_extra_fields = True
560 filter_extra_fields = True
559 filter_extra_fields = True
561 auth_plugins = All(v.ValidAuthPlugins(localizer),
560 auth_plugins = All(v.ValidAuthPlugins(localizer),
562 v.UniqueListFromString(localizer)(not_empty=True))
561 v.UniqueListFromString(localizer)(not_empty=True))
563 return _AuthSettingsForm
562 return _AuthSettingsForm
564
563
565
564
566 def UserExtraEmailForm(localizer):
565 def UserExtraEmailForm(localizer):
567 _ = localizer
566 _ = localizer
568
567
569 class _UserExtraEmailForm(formencode.Schema):
568 class _UserExtraEmailForm(formencode.Schema):
570 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
569 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
571 return _UserExtraEmailForm
570 return _UserExtraEmailForm
572
571
573
572
574 def UserExtraIpForm(localizer):
573 def UserExtraIpForm(localizer):
575 _ = localizer
574 _ = localizer
576
575
577 class _UserExtraIpForm(formencode.Schema):
576 class _UserExtraIpForm(formencode.Schema):
578 ip = v.ValidIp(localizer)(not_empty=True)
577 ip = v.ValidIp(localizer)(not_empty=True)
579 return _UserExtraIpForm
578 return _UserExtraIpForm
580
579
581
580
582 def PullRequestForm(localizer, repo_id):
581 def PullRequestForm(localizer, repo_id):
583 _ = localizer
582 _ = localizer
584
583
585 class ReviewerForm(formencode.Schema):
584 class ReviewerForm(formencode.Schema):
586 user_id = v.Int(not_empty=True)
585 user_id = v.Int(not_empty=True)
587 reasons = All()
586 reasons = All()
588 mandatory = v.StringBoolean()
587 mandatory = v.StringBoolean()
589
588
590 class _PullRequestForm(formencode.Schema):
589 class _PullRequestForm(formencode.Schema):
591 allow_extra_fields = True
590 allow_extra_fields = True
592 filter_extra_fields = True
591 filter_extra_fields = True
593
592
594 common_ancestor = v.UnicodeString(strip=True, required=True)
593 common_ancestor = v.UnicodeString(strip=True, required=True)
595 source_repo = v.UnicodeString(strip=True, required=True)
594 source_repo = v.UnicodeString(strip=True, required=True)
596 source_ref = v.UnicodeString(strip=True, required=True)
595 source_ref = v.UnicodeString(strip=True, required=True)
597 target_repo = v.UnicodeString(strip=True, required=True)
596 target_repo = v.UnicodeString(strip=True, required=True)
598 target_ref = v.UnicodeString(strip=True, required=True)
597 target_ref = v.UnicodeString(strip=True, required=True)
599 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
598 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
600 v.UniqueList(localizer)(not_empty=True))
599 v.UniqueList(localizer)(not_empty=True))
601 review_members = formencode.ForEach(ReviewerForm())
600 review_members = formencode.ForEach(ReviewerForm())
602 pullrequest_title = v.UnicodeString(strip=True, required=True)
601 pullrequest_title = v.UnicodeString(strip=True, required=True)
603 pullrequest_desc = v.UnicodeString(strip=True, required=False)
602 pullrequest_desc = v.UnicodeString(strip=True, required=False)
604
603
605 return _PullRequestForm
604 return _PullRequestForm
606
605
607
606
608 def IssueTrackerPatternsForm(localizer):
607 def IssueTrackerPatternsForm(localizer):
609 _ = localizer
608 _ = localizer
610
609
611 class _IssueTrackerPatternsForm(formencode.Schema):
610 class _IssueTrackerPatternsForm(formencode.Schema):
612 allow_extra_fields = True
611 allow_extra_fields = True
613 filter_extra_fields = False
612 filter_extra_fields = False
614 chained_validators = [v.ValidPattern(localizer)]
613 chained_validators = [v.ValidPattern(localizer)]
615 return _IssueTrackerPatternsForm
614 return _IssueTrackerPatternsForm
@@ -1,909 +1,907 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import traceback
28 import traceback
29 import logging
29 import logging
30 import cStringIO
30 import cStringIO
31 import pkg_resources
31 import pkg_resources
32
32
33 from rhodecode.translation import temp_translation_factory as _
34 from sqlalchemy import func
33 from sqlalchemy import func
35 from zope.cachedescriptors.property import Lazy as LazyProperty
34 from zope.cachedescriptors.property import Lazy as LazyProperty
36
35
37 import rhodecode
36 import rhodecode
38 from rhodecode.lib.vcs import get_backend
37 from rhodecode.lib.vcs import get_backend
39 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
40 from rhodecode.lib.vcs.nodes import FileNode
39 from rhodecode.lib.vcs.nodes import FileNode
41 from rhodecode.lib.vcs.backends.base import EmptyCommit
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
42 from rhodecode.lib import helpers as h
41 from rhodecode.lib import helpers as h
43
44 from rhodecode.lib.auth import (
42 from rhodecode.lib.auth import (
45 HasRepoPermissionAny, HasRepoGroupPermissionAny,
43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
46 HasUserGroupPermissionAny)
44 HasUserGroupPermissionAny)
47 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
48 from rhodecode.lib import hooks_utils, caches
46 from rhodecode.lib import hooks_utils, caches
49 from rhodecode.lib.utils import (
47 from rhodecode.lib.utils import (
50 get_filesystem_repos, make_db_config)
48 get_filesystem_repos, make_db_config)
51 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
52 from rhodecode.lib.system_info import get_system_info
50 from rhodecode.lib.system_info import get_system_info
53 from rhodecode.model import BaseModel
51 from rhodecode.model import BaseModel
54 from rhodecode.model.db import (
52 from rhodecode.model.db import (
55 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
56 PullRequest)
54 PullRequest)
57 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.settings import VcsSettingsModel
58
56
59 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
60
58
61
59
62 class UserTemp(object):
60 class UserTemp(object):
63 def __init__(self, user_id):
61 def __init__(self, user_id):
64 self.user_id = user_id
62 self.user_id = user_id
65
63
66 def __repr__(self):
64 def __repr__(self):
67 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
68
66
69
67
70 class RepoTemp(object):
68 class RepoTemp(object):
71 def __init__(self, repo_id):
69 def __init__(self, repo_id):
72 self.repo_id = repo_id
70 self.repo_id = repo_id
73
71
74 def __repr__(self):
72 def __repr__(self):
75 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
76
74
77
75
78 class SimpleCachedRepoList(object):
76 class SimpleCachedRepoList(object):
79 """
77 """
80 Lighter version of of iteration of repos without the scm initialisation,
78 Lighter version of of iteration of repos without the scm initialisation,
81 and with cache usage
79 and with cache usage
82 """
80 """
83 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
84 self.db_repo_list = db_repo_list
82 self.db_repo_list = db_repo_list
85 self.repos_path = repos_path
83 self.repos_path = repos_path
86 self.order_by = order_by
84 self.order_by = order_by
87 self.reversed = (order_by or '').startswith('-')
85 self.reversed = (order_by or '').startswith('-')
88 if not perm_set:
86 if not perm_set:
89 perm_set = ['repository.read', 'repository.write',
87 perm_set = ['repository.read', 'repository.write',
90 'repository.admin']
88 'repository.admin']
91 self.perm_set = perm_set
89 self.perm_set = perm_set
92
90
93 def __len__(self):
91 def __len__(self):
94 return len(self.db_repo_list)
92 return len(self.db_repo_list)
95
93
96 def __repr__(self):
94 def __repr__(self):
97 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
98
96
99 def __iter__(self):
97 def __iter__(self):
100 for dbr in self.db_repo_list:
98 for dbr in self.db_repo_list:
101 # check permission at this level
99 # check permission at this level
102 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 has_perm = HasRepoPermissionAny(*self.perm_set)(
103 dbr.repo_name, 'SimpleCachedRepoList check')
101 dbr.repo_name, 'SimpleCachedRepoList check')
104 if not has_perm:
102 if not has_perm:
105 continue
103 continue
106
104
107 tmp_d = {
105 tmp_d = {
108 'name': dbr.repo_name,
106 'name': dbr.repo_name,
109 'dbrepo': dbr.get_dict(),
107 'dbrepo': dbr.get_dict(),
110 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
111 }
109 }
112 yield tmp_d
110 yield tmp_d
113
111
114
112
115 class _PermCheckIterator(object):
113 class _PermCheckIterator(object):
116
114
117 def __init__(
115 def __init__(
118 self, obj_list, obj_attr, perm_set, perm_checker,
116 self, obj_list, obj_attr, perm_set, perm_checker,
119 extra_kwargs=None):
117 extra_kwargs=None):
120 """
118 """
121 Creates iterator from given list of objects, additionally
119 Creates iterator from given list of objects, additionally
122 checking permission for them from perm_set var
120 checking permission for them from perm_set var
123
121
124 :param obj_list: list of db objects
122 :param obj_list: list of db objects
125 :param obj_attr: attribute of object to pass into perm_checker
123 :param obj_attr: attribute of object to pass into perm_checker
126 :param perm_set: list of permissions to check
124 :param perm_set: list of permissions to check
127 :param perm_checker: callable to check permissions against
125 :param perm_checker: callable to check permissions against
128 """
126 """
129 self.obj_list = obj_list
127 self.obj_list = obj_list
130 self.obj_attr = obj_attr
128 self.obj_attr = obj_attr
131 self.perm_set = perm_set
129 self.perm_set = perm_set
132 self.perm_checker = perm_checker
130 self.perm_checker = perm_checker
133 self.extra_kwargs = extra_kwargs or {}
131 self.extra_kwargs = extra_kwargs or {}
134
132
135 def __len__(self):
133 def __len__(self):
136 return len(self.obj_list)
134 return len(self.obj_list)
137
135
138 def __repr__(self):
136 def __repr__(self):
139 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
140
138
141 def __iter__(self):
139 def __iter__(self):
142 checker = self.perm_checker(*self.perm_set)
140 checker = self.perm_checker(*self.perm_set)
143 for db_obj in self.obj_list:
141 for db_obj in self.obj_list:
144 # check permission at this level
142 # check permission at this level
145 name = getattr(db_obj, self.obj_attr, None)
143 name = getattr(db_obj, self.obj_attr, None)
146 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
147 continue
145 continue
148
146
149 yield db_obj
147 yield db_obj
150
148
151
149
152 class RepoList(_PermCheckIterator):
150 class RepoList(_PermCheckIterator):
153
151
154 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
155 if not perm_set:
153 if not perm_set:
156 perm_set = [
154 perm_set = [
157 'repository.read', 'repository.write', 'repository.admin']
155 'repository.read', 'repository.write', 'repository.admin']
158
156
159 super(RepoList, self).__init__(
157 super(RepoList, self).__init__(
160 obj_list=db_repo_list,
158 obj_list=db_repo_list,
161 obj_attr='repo_name', perm_set=perm_set,
159 obj_attr='repo_name', perm_set=perm_set,
162 perm_checker=HasRepoPermissionAny,
160 perm_checker=HasRepoPermissionAny,
163 extra_kwargs=extra_kwargs)
161 extra_kwargs=extra_kwargs)
164
162
165
163
166 class RepoGroupList(_PermCheckIterator):
164 class RepoGroupList(_PermCheckIterator):
167
165
168 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
169 if not perm_set:
167 if not perm_set:
170 perm_set = ['group.read', 'group.write', 'group.admin']
168 perm_set = ['group.read', 'group.write', 'group.admin']
171
169
172 super(RepoGroupList, self).__init__(
170 super(RepoGroupList, self).__init__(
173 obj_list=db_repo_group_list,
171 obj_list=db_repo_group_list,
174 obj_attr='group_name', perm_set=perm_set,
172 obj_attr='group_name', perm_set=perm_set,
175 perm_checker=HasRepoGroupPermissionAny,
173 perm_checker=HasRepoGroupPermissionAny,
176 extra_kwargs=extra_kwargs)
174 extra_kwargs=extra_kwargs)
177
175
178
176
179 class UserGroupList(_PermCheckIterator):
177 class UserGroupList(_PermCheckIterator):
180
178
181 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
182 if not perm_set:
180 if not perm_set:
183 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
184
182
185 super(UserGroupList, self).__init__(
183 super(UserGroupList, self).__init__(
186 obj_list=db_user_group_list,
184 obj_list=db_user_group_list,
187 obj_attr='users_group_name', perm_set=perm_set,
185 obj_attr='users_group_name', perm_set=perm_set,
188 perm_checker=HasUserGroupPermissionAny,
186 perm_checker=HasUserGroupPermissionAny,
189 extra_kwargs=extra_kwargs)
187 extra_kwargs=extra_kwargs)
190
188
191
189
192 class ScmModel(BaseModel):
190 class ScmModel(BaseModel):
193 """
191 """
194 Generic Scm Model
192 Generic Scm Model
195 """
193 """
196
194
197 @LazyProperty
195 @LazyProperty
198 def repos_path(self):
196 def repos_path(self):
199 """
197 """
200 Gets the repositories root path from database
198 Gets the repositories root path from database
201 """
199 """
202
200
203 settings_model = VcsSettingsModel(sa=self.sa)
201 settings_model = VcsSettingsModel(sa=self.sa)
204 return settings_model.get_repos_location()
202 return settings_model.get_repos_location()
205
203
206 def repo_scan(self, repos_path=None):
204 def repo_scan(self, repos_path=None):
207 """
205 """
208 Listing of repositories in given path. This path should not be a
206 Listing of repositories in given path. This path should not be a
209 repository itself. Return a dictionary of repository objects
207 repository itself. Return a dictionary of repository objects
210
208
211 :param repos_path: path to directory containing repositories
209 :param repos_path: path to directory containing repositories
212 """
210 """
213
211
214 if repos_path is None:
212 if repos_path is None:
215 repos_path = self.repos_path
213 repos_path = self.repos_path
216
214
217 log.info('scanning for repositories in %s', repos_path)
215 log.info('scanning for repositories in %s', repos_path)
218
216
219 config = make_db_config()
217 config = make_db_config()
220 config.set('extensions', 'largefiles', '')
218 config.set('extensions', 'largefiles', '')
221 repos = {}
219 repos = {}
222
220
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
221 for name, path in get_filesystem_repos(repos_path, recursive=True):
224 # name need to be decomposed and put back together using the /
222 # name need to be decomposed and put back together using the /
225 # since this is internal storage separator for rhodecode
223 # since this is internal storage separator for rhodecode
226 name = Repository.normalize_repo_name(name)
224 name = Repository.normalize_repo_name(name)
227
225
228 try:
226 try:
229 if name in repos:
227 if name in repos:
230 raise RepositoryError('Duplicate repository name %s '
228 raise RepositoryError('Duplicate repository name %s '
231 'found in %s' % (name, path))
229 'found in %s' % (name, path))
232 elif path[0] in rhodecode.BACKENDS:
230 elif path[0] in rhodecode.BACKENDS:
233 klass = get_backend(path[0])
231 klass = get_backend(path[0])
234 repos[name] = klass(path[1], config=config)
232 repos[name] = klass(path[1], config=config)
235 except OSError:
233 except OSError:
236 continue
234 continue
237 log.debug('found %s paths with repositories', len(repos))
235 log.debug('found %s paths with repositories', len(repos))
238 return repos
236 return repos
239
237
240 def get_repos(self, all_repos=None, sort_key=None):
238 def get_repos(self, all_repos=None, sort_key=None):
241 """
239 """
242 Get all repositories from db and for each repo create it's
240 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
241 backend instance and fill that backed with information from database
244
242
245 :param all_repos: list of repository names as strings
243 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
244 give specific repositories list, good for filtering
247
245
248 :param sort_key: initial sorting of repositories
246 :param sort_key: initial sorting of repositories
249 """
247 """
250 if all_repos is None:
248 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
249 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == None)\
250 .filter(Repository.group_id == None)\
253 .order_by(func.lower(Repository.repo_name)).all()
251 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
252 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
254 return repo_iter
257
255
258 def get_repo_groups(self, all_groups=None):
256 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
257 if all_groups is None:
260 all_groups = RepoGroup.query()\
258 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == None).all()
259 .filter(RepoGroup.group_parent_id == None).all()
262 return [x for x in RepoGroupList(all_groups)]
260 return [x for x in RepoGroupList(all_groups)]
263
261
264 def mark_for_invalidation(self, repo_name, delete=False):
262 def mark_for_invalidation(self, repo_name, delete=False):
265 """
263 """
266 Mark caches of this repo invalid in the database. `delete` flag
264 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
265 removes the cache entries
268
266
269 :param repo_name: the repo_name for which caches should be marked
267 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
268 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
269 :param delete: delete the entry keys instead of setting bool
272 flag on them
270 flag on them
273 """
271 """
274 CacheKey.set_invalidate(repo_name, delete=delete)
272 CacheKey.set_invalidate(repo_name, delete=delete)
275 repo = Repository.get_by_repo_name(repo_name)
273 repo = Repository.get_by_repo_name(repo_name)
276
274
277 if repo:
275 if repo:
278 config = repo._config
276 config = repo._config
279 config.set('extensions', 'largefiles', '')
277 config.set('extensions', 'largefiles', '')
280 repo.update_commit_cache(config=config, cs_cache=None)
278 repo.update_commit_cache(config=config, cs_cache=None)
281 caches.clear_repo_caches(repo_name)
279 caches.clear_repo_caches(repo_name)
282
280
283 def toggle_following_repo(self, follow_repo_id, user_id):
281 def toggle_following_repo(self, follow_repo_id, user_id):
284
282
285 f = self.sa.query(UserFollowing)\
283 f = self.sa.query(UserFollowing)\
286 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
287 .filter(UserFollowing.user_id == user_id).scalar()
285 .filter(UserFollowing.user_id == user_id).scalar()
288
286
289 if f is not None:
287 if f is not None:
290 try:
288 try:
291 self.sa.delete(f)
289 self.sa.delete(f)
292 return
290 return
293 except Exception:
291 except Exception:
294 log.error(traceback.format_exc())
292 log.error(traceback.format_exc())
295 raise
293 raise
296
294
297 try:
295 try:
298 f = UserFollowing()
296 f = UserFollowing()
299 f.user_id = user_id
297 f.user_id = user_id
300 f.follows_repo_id = follow_repo_id
298 f.follows_repo_id = follow_repo_id
301 self.sa.add(f)
299 self.sa.add(f)
302 except Exception:
300 except Exception:
303 log.error(traceback.format_exc())
301 log.error(traceback.format_exc())
304 raise
302 raise
305
303
306 def toggle_following_user(self, follow_user_id, user_id):
304 def toggle_following_user(self, follow_user_id, user_id):
307 f = self.sa.query(UserFollowing)\
305 f = self.sa.query(UserFollowing)\
308 .filter(UserFollowing.follows_user_id == follow_user_id)\
306 .filter(UserFollowing.follows_user_id == follow_user_id)\
309 .filter(UserFollowing.user_id == user_id).scalar()
307 .filter(UserFollowing.user_id == user_id).scalar()
310
308
311 if f is not None:
309 if f is not None:
312 try:
310 try:
313 self.sa.delete(f)
311 self.sa.delete(f)
314 return
312 return
315 except Exception:
313 except Exception:
316 log.error(traceback.format_exc())
314 log.error(traceback.format_exc())
317 raise
315 raise
318
316
319 try:
317 try:
320 f = UserFollowing()
318 f = UserFollowing()
321 f.user_id = user_id
319 f.user_id = user_id
322 f.follows_user_id = follow_user_id
320 f.follows_user_id = follow_user_id
323 self.sa.add(f)
321 self.sa.add(f)
324 except Exception:
322 except Exception:
325 log.error(traceback.format_exc())
323 log.error(traceback.format_exc())
326 raise
324 raise
327
325
328 def is_following_repo(self, repo_name, user_id, cache=False):
326 def is_following_repo(self, repo_name, user_id, cache=False):
329 r = self.sa.query(Repository)\
327 r = self.sa.query(Repository)\
330 .filter(Repository.repo_name == repo_name).scalar()
328 .filter(Repository.repo_name == repo_name).scalar()
331
329
332 f = self.sa.query(UserFollowing)\
330 f = self.sa.query(UserFollowing)\
333 .filter(UserFollowing.follows_repository == r)\
331 .filter(UserFollowing.follows_repository == r)\
334 .filter(UserFollowing.user_id == user_id).scalar()
332 .filter(UserFollowing.user_id == user_id).scalar()
335
333
336 return f is not None
334 return f is not None
337
335
338 def is_following_user(self, username, user_id, cache=False):
336 def is_following_user(self, username, user_id, cache=False):
339 u = User.get_by_username(username)
337 u = User.get_by_username(username)
340
338
341 f = self.sa.query(UserFollowing)\
339 f = self.sa.query(UserFollowing)\
342 .filter(UserFollowing.follows_user == u)\
340 .filter(UserFollowing.follows_user == u)\
343 .filter(UserFollowing.user_id == user_id).scalar()
341 .filter(UserFollowing.user_id == user_id).scalar()
344
342
345 return f is not None
343 return f is not None
346
344
347 def get_followers(self, repo):
345 def get_followers(self, repo):
348 repo = self._get_repo(repo)
346 repo = self._get_repo(repo)
349
347
350 return self.sa.query(UserFollowing)\
348 return self.sa.query(UserFollowing)\
351 .filter(UserFollowing.follows_repository == repo).count()
349 .filter(UserFollowing.follows_repository == repo).count()
352
350
353 def get_forks(self, repo):
351 def get_forks(self, repo):
354 repo = self._get_repo(repo)
352 repo = self._get_repo(repo)
355 return self.sa.query(Repository)\
353 return self.sa.query(Repository)\
356 .filter(Repository.fork == repo).count()
354 .filter(Repository.fork == repo).count()
357
355
358 def get_pull_requests(self, repo):
356 def get_pull_requests(self, repo):
359 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
360 return self.sa.query(PullRequest)\
358 return self.sa.query(PullRequest)\
361 .filter(PullRequest.target_repo == repo)\
359 .filter(PullRequest.target_repo == repo)\
362 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
363
361
364 def mark_as_fork(self, repo, fork, user):
362 def mark_as_fork(self, repo, fork, user):
365 repo = self._get_repo(repo)
363 repo = self._get_repo(repo)
366 fork = self._get_repo(fork)
364 fork = self._get_repo(fork)
367 if fork and repo.repo_id == fork.repo_id:
365 if fork and repo.repo_id == fork.repo_id:
368 raise Exception("Cannot set repository as fork of itself")
366 raise Exception("Cannot set repository as fork of itself")
369
367
370 if fork and repo.repo_type != fork.repo_type:
368 if fork and repo.repo_type != fork.repo_type:
371 raise RepositoryError(
369 raise RepositoryError(
372 "Cannot set repository as fork of repository with other type")
370 "Cannot set repository as fork of repository with other type")
373
371
374 repo.fork = fork
372 repo.fork = fork
375 self.sa.add(repo)
373 self.sa.add(repo)
376 return repo
374 return repo
377
375
378 def pull_changes(self, repo, username):
376 def pull_changes(self, repo, username):
379 dbrepo = self._get_repo(repo)
377 dbrepo = self._get_repo(repo)
380 clone_uri = dbrepo.clone_uri
378 clone_uri = dbrepo.clone_uri
381 if not clone_uri:
379 if not clone_uri:
382 raise Exception("This repository doesn't have a clone uri")
380 raise Exception("This repository doesn't have a clone uri")
383
381
384 repo = dbrepo.scm_instance(cache=False)
382 repo = dbrepo.scm_instance(cache=False)
385 # TODO: marcink fix this an re-enable since we need common logic
383 # TODO: marcink fix this an re-enable since we need common logic
386 # for hg/git remove hooks so we don't trigger them on fetching
384 # for hg/git remove hooks so we don't trigger them on fetching
387 # commits from remote
385 # commits from remote
388 repo.config.clear_section('hooks')
386 repo.config.clear_section('hooks')
389
387
390 repo_name = dbrepo.repo_name
388 repo_name = dbrepo.repo_name
391 try:
389 try:
392 # TODO: we need to make sure those operations call proper hooks !
390 # TODO: we need to make sure those operations call proper hooks !
393 repo.pull(clone_uri)
391 repo.pull(clone_uri)
394
392
395 self.mark_for_invalidation(repo_name)
393 self.mark_for_invalidation(repo_name)
396 except Exception:
394 except Exception:
397 log.error(traceback.format_exc())
395 log.error(traceback.format_exc())
398 raise
396 raise
399
397
400 def commit_change(self, repo, repo_name, commit, user, author, message,
398 def commit_change(self, repo, repo_name, commit, user, author, message,
401 content, f_path):
399 content, f_path):
402 """
400 """
403 Commits changes
401 Commits changes
404
402
405 :param repo: SCM instance
403 :param repo: SCM instance
406
404
407 """
405 """
408 user = self._get_user(user)
406 user = self._get_user(user)
409
407
410 # decoding here will force that we have proper encoded values
408 # decoding here will force that we have proper encoded values
411 # in any other case this will throw exceptions and deny commit
409 # in any other case this will throw exceptions and deny commit
412 content = safe_str(content)
410 content = safe_str(content)
413 path = safe_str(f_path)
411 path = safe_str(f_path)
414 # message and author needs to be unicode
412 # message and author needs to be unicode
415 # proper backend should then translate that into required type
413 # proper backend should then translate that into required type
416 message = safe_unicode(message)
414 message = safe_unicode(message)
417 author = safe_unicode(author)
415 author = safe_unicode(author)
418 imc = repo.in_memory_commit
416 imc = repo.in_memory_commit
419 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
417 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
420 try:
418 try:
421 # TODO: handle pre-push action !
419 # TODO: handle pre-push action !
422 tip = imc.commit(
420 tip = imc.commit(
423 message=message, author=author, parents=[commit],
421 message=message, author=author, parents=[commit],
424 branch=commit.branch)
422 branch=commit.branch)
425 except Exception as e:
423 except Exception as e:
426 log.error(traceback.format_exc())
424 log.error(traceback.format_exc())
427 raise IMCCommitError(str(e))
425 raise IMCCommitError(str(e))
428 finally:
426 finally:
429 # always clear caches, if commit fails we want fresh object also
427 # always clear caches, if commit fails we want fresh object also
430 self.mark_for_invalidation(repo_name)
428 self.mark_for_invalidation(repo_name)
431
429
432 # We trigger the post-push action
430 # We trigger the post-push action
433 hooks_utils.trigger_post_push_hook(
431 hooks_utils.trigger_post_push_hook(
434 username=user.username, action='push_local', repo_name=repo_name,
432 username=user.username, action='push_local', repo_name=repo_name,
435 repo_alias=repo.alias, commit_ids=[tip.raw_id])
433 repo_alias=repo.alias, commit_ids=[tip.raw_id])
436 return tip
434 return tip
437
435
438 def _sanitize_path(self, f_path):
436 def _sanitize_path(self, f_path):
439 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
437 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
440 raise NonRelativePathError('%s is not an relative path' % f_path)
438 raise NonRelativePathError('%s is not an relative path' % f_path)
441 if f_path:
439 if f_path:
442 f_path = os.path.normpath(f_path)
440 f_path = os.path.normpath(f_path)
443 return f_path
441 return f_path
444
442
445 def get_dirnode_metadata(self, request, commit, dir_node):
443 def get_dirnode_metadata(self, request, commit, dir_node):
446 if not dir_node.is_dir():
444 if not dir_node.is_dir():
447 return []
445 return []
448
446
449 data = []
447 data = []
450 for node in dir_node:
448 for node in dir_node:
451 if not node.is_file():
449 if not node.is_file():
452 # we skip file-nodes
450 # we skip file-nodes
453 continue
451 continue
454
452
455 last_commit = node.last_commit
453 last_commit = node.last_commit
456 last_commit_date = last_commit.date
454 last_commit_date = last_commit.date
457 data.append({
455 data.append({
458 'name': node.name,
456 'name': node.name,
459 'size': h.format_byte_size_binary(node.size),
457 'size': h.format_byte_size_binary(node.size),
460 'modified_at': h.format_date(last_commit_date),
458 'modified_at': h.format_date(last_commit_date),
461 'modified_ts': last_commit_date.isoformat(),
459 'modified_ts': last_commit_date.isoformat(),
462 'revision': last_commit.revision,
460 'revision': last_commit.revision,
463 'short_id': last_commit.short_id,
461 'short_id': last_commit.short_id,
464 'message': h.escape(last_commit.message),
462 'message': h.escape(last_commit.message),
465 'author': h.escape(last_commit.author),
463 'author': h.escape(last_commit.author),
466 'user_profile': h.gravatar_with_user(
464 'user_profile': h.gravatar_with_user(
467 request, last_commit.author),
465 request, last_commit.author),
468 })
466 })
469
467
470 return data
468 return data
471
469
472 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
470 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
473 extended_info=False, content=False, max_file_bytes=None):
471 extended_info=False, content=False, max_file_bytes=None):
474 """
472 """
475 recursive walk in root dir and return a set of all path in that dir
473 recursive walk in root dir and return a set of all path in that dir
476 based on repository walk function
474 based on repository walk function
477
475
478 :param repo_name: name of repository
476 :param repo_name: name of repository
479 :param commit_id: commit id for which to list nodes
477 :param commit_id: commit id for which to list nodes
480 :param root_path: root path to list
478 :param root_path: root path to list
481 :param flat: return as a list, if False returns a dict with description
479 :param flat: return as a list, if False returns a dict with description
482 :param max_file_bytes: will not return file contents over this limit
480 :param max_file_bytes: will not return file contents over this limit
483
481
484 """
482 """
485 _files = list()
483 _files = list()
486 _dirs = list()
484 _dirs = list()
487 try:
485 try:
488 _repo = self._get_repo(repo_name)
486 _repo = self._get_repo(repo_name)
489 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
487 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
490 root_path = root_path.lstrip('/')
488 root_path = root_path.lstrip('/')
491 for __, dirs, files in commit.walk(root_path):
489 for __, dirs, files in commit.walk(root_path):
492 for f in files:
490 for f in files:
493 _content = None
491 _content = None
494 _data = f.unicode_path
492 _data = f.unicode_path
495 over_size_limit = (max_file_bytes is not None
493 over_size_limit = (max_file_bytes is not None
496 and f.size > max_file_bytes)
494 and f.size > max_file_bytes)
497
495
498 if not flat:
496 if not flat:
499 _data = {
497 _data = {
500 "name": h.escape(f.unicode_path),
498 "name": h.escape(f.unicode_path),
501 "type": "file",
499 "type": "file",
502 }
500 }
503 if extended_info:
501 if extended_info:
504 _data.update({
502 _data.update({
505 "md5": f.md5,
503 "md5": f.md5,
506 "binary": f.is_binary,
504 "binary": f.is_binary,
507 "size": f.size,
505 "size": f.size,
508 "extension": f.extension,
506 "extension": f.extension,
509 "mimetype": f.mimetype,
507 "mimetype": f.mimetype,
510 "lines": f.lines()[0]
508 "lines": f.lines()[0]
511 })
509 })
512
510
513 if content:
511 if content:
514 full_content = None
512 full_content = None
515 if not f.is_binary and not over_size_limit:
513 if not f.is_binary and not over_size_limit:
516 full_content = safe_str(f.content)
514 full_content = safe_str(f.content)
517
515
518 _data.update({
516 _data.update({
519 "content": full_content,
517 "content": full_content,
520 })
518 })
521 _files.append(_data)
519 _files.append(_data)
522 for d in dirs:
520 for d in dirs:
523 _data = d.unicode_path
521 _data = d.unicode_path
524 if not flat:
522 if not flat:
525 _data = {
523 _data = {
526 "name": h.escape(d.unicode_path),
524 "name": h.escape(d.unicode_path),
527 "type": "dir",
525 "type": "dir",
528 }
526 }
529 if extended_info:
527 if extended_info:
530 _data.update({
528 _data.update({
531 "md5": None,
529 "md5": None,
532 "binary": None,
530 "binary": None,
533 "size": None,
531 "size": None,
534 "extension": None,
532 "extension": None,
535 })
533 })
536 if content:
534 if content:
537 _data.update({
535 _data.update({
538 "content": None
536 "content": None
539 })
537 })
540 _dirs.append(_data)
538 _dirs.append(_data)
541 except RepositoryError:
539 except RepositoryError:
542 log.debug("Exception in get_nodes", exc_info=True)
540 log.debug("Exception in get_nodes", exc_info=True)
543 raise
541 raise
544
542
545 return _dirs, _files
543 return _dirs, _files
546
544
547 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
545 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
548 author=None, trigger_push_hook=True):
546 author=None, trigger_push_hook=True):
549 """
547 """
550 Commits given multiple nodes into repo
548 Commits given multiple nodes into repo
551
549
552 :param user: RhodeCode User object or user_id, the commiter
550 :param user: RhodeCode User object or user_id, the commiter
553 :param repo: RhodeCode Repository object
551 :param repo: RhodeCode Repository object
554 :param message: commit message
552 :param message: commit message
555 :param nodes: mapping {filename:{'content':content},...}
553 :param nodes: mapping {filename:{'content':content},...}
556 :param parent_commit: parent commit, can be empty than it's
554 :param parent_commit: parent commit, can be empty than it's
557 initial commit
555 initial commit
558 :param author: author of commit, cna be different that commiter
556 :param author: author of commit, cna be different that commiter
559 only for git
557 only for git
560 :param trigger_push_hook: trigger push hooks
558 :param trigger_push_hook: trigger push hooks
561
559
562 :returns: new commited commit
560 :returns: new commited commit
563 """
561 """
564
562
565 user = self._get_user(user)
563 user = self._get_user(user)
566 scm_instance = repo.scm_instance(cache=False)
564 scm_instance = repo.scm_instance(cache=False)
567
565
568 processed_nodes = []
566 processed_nodes = []
569 for f_path in nodes:
567 for f_path in nodes:
570 f_path = self._sanitize_path(f_path)
568 f_path = self._sanitize_path(f_path)
571 content = nodes[f_path]['content']
569 content = nodes[f_path]['content']
572 f_path = safe_str(f_path)
570 f_path = safe_str(f_path)
573 # decoding here will force that we have proper encoded values
571 # decoding here will force that we have proper encoded values
574 # in any other case this will throw exceptions and deny commit
572 # in any other case this will throw exceptions and deny commit
575 if isinstance(content, (basestring,)):
573 if isinstance(content, (basestring,)):
576 content = safe_str(content)
574 content = safe_str(content)
577 elif isinstance(content, (file, cStringIO.OutputType,)):
575 elif isinstance(content, (file, cStringIO.OutputType,)):
578 content = content.read()
576 content = content.read()
579 else:
577 else:
580 raise Exception('Content is of unrecognized type %s' % (
578 raise Exception('Content is of unrecognized type %s' % (
581 type(content)
579 type(content)
582 ))
580 ))
583 processed_nodes.append((f_path, content))
581 processed_nodes.append((f_path, content))
584
582
585 message = safe_unicode(message)
583 message = safe_unicode(message)
586 commiter = user.full_contact
584 commiter = user.full_contact
587 author = safe_unicode(author) if author else commiter
585 author = safe_unicode(author) if author else commiter
588
586
589 imc = scm_instance.in_memory_commit
587 imc = scm_instance.in_memory_commit
590
588
591 if not parent_commit:
589 if not parent_commit:
592 parent_commit = EmptyCommit(alias=scm_instance.alias)
590 parent_commit = EmptyCommit(alias=scm_instance.alias)
593
591
594 if isinstance(parent_commit, EmptyCommit):
592 if isinstance(parent_commit, EmptyCommit):
595 # EmptyCommit means we we're editing empty repository
593 # EmptyCommit means we we're editing empty repository
596 parents = None
594 parents = None
597 else:
595 else:
598 parents = [parent_commit]
596 parents = [parent_commit]
599 # add multiple nodes
597 # add multiple nodes
600 for path, content in processed_nodes:
598 for path, content in processed_nodes:
601 imc.add(FileNode(path, content=content))
599 imc.add(FileNode(path, content=content))
602 # TODO: handle pre push scenario
600 # TODO: handle pre push scenario
603 tip = imc.commit(message=message,
601 tip = imc.commit(message=message,
604 author=author,
602 author=author,
605 parents=parents,
603 parents=parents,
606 branch=parent_commit.branch)
604 branch=parent_commit.branch)
607
605
608 self.mark_for_invalidation(repo.repo_name)
606 self.mark_for_invalidation(repo.repo_name)
609 if trigger_push_hook:
607 if trigger_push_hook:
610 hooks_utils.trigger_post_push_hook(
608 hooks_utils.trigger_post_push_hook(
611 username=user.username, action='push_local',
609 username=user.username, action='push_local',
612 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
610 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
613 commit_ids=[tip.raw_id])
611 commit_ids=[tip.raw_id])
614 return tip
612 return tip
615
613
616 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
614 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
617 author=None, trigger_push_hook=True):
615 author=None, trigger_push_hook=True):
618 user = self._get_user(user)
616 user = self._get_user(user)
619 scm_instance = repo.scm_instance(cache=False)
617 scm_instance = repo.scm_instance(cache=False)
620
618
621 message = safe_unicode(message)
619 message = safe_unicode(message)
622 commiter = user.full_contact
620 commiter = user.full_contact
623 author = safe_unicode(author) if author else commiter
621 author = safe_unicode(author) if author else commiter
624
622
625 imc = scm_instance.in_memory_commit
623 imc = scm_instance.in_memory_commit
626
624
627 if not parent_commit:
625 if not parent_commit:
628 parent_commit = EmptyCommit(alias=scm_instance.alias)
626 parent_commit = EmptyCommit(alias=scm_instance.alias)
629
627
630 if isinstance(parent_commit, EmptyCommit):
628 if isinstance(parent_commit, EmptyCommit):
631 # EmptyCommit means we we're editing empty repository
629 # EmptyCommit means we we're editing empty repository
632 parents = None
630 parents = None
633 else:
631 else:
634 parents = [parent_commit]
632 parents = [parent_commit]
635
633
636 # add multiple nodes
634 # add multiple nodes
637 for _filename, data in nodes.items():
635 for _filename, data in nodes.items():
638 # new filename, can be renamed from the old one, also sanitaze
636 # new filename, can be renamed from the old one, also sanitaze
639 # the path for any hack around relative paths like ../../ etc.
637 # the path for any hack around relative paths like ../../ etc.
640 filename = self._sanitize_path(data['filename'])
638 filename = self._sanitize_path(data['filename'])
641 old_filename = self._sanitize_path(_filename)
639 old_filename = self._sanitize_path(_filename)
642 content = data['content']
640 content = data['content']
643
641
644 filenode = FileNode(old_filename, content=content)
642 filenode = FileNode(old_filename, content=content)
645 op = data['op']
643 op = data['op']
646 if op == 'add':
644 if op == 'add':
647 imc.add(filenode)
645 imc.add(filenode)
648 elif op == 'del':
646 elif op == 'del':
649 imc.remove(filenode)
647 imc.remove(filenode)
650 elif op == 'mod':
648 elif op == 'mod':
651 if filename != old_filename:
649 if filename != old_filename:
652 # TODO: handle renames more efficient, needs vcs lib
650 # TODO: handle renames more efficient, needs vcs lib
653 # changes
651 # changes
654 imc.remove(filenode)
652 imc.remove(filenode)
655 imc.add(FileNode(filename, content=content))
653 imc.add(FileNode(filename, content=content))
656 else:
654 else:
657 imc.change(filenode)
655 imc.change(filenode)
658
656
659 try:
657 try:
660 # TODO: handle pre push scenario
658 # TODO: handle pre push scenario
661 # commit changes
659 # commit changes
662 tip = imc.commit(message=message,
660 tip = imc.commit(message=message,
663 author=author,
661 author=author,
664 parents=parents,
662 parents=parents,
665 branch=parent_commit.branch)
663 branch=parent_commit.branch)
666 except NodeNotChangedError:
664 except NodeNotChangedError:
667 raise
665 raise
668 except Exception as e:
666 except Exception as e:
669 log.exception("Unexpected exception during call to imc.commit")
667 log.exception("Unexpected exception during call to imc.commit")
670 raise IMCCommitError(str(e))
668 raise IMCCommitError(str(e))
671 finally:
669 finally:
672 # always clear caches, if commit fails we want fresh object also
670 # always clear caches, if commit fails we want fresh object also
673 self.mark_for_invalidation(repo.repo_name)
671 self.mark_for_invalidation(repo.repo_name)
674
672
675 if trigger_push_hook:
673 if trigger_push_hook:
676 hooks_utils.trigger_post_push_hook(
674 hooks_utils.trigger_post_push_hook(
677 username=user.username, action='push_local',
675 username=user.username, action='push_local',
678 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
676 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
679 commit_ids=[tip.raw_id])
677 commit_ids=[tip.raw_id])
680
678
681 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
679 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
682 author=None, trigger_push_hook=True):
680 author=None, trigger_push_hook=True):
683 """
681 """
684 Deletes given multiple nodes into `repo`
682 Deletes given multiple nodes into `repo`
685
683
686 :param user: RhodeCode User object or user_id, the committer
684 :param user: RhodeCode User object or user_id, the committer
687 :param repo: RhodeCode Repository object
685 :param repo: RhodeCode Repository object
688 :param message: commit message
686 :param message: commit message
689 :param nodes: mapping {filename:{'content':content},...}
687 :param nodes: mapping {filename:{'content':content},...}
690 :param parent_commit: parent commit, can be empty than it's initial
688 :param parent_commit: parent commit, can be empty than it's initial
691 commit
689 commit
692 :param author: author of commit, cna be different that commiter only
690 :param author: author of commit, cna be different that commiter only
693 for git
691 for git
694 :param trigger_push_hook: trigger push hooks
692 :param trigger_push_hook: trigger push hooks
695
693
696 :returns: new commit after deletion
694 :returns: new commit after deletion
697 """
695 """
698
696
699 user = self._get_user(user)
697 user = self._get_user(user)
700 scm_instance = repo.scm_instance(cache=False)
698 scm_instance = repo.scm_instance(cache=False)
701
699
702 processed_nodes = []
700 processed_nodes = []
703 for f_path in nodes:
701 for f_path in nodes:
704 f_path = self._sanitize_path(f_path)
702 f_path = self._sanitize_path(f_path)
705 # content can be empty but for compatabilty it allows same dicts
703 # content can be empty but for compatabilty it allows same dicts
706 # structure as add_nodes
704 # structure as add_nodes
707 content = nodes[f_path].get('content')
705 content = nodes[f_path].get('content')
708 processed_nodes.append((f_path, content))
706 processed_nodes.append((f_path, content))
709
707
710 message = safe_unicode(message)
708 message = safe_unicode(message)
711 commiter = user.full_contact
709 commiter = user.full_contact
712 author = safe_unicode(author) if author else commiter
710 author = safe_unicode(author) if author else commiter
713
711
714 imc = scm_instance.in_memory_commit
712 imc = scm_instance.in_memory_commit
715
713
716 if not parent_commit:
714 if not parent_commit:
717 parent_commit = EmptyCommit(alias=scm_instance.alias)
715 parent_commit = EmptyCommit(alias=scm_instance.alias)
718
716
719 if isinstance(parent_commit, EmptyCommit):
717 if isinstance(parent_commit, EmptyCommit):
720 # EmptyCommit means we we're editing empty repository
718 # EmptyCommit means we we're editing empty repository
721 parents = None
719 parents = None
722 else:
720 else:
723 parents = [parent_commit]
721 parents = [parent_commit]
724 # add multiple nodes
722 # add multiple nodes
725 for path, content in processed_nodes:
723 for path, content in processed_nodes:
726 imc.remove(FileNode(path, content=content))
724 imc.remove(FileNode(path, content=content))
727
725
728 # TODO: handle pre push scenario
726 # TODO: handle pre push scenario
729 tip = imc.commit(message=message,
727 tip = imc.commit(message=message,
730 author=author,
728 author=author,
731 parents=parents,
729 parents=parents,
732 branch=parent_commit.branch)
730 branch=parent_commit.branch)
733
731
734 self.mark_for_invalidation(repo.repo_name)
732 self.mark_for_invalidation(repo.repo_name)
735 if trigger_push_hook:
733 if trigger_push_hook:
736 hooks_utils.trigger_post_push_hook(
734 hooks_utils.trigger_post_push_hook(
737 username=user.username, action='push_local',
735 username=user.username, action='push_local',
738 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
736 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
739 commit_ids=[tip.raw_id])
737 commit_ids=[tip.raw_id])
740 return tip
738 return tip
741
739
742 def strip(self, repo, commit_id, branch):
740 def strip(self, repo, commit_id, branch):
743 scm_instance = repo.scm_instance(cache=False)
741 scm_instance = repo.scm_instance(cache=False)
744 scm_instance.config.clear_section('hooks')
742 scm_instance.config.clear_section('hooks')
745 scm_instance.strip(commit_id, branch)
743 scm_instance.strip(commit_id, branch)
746 self.mark_for_invalidation(repo.repo_name)
744 self.mark_for_invalidation(repo.repo_name)
747
745
748 def get_unread_journal(self):
746 def get_unread_journal(self):
749 return self.sa.query(UserLog).count()
747 return self.sa.query(UserLog).count()
750
748
751 def get_repo_landing_revs(self, repo=None):
749 def get_repo_landing_revs(self, translator, repo=None):
752 """
750 """
753 Generates select option with tags branches and bookmarks (for hg only)
751 Generates select option with tags branches and bookmarks (for hg only)
754 grouped by type
752 grouped by type
755
753
756 :param repo:
754 :param repo:
757 """
755 """
758
756 _ = translator
759 repo = self._get_repo(repo)
757 repo = self._get_repo(repo)
760
758
761 hist_l = [
759 hist_l = [
762 ['rev:tip', _('latest tip')]
760 ['rev:tip', _('latest tip')]
763 ]
761 ]
764 choices = [
762 choices = [
765 'rev:tip'
763 'rev:tip'
766 ]
764 ]
767
765
768 if not repo:
766 if not repo:
769 return choices, hist_l
767 return choices, hist_l
770
768
771 repo = repo.scm_instance()
769 repo = repo.scm_instance()
772
770
773 branches_group = (
771 branches_group = (
774 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
772 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
775 for b in repo.branches],
773 for b in repo.branches],
776 _("Branches"))
774 _("Branches"))
777 hist_l.append(branches_group)
775 hist_l.append(branches_group)
778 choices.extend([x[0] for x in branches_group[0]])
776 choices.extend([x[0] for x in branches_group[0]])
779
777
780 if repo.alias == 'hg':
778 if repo.alias == 'hg':
781 bookmarks_group = (
779 bookmarks_group = (
782 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
780 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
783 for b in repo.bookmarks],
781 for b in repo.bookmarks],
784 _("Bookmarks"))
782 _("Bookmarks"))
785 hist_l.append(bookmarks_group)
783 hist_l.append(bookmarks_group)
786 choices.extend([x[0] for x in bookmarks_group[0]])
784 choices.extend([x[0] for x in bookmarks_group[0]])
787
785
788 tags_group = (
786 tags_group = (
789 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
787 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
790 for t in repo.tags],
788 for t in repo.tags],
791 _("Tags"))
789 _("Tags"))
792 hist_l.append(tags_group)
790 hist_l.append(tags_group)
793 choices.extend([x[0] for x in tags_group[0]])
791 choices.extend([x[0] for x in tags_group[0]])
794
792
795 return choices, hist_l
793 return choices, hist_l
796
794
797 def install_git_hook(self, repo, force_create=False):
795 def install_git_hook(self, repo, force_create=False):
798 """
796 """
799 Creates a rhodecode hook inside a git repository
797 Creates a rhodecode hook inside a git repository
800
798
801 :param repo: Instance of VCS repo
799 :param repo: Instance of VCS repo
802 :param force_create: Create even if same name hook exists
800 :param force_create: Create even if same name hook exists
803 """
801 """
804
802
805 loc = os.path.join(repo.path, 'hooks')
803 loc = os.path.join(repo.path, 'hooks')
806 if not repo.bare:
804 if not repo.bare:
807 loc = os.path.join(repo.path, '.git', 'hooks')
805 loc = os.path.join(repo.path, '.git', 'hooks')
808 if not os.path.isdir(loc):
806 if not os.path.isdir(loc):
809 os.makedirs(loc, mode=0777)
807 os.makedirs(loc, mode=0777)
810
808
811 tmpl_post = pkg_resources.resource_string(
809 tmpl_post = pkg_resources.resource_string(
812 'rhodecode', '/'.join(
810 'rhodecode', '/'.join(
813 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
811 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
814 tmpl_pre = pkg_resources.resource_string(
812 tmpl_pre = pkg_resources.resource_string(
815 'rhodecode', '/'.join(
813 'rhodecode', '/'.join(
816 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
814 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
817
815
818 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
816 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
819 _hook_file = os.path.join(loc, '%s-receive' % h_type)
817 _hook_file = os.path.join(loc, '%s-receive' % h_type)
820 log.debug('Installing git hook in repo %s', repo)
818 log.debug('Installing git hook in repo %s', repo)
821 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
819 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
822
820
823 if _rhodecode_hook or force_create:
821 if _rhodecode_hook or force_create:
824 log.debug('writing %s hook file !', h_type)
822 log.debug('writing %s hook file !', h_type)
825 try:
823 try:
826 with open(_hook_file, 'wb') as f:
824 with open(_hook_file, 'wb') as f:
827 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
825 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
828 tmpl = tmpl.replace('_ENV_', sys.executable)
826 tmpl = tmpl.replace('_ENV_', sys.executable)
829 f.write(tmpl)
827 f.write(tmpl)
830 os.chmod(_hook_file, 0755)
828 os.chmod(_hook_file, 0755)
831 except IOError:
829 except IOError:
832 log.exception('error writing hook file %s', _hook_file)
830 log.exception('error writing hook file %s', _hook_file)
833 else:
831 else:
834 log.debug('skipping writing hook file')
832 log.debug('skipping writing hook file')
835
833
836 def install_svn_hooks(self, repo, force_create=False):
834 def install_svn_hooks(self, repo, force_create=False):
837 """
835 """
838 Creates rhodecode hooks inside a svn repository
836 Creates rhodecode hooks inside a svn repository
839
837
840 :param repo: Instance of VCS repo
838 :param repo: Instance of VCS repo
841 :param force_create: Create even if same name hook exists
839 :param force_create: Create even if same name hook exists
842 """
840 """
843 hooks_path = os.path.join(repo.path, 'hooks')
841 hooks_path = os.path.join(repo.path, 'hooks')
844 if not os.path.isdir(hooks_path):
842 if not os.path.isdir(hooks_path):
845 os.makedirs(hooks_path)
843 os.makedirs(hooks_path)
846 post_commit_tmpl = pkg_resources.resource_string(
844 post_commit_tmpl = pkg_resources.resource_string(
847 'rhodecode', '/'.join(
845 'rhodecode', '/'.join(
848 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
846 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
849 pre_commit_template = pkg_resources.resource_string(
847 pre_commit_template = pkg_resources.resource_string(
850 'rhodecode', '/'.join(
848 'rhodecode', '/'.join(
851 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
849 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
852 templates = {
850 templates = {
853 'post-commit': post_commit_tmpl,
851 'post-commit': post_commit_tmpl,
854 'pre-commit': pre_commit_template
852 'pre-commit': pre_commit_template
855 }
853 }
856 for filename in templates:
854 for filename in templates:
857 _hook_file = os.path.join(hooks_path, filename)
855 _hook_file = os.path.join(hooks_path, filename)
858 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
856 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
859 if _rhodecode_hook or force_create:
857 if _rhodecode_hook or force_create:
860 log.debug('writing %s hook file !', filename)
858 log.debug('writing %s hook file !', filename)
861 template = templates[filename]
859 template = templates[filename]
862 try:
860 try:
863 with open(_hook_file, 'wb') as f:
861 with open(_hook_file, 'wb') as f:
864 template = template.replace(
862 template = template.replace(
865 '_TMPL_', rhodecode.__version__)
863 '_TMPL_', rhodecode.__version__)
866 template = template.replace('_ENV_', sys.executable)
864 template = template.replace('_ENV_', sys.executable)
867 f.write(template)
865 f.write(template)
868 os.chmod(_hook_file, 0755)
866 os.chmod(_hook_file, 0755)
869 except IOError:
867 except IOError:
870 log.exception('error writing hook file %s', filename)
868 log.exception('error writing hook file %s', filename)
871 else:
869 else:
872 log.debug('skipping writing hook file')
870 log.debug('skipping writing hook file')
873
871
874 def install_hooks(self, repo, repo_type):
872 def install_hooks(self, repo, repo_type):
875 if repo_type == 'git':
873 if repo_type == 'git':
876 self.install_git_hook(repo)
874 self.install_git_hook(repo)
877 elif repo_type == 'svn':
875 elif repo_type == 'svn':
878 self.install_svn_hooks(repo)
876 self.install_svn_hooks(repo)
879
877
880 def get_server_info(self, environ=None):
878 def get_server_info(self, environ=None):
881 server_info = get_system_info(environ)
879 server_info = get_system_info(environ)
882 return server_info
880 return server_info
883
881
884
882
885 def _check_rhodecode_hook(hook_path):
883 def _check_rhodecode_hook(hook_path):
886 """
884 """
887 Check if the hook was created by RhodeCode
885 Check if the hook was created by RhodeCode
888 """
886 """
889 if not os.path.exists(hook_path):
887 if not os.path.exists(hook_path):
890 return True
888 return True
891
889
892 log.debug('hook exists, checking if it is from rhodecode')
890 log.debug('hook exists, checking if it is from rhodecode')
893 hook_content = _read_hook(hook_path)
891 hook_content = _read_hook(hook_path)
894 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
892 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
895 if matches:
893 if matches:
896 try:
894 try:
897 version = matches.groups()[0]
895 version = matches.groups()[0]
898 log.debug('got %s, it is rhodecode', version)
896 log.debug('got %s, it is rhodecode', version)
899 return True
897 return True
900 except Exception:
898 except Exception:
901 log.exception("Exception while reading the hook version.")
899 log.exception("Exception while reading the hook version.")
902
900
903 return False
901 return False
904
902
905
903
906 def _read_hook(hook_path):
904 def _read_hook(hook_path):
907 with open(hook_path, 'rb') as f:
905 with open(hook_path, 'rb') as f:
908 content = f.read()
906 content = f.read()
909 return content
907 return content
@@ -1,906 +1,911 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 users model for RhodeCode
22 users model for RhodeCode
23 """
23 """
24
24
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import datetime
28 import ipaddress
27
29
28 import datetime
30 from pyramid.threadlocal import get_current_request
29 from rhodecode.translation import temp_translation_factory as _
30
31 import ipaddress
32 from sqlalchemy.exc import DatabaseError
31 from sqlalchemy.exc import DatabaseError
33
32
34 from rhodecode import events
33 from rhodecode import events
35 from rhodecode.lib.user_log_filter import user_log_filter
34 from rhodecode.lib.user_log_filter import user_log_filter
36 from rhodecode.lib.utils2 import (
35 from rhodecode.lib.utils2 import (
37 safe_unicode, get_current_rhodecode_user, action_logger_generic,
36 safe_unicode, get_current_rhodecode_user, action_logger_generic,
38 AttributeDict, str2bool)
37 AttributeDict, str2bool)
39 from rhodecode.lib.exceptions import (
38 from rhodecode.lib.exceptions import (
40 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
39 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
41 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
40 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
42 from rhodecode.lib.caching_query import FromCache
41 from rhodecode.lib.caching_query import FromCache
43 from rhodecode.model import BaseModel
42 from rhodecode.model import BaseModel
44 from rhodecode.model.auth_token import AuthTokenModel
43 from rhodecode.model.auth_token import AuthTokenModel
45 from rhodecode.model.db import (
44 from rhodecode.model.db import (
46 _hash_key, true, false, or_, joinedload, User, UserToPerm,
45 _hash_key, true, false, or_, joinedload, User, UserToPerm,
47 UserEmailMap, UserIpMap, UserLog)
46 UserEmailMap, UserIpMap, UserLog)
48 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
49 from rhodecode.model.repo_group import RepoGroupModel
48 from rhodecode.model.repo_group import RepoGroupModel
50
49
51
50
52 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
53
52
54
53
55 class UserModel(BaseModel):
54 class UserModel(BaseModel):
56 cls = User
55 cls = User
57
56
58 def get(self, user_id, cache=False):
57 def get(self, user_id, cache=False):
59 user = self.sa.query(User)
58 user = self.sa.query(User)
60 if cache:
59 if cache:
61 user = user.options(
60 user = user.options(
62 FromCache("sql_cache_short", "get_user_%s" % user_id))
61 FromCache("sql_cache_short", "get_user_%s" % user_id))
63 return user.get(user_id)
62 return user.get(user_id)
64
63
65 def get_user(self, user):
64 def get_user(self, user):
66 return self._get_user(user)
65 return self._get_user(user)
67
66
68 def _serialize_user(self, user):
67 def _serialize_user(self, user):
69 import rhodecode.lib.helpers as h
68 import rhodecode.lib.helpers as h
70
69
71 return {
70 return {
72 'id': user.user_id,
71 'id': user.user_id,
73 'first_name': user.first_name,
72 'first_name': user.first_name,
74 'last_name': user.last_name,
73 'last_name': user.last_name,
75 'username': user.username,
74 'username': user.username,
76 'email': user.email,
75 'email': user.email,
77 'icon_link': h.gravatar_url(user.email, 30),
76 'icon_link': h.gravatar_url(user.email, 30),
78 'value_display': h.escape(h.person(user)),
77 'value_display': h.escape(h.person(user)),
79 'value': user.username,
78 'value': user.username,
80 'value_type': 'user',
79 'value_type': 'user',
81 'active': user.active,
80 'active': user.active,
82 }
81 }
83
82
84 def get_users(self, name_contains=None, limit=20, only_active=True):
83 def get_users(self, name_contains=None, limit=20, only_active=True):
85
84
86 query = self.sa.query(User)
85 query = self.sa.query(User)
87 if only_active:
86 if only_active:
88 query = query.filter(User.active == true())
87 query = query.filter(User.active == true())
89
88
90 if name_contains:
89 if name_contains:
91 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
90 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
92 query = query.filter(
91 query = query.filter(
93 or_(
92 or_(
94 User.name.ilike(ilike_expression),
93 User.name.ilike(ilike_expression),
95 User.lastname.ilike(ilike_expression),
94 User.lastname.ilike(ilike_expression),
96 User.username.ilike(ilike_expression)
95 User.username.ilike(ilike_expression)
97 )
96 )
98 )
97 )
99 query = query.limit(limit)
98 query = query.limit(limit)
100 users = query.all()
99 users = query.all()
101
100
102 _users = [
101 _users = [
103 self._serialize_user(user) for user in users
102 self._serialize_user(user) for user in users
104 ]
103 ]
105 return _users
104 return _users
106
105
107 def get_by_username(self, username, cache=False, case_insensitive=False):
106 def get_by_username(self, username, cache=False, case_insensitive=False):
108
107
109 if case_insensitive:
108 if case_insensitive:
110 user = self.sa.query(User).filter(User.username.ilike(username))
109 user = self.sa.query(User).filter(User.username.ilike(username))
111 else:
110 else:
112 user = self.sa.query(User)\
111 user = self.sa.query(User)\
113 .filter(User.username == username)
112 .filter(User.username == username)
114 if cache:
113 if cache:
115 name_key = _hash_key(username)
114 name_key = _hash_key(username)
116 user = user.options(
115 user = user.options(
117 FromCache("sql_cache_short", "get_user_%s" % name_key))
116 FromCache("sql_cache_short", "get_user_%s" % name_key))
118 return user.scalar()
117 return user.scalar()
119
118
120 def get_by_email(self, email, cache=False, case_insensitive=False):
119 def get_by_email(self, email, cache=False, case_insensitive=False):
121 return User.get_by_email(email, case_insensitive, cache)
120 return User.get_by_email(email, case_insensitive, cache)
122
121
123 def get_by_auth_token(self, auth_token, cache=False):
122 def get_by_auth_token(self, auth_token, cache=False):
124 return User.get_by_auth_token(auth_token, cache)
123 return User.get_by_auth_token(auth_token, cache)
125
124
126 def get_active_user_count(self, cache=False):
125 def get_active_user_count(self, cache=False):
127 return User.query().filter(
126 return User.query().filter(
128 User.active == True).filter(
127 User.active == True).filter(
129 User.username != User.DEFAULT_USER).count()
128 User.username != User.DEFAULT_USER).count()
130
129
131 def create(self, form_data, cur_user=None):
130 def create(self, form_data, cur_user=None):
132 if not cur_user:
131 if not cur_user:
133 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
132 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
134
133
135 user_data = {
134 user_data = {
136 'username': form_data['username'],
135 'username': form_data['username'],
137 'password': form_data['password'],
136 'password': form_data['password'],
138 'email': form_data['email'],
137 'email': form_data['email'],
139 'firstname': form_data['firstname'],
138 'firstname': form_data['firstname'],
140 'lastname': form_data['lastname'],
139 'lastname': form_data['lastname'],
141 'active': form_data['active'],
140 'active': form_data['active'],
142 'extern_type': form_data['extern_type'],
141 'extern_type': form_data['extern_type'],
143 'extern_name': form_data['extern_name'],
142 'extern_name': form_data['extern_name'],
144 'admin': False,
143 'admin': False,
145 'cur_user': cur_user
144 'cur_user': cur_user
146 }
145 }
147
146
148 if 'create_repo_group' in form_data:
147 if 'create_repo_group' in form_data:
149 user_data['create_repo_group'] = str2bool(
148 user_data['create_repo_group'] = str2bool(
150 form_data.get('create_repo_group'))
149 form_data.get('create_repo_group'))
151
150
152 try:
151 try:
153 if form_data.get('password_change'):
152 if form_data.get('password_change'):
154 user_data['force_password_change'] = True
153 user_data['force_password_change'] = True
155 return UserModel().create_or_update(**user_data)
154 return UserModel().create_or_update(**user_data)
156 except Exception:
155 except Exception:
157 log.error(traceback.format_exc())
156 log.error(traceback.format_exc())
158 raise
157 raise
159
158
160 def update_user(self, user, skip_attrs=None, **kwargs):
159 def update_user(self, user, skip_attrs=None, **kwargs):
161 from rhodecode.lib.auth import get_crypt_password
160 from rhodecode.lib.auth import get_crypt_password
162
161
163 user = self._get_user(user)
162 user = self._get_user(user)
164 if user.username == User.DEFAULT_USER:
163 if user.username == User.DEFAULT_USER:
165 raise DefaultUserException(
164 raise DefaultUserException(
166 _("You can't Edit this user since it's"
165 "You can't edit this user (`%(username)s`) since it's "
167 " crucial for entire application"))
166 "crucial for entire application" % {
167 'username': user.username})
168
168
169 # first store only defaults
169 # first store only defaults
170 user_attrs = {
170 user_attrs = {
171 'updating_user_id': user.user_id,
171 'updating_user_id': user.user_id,
172 'username': user.username,
172 'username': user.username,
173 'password': user.password,
173 'password': user.password,
174 'email': user.email,
174 'email': user.email,
175 'firstname': user.name,
175 'firstname': user.name,
176 'lastname': user.lastname,
176 'lastname': user.lastname,
177 'active': user.active,
177 'active': user.active,
178 'admin': user.admin,
178 'admin': user.admin,
179 'extern_name': user.extern_name,
179 'extern_name': user.extern_name,
180 'extern_type': user.extern_type,
180 'extern_type': user.extern_type,
181 'language': user.user_data.get('language')
181 'language': user.user_data.get('language')
182 }
182 }
183
183
184 # in case there's new_password, that comes from form, use it to
184 # in case there's new_password, that comes from form, use it to
185 # store password
185 # store password
186 if kwargs.get('new_password'):
186 if kwargs.get('new_password'):
187 kwargs['password'] = kwargs['new_password']
187 kwargs['password'] = kwargs['new_password']
188
188
189 # cleanups, my_account password change form
189 # cleanups, my_account password change form
190 kwargs.pop('current_password', None)
190 kwargs.pop('current_password', None)
191 kwargs.pop('new_password', None)
191 kwargs.pop('new_password', None)
192
192
193 # cleanups, user edit password change form
193 # cleanups, user edit password change form
194 kwargs.pop('password_confirmation', None)
194 kwargs.pop('password_confirmation', None)
195 kwargs.pop('password_change', None)
195 kwargs.pop('password_change', None)
196
196
197 # create repo group on user creation
197 # create repo group on user creation
198 kwargs.pop('create_repo_group', None)
198 kwargs.pop('create_repo_group', None)
199
199
200 # legacy forms send name, which is the firstname
200 # legacy forms send name, which is the firstname
201 firstname = kwargs.pop('name', None)
201 firstname = kwargs.pop('name', None)
202 if firstname:
202 if firstname:
203 kwargs['firstname'] = firstname
203 kwargs['firstname'] = firstname
204
204
205 for k, v in kwargs.items():
205 for k, v in kwargs.items():
206 # skip if we don't want to update this
206 # skip if we don't want to update this
207 if skip_attrs and k in skip_attrs:
207 if skip_attrs and k in skip_attrs:
208 continue
208 continue
209
209
210 user_attrs[k] = v
210 user_attrs[k] = v
211
211
212 try:
212 try:
213 return self.create_or_update(**user_attrs)
213 return self.create_or_update(**user_attrs)
214 except Exception:
214 except Exception:
215 log.error(traceback.format_exc())
215 log.error(traceback.format_exc())
216 raise
216 raise
217
217
218 def create_or_update(
218 def create_or_update(
219 self, username, password, email, firstname='', lastname='',
219 self, username, password, email, firstname='', lastname='',
220 active=True, admin=False, extern_type=None, extern_name=None,
220 active=True, admin=False, extern_type=None, extern_name=None,
221 cur_user=None, plugin=None, force_password_change=False,
221 cur_user=None, plugin=None, force_password_change=False,
222 allow_to_create_user=True, create_repo_group=None,
222 allow_to_create_user=True, create_repo_group=None,
223 updating_user_id=None, language=None, strict_creation_check=True):
223 updating_user_id=None, language=None, strict_creation_check=True):
224 """
224 """
225 Creates a new instance if not found, or updates current one
225 Creates a new instance if not found, or updates current one
226
226
227 :param username:
227 :param username:
228 :param password:
228 :param password:
229 :param email:
229 :param email:
230 :param firstname:
230 :param firstname:
231 :param lastname:
231 :param lastname:
232 :param active:
232 :param active:
233 :param admin:
233 :param admin:
234 :param extern_type:
234 :param extern_type:
235 :param extern_name:
235 :param extern_name:
236 :param cur_user:
236 :param cur_user:
237 :param plugin: optional plugin this method was called from
237 :param plugin: optional plugin this method was called from
238 :param force_password_change: toggles new or existing user flag
238 :param force_password_change: toggles new or existing user flag
239 for password change
239 for password change
240 :param allow_to_create_user: Defines if the method can actually create
240 :param allow_to_create_user: Defines if the method can actually create
241 new users
241 new users
242 :param create_repo_group: Defines if the method should also
242 :param create_repo_group: Defines if the method should also
243 create an repo group with user name, and owner
243 create an repo group with user name, and owner
244 :param updating_user_id: if we set it up this is the user we want to
244 :param updating_user_id: if we set it up this is the user we want to
245 update this allows to editing username.
245 update this allows to editing username.
246 :param language: language of user from interface.
246 :param language: language of user from interface.
247
247
248 :returns: new User object with injected `is_new_user` attribute.
248 :returns: new User object with injected `is_new_user` attribute.
249 """
249 """
250
250 if not cur_user:
251 if not cur_user:
251 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
252 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
252
253
253 from rhodecode.lib.auth import (
254 from rhodecode.lib.auth import (
254 get_crypt_password, check_password, generate_auth_token)
255 get_crypt_password, check_password, generate_auth_token)
255 from rhodecode.lib.hooks_base import (
256 from rhodecode.lib.hooks_base import (
256 log_create_user, check_allowed_create_user)
257 log_create_user, check_allowed_create_user)
257
258
258 def _password_change(new_user, password):
259 def _password_change(new_user, password):
259 old_password = new_user.password or ''
260 old_password = new_user.password or ''
260 # empty password
261 # empty password
261 if not old_password:
262 if not old_password:
262 return False
263 return False
263
264
264 # password check is only needed for RhodeCode internal auth calls
265 # password check is only needed for RhodeCode internal auth calls
265 # in case it's a plugin we don't care
266 # in case it's a plugin we don't care
266 if not plugin:
267 if not plugin:
267
268
268 # first check if we gave crypted password back, and if it
269 # first check if we gave crypted password back, and if it
269 # matches it's not password change
270 # matches it's not password change
270 if new_user.password == password:
271 if new_user.password == password:
271 return False
272 return False
272
273
273 password_match = check_password(password, old_password)
274 password_match = check_password(password, old_password)
274 if not password_match:
275 if not password_match:
275 return True
276 return True
276
277
277 return False
278 return False
278
279
279 # read settings on default personal repo group creation
280 # read settings on default personal repo group creation
280 if create_repo_group is None:
281 if create_repo_group is None:
281 default_create_repo_group = RepoGroupModel()\
282 default_create_repo_group = RepoGroupModel()\
282 .get_default_create_personal_repo_group()
283 .get_default_create_personal_repo_group()
283 create_repo_group = default_create_repo_group
284 create_repo_group = default_create_repo_group
284
285
285 user_data = {
286 user_data = {
286 'username': username,
287 'username': username,
287 'password': password,
288 'password': password,
288 'email': email,
289 'email': email,
289 'firstname': firstname,
290 'firstname': firstname,
290 'lastname': lastname,
291 'lastname': lastname,
291 'active': active,
292 'active': active,
292 'admin': admin
293 'admin': admin
293 }
294 }
294
295
295 if updating_user_id:
296 if updating_user_id:
296 log.debug('Checking for existing account in RhodeCode '
297 log.debug('Checking for existing account in RhodeCode '
297 'database with user_id `%s` ' % (updating_user_id,))
298 'database with user_id `%s` ' % (updating_user_id,))
298 user = User.get(updating_user_id)
299 user = User.get(updating_user_id)
299 else:
300 else:
300 log.debug('Checking for existing account in RhodeCode '
301 log.debug('Checking for existing account in RhodeCode '
301 'database with username `%s` ' % (username,))
302 'database with username `%s` ' % (username,))
302 user = User.get_by_username(username, case_insensitive=True)
303 user = User.get_by_username(username, case_insensitive=True)
303
304
304 if user is None:
305 if user is None:
305 # we check internal flag if this method is actually allowed to
306 # we check internal flag if this method is actually allowed to
306 # create new user
307 # create new user
307 if not allow_to_create_user:
308 if not allow_to_create_user:
308 msg = ('Method wants to create new user, but it is not '
309 msg = ('Method wants to create new user, but it is not '
309 'allowed to do so')
310 'allowed to do so')
310 log.warning(msg)
311 log.warning(msg)
311 raise NotAllowedToCreateUserError(msg)
312 raise NotAllowedToCreateUserError(msg)
312
313
313 log.debug('Creating new user %s', username)
314 log.debug('Creating new user %s', username)
314
315
315 # only if we create user that is active
316 # only if we create user that is active
316 new_active_user = active
317 new_active_user = active
317 if new_active_user and strict_creation_check:
318 if new_active_user and strict_creation_check:
318 # raises UserCreationError if it's not allowed for any reason to
319 # raises UserCreationError if it's not allowed for any reason to
319 # create new active user, this also executes pre-create hooks
320 # create new active user, this also executes pre-create hooks
320 check_allowed_create_user(user_data, cur_user, strict_check=True)
321 check_allowed_create_user(user_data, cur_user, strict_check=True)
321 events.trigger(events.UserPreCreate(user_data))
322 events.trigger(events.UserPreCreate(user_data))
322 new_user = User()
323 new_user = User()
323 edit = False
324 edit = False
324 else:
325 else:
325 log.debug('updating user %s', username)
326 log.debug('updating user %s', username)
326 events.trigger(events.UserPreUpdate(user, user_data))
327 events.trigger(events.UserPreUpdate(user, user_data))
327 new_user = user
328 new_user = user
328 edit = True
329 edit = True
329
330
330 # we're not allowed to edit default user
331 # we're not allowed to edit default user
331 if user.username == User.DEFAULT_USER:
332 if user.username == User.DEFAULT_USER:
332 raise DefaultUserException(
333 raise DefaultUserException(
333 _("You can't edit this user (`%(username)s`) since it's "
334 "You can't edit this user (`%(username)s`) since it's "
334 "crucial for entire application") % {'username': user.username})
335 "crucial for entire application"
336 % {'username': user.username})
335
337
336 # inject special attribute that will tell us if User is new or old
338 # inject special attribute that will tell us if User is new or old
337 new_user.is_new_user = not edit
339 new_user.is_new_user = not edit
338 # for users that didn's specify auth type, we use RhodeCode built in
340 # for users that didn's specify auth type, we use RhodeCode built in
339 from rhodecode.authentication.plugins import auth_rhodecode
341 from rhodecode.authentication.plugins import auth_rhodecode
340 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
342 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
341 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
343 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
342
344
343 try:
345 try:
344 new_user.username = username
346 new_user.username = username
345 new_user.admin = admin
347 new_user.admin = admin
346 new_user.email = email
348 new_user.email = email
347 new_user.active = active
349 new_user.active = active
348 new_user.extern_name = safe_unicode(extern_name)
350 new_user.extern_name = safe_unicode(extern_name)
349 new_user.extern_type = safe_unicode(extern_type)
351 new_user.extern_type = safe_unicode(extern_type)
350 new_user.name = firstname
352 new_user.name = firstname
351 new_user.lastname = lastname
353 new_user.lastname = lastname
352
354
353 # set password only if creating an user or password is changed
355 # set password only if creating an user or password is changed
354 if not edit or _password_change(new_user, password):
356 if not edit or _password_change(new_user, password):
355 reason = 'new password' if edit else 'new user'
357 reason = 'new password' if edit else 'new user'
356 log.debug('Updating password reason=>%s', reason)
358 log.debug('Updating password reason=>%s', reason)
357 new_user.password = get_crypt_password(password) if password else None
359 new_user.password = get_crypt_password(password) if password else None
358
360
359 if force_password_change:
361 if force_password_change:
360 new_user.update_userdata(force_password_change=True)
362 new_user.update_userdata(force_password_change=True)
361 if language:
363 if language:
362 new_user.update_userdata(language=language)
364 new_user.update_userdata(language=language)
363 new_user.update_userdata(notification_status=True)
365 new_user.update_userdata(notification_status=True)
364
366
365 self.sa.add(new_user)
367 self.sa.add(new_user)
366
368
367 if not edit and create_repo_group:
369 if not edit and create_repo_group:
368 RepoGroupModel().create_personal_repo_group(
370 RepoGroupModel().create_personal_repo_group(
369 new_user, commit_early=False)
371 new_user, commit_early=False)
370
372
371 if not edit:
373 if not edit:
372 # add the RSS token
374 # add the RSS token
373 AuthTokenModel().create(username,
375 AuthTokenModel().create(username,
374 description=u'Generated feed token',
376 description=u'Generated feed token',
375 role=AuthTokenModel.cls.ROLE_FEED)
377 role=AuthTokenModel.cls.ROLE_FEED)
376 kwargs = new_user.get_dict()
378 kwargs = new_user.get_dict()
377 # backward compat, require api_keys present
379 # backward compat, require api_keys present
378 kwargs['api_keys'] = kwargs['auth_tokens']
380 kwargs['api_keys'] = kwargs['auth_tokens']
379 log_create_user(created_by=cur_user, **kwargs)
381 log_create_user(created_by=cur_user, **kwargs)
380 events.trigger(events.UserPostCreate(user_data))
382 events.trigger(events.UserPostCreate(user_data))
381 return new_user
383 return new_user
382 except (DatabaseError,):
384 except (DatabaseError,):
383 log.error(traceback.format_exc())
385 log.error(traceback.format_exc())
384 raise
386 raise
385
387
386 def create_registration(self, form_data):
388 def create_registration(self, form_data):
387 from rhodecode.model.notification import NotificationModel
389 from rhodecode.model.notification import NotificationModel
388 from rhodecode.model.notification import EmailNotificationModel
390 from rhodecode.model.notification import EmailNotificationModel
389
391
390 try:
392 try:
391 form_data['admin'] = False
393 form_data['admin'] = False
392 form_data['extern_name'] = 'rhodecode'
394 form_data['extern_name'] = 'rhodecode'
393 form_data['extern_type'] = 'rhodecode'
395 form_data['extern_type'] = 'rhodecode'
394 new_user = self.create(form_data)
396 new_user = self.create(form_data)
395
397
396 self.sa.add(new_user)
398 self.sa.add(new_user)
397 self.sa.flush()
399 self.sa.flush()
398
400
399 user_data = new_user.get_dict()
401 user_data = new_user.get_dict()
400 kwargs = {
402 kwargs = {
401 # use SQLALCHEMY safe dump of user data
403 # use SQLALCHEMY safe dump of user data
402 'user': AttributeDict(user_data),
404 'user': AttributeDict(user_data),
403 'date': datetime.datetime.now()
405 'date': datetime.datetime.now()
404 }
406 }
405 notification_type = EmailNotificationModel.TYPE_REGISTRATION
407 notification_type = EmailNotificationModel.TYPE_REGISTRATION
406 # pre-generate the subject for notification itself
408 # pre-generate the subject for notification itself
407 (subject,
409 (subject,
408 _h, _e, # we don't care about those
410 _h, _e, # we don't care about those
409 body_plaintext) = EmailNotificationModel().render_email(
411 body_plaintext) = EmailNotificationModel().render_email(
410 notification_type, **kwargs)
412 notification_type, **kwargs)
411
413
412 # create notification objects, and emails
414 # create notification objects, and emails
413 NotificationModel().create(
415 NotificationModel().create(
414 created_by=new_user,
416 created_by=new_user,
415 notification_subject=subject,
417 notification_subject=subject,
416 notification_body=body_plaintext,
418 notification_body=body_plaintext,
417 notification_type=notification_type,
419 notification_type=notification_type,
418 recipients=None, # all admins
420 recipients=None, # all admins
419 email_kwargs=kwargs,
421 email_kwargs=kwargs,
420 )
422 )
421
423
422 return new_user
424 return new_user
423 except Exception:
425 except Exception:
424 log.error(traceback.format_exc())
426 log.error(traceback.format_exc())
425 raise
427 raise
426
428
427 def _handle_user_repos(self, username, repositories, handle_mode=None):
429 def _handle_user_repos(self, username, repositories, handle_mode=None):
428 _superadmin = self.cls.get_first_super_admin()
430 _superadmin = self.cls.get_first_super_admin()
429 left_overs = True
431 left_overs = True
430
432
431 from rhodecode.model.repo import RepoModel
433 from rhodecode.model.repo import RepoModel
432
434
433 if handle_mode == 'detach':
435 if handle_mode == 'detach':
434 for obj in repositories:
436 for obj in repositories:
435 obj.user = _superadmin
437 obj.user = _superadmin
436 # set description we know why we super admin now owns
438 # set description we know why we super admin now owns
437 # additional repositories that were orphaned !
439 # additional repositories that were orphaned !
438 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
440 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
439 self.sa.add(obj)
441 self.sa.add(obj)
440 left_overs = False
442 left_overs = False
441 elif handle_mode == 'delete':
443 elif handle_mode == 'delete':
442 for obj in repositories:
444 for obj in repositories:
443 RepoModel().delete(obj, forks='detach')
445 RepoModel().delete(obj, forks='detach')
444 left_overs = False
446 left_overs = False
445
447
446 # if nothing is done we have left overs left
448 # if nothing is done we have left overs left
447 return left_overs
449 return left_overs
448
450
449 def _handle_user_repo_groups(self, username, repository_groups,
451 def _handle_user_repo_groups(self, username, repository_groups,
450 handle_mode=None):
452 handle_mode=None):
451 _superadmin = self.cls.get_first_super_admin()
453 _superadmin = self.cls.get_first_super_admin()
452 left_overs = True
454 left_overs = True
453
455
454 from rhodecode.model.repo_group import RepoGroupModel
456 from rhodecode.model.repo_group import RepoGroupModel
455
457
456 if handle_mode == 'detach':
458 if handle_mode == 'detach':
457 for r in repository_groups:
459 for r in repository_groups:
458 r.user = _superadmin
460 r.user = _superadmin
459 # set description we know why we super admin now owns
461 # set description we know why we super admin now owns
460 # additional repositories that were orphaned !
462 # additional repositories that were orphaned !
461 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
463 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
462 self.sa.add(r)
464 self.sa.add(r)
463 left_overs = False
465 left_overs = False
464 elif handle_mode == 'delete':
466 elif handle_mode == 'delete':
465 for r in repository_groups:
467 for r in repository_groups:
466 RepoGroupModel().delete(r)
468 RepoGroupModel().delete(r)
467 left_overs = False
469 left_overs = False
468
470
469 # if nothing is done we have left overs left
471 # if nothing is done we have left overs left
470 return left_overs
472 return left_overs
471
473
472 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
474 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
473 _superadmin = self.cls.get_first_super_admin()
475 _superadmin = self.cls.get_first_super_admin()
474 left_overs = True
476 left_overs = True
475
477
476 from rhodecode.model.user_group import UserGroupModel
478 from rhodecode.model.user_group import UserGroupModel
477
479
478 if handle_mode == 'detach':
480 if handle_mode == 'detach':
479 for r in user_groups:
481 for r in user_groups:
480 for user_user_group_to_perm in r.user_user_group_to_perm:
482 for user_user_group_to_perm in r.user_user_group_to_perm:
481 if user_user_group_to_perm.user.username == username:
483 if user_user_group_to_perm.user.username == username:
482 user_user_group_to_perm.user = _superadmin
484 user_user_group_to_perm.user = _superadmin
483 r.user = _superadmin
485 r.user = _superadmin
484 # set description we know why we super admin now owns
486 # set description we know why we super admin now owns
485 # additional repositories that were orphaned !
487 # additional repositories that were orphaned !
486 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
488 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
487 self.sa.add(r)
489 self.sa.add(r)
488 left_overs = False
490 left_overs = False
489 elif handle_mode == 'delete':
491 elif handle_mode == 'delete':
490 for r in user_groups:
492 for r in user_groups:
491 UserGroupModel().delete(r)
493 UserGroupModel().delete(r)
492 left_overs = False
494 left_overs = False
493
495
494 # if nothing is done we have left overs left
496 # if nothing is done we have left overs left
495 return left_overs
497 return left_overs
496
498
497 def delete(self, user, cur_user=None, handle_repos=None,
499 def delete(self, user, cur_user=None, handle_repos=None,
498 handle_repo_groups=None, handle_user_groups=None):
500 handle_repo_groups=None, handle_user_groups=None):
499 if not cur_user:
501 if not cur_user:
500 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
502 cur_user = getattr(
503 get_current_rhodecode_user(), 'username', None)
501 user = self._get_user(user)
504 user = self._get_user(user)
502
505
503 try:
506 try:
504 if user.username == User.DEFAULT_USER:
507 if user.username == User.DEFAULT_USER:
505 raise DefaultUserException(
508 raise DefaultUserException(
506 _(u"You can't remove this user since it's"
509 u"You can't remove this user since it's"
507 u" crucial for entire application"))
510 u" crucial for entire application")
508
511
509 left_overs = self._handle_user_repos(
512 left_overs = self._handle_user_repos(
510 user.username, user.repositories, handle_repos)
513 user.username, user.repositories, handle_repos)
511 if left_overs and user.repositories:
514 if left_overs and user.repositories:
512 repos = [x.repo_name for x in user.repositories]
515 repos = [x.repo_name for x in user.repositories]
513 raise UserOwnsReposException(
516 raise UserOwnsReposException(
514 _(u'user "%s" still owns %s repositories and cannot be '
517 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
515 u'removed. Switch owners or remove those repositories:%s')
518 u'removed. Switch owners or remove those repositories:%(list_repos)s'
516 % (user.username, len(repos), ', '.join(repos)))
519 % {'username': user.username, 'len_repos': len(repos),
520 'list_repos': ', '.join(repos)})
517
521
518 left_overs = self._handle_user_repo_groups(
522 left_overs = self._handle_user_repo_groups(
519 user.username, user.repository_groups, handle_repo_groups)
523 user.username, user.repository_groups, handle_repo_groups)
520 if left_overs and user.repository_groups:
524 if left_overs and user.repository_groups:
521 repo_groups = [x.group_name for x in user.repository_groups]
525 repo_groups = [x.group_name for x in user.repository_groups]
522 raise UserOwnsRepoGroupsException(
526 raise UserOwnsRepoGroupsException(
523 _(u'user "%s" still owns %s repository groups and cannot be '
527 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
524 u'removed. Switch owners or remove those repository groups:%s')
528 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
525 % (user.username, len(repo_groups), ', '.join(repo_groups)))
529 % {'username': user.username, 'len_repo_groups': len(repo_groups),
530 'list_repo_groups': ', '.join(repo_groups)})
526
531
527 left_overs = self._handle_user_user_groups(
532 left_overs = self._handle_user_user_groups(
528 user.username, user.user_groups, handle_user_groups)
533 user.username, user.user_groups, handle_user_groups)
529 if left_overs and user.user_groups:
534 if left_overs and user.user_groups:
530 user_groups = [x.users_group_name for x in user.user_groups]
535 user_groups = [x.users_group_name for x in user.user_groups]
531 raise UserOwnsUserGroupsException(
536 raise UserOwnsUserGroupsException(
532 _(u'user "%s" still owns %s user groups and cannot be '
537 u'user "%s" still owns %s user groups and cannot be '
533 u'removed. Switch owners or remove those user groups:%s')
538 u'removed. Switch owners or remove those user groups:%s'
534 % (user.username, len(user_groups), ', '.join(user_groups)))
539 % (user.username, len(user_groups), ', '.join(user_groups)))
535
540
536 # we might change the user data with detach/delete, make sure
541 # we might change the user data with detach/delete, make sure
537 # the object is marked as expired before actually deleting !
542 # the object is marked as expired before actually deleting !
538 self.sa.expire(user)
543 self.sa.expire(user)
539 self.sa.delete(user)
544 self.sa.delete(user)
540 from rhodecode.lib.hooks_base import log_delete_user
545 from rhodecode.lib.hooks_base import log_delete_user
541 log_delete_user(deleted_by=cur_user, **user.get_dict())
546 log_delete_user(deleted_by=cur_user, **user.get_dict())
542 except Exception:
547 except Exception:
543 log.error(traceback.format_exc())
548 log.error(traceback.format_exc())
544 raise
549 raise
545
550
546 def reset_password_link(self, data, pwd_reset_url):
551 def reset_password_link(self, data, pwd_reset_url):
547 from rhodecode.lib.celerylib import tasks, run_task
552 from rhodecode.lib.celerylib import tasks, run_task
548 from rhodecode.model.notification import EmailNotificationModel
553 from rhodecode.model.notification import EmailNotificationModel
549 user_email = data['email']
554 user_email = data['email']
550 try:
555 try:
551 user = User.get_by_email(user_email)
556 user = User.get_by_email(user_email)
552 if user:
557 if user:
553 log.debug('password reset user found %s', user)
558 log.debug('password reset user found %s', user)
554
559
555 email_kwargs = {
560 email_kwargs = {
556 'password_reset_url': pwd_reset_url,
561 'password_reset_url': pwd_reset_url,
557 'user': user,
562 'user': user,
558 'email': user_email,
563 'email': user_email,
559 'date': datetime.datetime.now()
564 'date': datetime.datetime.now()
560 }
565 }
561
566
562 (subject, headers, email_body,
567 (subject, headers, email_body,
563 email_body_plaintext) = EmailNotificationModel().render_email(
568 email_body_plaintext) = EmailNotificationModel().render_email(
564 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
569 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
565
570
566 recipients = [user_email]
571 recipients = [user_email]
567
572
568 action_logger_generic(
573 action_logger_generic(
569 'sending password reset email to user: {}'.format(
574 'sending password reset email to user: {}'.format(
570 user), namespace='security.password_reset')
575 user), namespace='security.password_reset')
571
576
572 run_task(tasks.send_email, recipients, subject,
577 run_task(tasks.send_email, recipients, subject,
573 email_body_plaintext, email_body)
578 email_body_plaintext, email_body)
574
579
575 else:
580 else:
576 log.debug("password reset email %s not found", user_email)
581 log.debug("password reset email %s not found", user_email)
577 except Exception:
582 except Exception:
578 log.error(traceback.format_exc())
583 log.error(traceback.format_exc())
579 return False
584 return False
580
585
581 return True
586 return True
582
587
583 def reset_password(self, data):
588 def reset_password(self, data):
584 from rhodecode.lib.celerylib import tasks, run_task
589 from rhodecode.lib.celerylib import tasks, run_task
585 from rhodecode.model.notification import EmailNotificationModel
590 from rhodecode.model.notification import EmailNotificationModel
586 from rhodecode.lib import auth
591 from rhodecode.lib import auth
587 user_email = data['email']
592 user_email = data['email']
588 pre_db = True
593 pre_db = True
589 try:
594 try:
590 user = User.get_by_email(user_email)
595 user = User.get_by_email(user_email)
591 new_passwd = auth.PasswordGenerator().gen_password(
596 new_passwd = auth.PasswordGenerator().gen_password(
592 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
597 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
593 if user:
598 if user:
594 user.password = auth.get_crypt_password(new_passwd)
599 user.password = auth.get_crypt_password(new_passwd)
595 # also force this user to reset his password !
600 # also force this user to reset his password !
596 user.update_userdata(force_password_change=True)
601 user.update_userdata(force_password_change=True)
597
602
598 Session().add(user)
603 Session().add(user)
599
604
600 # now delete the token in question
605 # now delete the token in question
601 UserApiKeys = AuthTokenModel.cls
606 UserApiKeys = AuthTokenModel.cls
602 UserApiKeys().query().filter(
607 UserApiKeys().query().filter(
603 UserApiKeys.api_key == data['token']).delete()
608 UserApiKeys.api_key == data['token']).delete()
604
609
605 Session().commit()
610 Session().commit()
606 log.info('successfully reset password for `%s`', user_email)
611 log.info('successfully reset password for `%s`', user_email)
607
612
608 if new_passwd is None:
613 if new_passwd is None:
609 raise Exception('unable to generate new password')
614 raise Exception('unable to generate new password')
610
615
611 pre_db = False
616 pre_db = False
612
617
613 email_kwargs = {
618 email_kwargs = {
614 'new_password': new_passwd,
619 'new_password': new_passwd,
615 'user': user,
620 'user': user,
616 'email': user_email,
621 'email': user_email,
617 'date': datetime.datetime.now()
622 'date': datetime.datetime.now()
618 }
623 }
619
624
620 (subject, headers, email_body,
625 (subject, headers, email_body,
621 email_body_plaintext) = EmailNotificationModel().render_email(
626 email_body_plaintext) = EmailNotificationModel().render_email(
622 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
627 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
623 **email_kwargs)
628 **email_kwargs)
624
629
625 recipients = [user_email]
630 recipients = [user_email]
626
631
627 action_logger_generic(
632 action_logger_generic(
628 'sent new password to user: {} with email: {}'.format(
633 'sent new password to user: {} with email: {}'.format(
629 user, user_email), namespace='security.password_reset')
634 user, user_email), namespace='security.password_reset')
630
635
631 run_task(tasks.send_email, recipients, subject,
636 run_task(tasks.send_email, recipients, subject,
632 email_body_plaintext, email_body)
637 email_body_plaintext, email_body)
633
638
634 except Exception:
639 except Exception:
635 log.error('Failed to update user password')
640 log.error('Failed to update user password')
636 log.error(traceback.format_exc())
641 log.error(traceback.format_exc())
637 if pre_db:
642 if pre_db:
638 # we rollback only if local db stuff fails. If it goes into
643 # we rollback only if local db stuff fails. If it goes into
639 # run_task, we're pass rollback state this wouldn't work then
644 # run_task, we're pass rollback state this wouldn't work then
640 Session().rollback()
645 Session().rollback()
641
646
642 return True
647 return True
643
648
644 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
649 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
645 """
650 """
646 Fetches auth_user by user_id,or api_key if present.
651 Fetches auth_user by user_id,or api_key if present.
647 Fills auth_user attributes with those taken from database.
652 Fills auth_user attributes with those taken from database.
648 Additionally set's is_authenitated if lookup fails
653 Additionally set's is_authenitated if lookup fails
649 present in database
654 present in database
650
655
651 :param auth_user: instance of user to set attributes
656 :param auth_user: instance of user to set attributes
652 :param user_id: user id to fetch by
657 :param user_id: user id to fetch by
653 :param api_key: api key to fetch by
658 :param api_key: api key to fetch by
654 :param username: username to fetch by
659 :param username: username to fetch by
655 """
660 """
656 if user_id is None and api_key is None and username is None:
661 if user_id is None and api_key is None and username is None:
657 raise Exception('You need to pass user_id, api_key or username')
662 raise Exception('You need to pass user_id, api_key or username')
658
663
659 log.debug(
664 log.debug(
660 'AuthUser: fill data execution based on: '
665 'AuthUser: fill data execution based on: '
661 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
666 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
662 try:
667 try:
663 dbuser = None
668 dbuser = None
664 if user_id:
669 if user_id:
665 dbuser = self.get(user_id)
670 dbuser = self.get(user_id)
666 elif api_key:
671 elif api_key:
667 dbuser = self.get_by_auth_token(api_key)
672 dbuser = self.get_by_auth_token(api_key)
668 elif username:
673 elif username:
669 dbuser = self.get_by_username(username)
674 dbuser = self.get_by_username(username)
670
675
671 if not dbuser:
676 if not dbuser:
672 log.warning(
677 log.warning(
673 'Unable to lookup user by id:%s api_key:%s username:%s',
678 'Unable to lookup user by id:%s api_key:%s username:%s',
674 user_id, api_key, username)
679 user_id, api_key, username)
675 return False
680 return False
676 if not dbuser.active:
681 if not dbuser.active:
677 log.debug('User `%s:%s` is inactive, skipping fill data',
682 log.debug('User `%s:%s` is inactive, skipping fill data',
678 username, user_id)
683 username, user_id)
679 return False
684 return False
680
685
681 log.debug('AuthUser: filling found user:%s data', dbuser)
686 log.debug('AuthUser: filling found user:%s data', dbuser)
682 user_data = dbuser.get_dict()
687 user_data = dbuser.get_dict()
683
688
684 user_data.update({
689 user_data.update({
685 # set explicit the safe escaped values
690 # set explicit the safe escaped values
686 'first_name': dbuser.first_name,
691 'first_name': dbuser.first_name,
687 'last_name': dbuser.last_name,
692 'last_name': dbuser.last_name,
688 })
693 })
689
694
690 for k, v in user_data.items():
695 for k, v in user_data.items():
691 # properties of auth user we dont update
696 # properties of auth user we dont update
692 if k not in ['auth_tokens', 'permissions']:
697 if k not in ['auth_tokens', 'permissions']:
693 setattr(auth_user, k, v)
698 setattr(auth_user, k, v)
694
699
695 # few extras
700 # few extras
696 setattr(auth_user, 'feed_token', dbuser.feed_token)
701 setattr(auth_user, 'feed_token', dbuser.feed_token)
697 except Exception:
702 except Exception:
698 log.error(traceback.format_exc())
703 log.error(traceback.format_exc())
699 auth_user.is_authenticated = False
704 auth_user.is_authenticated = False
700 return False
705 return False
701
706
702 return True
707 return True
703
708
704 def has_perm(self, user, perm):
709 def has_perm(self, user, perm):
705 perm = self._get_perm(perm)
710 perm = self._get_perm(perm)
706 user = self._get_user(user)
711 user = self._get_user(user)
707
712
708 return UserToPerm.query().filter(UserToPerm.user == user)\
713 return UserToPerm.query().filter(UserToPerm.user == user)\
709 .filter(UserToPerm.permission == perm).scalar() is not None
714 .filter(UserToPerm.permission == perm).scalar() is not None
710
715
711 def grant_perm(self, user, perm):
716 def grant_perm(self, user, perm):
712 """
717 """
713 Grant user global permissions
718 Grant user global permissions
714
719
715 :param user:
720 :param user:
716 :param perm:
721 :param perm:
717 """
722 """
718 user = self._get_user(user)
723 user = self._get_user(user)
719 perm = self._get_perm(perm)
724 perm = self._get_perm(perm)
720 # if this permission is already granted skip it
725 # if this permission is already granted skip it
721 _perm = UserToPerm.query()\
726 _perm = UserToPerm.query()\
722 .filter(UserToPerm.user == user)\
727 .filter(UserToPerm.user == user)\
723 .filter(UserToPerm.permission == perm)\
728 .filter(UserToPerm.permission == perm)\
724 .scalar()
729 .scalar()
725 if _perm:
730 if _perm:
726 return
731 return
727 new = UserToPerm()
732 new = UserToPerm()
728 new.user = user
733 new.user = user
729 new.permission = perm
734 new.permission = perm
730 self.sa.add(new)
735 self.sa.add(new)
731 return new
736 return new
732
737
733 def revoke_perm(self, user, perm):
738 def revoke_perm(self, user, perm):
734 """
739 """
735 Revoke users global permissions
740 Revoke users global permissions
736
741
737 :param user:
742 :param user:
738 :param perm:
743 :param perm:
739 """
744 """
740 user = self._get_user(user)
745 user = self._get_user(user)
741 perm = self._get_perm(perm)
746 perm = self._get_perm(perm)
742
747
743 obj = UserToPerm.query()\
748 obj = UserToPerm.query()\
744 .filter(UserToPerm.user == user)\
749 .filter(UserToPerm.user == user)\
745 .filter(UserToPerm.permission == perm)\
750 .filter(UserToPerm.permission == perm)\
746 .scalar()
751 .scalar()
747 if obj:
752 if obj:
748 self.sa.delete(obj)
753 self.sa.delete(obj)
749
754
750 def add_extra_email(self, user, email):
755 def add_extra_email(self, user, email):
751 """
756 """
752 Adds email address to UserEmailMap
757 Adds email address to UserEmailMap
753
758
754 :param user:
759 :param user:
755 :param email:
760 :param email:
756 """
761 """
757
762
758 user = self._get_user(user)
763 user = self._get_user(user)
759
764
760 obj = UserEmailMap()
765 obj = UserEmailMap()
761 obj.user = user
766 obj.user = user
762 obj.email = email
767 obj.email = email
763 self.sa.add(obj)
768 self.sa.add(obj)
764 return obj
769 return obj
765
770
766 def delete_extra_email(self, user, email_id):
771 def delete_extra_email(self, user, email_id):
767 """
772 """
768 Removes email address from UserEmailMap
773 Removes email address from UserEmailMap
769
774
770 :param user:
775 :param user:
771 :param email_id:
776 :param email_id:
772 """
777 """
773 user = self._get_user(user)
778 user = self._get_user(user)
774 obj = UserEmailMap.query().get(email_id)
779 obj = UserEmailMap.query().get(email_id)
775 if obj and obj.user_id == user.user_id:
780 if obj and obj.user_id == user.user_id:
776 self.sa.delete(obj)
781 self.sa.delete(obj)
777
782
778 def parse_ip_range(self, ip_range):
783 def parse_ip_range(self, ip_range):
779 ip_list = []
784 ip_list = []
780
785
781 def make_unique(value):
786 def make_unique(value):
782 seen = []
787 seen = []
783 return [c for c in value if not (c in seen or seen.append(c))]
788 return [c for c in value if not (c in seen or seen.append(c))]
784
789
785 # firsts split by commas
790 # firsts split by commas
786 for ip_range in ip_range.split(','):
791 for ip_range in ip_range.split(','):
787 if not ip_range:
792 if not ip_range:
788 continue
793 continue
789 ip_range = ip_range.strip()
794 ip_range = ip_range.strip()
790 if '-' in ip_range:
795 if '-' in ip_range:
791 start_ip, end_ip = ip_range.split('-', 1)
796 start_ip, end_ip = ip_range.split('-', 1)
792 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
797 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
793 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
798 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
794 parsed_ip_range = []
799 parsed_ip_range = []
795
800
796 for index in xrange(int(start_ip), int(end_ip) + 1):
801 for index in xrange(int(start_ip), int(end_ip) + 1):
797 new_ip = ipaddress.ip_address(index)
802 new_ip = ipaddress.ip_address(index)
798 parsed_ip_range.append(str(new_ip))
803 parsed_ip_range.append(str(new_ip))
799 ip_list.extend(parsed_ip_range)
804 ip_list.extend(parsed_ip_range)
800 else:
805 else:
801 ip_list.append(ip_range)
806 ip_list.append(ip_range)
802
807
803 return make_unique(ip_list)
808 return make_unique(ip_list)
804
809
805 def add_extra_ip(self, user, ip, description=None):
810 def add_extra_ip(self, user, ip, description=None):
806 """
811 """
807 Adds ip address to UserIpMap
812 Adds ip address to UserIpMap
808
813
809 :param user:
814 :param user:
810 :param ip:
815 :param ip:
811 """
816 """
812
817
813 user = self._get_user(user)
818 user = self._get_user(user)
814 obj = UserIpMap()
819 obj = UserIpMap()
815 obj.user = user
820 obj.user = user
816 obj.ip_addr = ip
821 obj.ip_addr = ip
817 obj.description = description
822 obj.description = description
818 self.sa.add(obj)
823 self.sa.add(obj)
819 return obj
824 return obj
820
825
821 def delete_extra_ip(self, user, ip_id):
826 def delete_extra_ip(self, user, ip_id):
822 """
827 """
823 Removes ip address from UserIpMap
828 Removes ip address from UserIpMap
824
829
825 :param user:
830 :param user:
826 :param ip_id:
831 :param ip_id:
827 """
832 """
828 user = self._get_user(user)
833 user = self._get_user(user)
829 obj = UserIpMap.query().get(ip_id)
834 obj = UserIpMap.query().get(ip_id)
830 if obj and obj.user_id == user.user_id:
835 if obj and obj.user_id == user.user_id:
831 self.sa.delete(obj)
836 self.sa.delete(obj)
832
837
833 def get_accounts_in_creation_order(self, current_user=None):
838 def get_accounts_in_creation_order(self, current_user=None):
834 """
839 """
835 Get accounts in order of creation for deactivation for license limits
840 Get accounts in order of creation for deactivation for license limits
836
841
837 pick currently logged in user, and append to the list in position 0
842 pick currently logged in user, and append to the list in position 0
838 pick all super-admins in order of creation date and add it to the list
843 pick all super-admins in order of creation date and add it to the list
839 pick all other accounts in order of creation and add it to the list.
844 pick all other accounts in order of creation and add it to the list.
840
845
841 Based on that list, the last accounts can be disabled as they are
846 Based on that list, the last accounts can be disabled as they are
842 created at the end and don't include any of the super admins as well
847 created at the end and don't include any of the super admins as well
843 as the current user.
848 as the current user.
844
849
845 :param current_user: optionally current user running this operation
850 :param current_user: optionally current user running this operation
846 """
851 """
847
852
848 if not current_user:
853 if not current_user:
849 current_user = get_current_rhodecode_user()
854 current_user = get_current_rhodecode_user()
850 active_super_admins = [
855 active_super_admins = [
851 x.user_id for x in User.query()
856 x.user_id for x in User.query()
852 .filter(User.user_id != current_user.user_id)
857 .filter(User.user_id != current_user.user_id)
853 .filter(User.active == true())
858 .filter(User.active == true())
854 .filter(User.admin == true())
859 .filter(User.admin == true())
855 .order_by(User.created_on.asc())]
860 .order_by(User.created_on.asc())]
856
861
857 active_regular_users = [
862 active_regular_users = [
858 x.user_id for x in User.query()
863 x.user_id for x in User.query()
859 .filter(User.user_id != current_user.user_id)
864 .filter(User.user_id != current_user.user_id)
860 .filter(User.active == true())
865 .filter(User.active == true())
861 .filter(User.admin == false())
866 .filter(User.admin == false())
862 .order_by(User.created_on.asc())]
867 .order_by(User.created_on.asc())]
863
868
864 list_of_accounts = [current_user.user_id]
869 list_of_accounts = [current_user.user_id]
865 list_of_accounts += active_super_admins
870 list_of_accounts += active_super_admins
866 list_of_accounts += active_regular_users
871 list_of_accounts += active_regular_users
867
872
868 return list_of_accounts
873 return list_of_accounts
869
874
870 def deactivate_last_users(self, expected_users, current_user=None):
875 def deactivate_last_users(self, expected_users, current_user=None):
871 """
876 """
872 Deactivate accounts that are over the license limits.
877 Deactivate accounts that are over the license limits.
873 Algorithm of which accounts to disabled is based on the formula:
878 Algorithm of which accounts to disabled is based on the formula:
874
879
875 Get current user, then super admins in creation order, then regular
880 Get current user, then super admins in creation order, then regular
876 active users in creation order.
881 active users in creation order.
877
882
878 Using that list we mark all accounts from the end of it as inactive.
883 Using that list we mark all accounts from the end of it as inactive.
879 This way we block only latest created accounts.
884 This way we block only latest created accounts.
880
885
881 :param expected_users: list of users in special order, we deactivate
886 :param expected_users: list of users in special order, we deactivate
882 the end N ammoun of users from that list
887 the end N ammoun of users from that list
883 """
888 """
884
889
885 list_of_accounts = self.get_accounts_in_creation_order(
890 list_of_accounts = self.get_accounts_in_creation_order(
886 current_user=current_user)
891 current_user=current_user)
887
892
888 for acc_id in list_of_accounts[expected_users + 1:]:
893 for acc_id in list_of_accounts[expected_users + 1:]:
889 user = User.get(acc_id)
894 user = User.get(acc_id)
890 log.info('Deactivating account %s for license unlock', user)
895 log.info('Deactivating account %s for license unlock', user)
891 user.active = False
896 user.active = False
892 Session().add(user)
897 Session().add(user)
893 Session().commit()
898 Session().commit()
894
899
895 return
900 return
896
901
897 def get_user_log(self, user, filter_term):
902 def get_user_log(self, user, filter_term):
898 user_log = UserLog.query()\
903 user_log = UserLog.query()\
899 .filter(or_(UserLog.user_id == user.user_id,
904 .filter(or_(UserLog.user_id == user.user_id,
900 UserLog.username == user.username))\
905 UserLog.username == user.username))\
901 .options(joinedload(UserLog.user))\
906 .options(joinedload(UserLog.user))\
902 .options(joinedload(UserLog.repository))\
907 .options(joinedload(UserLog.repository))\
903 .order_by(UserLog.action_date.desc())
908 .order_by(UserLog.action_date.desc())
904
909
905 user_log = user_log_filter(user_log, filter_term)
910 user_log = user_log_filter(user_log, filter_term)
906 return user_log
911 return user_log
@@ -1,1129 +1,1113 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Set of generic validators
22 Set of generic validators
23 """
23 """
24
24
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import collections
29 import collections
30
30
31 import formencode
31 import formencode
32 import ipaddress
32 import ipaddress
33 from formencode.validators import (
33 from formencode.validators import (
34 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set,
34 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set,
35 NotEmpty, IPAddress, CIDR, String, FancyValidator
35 NotEmpty, IPAddress, CIDR, String, FancyValidator
36 )
36 )
37
37
38 from sqlalchemy.sql.expression import true
38 from sqlalchemy.sql.expression import true
39 from sqlalchemy.util import OrderedSet
39 from sqlalchemy.util import OrderedSet
40 from webhelpers.pylonslib.secure_form import authentication_token
41
40
42 from rhodecode.authentication import (
41 from rhodecode.authentication import (
43 legacy_plugin_prefix, _import_legacy_plugin)
42 legacy_plugin_prefix, _import_legacy_plugin)
44 from rhodecode.authentication.base import loadplugin
43 from rhodecode.authentication.base import loadplugin
45 from rhodecode.apps._base import ADMIN_PREFIX
44 from rhodecode.apps._base import ADMIN_PREFIX
46 from rhodecode.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny
45 from rhodecode.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny
47 from rhodecode.lib.utils import repo_name_slug, make_db_config
46 from rhodecode.lib.utils import repo_name_slug, make_db_config
48 from rhodecode.lib.utils2 import safe_int, str2bool, aslist, md5, safe_unicode
47 from rhodecode.lib.utils2 import safe_int, str2bool, aslist, md5, safe_unicode
49 from rhodecode.lib.vcs.backends.git.repository import GitRepository
48 from rhodecode.lib.vcs.backends.git.repository import GitRepository
50 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
49 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
51 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
50 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
52 from rhodecode.model.db import (
51 from rhodecode.model.db import (
53 RepoGroup, Repository, UserGroup, User, ChangesetStatus, Gist)
52 RepoGroup, Repository, UserGroup, User, ChangesetStatus, Gist)
54 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
55
54
56 # silence warnings and pylint
55 # silence warnings and pylint
57 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \
56 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \
58 NotEmpty, IPAddress, CIDR, String, FancyValidator
57 NotEmpty, IPAddress, CIDR, String, FancyValidator
59
58
60 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
61
60
62
61
63 class _Missing(object):
62 class _Missing(object):
64 pass
63 pass
65
64
66
65
67 Missing = _Missing()
66 Missing = _Missing()
68
67
69
68
70 def M(self, key, state, **kwargs):
69 def M(self, key, state, **kwargs):
71 """
70 """
72 returns string from self.message based on given key,
71 returns string from self.message based on given key,
73 passed kw params are used to substitute %(named)s params inside
72 passed kw params are used to substitute %(named)s params inside
74 translated strings
73 translated strings
75
74
76 :param msg:
75 :param msg:
77 :param state:
76 :param state:
78 """
77 """
79
78
80 #state._ = staticmethod(_)
79 #state._ = staticmethod(_)
81 # inject validator into state object
80 # inject validator into state object
82 return self.message(key, state, **kwargs)
81 return self.message(key, state, **kwargs)
83
82
84
83
85 def UniqueList(localizer, convert=None):
84 def UniqueList(localizer, convert=None):
86 _ = localizer
85 _ = localizer
87
86
88 class _validator(formencode.FancyValidator):
87 class _validator(formencode.FancyValidator):
89 """
88 """
90 Unique List !
89 Unique List !
91 """
90 """
92 messages = {
91 messages = {
93 'empty': _(u'Value cannot be an empty list'),
92 'empty': _(u'Value cannot be an empty list'),
94 'missing_value': _(u'Value cannot be an empty list'),
93 'missing_value': _(u'Value cannot be an empty list'),
95 }
94 }
96
95
97 def _to_python(self, value, state):
96 def _to_python(self, value, state):
98 ret_val = []
97 ret_val = []
99
98
100 def make_unique(value):
99 def make_unique(value):
101 seen = []
100 seen = []
102 return [c for c in value if not (c in seen or seen.append(c))]
101 return [c for c in value if not (c in seen or seen.append(c))]
103
102
104 if isinstance(value, list):
103 if isinstance(value, list):
105 ret_val = make_unique(value)
104 ret_val = make_unique(value)
106 elif isinstance(value, set):
105 elif isinstance(value, set):
107 ret_val = make_unique(list(value))
106 ret_val = make_unique(list(value))
108 elif isinstance(value, tuple):
107 elif isinstance(value, tuple):
109 ret_val = make_unique(list(value))
108 ret_val = make_unique(list(value))
110 elif value is None:
109 elif value is None:
111 ret_val = []
110 ret_val = []
112 else:
111 else:
113 ret_val = [value]
112 ret_val = [value]
114
113
115 if convert:
114 if convert:
116 ret_val = map(convert, ret_val)
115 ret_val = map(convert, ret_val)
117 return ret_val
116 return ret_val
118
117
119 def empty_value(self, value):
118 def empty_value(self, value):
120 return []
119 return []
121 return _validator
120 return _validator
122
121
123
122
124 def UniqueListFromString(localizer):
123 def UniqueListFromString(localizer):
125 _ = localizer
124 _ = localizer
126
125
127 class _validator(UniqueList(localizer)):
126 class _validator(UniqueList(localizer)):
128 def _to_python(self, value, state):
127 def _to_python(self, value, state):
129 if isinstance(value, basestring):
128 if isinstance(value, basestring):
130 value = aslist(value, ',')
129 value = aslist(value, ',')
131 return super(_validator, self)._to_python(value, state)
130 return super(_validator, self)._to_python(value, state)
132 return _validator
131 return _validator
133
132
134
133
135 def ValidSvnPattern(localizer, section, repo_name=None):
134 def ValidSvnPattern(localizer, section, repo_name=None):
136 _ = localizer
135 _ = localizer
137
136
138 class _validator(formencode.validators.FancyValidator):
137 class _validator(formencode.validators.FancyValidator):
139 messages = {
138 messages = {
140 'pattern_exists': _(u'Pattern already exists'),
139 'pattern_exists': _(u'Pattern already exists'),
141 }
140 }
142
141
143 def validate_python(self, value, state):
142 def validate_python(self, value, state):
144 if not value:
143 if not value:
145 return
144 return
146 model = VcsSettingsModel(repo=repo_name)
145 model = VcsSettingsModel(repo=repo_name)
147 ui_settings = model.get_svn_patterns(section=section)
146 ui_settings = model.get_svn_patterns(section=section)
148 for entry in ui_settings:
147 for entry in ui_settings:
149 if value == entry.value:
148 if value == entry.value:
150 msg = M(self, 'pattern_exists', state)
149 msg = M(self, 'pattern_exists', state)
151 raise formencode.Invalid(msg, value, state)
150 raise formencode.Invalid(msg, value, state)
152 return _validator
151 return _validator
153
152
154
153
155 def ValidUsername(localizer, edit=False, old_data=None):
154 def ValidUsername(localizer, edit=False, old_data=None):
156 _ = localizer
155 _ = localizer
157 old_data = old_data or {}
156 old_data = old_data or {}
158
157
159 class _validator(formencode.validators.FancyValidator):
158 class _validator(formencode.validators.FancyValidator):
160 messages = {
159 messages = {
161 'username_exists': _(u'Username "%(username)s" already exists'),
160 'username_exists': _(u'Username "%(username)s" already exists'),
162 'system_invalid_username':
161 'system_invalid_username':
163 _(u'Username "%(username)s" is forbidden'),
162 _(u'Username "%(username)s" is forbidden'),
164 'invalid_username':
163 'invalid_username':
165 _(u'Username may only contain alphanumeric characters '
164 _(u'Username may only contain alphanumeric characters '
166 u'underscores, periods or dashes and must begin with '
165 u'underscores, periods or dashes and must begin with '
167 u'alphanumeric character or underscore')
166 u'alphanumeric character or underscore')
168 }
167 }
169
168
170 def validate_python(self, value, state):
169 def validate_python(self, value, state):
171 if value in ['default', 'new_user']:
170 if value in ['default', 'new_user']:
172 msg = M(self, 'system_invalid_username', state, username=value)
171 msg = M(self, 'system_invalid_username', state, username=value)
173 raise formencode.Invalid(msg, value, state)
172 raise formencode.Invalid(msg, value, state)
174 # check if user is unique
173 # check if user is unique
175 old_un = None
174 old_un = None
176 if edit:
175 if edit:
177 old_un = User.get(old_data.get('user_id')).username
176 old_un = User.get(old_data.get('user_id')).username
178
177
179 if old_un != value or not edit:
178 if old_un != value or not edit:
180 if User.get_by_username(value, case_insensitive=True):
179 if User.get_by_username(value, case_insensitive=True):
181 msg = M(self, 'username_exists', state, username=value)
180 msg = M(self, 'username_exists', state, username=value)
182 raise formencode.Invalid(msg, value, state)
181 raise formencode.Invalid(msg, value, state)
183
182
184 if (re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value)
183 if (re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value)
185 is None):
184 is None):
186 msg = M(self, 'invalid_username', state)
185 msg = M(self, 'invalid_username', state)
187 raise formencode.Invalid(msg, value, state)
186 raise formencode.Invalid(msg, value, state)
188 return _validator
187 return _validator
189
188
190
189
191 def ValidRepoUser(localizer, allow_disabled=False):
190 def ValidRepoUser(localizer, allow_disabled=False):
192 _ = localizer
191 _ = localizer
193
192
194 class _validator(formencode.validators.FancyValidator):
193 class _validator(formencode.validators.FancyValidator):
195 messages = {
194 messages = {
196 'invalid_username': _(u'Username %(username)s is not valid'),
195 'invalid_username': _(u'Username %(username)s is not valid'),
197 'disabled_username': _(u'Username %(username)s is disabled')
196 'disabled_username': _(u'Username %(username)s is disabled')
198 }
197 }
199
198
200 def validate_python(self, value, state):
199 def validate_python(self, value, state):
201 try:
200 try:
202 user = User.query().filter(User.username == value).one()
201 user = User.query().filter(User.username == value).one()
203 except Exception:
202 except Exception:
204 msg = M(self, 'invalid_username', state, username=value)
203 msg = M(self, 'invalid_username', state, username=value)
205 raise formencode.Invalid(
204 raise formencode.Invalid(
206 msg, value, state, error_dict={'username': msg}
205 msg, value, state, error_dict={'username': msg}
207 )
206 )
208 if user and (not allow_disabled and not user.active):
207 if user and (not allow_disabled and not user.active):
209 msg = M(self, 'disabled_username', state, username=value)
208 msg = M(self, 'disabled_username', state, username=value)
210 raise formencode.Invalid(
209 raise formencode.Invalid(
211 msg, value, state, error_dict={'username': msg}
210 msg, value, state, error_dict={'username': msg}
212 )
211 )
213 return _validator
212 return _validator
214
213
215
214
216 def ValidUserGroup(localizer, edit=False, old_data=None):
215 def ValidUserGroup(localizer, edit=False, old_data=None):
217 _ = localizer
216 _ = localizer
218 old_data = old_data or {}
217 old_data = old_data or {}
219
218
220 class _validator(formencode.validators.FancyValidator):
219 class _validator(formencode.validators.FancyValidator):
221 messages = {
220 messages = {
222 'invalid_group': _(u'Invalid user group name'),
221 'invalid_group': _(u'Invalid user group name'),
223 'group_exist': _(u'User group `%(usergroup)s` already exists'),
222 'group_exist': _(u'User group `%(usergroup)s` already exists'),
224 'invalid_usergroup_name':
223 'invalid_usergroup_name':
225 _(u'user group name may only contain alphanumeric '
224 _(u'user group name may only contain alphanumeric '
226 u'characters underscores, periods or dashes and must begin '
225 u'characters underscores, periods or dashes and must begin '
227 u'with alphanumeric character')
226 u'with alphanumeric character')
228 }
227 }
229
228
230 def validate_python(self, value, state):
229 def validate_python(self, value, state):
231 if value in ['default']:
230 if value in ['default']:
232 msg = M(self, 'invalid_group', state)
231 msg = M(self, 'invalid_group', state)
233 raise formencode.Invalid(
232 raise formencode.Invalid(
234 msg, value, state, error_dict={'users_group_name': msg}
233 msg, value, state, error_dict={'users_group_name': msg}
235 )
234 )
236 # check if group is unique
235 # check if group is unique
237 old_ugname = None
236 old_ugname = None
238 if edit:
237 if edit:
239 old_id = old_data.get('users_group_id')
238 old_id = old_data.get('users_group_id')
240 old_ugname = UserGroup.get(old_id).users_group_name
239 old_ugname = UserGroup.get(old_id).users_group_name
241
240
242 if old_ugname != value or not edit:
241 if old_ugname != value or not edit:
243 is_existing_group = UserGroup.get_by_group_name(
242 is_existing_group = UserGroup.get_by_group_name(
244 value, case_insensitive=True)
243 value, case_insensitive=True)
245 if is_existing_group:
244 if is_existing_group:
246 msg = M(self, 'group_exist', state, usergroup=value)
245 msg = M(self, 'group_exist', state, usergroup=value)
247 raise formencode.Invalid(
246 raise formencode.Invalid(
248 msg, value, state, error_dict={'users_group_name': msg}
247 msg, value, state, error_dict={'users_group_name': msg}
249 )
248 )
250
249
251 if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None:
250 if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None:
252 msg = M(self, 'invalid_usergroup_name', state)
251 msg = M(self, 'invalid_usergroup_name', state)
253 raise formencode.Invalid(
252 raise formencode.Invalid(
254 msg, value, state, error_dict={'users_group_name': msg}
253 msg, value, state, error_dict={'users_group_name': msg}
255 )
254 )
256 return _validator
255 return _validator
257
256
258
257
259 def ValidRepoGroup(localizer, edit=False, old_data=None, can_create_in_root=False):
258 def ValidRepoGroup(localizer, edit=False, old_data=None, can_create_in_root=False):
260 _ = localizer
259 _ = localizer
261 old_data = old_data or {}
260 old_data = old_data or {}
262
261
263 class _validator(formencode.validators.FancyValidator):
262 class _validator(formencode.validators.FancyValidator):
264 messages = {
263 messages = {
265 'group_parent_id': _(u'Cannot assign this group as parent'),
264 'group_parent_id': _(u'Cannot assign this group as parent'),
266 'group_exists': _(u'Group "%(group_name)s" already exists'),
265 'group_exists': _(u'Group "%(group_name)s" already exists'),
267 'repo_exists': _(u'Repository with name "%(group_name)s" '
266 'repo_exists': _(u'Repository with name "%(group_name)s" '
268 u'already exists'),
267 u'already exists'),
269 'permission_denied': _(u"no permission to store repository group"
268 'permission_denied': _(u"no permission to store repository group"
270 u"in this location"),
269 u"in this location"),
271 'permission_denied_root': _(
270 'permission_denied_root': _(
272 u"no permission to store repository group "
271 u"no permission to store repository group "
273 u"in root location")
272 u"in root location")
274 }
273 }
275
274
276 def _to_python(self, value, state):
275 def _to_python(self, value, state):
277 group_name = repo_name_slug(value.get('group_name', ''))
276 group_name = repo_name_slug(value.get('group_name', ''))
278 group_parent_id = safe_int(value.get('group_parent_id'))
277 group_parent_id = safe_int(value.get('group_parent_id'))
279 gr = RepoGroup.get(group_parent_id)
278 gr = RepoGroup.get(group_parent_id)
280 if gr:
279 if gr:
281 parent_group_path = gr.full_path
280 parent_group_path = gr.full_path
282 # value needs to be aware of group name in order to check
281 # value needs to be aware of group name in order to check
283 # db key This is an actual just the name to store in the
282 # db key This is an actual just the name to store in the
284 # database
283 # database
285 group_name_full = (
284 group_name_full = (
286 parent_group_path + RepoGroup.url_sep() + group_name)
285 parent_group_path + RepoGroup.url_sep() + group_name)
287 else:
286 else:
288 group_name_full = group_name
287 group_name_full = group_name
289
288
290 value['group_name'] = group_name
289 value['group_name'] = group_name
291 value['group_name_full'] = group_name_full
290 value['group_name_full'] = group_name_full
292 value['group_parent_id'] = group_parent_id
291 value['group_parent_id'] = group_parent_id
293 return value
292 return value
294
293
295 def validate_python(self, value, state):
294 def validate_python(self, value, state):
296
295
297 old_group_name = None
296 old_group_name = None
298 group_name = value.get('group_name')
297 group_name = value.get('group_name')
299 group_name_full = value.get('group_name_full')
298 group_name_full = value.get('group_name_full')
300 group_parent_id = safe_int(value.get('group_parent_id'))
299 group_parent_id = safe_int(value.get('group_parent_id'))
301 if group_parent_id == -1:
300 if group_parent_id == -1:
302 group_parent_id = None
301 group_parent_id = None
303
302
304 group_obj = RepoGroup.get(old_data.get('group_id'))
303 group_obj = RepoGroup.get(old_data.get('group_id'))
305 parent_group_changed = False
304 parent_group_changed = False
306 if edit:
305 if edit:
307 old_group_name = group_obj.group_name
306 old_group_name = group_obj.group_name
308 old_group_parent_id = group_obj.group_parent_id
307 old_group_parent_id = group_obj.group_parent_id
309
308
310 if group_parent_id != old_group_parent_id:
309 if group_parent_id != old_group_parent_id:
311 parent_group_changed = True
310 parent_group_changed = True
312
311
313 # TODO: mikhail: the following if statement is not reached
312 # TODO: mikhail: the following if statement is not reached
314 # since group_parent_id's OneOf validation fails before.
313 # since group_parent_id's OneOf validation fails before.
315 # Can be removed.
314 # Can be removed.
316
315
317 # check against setting a parent of self
316 # check against setting a parent of self
318 parent_of_self = (
317 parent_of_self = (
319 old_data['group_id'] == group_parent_id
318 old_data['group_id'] == group_parent_id
320 if group_parent_id else False
319 if group_parent_id else False
321 )
320 )
322 if parent_of_self:
321 if parent_of_self:
323 msg = M(self, 'group_parent_id', state)
322 msg = M(self, 'group_parent_id', state)
324 raise formencode.Invalid(
323 raise formencode.Invalid(
325 msg, value, state, error_dict={'group_parent_id': msg}
324 msg, value, state, error_dict={'group_parent_id': msg}
326 )
325 )
327
326
328 # group we're moving current group inside
327 # group we're moving current group inside
329 child_group = None
328 child_group = None
330 if group_parent_id:
329 if group_parent_id:
331 child_group = RepoGroup.query().filter(
330 child_group = RepoGroup.query().filter(
332 RepoGroup.group_id == group_parent_id).scalar()
331 RepoGroup.group_id == group_parent_id).scalar()
333
332
334 # do a special check that we cannot move a group to one of
333 # do a special check that we cannot move a group to one of
335 # it's children
334 # it's children
336 if edit and child_group:
335 if edit and child_group:
337 parents = [x.group_id for x in child_group.parents]
336 parents = [x.group_id for x in child_group.parents]
338 move_to_children = old_data['group_id'] in parents
337 move_to_children = old_data['group_id'] in parents
339 if move_to_children:
338 if move_to_children:
340 msg = M(self, 'group_parent_id', state)
339 msg = M(self, 'group_parent_id', state)
341 raise formencode.Invalid(
340 raise formencode.Invalid(
342 msg, value, state, error_dict={'group_parent_id': msg})
341 msg, value, state, error_dict={'group_parent_id': msg})
343
342
344 # Check if we have permission to store in the parent.
343 # Check if we have permission to store in the parent.
345 # Only check if the parent group changed.
344 # Only check if the parent group changed.
346 if parent_group_changed:
345 if parent_group_changed:
347 if child_group is None:
346 if child_group is None:
348 if not can_create_in_root:
347 if not can_create_in_root:
349 msg = M(self, 'permission_denied_root', state)
348 msg = M(self, 'permission_denied_root', state)
350 raise formencode.Invalid(
349 raise formencode.Invalid(
351 msg, value, state,
350 msg, value, state,
352 error_dict={'group_parent_id': msg})
351 error_dict={'group_parent_id': msg})
353 else:
352 else:
354 valid = HasRepoGroupPermissionAny('group.admin')
353 valid = HasRepoGroupPermissionAny('group.admin')
355 forbidden = not valid(
354 forbidden = not valid(
356 child_group.group_name, 'can create group validator')
355 child_group.group_name, 'can create group validator')
357 if forbidden:
356 if forbidden:
358 msg = M(self, 'permission_denied', state)
357 msg = M(self, 'permission_denied', state)
359 raise formencode.Invalid(
358 raise formencode.Invalid(
360 msg, value, state,
359 msg, value, state,
361 error_dict={'group_parent_id': msg})
360 error_dict={'group_parent_id': msg})
362
361
363 # if we change the name or it's new group, check for existing names
362 # if we change the name or it's new group, check for existing names
364 # or repositories with the same name
363 # or repositories with the same name
365 if old_group_name != group_name_full or not edit:
364 if old_group_name != group_name_full or not edit:
366 # check group
365 # check group
367 gr = RepoGroup.get_by_group_name(group_name_full)
366 gr = RepoGroup.get_by_group_name(group_name_full)
368 if gr:
367 if gr:
369 msg = M(self, 'group_exists', state, group_name=group_name)
368 msg = M(self, 'group_exists', state, group_name=group_name)
370 raise formencode.Invalid(
369 raise formencode.Invalid(
371 msg, value, state, error_dict={'group_name': msg})
370 msg, value, state, error_dict={'group_name': msg})
372
371
373 # check for same repo
372 # check for same repo
374 repo = Repository.get_by_repo_name(group_name_full)
373 repo = Repository.get_by_repo_name(group_name_full)
375 if repo:
374 if repo:
376 msg = M(self, 'repo_exists', state, group_name=group_name)
375 msg = M(self, 'repo_exists', state, group_name=group_name)
377 raise formencode.Invalid(
376 raise formencode.Invalid(
378 msg, value, state, error_dict={'group_name': msg})
377 msg, value, state, error_dict={'group_name': msg})
379 return _validator
378 return _validator
380
379
381
380
382 def ValidPassword(localizer):
381 def ValidPassword(localizer):
383 _ = localizer
382 _ = localizer
384
383
385 class _validator(formencode.validators.FancyValidator):
384 class _validator(formencode.validators.FancyValidator):
386 messages = {
385 messages = {
387 'invalid_password':
386 'invalid_password':
388 _(u'Invalid characters (non-ascii) in password')
387 _(u'Invalid characters (non-ascii) in password')
389 }
388 }
390
389
391 def validate_python(self, value, state):
390 def validate_python(self, value, state):
392 try:
391 try:
393 (value or '').decode('ascii')
392 (value or '').decode('ascii')
394 except UnicodeError:
393 except UnicodeError:
395 msg = M(self, 'invalid_password', state)
394 msg = M(self, 'invalid_password', state)
396 raise formencode.Invalid(msg, value, state,)
395 raise formencode.Invalid(msg, value, state,)
397 return _validator
396 return _validator
398
397
399
398
400 def ValidPasswordsMatch(
399 def ValidPasswordsMatch(
401 localizer, passwd='new_password',
400 localizer, passwd='new_password',
402 passwd_confirmation='password_confirmation'):
401 passwd_confirmation='password_confirmation'):
403 _ = localizer
402 _ = localizer
404
403
405 class _validator(formencode.validators.FancyValidator):
404 class _validator(formencode.validators.FancyValidator):
406 messages = {
405 messages = {
407 'password_mismatch': _(u'Passwords do not match'),
406 'password_mismatch': _(u'Passwords do not match'),
408 }
407 }
409
408
410 def validate_python(self, value, state):
409 def validate_python(self, value, state):
411
410
412 pass_val = value.get('password') or value.get(passwd)
411 pass_val = value.get('password') or value.get(passwd)
413 if pass_val != value[passwd_confirmation]:
412 if pass_val != value[passwd_confirmation]:
414 msg = M(self, 'password_mismatch', state)
413 msg = M(self, 'password_mismatch', state)
415 raise formencode.Invalid(
414 raise formencode.Invalid(
416 msg, value, state,
415 msg, value, state,
417 error_dict={passwd: msg, passwd_confirmation: msg}
416 error_dict={passwd: msg, passwd_confirmation: msg}
418 )
417 )
419 return _validator
418 return _validator
420
419
421
420
422 def ValidAuth(localizer):
421 def ValidAuth(localizer):
423 _ = localizer
422 _ = localizer
424
423
425 class _validator(formencode.validators.FancyValidator):
424 class _validator(formencode.validators.FancyValidator):
426 messages = {
425 messages = {
427 'invalid_password': _(u'invalid password'),
426 'invalid_password': _(u'invalid password'),
428 'invalid_username': _(u'invalid user name'),
427 'invalid_username': _(u'invalid user name'),
429 'disabled_account': _(u'Your account is disabled')
428 'disabled_account': _(u'Your account is disabled')
430 }
429 }
431
430
432 def validate_python(self, value, state):
431 def validate_python(self, value, state):
433 from rhodecode.authentication.base import authenticate, HTTP_TYPE
432 from rhodecode.authentication.base import authenticate, HTTP_TYPE
434
433
435 password = value['password']
434 password = value['password']
436 username = value['username']
435 username = value['username']
437
436
438 if not authenticate(username, password, '', HTTP_TYPE,
437 if not authenticate(username, password, '', HTTP_TYPE,
439 skip_missing=True):
438 skip_missing=True):
440 user = User.get_by_username(username)
439 user = User.get_by_username(username)
441 if user and not user.active:
440 if user and not user.active:
442 log.warning('user %s is disabled', username)
441 log.warning('user %s is disabled', username)
443 msg = M(self, 'disabled_account', state)
442 msg = M(self, 'disabled_account', state)
444 raise formencode.Invalid(
443 raise formencode.Invalid(
445 msg, value, state, error_dict={'username': msg}
444 msg, value, state, error_dict={'username': msg}
446 )
445 )
447 else:
446 else:
448 log.warning('user `%s` failed to authenticate', username)
447 log.warning('user `%s` failed to authenticate', username)
449 msg = M(self, 'invalid_username', state)
448 msg = M(self, 'invalid_username', state)
450 msg2 = M(self, 'invalid_password', state)
449 msg2 = M(self, 'invalid_password', state)
451 raise formencode.Invalid(
450 raise formencode.Invalid(
452 msg, value, state,
451 msg, value, state,
453 error_dict={'username': msg, 'password': msg2}
452 error_dict={'username': msg, 'password': msg2}
454 )
453 )
455 return _validator
454 return _validator
456
455
457
456
458 def ValidAuthToken(localizer):
459 _ = localizer
460
461 class _validator(formencode.validators.FancyValidator):
462 messages = {
463 'invalid_token': _(u'Token mismatch')
464 }
465
466 def validate_python(self, value, state):
467 if value != authentication_token():
468 msg = M(self, 'invalid_token', state)
469 raise formencode.Invalid(msg, value, state)
470 return _validator
471
472
473 def ValidRepoName(localizer, edit=False, old_data=None):
457 def ValidRepoName(localizer, edit=False, old_data=None):
474 old_data = old_data or {}
458 old_data = old_data or {}
475 _ = localizer
459 _ = localizer
476
460
477 class _validator(formencode.validators.FancyValidator):
461 class _validator(formencode.validators.FancyValidator):
478 messages = {
462 messages = {
479 'invalid_repo_name':
463 'invalid_repo_name':
480 _(u'Repository name %(repo)s is disallowed'),
464 _(u'Repository name %(repo)s is disallowed'),
481 # top level
465 # top level
482 'repository_exists': _(u'Repository with name %(repo)s '
466 'repository_exists': _(u'Repository with name %(repo)s '
483 u'already exists'),
467 u'already exists'),
484 'group_exists': _(u'Repository group with name "%(repo)s" '
468 'group_exists': _(u'Repository group with name "%(repo)s" '
485 u'already exists'),
469 u'already exists'),
486 # inside a group
470 # inside a group
487 'repository_in_group_exists': _(u'Repository with name %(repo)s '
471 'repository_in_group_exists': _(u'Repository with name %(repo)s '
488 u'exists in group "%(group)s"'),
472 u'exists in group "%(group)s"'),
489 'group_in_group_exists': _(
473 'group_in_group_exists': _(
490 u'Repository group with name "%(repo)s" '
474 u'Repository group with name "%(repo)s" '
491 u'exists in group "%(group)s"'),
475 u'exists in group "%(group)s"'),
492 }
476 }
493
477
494 def _to_python(self, value, state):
478 def _to_python(self, value, state):
495 repo_name = repo_name_slug(value.get('repo_name', ''))
479 repo_name = repo_name_slug(value.get('repo_name', ''))
496 repo_group = value.get('repo_group')
480 repo_group = value.get('repo_group')
497 if repo_group:
481 if repo_group:
498 gr = RepoGroup.get(repo_group)
482 gr = RepoGroup.get(repo_group)
499 group_path = gr.full_path
483 group_path = gr.full_path
500 group_name = gr.group_name
484 group_name = gr.group_name
501 # value needs to be aware of group name in order to check
485 # value needs to be aware of group name in order to check
502 # db key This is an actual just the name to store in the
486 # db key This is an actual just the name to store in the
503 # database
487 # database
504 repo_name_full = group_path + RepoGroup.url_sep() + repo_name
488 repo_name_full = group_path + RepoGroup.url_sep() + repo_name
505 else:
489 else:
506 group_name = group_path = ''
490 group_name = group_path = ''
507 repo_name_full = repo_name
491 repo_name_full = repo_name
508
492
509 value['repo_name'] = repo_name
493 value['repo_name'] = repo_name
510 value['repo_name_full'] = repo_name_full
494 value['repo_name_full'] = repo_name_full
511 value['group_path'] = group_path
495 value['group_path'] = group_path
512 value['group_name'] = group_name
496 value['group_name'] = group_name
513 return value
497 return value
514
498
515 def validate_python(self, value, state):
499 def validate_python(self, value, state):
516
500
517 repo_name = value.get('repo_name')
501 repo_name = value.get('repo_name')
518 repo_name_full = value.get('repo_name_full')
502 repo_name_full = value.get('repo_name_full')
519 group_path = value.get('group_path')
503 group_path = value.get('group_path')
520 group_name = value.get('group_name')
504 group_name = value.get('group_name')
521
505
522 if repo_name in [ADMIN_PREFIX, '']:
506 if repo_name in [ADMIN_PREFIX, '']:
523 msg = M(self, 'invalid_repo_name', state, repo=repo_name)
507 msg = M(self, 'invalid_repo_name', state, repo=repo_name)
524 raise formencode.Invalid(
508 raise formencode.Invalid(
525 msg, value, state, error_dict={'repo_name': msg})
509 msg, value, state, error_dict={'repo_name': msg})
526
510
527 rename = old_data.get('repo_name') != repo_name_full
511 rename = old_data.get('repo_name') != repo_name_full
528 create = not edit
512 create = not edit
529 if rename or create:
513 if rename or create:
530
514
531 if group_path:
515 if group_path:
532 if Repository.get_by_repo_name(repo_name_full):
516 if Repository.get_by_repo_name(repo_name_full):
533 msg = M(self, 'repository_in_group_exists', state,
517 msg = M(self, 'repository_in_group_exists', state,
534 repo=repo_name, group=group_name)
518 repo=repo_name, group=group_name)
535 raise formencode.Invalid(
519 raise formencode.Invalid(
536 msg, value, state, error_dict={'repo_name': msg})
520 msg, value, state, error_dict={'repo_name': msg})
537 if RepoGroup.get_by_group_name(repo_name_full):
521 if RepoGroup.get_by_group_name(repo_name_full):
538 msg = M(self, 'group_in_group_exists', state,
522 msg = M(self, 'group_in_group_exists', state,
539 repo=repo_name, group=group_name)
523 repo=repo_name, group=group_name)
540 raise formencode.Invalid(
524 raise formencode.Invalid(
541 msg, value, state, error_dict={'repo_name': msg})
525 msg, value, state, error_dict={'repo_name': msg})
542 else:
526 else:
543 if RepoGroup.get_by_group_name(repo_name_full):
527 if RepoGroup.get_by_group_name(repo_name_full):
544 msg = M(self, 'group_exists', state, repo=repo_name)
528 msg = M(self, 'group_exists', state, repo=repo_name)
545 raise formencode.Invalid(
529 raise formencode.Invalid(
546 msg, value, state, error_dict={'repo_name': msg})
530 msg, value, state, error_dict={'repo_name': msg})
547
531
548 if Repository.get_by_repo_name(repo_name_full):
532 if Repository.get_by_repo_name(repo_name_full):
549 msg = M(
533 msg = M(
550 self, 'repository_exists', state, repo=repo_name)
534 self, 'repository_exists', state, repo=repo_name)
551 raise formencode.Invalid(
535 raise formencode.Invalid(
552 msg, value, state, error_dict={'repo_name': msg})
536 msg, value, state, error_dict={'repo_name': msg})
553 return value
537 return value
554 return _validator
538 return _validator
555
539
556
540
557 def ValidForkName(localizer, *args, **kwargs):
541 def ValidForkName(localizer, *args, **kwargs):
558 _ = localizer
542 _ = localizer
559
543
560 return ValidRepoName(localizer, *args, **kwargs)
544 return ValidRepoName(localizer, *args, **kwargs)
561
545
562
546
563 def SlugifyName(localizer):
547 def SlugifyName(localizer):
564 _ = localizer
548 _ = localizer
565
549
566 class _validator(formencode.validators.FancyValidator):
550 class _validator(formencode.validators.FancyValidator):
567
551
568 def _to_python(self, value, state):
552 def _to_python(self, value, state):
569 return repo_name_slug(value)
553 return repo_name_slug(value)
570
554
571 def validate_python(self, value, state):
555 def validate_python(self, value, state):
572 pass
556 pass
573 return _validator
557 return _validator
574
558
575
559
576 def CannotHaveGitSuffix(localizer):
560 def CannotHaveGitSuffix(localizer):
577 _ = localizer
561 _ = localizer
578
562
579 class _validator(formencode.validators.FancyValidator):
563 class _validator(formencode.validators.FancyValidator):
580 messages = {
564 messages = {
581 'has_git_suffix':
565 'has_git_suffix':
582 _(u'Repository name cannot end with .git'),
566 _(u'Repository name cannot end with .git'),
583 }
567 }
584
568
585 def _to_python(self, value, state):
569 def _to_python(self, value, state):
586 return value
570 return value
587
571
588 def validate_python(self, value, state):
572 def validate_python(self, value, state):
589 if value and value.endswith('.git'):
573 if value and value.endswith('.git'):
590 msg = M(
574 msg = M(
591 self, 'has_git_suffix', state)
575 self, 'has_git_suffix', state)
592 raise formencode.Invalid(
576 raise formencode.Invalid(
593 msg, value, state, error_dict={'repo_name': msg})
577 msg, value, state, error_dict={'repo_name': msg})
594 return _validator
578 return _validator
595
579
596
580
597 def ValidCloneUri(localizer):
581 def ValidCloneUri(localizer):
598 _ = localizer
582 _ = localizer
599
583
600 class InvalidCloneUrl(Exception):
584 class InvalidCloneUrl(Exception):
601 allowed_prefixes = ()
585 allowed_prefixes = ()
602
586
603 def url_handler(repo_type, url):
587 def url_handler(repo_type, url):
604 config = make_db_config(clear_session=False)
588 config = make_db_config(clear_session=False)
605 if repo_type == 'hg':
589 if repo_type == 'hg':
606 allowed_prefixes = ('http', 'svn+http', 'git+http')
590 allowed_prefixes = ('http', 'svn+http', 'git+http')
607
591
608 if 'http' in url[:4]:
592 if 'http' in url[:4]:
609 # initially check if it's at least the proper URL
593 # initially check if it's at least the proper URL
610 # or does it pass basic auth
594 # or does it pass basic auth
611 MercurialRepository.check_url(url, config)
595 MercurialRepository.check_url(url, config)
612 elif 'svn+http' in url[:8]: # svn->hg import
596 elif 'svn+http' in url[:8]: # svn->hg import
613 SubversionRepository.check_url(url, config)
597 SubversionRepository.check_url(url, config)
614 elif 'git+http' in url[:8]: # git->hg import
598 elif 'git+http' in url[:8]: # git->hg import
615 raise NotImplementedError()
599 raise NotImplementedError()
616 else:
600 else:
617 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
601 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
618 'Allowed url must start with one of %s'
602 'Allowed url must start with one of %s'
619 % (url, ','.join(allowed_prefixes)))
603 % (url, ','.join(allowed_prefixes)))
620 exc.allowed_prefixes = allowed_prefixes
604 exc.allowed_prefixes = allowed_prefixes
621 raise exc
605 raise exc
622
606
623 elif repo_type == 'git':
607 elif repo_type == 'git':
624 allowed_prefixes = ('http', 'svn+http', 'hg+http')
608 allowed_prefixes = ('http', 'svn+http', 'hg+http')
625 if 'http' in url[:4]:
609 if 'http' in url[:4]:
626 # initially check if it's at least the proper URL
610 # initially check if it's at least the proper URL
627 # or does it pass basic auth
611 # or does it pass basic auth
628 GitRepository.check_url(url, config)
612 GitRepository.check_url(url, config)
629 elif 'svn+http' in url[:8]: # svn->git import
613 elif 'svn+http' in url[:8]: # svn->git import
630 raise NotImplementedError()
614 raise NotImplementedError()
631 elif 'hg+http' in url[:8]: # hg->git import
615 elif 'hg+http' in url[:8]: # hg->git import
632 raise NotImplementedError()
616 raise NotImplementedError()
633 else:
617 else:
634 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
618 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
635 'Allowed url must start with one of %s'
619 'Allowed url must start with one of %s'
636 % (url, ','.join(allowed_prefixes)))
620 % (url, ','.join(allowed_prefixes)))
637 exc.allowed_prefixes = allowed_prefixes
621 exc.allowed_prefixes = allowed_prefixes
638 raise exc
622 raise exc
639
623
640 class _validator(formencode.validators.FancyValidator):
624 class _validator(formencode.validators.FancyValidator):
641 messages = {
625 messages = {
642 'clone_uri': _(u'invalid clone url for %(rtype)s repository'),
626 'clone_uri': _(u'invalid clone url for %(rtype)s repository'),
643 'invalid_clone_uri': _(
627 'invalid_clone_uri': _(
644 u'Invalid clone url, provide a valid clone '
628 u'Invalid clone url, provide a valid clone '
645 u'url starting with one of %(allowed_prefixes)s')
629 u'url starting with one of %(allowed_prefixes)s')
646 }
630 }
647
631
648 def validate_python(self, value, state):
632 def validate_python(self, value, state):
649 repo_type = value.get('repo_type')
633 repo_type = value.get('repo_type')
650 url = value.get('clone_uri')
634 url = value.get('clone_uri')
651
635
652 if url:
636 if url:
653 try:
637 try:
654 url_handler(repo_type, url)
638 url_handler(repo_type, url)
655 except InvalidCloneUrl as e:
639 except InvalidCloneUrl as e:
656 log.warning(e)
640 log.warning(e)
657 msg = M(self, 'invalid_clone_uri', state, rtype=repo_type,
641 msg = M(self, 'invalid_clone_uri', state, rtype=repo_type,
658 allowed_prefixes=','.join(e.allowed_prefixes))
642 allowed_prefixes=','.join(e.allowed_prefixes))
659 raise formencode.Invalid(msg, value, state,
643 raise formencode.Invalid(msg, value, state,
660 error_dict={'clone_uri': msg})
644 error_dict={'clone_uri': msg})
661 except Exception:
645 except Exception:
662 log.exception('Url validation failed')
646 log.exception('Url validation failed')
663 msg = M(self, 'clone_uri', state, rtype=repo_type)
647 msg = M(self, 'clone_uri', state, rtype=repo_type)
664 raise formencode.Invalid(msg, value, state,
648 raise formencode.Invalid(msg, value, state,
665 error_dict={'clone_uri': msg})
649 error_dict={'clone_uri': msg})
666 return _validator
650 return _validator
667
651
668
652
669 def ValidForkType(localizer, old_data=None):
653 def ValidForkType(localizer, old_data=None):
670 _ = localizer
654 _ = localizer
671 old_data = old_data or {}
655 old_data = old_data or {}
672
656
673 class _validator(formencode.validators.FancyValidator):
657 class _validator(formencode.validators.FancyValidator):
674 messages = {
658 messages = {
675 'invalid_fork_type': _(u'Fork have to be the same type as parent')
659 'invalid_fork_type': _(u'Fork have to be the same type as parent')
676 }
660 }
677
661
678 def validate_python(self, value, state):
662 def validate_python(self, value, state):
679 if old_data['repo_type'] != value:
663 if old_data['repo_type'] != value:
680 msg = M(self, 'invalid_fork_type', state)
664 msg = M(self, 'invalid_fork_type', state)
681 raise formencode.Invalid(
665 raise formencode.Invalid(
682 msg, value, state, error_dict={'repo_type': msg}
666 msg, value, state, error_dict={'repo_type': msg}
683 )
667 )
684 return _validator
668 return _validator
685
669
686
670
687 def CanWriteGroup(localizer, old_data=None):
671 def CanWriteGroup(localizer, old_data=None):
688 _ = localizer
672 _ = localizer
689
673
690 class _validator(formencode.validators.FancyValidator):
674 class _validator(formencode.validators.FancyValidator):
691 messages = {
675 messages = {
692 'permission_denied': _(
676 'permission_denied': _(
693 u"You do not have the permission "
677 u"You do not have the permission "
694 u"to create repositories in this group."),
678 u"to create repositories in this group."),
695 'permission_denied_root': _(
679 'permission_denied_root': _(
696 u"You do not have the permission to store repositories in "
680 u"You do not have the permission to store repositories in "
697 u"the root location.")
681 u"the root location.")
698 }
682 }
699
683
700 def _to_python(self, value, state):
684 def _to_python(self, value, state):
701 # root location
685 # root location
702 if value in [-1, "-1"]:
686 if value in [-1, "-1"]:
703 return None
687 return None
704 return value
688 return value
705
689
706 def validate_python(self, value, state):
690 def validate_python(self, value, state):
707 gr = RepoGroup.get(value)
691 gr = RepoGroup.get(value)
708 gr_name = gr.group_name if gr else None # None means ROOT location
692 gr_name = gr.group_name if gr else None # None means ROOT location
709 # create repositories with write permission on group is set to true
693 # create repositories with write permission on group is set to true
710 create_on_write = HasPermissionAny(
694 create_on_write = HasPermissionAny(
711 'hg.create.write_on_repogroup.true')()
695 'hg.create.write_on_repogroup.true')()
712 group_admin = HasRepoGroupPermissionAny('group.admin')(
696 group_admin = HasRepoGroupPermissionAny('group.admin')(
713 gr_name, 'can write into group validator')
697 gr_name, 'can write into group validator')
714 group_write = HasRepoGroupPermissionAny('group.write')(
698 group_write = HasRepoGroupPermissionAny('group.write')(
715 gr_name, 'can write into group validator')
699 gr_name, 'can write into group validator')
716 forbidden = not (group_admin or (group_write and create_on_write))
700 forbidden = not (group_admin or (group_write and create_on_write))
717 can_create_repos = HasPermissionAny(
701 can_create_repos = HasPermissionAny(
718 'hg.admin', 'hg.create.repository')
702 'hg.admin', 'hg.create.repository')
719 gid = (old_data['repo_group'].get('group_id')
703 gid = (old_data['repo_group'].get('group_id')
720 if (old_data and 'repo_group' in old_data) else None)
704 if (old_data and 'repo_group' in old_data) else None)
721 value_changed = gid != safe_int(value)
705 value_changed = gid != safe_int(value)
722 new = not old_data
706 new = not old_data
723 # do check if we changed the value, there's a case that someone got
707 # do check if we changed the value, there's a case that someone got
724 # revoked write permissions to a repository, he still created, we
708 # revoked write permissions to a repository, he still created, we
725 # don't need to check permission if he didn't change the value of
709 # don't need to check permission if he didn't change the value of
726 # groups in form box
710 # groups in form box
727 if value_changed or new:
711 if value_changed or new:
728 # parent group need to be existing
712 # parent group need to be existing
729 if gr and forbidden:
713 if gr and forbidden:
730 msg = M(self, 'permission_denied', state)
714 msg = M(self, 'permission_denied', state)
731 raise formencode.Invalid(
715 raise formencode.Invalid(
732 msg, value, state, error_dict={'repo_type': msg}
716 msg, value, state, error_dict={'repo_type': msg}
733 )
717 )
734 # check if we can write to root location !
718 # check if we can write to root location !
735 elif gr is None and not can_create_repos():
719 elif gr is None and not can_create_repos():
736 msg = M(self, 'permission_denied_root', state)
720 msg = M(self, 'permission_denied_root', state)
737 raise formencode.Invalid(
721 raise formencode.Invalid(
738 msg, value, state, error_dict={'repo_type': msg}
722 msg, value, state, error_dict={'repo_type': msg}
739 )
723 )
740 return _validator
724 return _validator
741
725
742
726
743 def ValidPerms(localizer, type_='repo'):
727 def ValidPerms(localizer, type_='repo'):
744 _ = localizer
728 _ = localizer
745 if type_ == 'repo_group':
729 if type_ == 'repo_group':
746 EMPTY_PERM = 'group.none'
730 EMPTY_PERM = 'group.none'
747 elif type_ == 'repo':
731 elif type_ == 'repo':
748 EMPTY_PERM = 'repository.none'
732 EMPTY_PERM = 'repository.none'
749 elif type_ == 'user_group':
733 elif type_ == 'user_group':
750 EMPTY_PERM = 'usergroup.none'
734 EMPTY_PERM = 'usergroup.none'
751
735
752 class _validator(formencode.validators.FancyValidator):
736 class _validator(formencode.validators.FancyValidator):
753 messages = {
737 messages = {
754 'perm_new_member_name':
738 'perm_new_member_name':
755 _(u'This username or user group name is not valid')
739 _(u'This username or user group name is not valid')
756 }
740 }
757
741
758 def _to_python(self, value, state):
742 def _to_python(self, value, state):
759 perm_updates = OrderedSet()
743 perm_updates = OrderedSet()
760 perm_additions = OrderedSet()
744 perm_additions = OrderedSet()
761 perm_deletions = OrderedSet()
745 perm_deletions = OrderedSet()
762 # build a list of permission to update/delete and new permission
746 # build a list of permission to update/delete and new permission
763
747
764 # Read the perm_new_member/perm_del_member attributes and group
748 # Read the perm_new_member/perm_del_member attributes and group
765 # them by they IDs
749 # them by they IDs
766 new_perms_group = collections.defaultdict(dict)
750 new_perms_group = collections.defaultdict(dict)
767 del_perms_group = collections.defaultdict(dict)
751 del_perms_group = collections.defaultdict(dict)
768 for k, v in value.copy().iteritems():
752 for k, v in value.copy().iteritems():
769 if k.startswith('perm_del_member'):
753 if k.startswith('perm_del_member'):
770 # delete from org storage so we don't process that later
754 # delete from org storage so we don't process that later
771 del value[k]
755 del value[k]
772 # part is `id`, `type`
756 # part is `id`, `type`
773 _type, part = k.split('perm_del_member_')
757 _type, part = k.split('perm_del_member_')
774 args = part.split('_')
758 args = part.split('_')
775 if len(args) == 2:
759 if len(args) == 2:
776 _key, pos = args
760 _key, pos = args
777 del_perms_group[pos][_key] = v
761 del_perms_group[pos][_key] = v
778 if k.startswith('perm_new_member'):
762 if k.startswith('perm_new_member'):
779 # delete from org storage so we don't process that later
763 # delete from org storage so we don't process that later
780 del value[k]
764 del value[k]
781 # part is `id`, `type`, `perm`
765 # part is `id`, `type`, `perm`
782 _type, part = k.split('perm_new_member_')
766 _type, part = k.split('perm_new_member_')
783 args = part.split('_')
767 args = part.split('_')
784 if len(args) == 2:
768 if len(args) == 2:
785 _key, pos = args
769 _key, pos = args
786 new_perms_group[pos][_key] = v
770 new_perms_group[pos][_key] = v
787
771
788 # store the deletes
772 # store the deletes
789 for k in sorted(del_perms_group.keys()):
773 for k in sorted(del_perms_group.keys()):
790 perm_dict = del_perms_group[k]
774 perm_dict = del_perms_group[k]
791 del_member = perm_dict.get('id')
775 del_member = perm_dict.get('id')
792 del_type = perm_dict.get('type')
776 del_type = perm_dict.get('type')
793 if del_member and del_type:
777 if del_member and del_type:
794 perm_deletions.add(
778 perm_deletions.add(
795 (del_member, None, del_type))
779 (del_member, None, del_type))
796
780
797 # store additions in order of how they were added in web form
781 # store additions in order of how they were added in web form
798 for k in sorted(new_perms_group.keys()):
782 for k in sorted(new_perms_group.keys()):
799 perm_dict = new_perms_group[k]
783 perm_dict = new_perms_group[k]
800 new_member = perm_dict.get('id')
784 new_member = perm_dict.get('id')
801 new_type = perm_dict.get('type')
785 new_type = perm_dict.get('type')
802 new_perm = perm_dict.get('perm')
786 new_perm = perm_dict.get('perm')
803 if new_member and new_perm and new_type:
787 if new_member and new_perm and new_type:
804 perm_additions.add(
788 perm_additions.add(
805 (new_member, new_perm, new_type))
789 (new_member, new_perm, new_type))
806
790
807 # get updates of permissions
791 # get updates of permissions
808 # (read the existing radio button states)
792 # (read the existing radio button states)
809 default_user_id = User.get_default_user().user_id
793 default_user_id = User.get_default_user().user_id
810 for k, update_value in value.iteritems():
794 for k, update_value in value.iteritems():
811 if k.startswith('u_perm_') or k.startswith('g_perm_'):
795 if k.startswith('u_perm_') or k.startswith('g_perm_'):
812 member = k[7:]
796 member = k[7:]
813 update_type = {'u': 'user',
797 update_type = {'u': 'user',
814 'g': 'users_group'}[k[0]]
798 'g': 'users_group'}[k[0]]
815
799
816 if safe_int(member) == default_user_id:
800 if safe_int(member) == default_user_id:
817 if str2bool(value.get('repo_private')):
801 if str2bool(value.get('repo_private')):
818 # prevent from updating default user permissions
802 # prevent from updating default user permissions
819 # when this repository is marked as private
803 # when this repository is marked as private
820 update_value = EMPTY_PERM
804 update_value = EMPTY_PERM
821
805
822 perm_updates.add(
806 perm_updates.add(
823 (member, update_value, update_type))
807 (member, update_value, update_type))
824
808
825 value['perm_additions'] = [] # propagated later
809 value['perm_additions'] = [] # propagated later
826 value['perm_updates'] = list(perm_updates)
810 value['perm_updates'] = list(perm_updates)
827 value['perm_deletions'] = list(perm_deletions)
811 value['perm_deletions'] = list(perm_deletions)
828
812
829 updates_map = dict(
813 updates_map = dict(
830 (x[0], (x[1], x[2])) for x in value['perm_updates'])
814 (x[0], (x[1], x[2])) for x in value['perm_updates'])
831 # make sure Additions don't override updates.
815 # make sure Additions don't override updates.
832 for member_id, perm, member_type in list(perm_additions):
816 for member_id, perm, member_type in list(perm_additions):
833 if member_id in updates_map:
817 if member_id in updates_map:
834 perm = updates_map[member_id][0]
818 perm = updates_map[member_id][0]
835 value['perm_additions'].append((member_id, perm, member_type))
819 value['perm_additions'].append((member_id, perm, member_type))
836
820
837 # on new entries validate users they exist and they are active !
821 # on new entries validate users they exist and they are active !
838 # this leaves feedback to the form
822 # this leaves feedback to the form
839 try:
823 try:
840 if member_type == 'user':
824 if member_type == 'user':
841 User.query()\
825 User.query()\
842 .filter(User.active == true())\
826 .filter(User.active == true())\
843 .filter(User.user_id == member_id).one()
827 .filter(User.user_id == member_id).one()
844 if member_type == 'users_group':
828 if member_type == 'users_group':
845 UserGroup.query()\
829 UserGroup.query()\
846 .filter(UserGroup.users_group_active == true())\
830 .filter(UserGroup.users_group_active == true())\
847 .filter(UserGroup.users_group_id == member_id)\
831 .filter(UserGroup.users_group_id == member_id)\
848 .one()
832 .one()
849
833
850 except Exception:
834 except Exception:
851 log.exception('Updated permission failed: org_exc:')
835 log.exception('Updated permission failed: org_exc:')
852 msg = M(self, 'perm_new_member_type', state)
836 msg = M(self, 'perm_new_member_type', state)
853 raise formencode.Invalid(
837 raise formencode.Invalid(
854 msg, value, state, error_dict={
838 msg, value, state, error_dict={
855 'perm_new_member_name': msg}
839 'perm_new_member_name': msg}
856 )
840 )
857 return value
841 return value
858 return _validator
842 return _validator
859
843
860
844
861 def ValidPath(localizer):
845 def ValidPath(localizer):
862 _ = localizer
846 _ = localizer
863
847
864 class _validator(formencode.validators.FancyValidator):
848 class _validator(formencode.validators.FancyValidator):
865 messages = {
849 messages = {
866 'invalid_path': _(u'This is not a valid path')
850 'invalid_path': _(u'This is not a valid path')
867 }
851 }
868
852
869 def validate_python(self, value, state):
853 def validate_python(self, value, state):
870 if not os.path.isdir(value):
854 if not os.path.isdir(value):
871 msg = M(self, 'invalid_path', state)
855 msg = M(self, 'invalid_path', state)
872 raise formencode.Invalid(
856 raise formencode.Invalid(
873 msg, value, state, error_dict={'paths_root_path': msg}
857 msg, value, state, error_dict={'paths_root_path': msg}
874 )
858 )
875 return _validator
859 return _validator
876
860
877
861
878 def UniqSystemEmail(localizer, old_data=None):
862 def UniqSystemEmail(localizer, old_data=None):
879 _ = localizer
863 _ = localizer
880 old_data = old_data or {}
864 old_data = old_data or {}
881
865
882 class _validator(formencode.validators.FancyValidator):
866 class _validator(formencode.validators.FancyValidator):
883 messages = {
867 messages = {
884 'email_taken': _(u'This e-mail address is already taken')
868 'email_taken': _(u'This e-mail address is already taken')
885 }
869 }
886
870
887 def _to_python(self, value, state):
871 def _to_python(self, value, state):
888 return value.lower()
872 return value.lower()
889
873
890 def validate_python(self, value, state):
874 def validate_python(self, value, state):
891 if (old_data.get('email') or '').lower() != value:
875 if (old_data.get('email') or '').lower() != value:
892 user = User.get_by_email(value, case_insensitive=True)
876 user = User.get_by_email(value, case_insensitive=True)
893 if user:
877 if user:
894 msg = M(self, 'email_taken', state)
878 msg = M(self, 'email_taken', state)
895 raise formencode.Invalid(
879 raise formencode.Invalid(
896 msg, value, state, error_dict={'email': msg}
880 msg, value, state, error_dict={'email': msg}
897 )
881 )
898 return _validator
882 return _validator
899
883
900
884
901 def ValidSystemEmail(localizer):
885 def ValidSystemEmail(localizer):
902 _ = localizer
886 _ = localizer
903
887
904 class _validator(formencode.validators.FancyValidator):
888 class _validator(formencode.validators.FancyValidator):
905 messages = {
889 messages = {
906 'non_existing_email': _(u'e-mail "%(email)s" does not exist.')
890 'non_existing_email': _(u'e-mail "%(email)s" does not exist.')
907 }
891 }
908
892
909 def _to_python(self, value, state):
893 def _to_python(self, value, state):
910 return value.lower()
894 return value.lower()
911
895
912 def validate_python(self, value, state):
896 def validate_python(self, value, state):
913 user = User.get_by_email(value, case_insensitive=True)
897 user = User.get_by_email(value, case_insensitive=True)
914 if user is None:
898 if user is None:
915 msg = M(self, 'non_existing_email', state, email=value)
899 msg = M(self, 'non_existing_email', state, email=value)
916 raise formencode.Invalid(
900 raise formencode.Invalid(
917 msg, value, state, error_dict={'email': msg}
901 msg, value, state, error_dict={'email': msg}
918 )
902 )
919 return _validator
903 return _validator
920
904
921
905
922 def NotReviewedRevisions(localizer, repo_id):
906 def NotReviewedRevisions(localizer, repo_id):
923 _ = localizer
907 _ = localizer
924 class _validator(formencode.validators.FancyValidator):
908 class _validator(formencode.validators.FancyValidator):
925 messages = {
909 messages = {
926 'rev_already_reviewed':
910 'rev_already_reviewed':
927 _(u'Revisions %(revs)s are already part of pull request '
911 _(u'Revisions %(revs)s are already part of pull request '
928 u'or have set status'),
912 u'or have set status'),
929 }
913 }
930
914
931 def validate_python(self, value, state):
915 def validate_python(self, value, state):
932 # check revisions if they are not reviewed, or a part of another
916 # check revisions if they are not reviewed, or a part of another
933 # pull request
917 # pull request
934 statuses = ChangesetStatus.query()\
918 statuses = ChangesetStatus.query()\
935 .filter(ChangesetStatus.revision.in_(value))\
919 .filter(ChangesetStatus.revision.in_(value))\
936 .filter(ChangesetStatus.repo_id == repo_id)\
920 .filter(ChangesetStatus.repo_id == repo_id)\
937 .all()
921 .all()
938
922
939 errors = []
923 errors = []
940 for status in statuses:
924 for status in statuses:
941 if status.pull_request_id:
925 if status.pull_request_id:
942 errors.append(['pull_req', status.revision[:12]])
926 errors.append(['pull_req', status.revision[:12]])
943 elif status.status:
927 elif status.status:
944 errors.append(['status', status.revision[:12]])
928 errors.append(['status', status.revision[:12]])
945
929
946 if errors:
930 if errors:
947 revs = ','.join([x[1] for x in errors])
931 revs = ','.join([x[1] for x in errors])
948 msg = M(self, 'rev_already_reviewed', state, revs=revs)
932 msg = M(self, 'rev_already_reviewed', state, revs=revs)
949 raise formencode.Invalid(
933 raise formencode.Invalid(
950 msg, value, state, error_dict={'revisions': revs})
934 msg, value, state, error_dict={'revisions': revs})
951
935
952 return _validator
936 return _validator
953
937
954
938
955 def ValidIp(localizer):
939 def ValidIp(localizer):
956 _ = localizer
940 _ = localizer
957
941
958 class _validator(CIDR):
942 class _validator(CIDR):
959 messages = {
943 messages = {
960 'badFormat': _(u'Please enter a valid IPv4 or IpV6 address'),
944 'badFormat': _(u'Please enter a valid IPv4 or IpV6 address'),
961 'illegalBits': _(
945 'illegalBits': _(
962 u'The network size (bits) must be within the range '
946 u'The network size (bits) must be within the range '
963 u'of 0-32 (not %(bits)r)'),
947 u'of 0-32 (not %(bits)r)'),
964 }
948 }
965
949
966 # we ovveride the default to_python() call
950 # we ovveride the default to_python() call
967 def to_python(self, value, state):
951 def to_python(self, value, state):
968 v = super(_validator, self).to_python(value, state)
952 v = super(_validator, self).to_python(value, state)
969 v = safe_unicode(v.strip())
953 v = safe_unicode(v.strip())
970 net = ipaddress.ip_network(address=v, strict=False)
954 net = ipaddress.ip_network(address=v, strict=False)
971 return str(net)
955 return str(net)
972
956
973 def validate_python(self, value, state):
957 def validate_python(self, value, state):
974 try:
958 try:
975 addr = safe_unicode(value.strip())
959 addr = safe_unicode(value.strip())
976 # this raises an ValueError if address is not IpV4 or IpV6
960 # this raises an ValueError if address is not IpV4 or IpV6
977 ipaddress.ip_network(addr, strict=False)
961 ipaddress.ip_network(addr, strict=False)
978 except ValueError:
962 except ValueError:
979 raise formencode.Invalid(self.message('badFormat', state),
963 raise formencode.Invalid(self.message('badFormat', state),
980 value, state)
964 value, state)
981 return _validator
965 return _validator
982
966
983
967
984 def FieldKey(localizer):
968 def FieldKey(localizer):
985 _ = localizer
969 _ = localizer
986
970
987 class _validator(formencode.validators.FancyValidator):
971 class _validator(formencode.validators.FancyValidator):
988 messages = {
972 messages = {
989 'badFormat': _(
973 'badFormat': _(
990 u'Key name can only consist of letters, '
974 u'Key name can only consist of letters, '
991 u'underscore, dash or numbers'),
975 u'underscore, dash or numbers'),
992 }
976 }
993
977
994 def validate_python(self, value, state):
978 def validate_python(self, value, state):
995 if not re.match('[a-zA-Z0-9_-]+$', value):
979 if not re.match('[a-zA-Z0-9_-]+$', value):
996 raise formencode.Invalid(self.message('badFormat', state),
980 raise formencode.Invalid(self.message('badFormat', state),
997 value, state)
981 value, state)
998 return _validator
982 return _validator
999
983
1000
984
1001 def ValidAuthPlugins(localizer):
985 def ValidAuthPlugins(localizer):
1002 _ = localizer
986 _ = localizer
1003
987
1004 class _validator(formencode.validators.FancyValidator):
988 class _validator(formencode.validators.FancyValidator):
1005 messages = {
989 messages = {
1006 'import_duplicate': _(
990 'import_duplicate': _(
1007 u'Plugins %(loaded)s and %(next_to_load)s '
991 u'Plugins %(loaded)s and %(next_to_load)s '
1008 u'both export the same name'),
992 u'both export the same name'),
1009 'missing_includeme': _(
993 'missing_includeme': _(
1010 u'The plugin "%(plugin_id)s" is missing an includeme '
994 u'The plugin "%(plugin_id)s" is missing an includeme '
1011 u'function.'),
995 u'function.'),
1012 'import_error': _(
996 'import_error': _(
1013 u'Can not load plugin "%(plugin_id)s"'),
997 u'Can not load plugin "%(plugin_id)s"'),
1014 'no_plugin': _(
998 'no_plugin': _(
1015 u'No plugin available with ID "%(plugin_id)s"'),
999 u'No plugin available with ID "%(plugin_id)s"'),
1016 }
1000 }
1017
1001
1018 def _to_python(self, value, state):
1002 def _to_python(self, value, state):
1019 # filter empty values
1003 # filter empty values
1020 return filter(lambda s: s not in [None, ''], value)
1004 return filter(lambda s: s not in [None, ''], value)
1021
1005
1022 def _validate_legacy_plugin_id(self, plugin_id, value, state):
1006 def _validate_legacy_plugin_id(self, plugin_id, value, state):
1023 """
1007 """
1024 Validates that the plugin import works. It also checks that the
1008 Validates that the plugin import works. It also checks that the
1025 plugin has an includeme attribute.
1009 plugin has an includeme attribute.
1026 """
1010 """
1027 try:
1011 try:
1028 plugin = _import_legacy_plugin(plugin_id)
1012 plugin = _import_legacy_plugin(plugin_id)
1029 except Exception as e:
1013 except Exception as e:
1030 log.exception(
1014 log.exception(
1031 'Exception during import of auth legacy plugin "{}"'
1015 'Exception during import of auth legacy plugin "{}"'
1032 .format(plugin_id))
1016 .format(plugin_id))
1033 msg = M(self, 'import_error', state, plugin_id=plugin_id)
1017 msg = M(self, 'import_error', state, plugin_id=plugin_id)
1034 raise formencode.Invalid(msg, value, state)
1018 raise formencode.Invalid(msg, value, state)
1035
1019
1036 if not hasattr(plugin, 'includeme'):
1020 if not hasattr(plugin, 'includeme'):
1037 msg = M(self, 'missing_includeme', state, plugin_id=plugin_id)
1021 msg = M(self, 'missing_includeme', state, plugin_id=plugin_id)
1038 raise formencode.Invalid(msg, value, state)
1022 raise formencode.Invalid(msg, value, state)
1039
1023
1040 return plugin
1024 return plugin
1041
1025
1042 def _validate_plugin_id(self, plugin_id, value, state):
1026 def _validate_plugin_id(self, plugin_id, value, state):
1043 """
1027 """
1044 Plugins are already imported during app start up. Therefore this
1028 Plugins are already imported during app start up. Therefore this
1045 validation only retrieves the plugin from the plugin registry and
1029 validation only retrieves the plugin from the plugin registry and
1046 if it returns something not None everything is OK.
1030 if it returns something not None everything is OK.
1047 """
1031 """
1048 plugin = loadplugin(plugin_id)
1032 plugin = loadplugin(plugin_id)
1049
1033
1050 if plugin is None:
1034 if plugin is None:
1051 msg = M(self, 'no_plugin', state, plugin_id=plugin_id)
1035 msg = M(self, 'no_plugin', state, plugin_id=plugin_id)
1052 raise formencode.Invalid(msg, value, state)
1036 raise formencode.Invalid(msg, value, state)
1053
1037
1054 return plugin
1038 return plugin
1055
1039
1056 def validate_python(self, value, state):
1040 def validate_python(self, value, state):
1057 unique_names = {}
1041 unique_names = {}
1058 for plugin_id in value:
1042 for plugin_id in value:
1059
1043
1060 # Validate legacy or normal plugin.
1044 # Validate legacy or normal plugin.
1061 if plugin_id.startswith(legacy_plugin_prefix):
1045 if plugin_id.startswith(legacy_plugin_prefix):
1062 plugin = self._validate_legacy_plugin_id(
1046 plugin = self._validate_legacy_plugin_id(
1063 plugin_id, value, state)
1047 plugin_id, value, state)
1064 else:
1048 else:
1065 plugin = self._validate_plugin_id(plugin_id, value, state)
1049 plugin = self._validate_plugin_id(plugin_id, value, state)
1066
1050
1067 # Only allow unique plugin names.
1051 # Only allow unique plugin names.
1068 if plugin.name in unique_names:
1052 if plugin.name in unique_names:
1069 msg = M(self, 'import_duplicate', state,
1053 msg = M(self, 'import_duplicate', state,
1070 loaded=unique_names[plugin.name],
1054 loaded=unique_names[plugin.name],
1071 next_to_load=plugin)
1055 next_to_load=plugin)
1072 raise formencode.Invalid(msg, value, state)
1056 raise formencode.Invalid(msg, value, state)
1073 unique_names[plugin.name] = plugin
1057 unique_names[plugin.name] = plugin
1074 return _validator
1058 return _validator
1075
1059
1076
1060
1077 def ValidPattern(localizer):
1061 def ValidPattern(localizer):
1078 _ = localizer
1062 _ = localizer
1079
1063
1080 class _validator(formencode.validators.FancyValidator):
1064 class _validator(formencode.validators.FancyValidator):
1081 messages = {
1065 messages = {
1082 'bad_format': _(u'Url must start with http or /'),
1066 'bad_format': _(u'Url must start with http or /'),
1083 }
1067 }
1084
1068
1085 def _to_python(self, value, state):
1069 def _to_python(self, value, state):
1086 patterns = []
1070 patterns = []
1087
1071
1088 prefix = 'new_pattern'
1072 prefix = 'new_pattern'
1089 for name, v in value.iteritems():
1073 for name, v in value.iteritems():
1090 pattern_name = '_'.join((prefix, 'pattern'))
1074 pattern_name = '_'.join((prefix, 'pattern'))
1091 if name.startswith(pattern_name):
1075 if name.startswith(pattern_name):
1092 new_item_id = name[len(pattern_name)+1:]
1076 new_item_id = name[len(pattern_name)+1:]
1093
1077
1094 def _field(name):
1078 def _field(name):
1095 return '%s_%s_%s' % (prefix, name, new_item_id)
1079 return '%s_%s_%s' % (prefix, name, new_item_id)
1096
1080
1097 values = {
1081 values = {
1098 'issuetracker_pat': value.get(_field('pattern')),
1082 'issuetracker_pat': value.get(_field('pattern')),
1099 'issuetracker_url': value.get(_field('url')),
1083 'issuetracker_url': value.get(_field('url')),
1100 'issuetracker_pref': value.get(_field('prefix')),
1084 'issuetracker_pref': value.get(_field('prefix')),
1101 'issuetracker_desc': value.get(_field('description'))
1085 'issuetracker_desc': value.get(_field('description'))
1102 }
1086 }
1103 new_uid = md5(values['issuetracker_pat'])
1087 new_uid = md5(values['issuetracker_pat'])
1104
1088
1105 has_required_fields = (
1089 has_required_fields = (
1106 values['issuetracker_pat']
1090 values['issuetracker_pat']
1107 and values['issuetracker_url'])
1091 and values['issuetracker_url'])
1108
1092
1109 if has_required_fields:
1093 if has_required_fields:
1110 # validate url that it starts with http or /
1094 # validate url that it starts with http or /
1111 # otherwise it can lead to JS injections
1095 # otherwise it can lead to JS injections
1112 # e.g specifig javascript:<malicios code>
1096 # e.g specifig javascript:<malicios code>
1113 if not values['issuetracker_url'].startswith(('http', '/')):
1097 if not values['issuetracker_url'].startswith(('http', '/')):
1114 raise formencode.Invalid(
1098 raise formencode.Invalid(
1115 self.message('bad_format', state),
1099 self.message('bad_format', state),
1116 value, state)
1100 value, state)
1117
1101
1118 settings = [
1102 settings = [
1119 ('_'.join((key, new_uid)), values[key], 'unicode')
1103 ('_'.join((key, new_uid)), values[key], 'unicode')
1120 for key in values]
1104 for key in values]
1121 patterns.append(settings)
1105 patterns.append(settings)
1122
1106
1123 value['patterns'] = patterns
1107 value['patterns'] = patterns
1124 delete_patterns = value.get('uid') or []
1108 delete_patterns = value.get('uid') or []
1125 if not isinstance(delete_patterns, (list, tuple)):
1109 if not isinstance(delete_patterns, (list, tuple)):
1126 delete_patterns = [delete_patterns]
1110 delete_patterns = [delete_patterns]
1127 value['delete_patterns'] = delete_patterns
1111 value['delete_patterns'] = delete_patterns
1128 return value
1112 return value
1129 return _validator
1113 return _validator
@@ -1,296 +1,290 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import io
20 import io
21 import re
21 import re
22 import datetime
22 import datetime
23 import logging
23 import logging
24 import Queue
24 import Queue
25 import subprocess32
25 import subprocess32
26 import os
26 import os
27
27
28 from pyramid.i18n import get_localizer
28 from pyramid.i18n import get_localizer
29 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
30 from pyramid.interfaces import IRoutesMapper
30 from pyramid.interfaces import IRoutesMapper
31 from pyramid.settings import asbool
31 from pyramid.settings import asbool
32 from pyramid.path import AssetResolver
32 from pyramid.path import AssetResolver
33 from threading import Thread
33 from threading import Thread
34
34
35 from rhodecode.translation import _ as tsf
35 from rhodecode.translation import _ as tsf
36 from rhodecode.config.jsroutes import generate_jsroutes_content
36 from rhodecode.config.jsroutes import generate_jsroutes_content
37 from rhodecode.lib import auth
37 from rhodecode.lib import auth
38 from rhodecode.lib.base import get_auth_user
38 from rhodecode.lib.base import get_auth_user
39
39
40
40
41 import rhodecode
41 import rhodecode
42
42
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 def add_renderer_globals(event):
47 def add_renderer_globals(event):
48 from rhodecode.lib import helpers
48 from rhodecode.lib import helpers
49
49
50 # TODO: When executed in pyramid view context the request is not available
50 # TODO: When executed in pyramid view context the request is not available
51 # in the event. Find a better solution to get the request.
51 # in the event. Find a better solution to get the request.
52 request = event['request'] or get_current_request()
52 request = event['request'] or get_current_request()
53
53
54 # Add Pyramid translation as '_' to context
54 # Add Pyramid translation as '_' to context
55 event['_'] = request.translate
55 event['_'] = request.translate
56 event['_ungettext'] = request.plularize
56 event['_ungettext'] = request.plularize
57 event['h'] = helpers
57 event['h'] = helpers
58
58
59
59
60 def add_localizer(event):
60 def add_localizer(event):
61 request = event.request
61 request = event.request
62 localizer = request.localizer
62 localizer = request.localizer
63
63
64 def auto_translate(*args, **kwargs):
64 def auto_translate(*args, **kwargs):
65 return localizer.translate(tsf(*args, **kwargs))
65 return localizer.translate(tsf(*args, **kwargs))
66
66
67 request.translate = auto_translate
67 request.translate = auto_translate
68 request.plularize = localizer.pluralize
68 request.plularize = localizer.pluralize
69
69
70
70
71 def set_user_lang(event):
71 def set_user_lang(event):
72 request = event.request
72 request = event.request
73 cur_user = getattr(request, 'user', None)
73 cur_user = getattr(request, 'user', None)
74
74
75 if cur_user:
75 if cur_user:
76 user_lang = cur_user.get_instance().user_data.get('language')
76 user_lang = cur_user.get_instance().user_data.get('language')
77 if user_lang:
77 if user_lang:
78 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
78 log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang)
79 event.request._LOCALE_ = user_lang
79 event.request._LOCALE_ = user_lang
80
80
81
81
82 def add_request_user_context(event):
82 def add_request_user_context(event):
83 """
83 """
84 Adds auth user into request context
84 Adds auth user into request context
85 """
85 """
86 request = event.request
86 request = event.request
87
87
88 if hasattr(request, 'vcs_call'):
88 if hasattr(request, 'vcs_call'):
89 # skip vcs calls
89 # skip vcs calls
90 return
90 return
91
91
92 if hasattr(request, 'rpc_method'):
92 if hasattr(request, 'rpc_method'):
93 # skip api calls
93 # skip api calls
94 return
94 return
95
95
96 auth_user = get_auth_user(request)
96 auth_user = get_auth_user(request)
97 request.user = auth_user
97 request.user = auth_user
98 request.environ['rc_auth_user'] = auth_user
98 request.environ['rc_auth_user'] = auth_user
99
99
100
100
101 def inject_app_settings(event):
101 def inject_app_settings(event):
102 settings = event.app.registry.settings
102 settings = event.app.registry.settings
103 # inject info about available permissions
103 # inject info about available permissions
104 auth.set_available_permissions(settings)
104 auth.set_available_permissions(settings)
105
105
106
106
107 def scan_repositories_if_enabled(event):
107 def scan_repositories_if_enabled(event):
108 """
108 """
109 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
109 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
110 does a repository scan if enabled in the settings.
110 does a repository scan if enabled in the settings.
111 """
111 """
112 settings = event.app.registry.settings
112 settings = event.app.registry.settings
113 vcs_server_enabled = settings['vcs.server.enable']
113 vcs_server_enabled = settings['vcs.server.enable']
114 import_on_startup = settings['startup.import_repos']
114 import_on_startup = settings['startup.import_repos']
115 if vcs_server_enabled and import_on_startup:
115 if vcs_server_enabled and import_on_startup:
116 from rhodecode.model.scm import ScmModel
116 from rhodecode.model.scm import ScmModel
117 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
117 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path
118 repositories = ScmModel().repo_scan(get_rhodecode_base_path())
118 repositories = ScmModel().repo_scan(get_rhodecode_base_path())
119 repo2db_mapper(repositories, remove_obsolete=False)
119 repo2db_mapper(repositories, remove_obsolete=False)
120
120
121
121
122 def write_metadata_if_needed(event):
122 def write_metadata_if_needed(event):
123 """
123 """
124 Writes upgrade metadata
124 Writes upgrade metadata
125 """
125 """
126 import rhodecode
126 import rhodecode
127 from rhodecode.lib import system_info
127 from rhodecode.lib import system_info
128 from rhodecode.lib import ext_json
128 from rhodecode.lib import ext_json
129
129
130 def write():
130 def write():
131 fname = '.rcmetadata.json'
131 fname = '.rcmetadata.json'
132 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
132 ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__'))
133 metadata_destination = os.path.join(ini_loc, fname)
133 metadata_destination = os.path.join(ini_loc, fname)
134
134
135 configuration = system_info.SysInfo(
135 configuration = system_info.SysInfo(
136 system_info.rhodecode_config)()['value']
136 system_info.rhodecode_config)()['value']
137 license_token = configuration['config']['license_token']
137 license_token = configuration['config']['license_token']
138 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
138 dbinfo = system_info.SysInfo(system_info.database_info)()['value']
139 del dbinfo['url']
139 del dbinfo['url']
140 metadata = dict(
140 metadata = dict(
141 desc='upgrade metadata info',
141 desc='upgrade metadata info',
142 license_token=license_token,
142 license_token=license_token,
143 created_on=datetime.datetime.utcnow().isoformat(),
143 created_on=datetime.datetime.utcnow().isoformat(),
144 usage=system_info.SysInfo(system_info.usage_info)()['value'],
144 usage=system_info.SysInfo(system_info.usage_info)()['value'],
145 platform=system_info.SysInfo(system_info.platform_type)()['value'],
145 platform=system_info.SysInfo(system_info.platform_type)()['value'],
146 database=dbinfo,
146 database=dbinfo,
147 cpu=system_info.SysInfo(system_info.cpu)()['value'],
147 cpu=system_info.SysInfo(system_info.cpu)()['value'],
148 memory=system_info.SysInfo(system_info.memory)()['value'],
148 memory=system_info.SysInfo(system_info.memory)()['value'],
149 )
149 )
150
150
151 with open(metadata_destination, 'wb') as f:
151 with open(metadata_destination, 'wb') as f:
152 f.write(ext_json.json.dumps(metadata))
152 f.write(ext_json.json.dumps(metadata))
153
153
154 settings = event.app.registry.settings
154 settings = event.app.registry.settings
155 if settings.get('metadata.skip'):
155 if settings.get('metadata.skip'):
156 return
156 return
157
157
158 try:
158 try:
159 write()
159 write()
160 except Exception:
160 except Exception:
161 pass
161 pass
162
162
163
163
164 def write_js_routes_if_enabled(event):
164 def write_js_routes_if_enabled(event):
165 registry = event.app.registry
165 registry = event.app.registry
166
166
167 mapper = registry.queryUtility(IRoutesMapper)
167 mapper = registry.queryUtility(IRoutesMapper)
168 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
168 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
169
169
170 def _extract_route_information(route):
170 def _extract_route_information(route):
171 """
171 """
172 Convert a route into tuple(name, path, args), eg:
172 Convert a route into tuple(name, path, args), eg:
173 ('show_user', '/profile/%(username)s', ['username'])
173 ('show_user', '/profile/%(username)s', ['username'])
174 """
174 """
175
175
176 routepath = route.pattern
176 routepath = route.pattern
177 pattern = route.pattern
177 pattern = route.pattern
178
178
179 def replace(matchobj):
179 def replace(matchobj):
180 if matchobj.group(1):
180 if matchobj.group(1):
181 return "%%(%s)s" % matchobj.group(1).split(':')[0]
181 return "%%(%s)s" % matchobj.group(1).split(':')[0]
182 else:
182 else:
183 return "%%(%s)s" % matchobj.group(2)
183 return "%%(%s)s" % matchobj.group(2)
184
184
185 routepath = _argument_prog.sub(replace, routepath)
185 routepath = _argument_prog.sub(replace, routepath)
186
186
187 if not routepath.startswith('/'):
187 if not routepath.startswith('/'):
188 routepath = '/'+routepath
188 routepath = '/'+routepath
189
189
190 return (
190 return (
191 route.name,
191 route.name,
192 routepath,
192 routepath,
193 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
193 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
194 for arg in _argument_prog.findall(pattern)]
194 for arg in _argument_prog.findall(pattern)]
195 )
195 )
196
196
197 def get_routes():
197 def get_routes():
198 # pylons routes
199 # TODO(marcink): remove when pyramid migration is finished
200 if 'routes.map' in rhodecode.CONFIG:
201 for route in rhodecode.CONFIG['routes.map'].jsroutes():
202 yield route
203
204 # pyramid routes
198 # pyramid routes
205 for route in mapper.get_routes():
199 for route in mapper.get_routes():
206 if not route.name.startswith('__'):
200 if not route.name.startswith('__'):
207 yield _extract_route_information(route)
201 yield _extract_route_information(route)
208
202
209 if asbool(registry.settings.get('generate_js_files', 'false')):
203 if asbool(registry.settings.get('generate_js_files', 'false')):
210 static_path = AssetResolver().resolve('rhodecode:public').abspath()
204 static_path = AssetResolver().resolve('rhodecode:public').abspath()
211 jsroutes = get_routes()
205 jsroutes = get_routes()
212 jsroutes_file_content = generate_jsroutes_content(jsroutes)
206 jsroutes_file_content = generate_jsroutes_content(jsroutes)
213 jsroutes_file_path = os.path.join(
207 jsroutes_file_path = os.path.join(
214 static_path, 'js', 'rhodecode', 'routes.js')
208 static_path, 'js', 'rhodecode', 'routes.js')
215
209
216 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
210 with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f:
217 f.write(jsroutes_file_content)
211 f.write(jsroutes_file_content)
218
212
219
213
220 class Subscriber(object):
214 class Subscriber(object):
221 """
215 """
222 Base class for subscribers to the pyramid event system.
216 Base class for subscribers to the pyramid event system.
223 """
217 """
224 def __call__(self, event):
218 def __call__(self, event):
225 self.run(event)
219 self.run(event)
226
220
227 def run(self, event):
221 def run(self, event):
228 raise NotImplementedError('Subclass has to implement this.')
222 raise NotImplementedError('Subclass has to implement this.')
229
223
230
224
231 class AsyncSubscriber(Subscriber):
225 class AsyncSubscriber(Subscriber):
232 """
226 """
233 Subscriber that handles the execution of events in a separate task to not
227 Subscriber that handles the execution of events in a separate task to not
234 block the execution of the code which triggers the event. It puts the
228 block the execution of the code which triggers the event. It puts the
235 received events into a queue from which the worker process takes them in
229 received events into a queue from which the worker process takes them in
236 order.
230 order.
237 """
231 """
238 def __init__(self):
232 def __init__(self):
239 self._stop = False
233 self._stop = False
240 self._eventq = Queue.Queue()
234 self._eventq = Queue.Queue()
241 self._worker = self.create_worker()
235 self._worker = self.create_worker()
242 self._worker.start()
236 self._worker.start()
243
237
244 def __call__(self, event):
238 def __call__(self, event):
245 self._eventq.put(event)
239 self._eventq.put(event)
246
240
247 def create_worker(self):
241 def create_worker(self):
248 worker = Thread(target=self.do_work)
242 worker = Thread(target=self.do_work)
249 worker.daemon = True
243 worker.daemon = True
250 return worker
244 return worker
251
245
252 def stop_worker(self):
246 def stop_worker(self):
253 self._stop = False
247 self._stop = False
254 self._eventq.put(None)
248 self._eventq.put(None)
255 self._worker.join()
249 self._worker.join()
256
250
257 def do_work(self):
251 def do_work(self):
258 while not self._stop:
252 while not self._stop:
259 event = self._eventq.get()
253 event = self._eventq.get()
260 if event is not None:
254 if event is not None:
261 self.run(event)
255 self.run(event)
262
256
263
257
264 class AsyncSubprocessSubscriber(AsyncSubscriber):
258 class AsyncSubprocessSubscriber(AsyncSubscriber):
265 """
259 """
266 Subscriber that uses the subprocess32 module to execute a command if an
260 Subscriber that uses the subprocess32 module to execute a command if an
267 event is received. Events are handled asynchronously.
261 event is received. Events are handled asynchronously.
268 """
262 """
269
263
270 def __init__(self, cmd, timeout=None):
264 def __init__(self, cmd, timeout=None):
271 super(AsyncSubprocessSubscriber, self).__init__()
265 super(AsyncSubprocessSubscriber, self).__init__()
272 self._cmd = cmd
266 self._cmd = cmd
273 self._timeout = timeout
267 self._timeout = timeout
274
268
275 def run(self, event):
269 def run(self, event):
276 cmd = self._cmd
270 cmd = self._cmd
277 timeout = self._timeout
271 timeout = self._timeout
278 log.debug('Executing command %s.', cmd)
272 log.debug('Executing command %s.', cmd)
279
273
280 try:
274 try:
281 output = subprocess32.check_output(
275 output = subprocess32.check_output(
282 cmd, timeout=timeout, stderr=subprocess32.STDOUT)
276 cmd, timeout=timeout, stderr=subprocess32.STDOUT)
283 log.debug('Command finished %s', cmd)
277 log.debug('Command finished %s', cmd)
284 if output:
278 if output:
285 log.debug('Command output: %s', output)
279 log.debug('Command output: %s', output)
286 except subprocess32.TimeoutExpired as e:
280 except subprocess32.TimeoutExpired as e:
287 log.exception('Timeout while executing command.')
281 log.exception('Timeout while executing command.')
288 if e.output:
282 if e.output:
289 log.error('Command output: %s', e.output)
283 log.error('Command output: %s', e.output)
290 except subprocess32.CalledProcessError as e:
284 except subprocess32.CalledProcessError as e:
291 log.exception('Error while executing command.')
285 log.exception('Error while executing command.')
292 if e.output:
286 if e.output:
293 log.error('Command output: %s', e.output)
287 log.error('Command output: %s', e.output)
294 except:
288 except:
295 log.exception(
289 log.exception(
296 'Exception while executing command %s.', cmd)
290 'Exception while executing command %s.', cmd)
@@ -1,287 +1,288 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from subprocess32 import Popen, PIPE
21 from subprocess32 import Popen, PIPE
22 import os
22 import os
23 import shutil
23 import shutil
24 import sys
24 import sys
25 import tempfile
25 import tempfile
26
26
27 import pytest
27 import pytest
28 from sqlalchemy.engine import url
28 from sqlalchemy.engine import url
29
29
30 from rhodecode.tests.fixture import TestINI
30 from rhodecode.tests.fixture import TestINI
31
31
32
32
33 def _get_dbs_from_metafunc(metafunc):
33 def _get_dbs_from_metafunc(metafunc):
34 if hasattr(metafunc.function, 'dbs'):
34 if hasattr(metafunc.function, 'dbs'):
35 # Supported backends by this test function, created from
35 # Supported backends by this test function, created from
36 # pytest.mark.dbs
36 # pytest.mark.dbs
37 backends = metafunc.function.dbs.args
37 backends = metafunc.function.dbs.args
38 else:
38 else:
39 backends = metafunc.config.getoption('--dbs')
39 backends = metafunc.config.getoption('--dbs')
40 return backends
40 return backends
41
41
42
42
43 def pytest_generate_tests(metafunc):
43 def pytest_generate_tests(metafunc):
44 # Support test generation based on --dbs parameter
44 # Support test generation based on --dbs parameter
45 if 'db_backend' in metafunc.fixturenames:
45 if 'db_backend' in metafunc.fixturenames:
46 requested_backends = set(metafunc.config.getoption('--dbs'))
46 requested_backends = set(metafunc.config.getoption('--dbs'))
47 backends = _get_dbs_from_metafunc(metafunc)
47 backends = _get_dbs_from_metafunc(metafunc)
48 backends = requested_backends.intersection(backends)
48 backends = requested_backends.intersection(backends)
49 # TODO: johbo: Disabling a backend did not work out with
49 # TODO: johbo: Disabling a backend did not work out with
50 # parametrization, find better way to achieve this.
50 # parametrization, find better way to achieve this.
51 if not backends:
51 if not backends:
52 metafunc.function._skip = True
52 metafunc.function._skip = True
53 metafunc.parametrize('db_backend_name', backends)
53 metafunc.parametrize('db_backend_name', backends)
54
54
55
55
56 def pytest_collection_modifyitems(session, config, items):
56 def pytest_collection_modifyitems(session, config, items):
57 remaining = [
57 remaining = [
58 i for i in items if not getattr(i.obj, '_skip', False)]
58 i for i in items if not getattr(i.obj, '_skip', False)]
59 items[:] = remaining
59 items[:] = remaining
60
60
61
61
62 @pytest.fixture
62 @pytest.fixture
63 def db_backend(
63 def db_backend(
64 request, db_backend_name, ini_config, tmpdir_factory):
64 request, db_backend_name, ini_config, tmpdir_factory):
65 basetemp = tmpdir_factory.getbasetemp().strpath
65 basetemp = tmpdir_factory.getbasetemp().strpath
66 klass = _get_backend(db_backend_name)
66 klass = _get_backend(db_backend_name)
67
67
68 option_name = '--{}-connection-string'.format(db_backend_name)
68 option_name = '--{}-connection-string'.format(db_backend_name)
69 connection_string = request.config.getoption(option_name) or None
69 connection_string = request.config.getoption(option_name) or None
70
70
71 return klass(
71 return klass(
72 config_file=ini_config, basetemp=basetemp,
72 config_file=ini_config, basetemp=basetemp,
73 connection_string=connection_string)
73 connection_string=connection_string)
74
74
75
75
76 def _get_backend(backend_type):
76 def _get_backend(backend_type):
77 return {
77 return {
78 'sqlite': SQLiteDBBackend,
78 'sqlite': SQLiteDBBackend,
79 'postgres': PostgresDBBackend,
79 'postgres': PostgresDBBackend,
80 'mysql': MySQLDBBackend,
80 'mysql': MySQLDBBackend,
81 '': EmptyDBBackend
81 '': EmptyDBBackend
82 }[backend_type]
82 }[backend_type]
83
83
84
84
85 class DBBackend(object):
85 class DBBackend(object):
86 _store = os.path.dirname(os.path.abspath(__file__))
86 _store = os.path.dirname(os.path.abspath(__file__))
87 _type = None
87 _type = None
88 _base_ini_config = [{'app:main': {'vcs.start_server': 'false'}}]
88 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
89 'startup.import_repos': 'false'}}]
89 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
90 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
90 _base_db_name = 'rhodecode_test_db_backend'
91 _base_db_name = 'rhodecode_test_db_backend'
91
92
92 def __init__(
93 def __init__(
93 self, config_file, db_name=None, basetemp=None,
94 self, config_file, db_name=None, basetemp=None,
94 connection_string=None):
95 connection_string=None):
95
96
96 from rhodecode.lib.vcs.backends.hg import largefiles_store
97 from rhodecode.lib.vcs.backends.hg import largefiles_store
97 from rhodecode.lib.vcs.backends.git import lfs_store
98 from rhodecode.lib.vcs.backends.git import lfs_store
98
99
99 self.fixture_store = os.path.join(self._store, self._type)
100 self.fixture_store = os.path.join(self._store, self._type)
100 self.db_name = db_name or self._base_db_name
101 self.db_name = db_name or self._base_db_name
101 self._base_ini_file = config_file
102 self._base_ini_file = config_file
102 self.stderr = ''
103 self.stderr = ''
103 self.stdout = ''
104 self.stdout = ''
104 self._basetemp = basetemp or tempfile.gettempdir()
105 self._basetemp = basetemp or tempfile.gettempdir()
105 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
106 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
106 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
107 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
107 self._repos_git_lfs_store = lfs_store(self._basetemp)
108 self._repos_git_lfs_store = lfs_store(self._basetemp)
108 self.connection_string = connection_string
109 self.connection_string = connection_string
109
110
110 @property
111 @property
111 def connection_string(self):
112 def connection_string(self):
112 return self._connection_string
113 return self._connection_string
113
114
114 @connection_string.setter
115 @connection_string.setter
115 def connection_string(self, new_connection_string):
116 def connection_string(self, new_connection_string):
116 if not new_connection_string:
117 if not new_connection_string:
117 new_connection_string = self.get_default_connection_string()
118 new_connection_string = self.get_default_connection_string()
118 else:
119 else:
119 new_connection_string = new_connection_string.format(
120 new_connection_string = new_connection_string.format(
120 db_name=self.db_name)
121 db_name=self.db_name)
121 url_parts = url.make_url(new_connection_string)
122 url_parts = url.make_url(new_connection_string)
122 self._connection_string = new_connection_string
123 self._connection_string = new_connection_string
123 self.user = url_parts.username
124 self.user = url_parts.username
124 self.password = url_parts.password
125 self.password = url_parts.password
125 self.host = url_parts.host
126 self.host = url_parts.host
126
127
127 def get_default_connection_string(self):
128 def get_default_connection_string(self):
128 raise NotImplementedError('default connection_string is required.')
129 raise NotImplementedError('default connection_string is required.')
129
130
130 def execute(self, cmd, env=None, *args):
131 def execute(self, cmd, env=None, *args):
131 """
132 """
132 Runs command on the system with given ``args``.
133 Runs command on the system with given ``args``.
133 """
134 """
134
135
135 command = cmd + ' ' + ' '.join(args)
136 command = cmd + ' ' + ' '.join(args)
136 sys.stdout.write(command)
137 sys.stdout.write(command)
137
138
138 # Tell Python to use UTF-8 encoding out stdout
139 # Tell Python to use UTF-8 encoding out stdout
139 _env = os.environ.copy()
140 _env = os.environ.copy()
140 _env['PYTHONIOENCODING'] = 'UTF-8'
141 _env['PYTHONIOENCODING'] = 'UTF-8'
141 if env:
142 if env:
142 _env.update(env)
143 _env.update(env)
143 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
144 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
144 self.stdout, self.stderr = self.p.communicate()
145 self.stdout, self.stderr = self.p.communicate()
145 sys.stdout.write('COMMAND:'+command+'\n')
146 sys.stdout.write('COMMAND:'+command+'\n')
146 sys.stdout.write(self.stdout)
147 sys.stdout.write(self.stdout)
147 return self.stdout, self.stderr
148 return self.stdout, self.stderr
148
149
149 def assert_returncode_success(self):
150 def assert_returncode_success(self):
150 if not self.p.returncode == 0:
151 if not self.p.returncode == 0:
151 print(self.stderr)
152 print(self.stderr)
152 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
153 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
153
154
154 def setup_rhodecode_db(self, ini_params=None, env=None):
155 def setup_rhodecode_db(self, ini_params=None, env=None):
155 if not ini_params:
156 if not ini_params:
156 ini_params = self._base_ini_config
157 ini_params = self._base_ini_config
157
158
158 ini_params.extend(self._db_url)
159 ini_params.extend(self._db_url)
159 with TestINI(self._base_ini_file, ini_params,
160 with TestINI(self._base_ini_file, ini_params,
160 self._type, destroy=True) as _ini_file:
161 self._type, destroy=True) as _ini_file:
161
162
162 if not os.path.isdir(self._repos_location):
163 if not os.path.isdir(self._repos_location):
163 os.makedirs(self._repos_location)
164 os.makedirs(self._repos_location)
164 if not os.path.isdir(self._repos_hg_largefiles_store):
165 if not os.path.isdir(self._repos_hg_largefiles_store):
165 os.makedirs(self._repos_hg_largefiles_store)
166 os.makedirs(self._repos_hg_largefiles_store)
166 if not os.path.isdir(self._repos_git_lfs_store):
167 if not os.path.isdir(self._repos_git_lfs_store):
167 os.makedirs(self._repos_git_lfs_store)
168 os.makedirs(self._repos_git_lfs_store)
168
169
169 self.execute(
170 self.execute(
170 "rc-setup-app {0} --user=marcink "
171 "rc-setup-app {0} --user=marcink "
171 "--email=marcin@rhodeocode.com --password={1} "
172 "--email=marcin@rhodeocode.com --password={1} "
172 "--repos={2} --force-yes".format(
173 "--repos={2} --force-yes".format(
173 _ini_file, 'qweqwe', self._repos_location), env=env)
174 _ini_file, 'qweqwe', self._repos_location), env=env)
174
175
175 def upgrade_database(self, ini_params=None):
176 def upgrade_database(self, ini_params=None):
176 if not ini_params:
177 if not ini_params:
177 ini_params = self._base_ini_config
178 ini_params = self._base_ini_config
178 ini_params.extend(self._db_url)
179 ini_params.extend(self._db_url)
179
180
180 test_ini = TestINI(
181 test_ini = TestINI(
181 self._base_ini_file, ini_params, self._type, destroy=True)
182 self._base_ini_file, ini_params, self._type, destroy=True)
182 with test_ini as ini_file:
183 with test_ini as ini_file:
183 if not os.path.isdir(self._repos_location):
184 if not os.path.isdir(self._repos_location):
184 os.makedirs(self._repos_location)
185 os.makedirs(self._repos_location)
185 self.execute(
186 self.execute(
186 "paster upgrade-db {} --force-yes".format(ini_file))
187 "rc-upgrade-db {0} --force-yes".format(ini_file))
187
188
188 def setup_db(self):
189 def setup_db(self):
189 raise NotImplementedError
190 raise NotImplementedError
190
191
191 def teardown_db(self):
192 def teardown_db(self):
192 raise NotImplementedError
193 raise NotImplementedError
193
194
194 def import_dump(self, dumpname):
195 def import_dump(self, dumpname):
195 raise NotImplementedError
196 raise NotImplementedError
196
197
197
198
198 class EmptyDBBackend(DBBackend):
199 class EmptyDBBackend(DBBackend):
199 _type = ''
200 _type = ''
200
201
201 def setup_db(self):
202 def setup_db(self):
202 pass
203 pass
203
204
204 def teardown_db(self):
205 def teardown_db(self):
205 pass
206 pass
206
207
207 def import_dump(self, dumpname):
208 def import_dump(self, dumpname):
208 pass
209 pass
209
210
210 def assert_returncode_success(self):
211 def assert_returncode_success(self):
211 assert True
212 assert True
212
213
213
214
214 class SQLiteDBBackend(DBBackend):
215 class SQLiteDBBackend(DBBackend):
215 _type = 'sqlite'
216 _type = 'sqlite'
216
217
217 def get_default_connection_string(self):
218 def get_default_connection_string(self):
218 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
219 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
219
220
220 def setup_db(self):
221 def setup_db(self):
221 # dump schema for tests
222 # dump schema for tests
222 # cp -v $TEST_DB_NAME
223 # cp -v $TEST_DB_NAME
223 self._db_url = [{'app:main': {
224 self._db_url = [{'app:main': {
224 'sqlalchemy.db1.url': self.connection_string}}]
225 'sqlalchemy.db1.url': self.connection_string}}]
225
226
226 def import_dump(self, dumpname):
227 def import_dump(self, dumpname):
227 dump = os.path.join(self.fixture_store, dumpname)
228 dump = os.path.join(self.fixture_store, dumpname)
228 shutil.copy(
229 shutil.copy(
229 dump,
230 dump,
230 os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self)))
231 os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self)))
231
232
232 def teardown_db(self):
233 def teardown_db(self):
233 self.execute("rm -rf {}.sqlite".format(
234 self.execute("rm -rf {}.sqlite".format(
234 os.path.join(self._basetemp, self.db_name)))
235 os.path.join(self._basetemp, self.db_name)))
235
236
236
237
237 class MySQLDBBackend(DBBackend):
238 class MySQLDBBackend(DBBackend):
238 _type = 'mysql'
239 _type = 'mysql'
239
240
240 def get_default_connection_string(self):
241 def get_default_connection_string(self):
241 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
242 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
242
243
243 def setup_db(self):
244 def setup_db(self):
244 # dump schema for tests
245 # dump schema for tests
245 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
246 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
246 self._db_url = [{'app:main': {
247 self._db_url = [{'app:main': {
247 'sqlalchemy.db1.url': self.connection_string}}]
248 'sqlalchemy.db1.url': self.connection_string}}]
248 self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
249 self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
249 self.user, self.password, self.db_name))
250 self.user, self.password, self.db_name))
250
251
251 def import_dump(self, dumpname):
252 def import_dump(self, dumpname):
252 dump = os.path.join(self.fixture_store, dumpname)
253 dump = os.path.join(self.fixture_store, dumpname)
253 self.execute("mysql -u{} -p{} {} < {}".format(
254 self.execute("mysql -u{} -p{} {} < {}".format(
254 self.user, self.password, self.db_name, dump))
255 self.user, self.password, self.db_name, dump))
255
256
256 def teardown_db(self):
257 def teardown_db(self):
257 self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
258 self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
258 self.user, self.password, self.db_name))
259 self.user, self.password, self.db_name))
259
260
260
261
261 class PostgresDBBackend(DBBackend):
262 class PostgresDBBackend(DBBackend):
262 _type = 'postgres'
263 _type = 'postgres'
263
264
264 def get_default_connection_string(self):
265 def get_default_connection_string(self):
265 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
266 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
266
267
267 def setup_db(self):
268 def setup_db(self):
268 # dump schema for tests
269 # dump schema for tests
269 # pg_dump -U postgres -h localhost $TEST_DB_NAME
270 # pg_dump -U postgres -h localhost $TEST_DB_NAME
270 self._db_url = [{'app:main': {
271 self._db_url = [{'app:main': {
271 'sqlalchemy.db1.url':
272 'sqlalchemy.db1.url':
272 self.connection_string}}]
273 self.connection_string}}]
273 self.execute("PGPASSWORD={} psql -U {} -h localhost "
274 self.execute("PGPASSWORD={} psql -U {} -h localhost "
274 "-c 'create database '{}';'".format(
275 "-c 'create database '{}';'".format(
275 self.password, self.user, self.db_name))
276 self.password, self.user, self.db_name))
276
277
277 def teardown_db(self):
278 def teardown_db(self):
278 self.execute("PGPASSWORD={} psql -U {} -h localhost "
279 self.execute("PGPASSWORD={} psql -U {} -h localhost "
279 "-c 'drop database if exists '{}';'".format(
280 "-c 'drop database if exists '{}';'".format(
280 self.password, self.user, self.db_name))
281 self.password, self.user, self.db_name))
281
282
282 def import_dump(self, dumpname):
283 def import_dump(self, dumpname):
283 dump = os.path.join(self.fixture_store, dumpname)
284 dump = os.path.join(self.fixture_store, dumpname)
284 self.execute(
285 self.execute(
285 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
286 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
286 "-f {}".format(
287 "-f {}".format(
287 self.password, self.user, self.db_name, dump))
288 self.password, self.user, self.db_name, dump))
@@ -1,66 +1,77 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
24 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
25
25
26
26
27 def route_path(name, **kwargs):
27 def route_path(name, **kwargs):
28 return {
28 return {
29 'home': '/',
29 'home': '/',
30 }[name].format(**kwargs)
30 }[name].format(**kwargs)
31
31
32
32
33 class TestSessionBehaviorOnPasswordChange(object):
33 class TestSessionBehaviorOnPasswordChange(object):
34 @pytest.fixture(autouse=True)
34 @pytest.fixture(autouse=True)
35 def patch_password_changed(self, request):
35 def patch_password_changed(self, request):
36 password_changed_patcher = mock.patch(
36 password_changed_patcher = mock.patch(
37 'rhodecode.lib.base.password_changed')
37 'rhodecode.lib.base.password_changed')
38 self.password_changed_mock = password_changed_patcher.start()
38 self.password_changed_mock = password_changed_patcher.start()
39 self.password_changed_mock.return_value = False
39 self.password_changed_mock.return_value = False
40
40
41 @request.addfinalizer
41 @request.addfinalizer
42 def cleanup():
42 def cleanup():
43 password_changed_patcher.stop()
43 password_changed_patcher.stop()
44
44
45 def test_sessions_are_ok_when_password_is_not_changed(
45 def test_sessions_are_ok_when_password_is_not_changed(
46 self, app, autologin_user):
46 self, app, autologin_user):
47 response = app.get(route_path('home'))
47 response = app.get(route_path('home'))
48 assert_response = response.assert_response()
48 assert_response = response.assert_response()
49 assert_response.element_contains(
49 assert_response.element_contains(
50 '#quick_login_link .menu_link_user', TEST_USER_ADMIN_LOGIN)
50 '#quick_login_link .menu_link_user', TEST_USER_ADMIN_LOGIN)
51
51
52 session = response.get_session_from_response()
52 session = response.get_session_from_response()
53
53
54 assert 'rhodecode_user' in session
54 assert 'rhodecode_user' in session
55 assert session.was_invalidated is False
55 assert session.was_invalidated is False
56
56
57 def test_sessions_invalidated_when_password_is_changed(
57 def test_sessions_invalidated_when_password_is_changed(
58 self, app, autologin_user):
58 self, app, autologin_user):
59 response = app.get(route_path('home'), status=200)
60 session = response.get_session_from_response()
61
62 # now mark as password change
59 self.password_changed_mock.return_value = True
63 self.password_changed_mock.return_value = True
64
65 # flushes session first
66 app.get(route_path('home'))
67
68 # second call is now "different" with flushed empty session
60 response = app.get(route_path('home'))
69 response = app.get(route_path('home'))
70 session = response.get_session_from_response()
71
72 assert 'rhodecode_user' not in session
73
61 assert_response = response.assert_response()
74 assert_response = response.assert_response()
62 assert_response.element_contains('#quick_login_link .user', 'Sign in')
75 assert_response.element_contains('#quick_login_link .user', 'Sign in')
63
76
64 session = response.get_session_from_response()
77
65 assert 'rhodecode_user' not in session
66 assert session.was_invalidated is True
@@ -1,142 +1,139 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22 import urlparse
22 import urlparse
23 import mock
23 import mock
24 import simplejson as json
24 import simplejson as json
25
25
26 from rhodecode.lib.vcs.backends.base import Config
26 from rhodecode.lib.vcs.backends.base import Config
27 from rhodecode.tests.lib.middleware import mock_scm_app
27 from rhodecode.tests.lib.middleware import mock_scm_app
28 import rhodecode.lib.middleware.simplegit as simplegit
28 import rhodecode.lib.middleware.simplegit as simplegit
29
29
30
30
31 def get_environ(url, request_method):
31 def get_environ(url, request_method):
32 """Construct a minimum WSGI environ based on the URL."""
32 """Construct a minimum WSGI environ based on the URL."""
33 parsed_url = urlparse.urlparse(url)
33 parsed_url = urlparse.urlparse(url)
34 environ = {
34 environ = {
35 'PATH_INFO': parsed_url.path,
35 'PATH_INFO': parsed_url.path,
36 'QUERY_STRING': parsed_url.query,
36 'QUERY_STRING': parsed_url.query,
37 'REQUEST_METHOD': request_method,
37 'REQUEST_METHOD': request_method,
38 }
38 }
39
39
40 return environ
40 return environ
41
41
42
42
43 @pytest.mark.parametrize(
43 @pytest.mark.parametrize(
44 'url, expected_action, request_method',
44 'url, expected_action, request_method',
45 [
45 [
46 ('/foo/bar/info/refs?service=git-upload-pack', 'pull', 'GET'),
46 ('/foo/bar/info/refs?service=git-upload-pack', 'pull', 'GET'),
47 ('/foo/bar/info/refs?service=git-receive-pack', 'push', 'GET'),
47 ('/foo/bar/info/refs?service=git-receive-pack', 'push', 'GET'),
48 ('/foo/bar/git-upload-pack', 'pull', 'GET'),
48 ('/foo/bar/git-upload-pack', 'pull', 'GET'),
49 ('/foo/bar/git-receive-pack', 'push', 'GET'),
49 ('/foo/bar/git-receive-pack', 'push', 'GET'),
50 # Edge case: missing data for info/refs
50 # Edge case: missing data for info/refs
51 ('/foo/info/refs?service=', 'pull', 'GET'),
51 ('/foo/info/refs?service=', 'pull', 'GET'),
52 ('/foo/info/refs', 'pull', 'GET'),
52 ('/foo/info/refs', 'pull', 'GET'),
53 # Edge case: git command comes with service argument
53 # Edge case: git command comes with service argument
54 ('/foo/git-upload-pack?service=git-receive-pack', 'pull', 'GET'),
54 ('/foo/git-upload-pack?service=git-receive-pack', 'pull', 'GET'),
55 ('/foo/git-receive-pack?service=git-upload-pack', 'push', 'GET'),
55 ('/foo/git-receive-pack?service=git-upload-pack', 'push', 'GET'),
56 # Edge case: repo name conflicts with git commands
56 # Edge case: repo name conflicts with git commands
57 ('/git-receive-pack/git-upload-pack', 'pull', 'GET'),
57 ('/git-receive-pack/git-upload-pack', 'pull', 'GET'),
58 ('/git-receive-pack/git-receive-pack', 'push', 'GET'),
58 ('/git-receive-pack/git-receive-pack', 'push', 'GET'),
59 ('/git-upload-pack/git-upload-pack', 'pull', 'GET'),
59 ('/git-upload-pack/git-upload-pack', 'pull', 'GET'),
60 ('/git-upload-pack/git-receive-pack', 'push', 'GET'),
60 ('/git-upload-pack/git-receive-pack', 'push', 'GET'),
61 ('/foo/git-receive-pack', 'push', 'GET'),
61 ('/foo/git-receive-pack', 'push', 'GET'),
62 # Edge case: not a smart protocol url
62 # Edge case: not a smart protocol url
63 ('/foo/bar', 'pull', 'GET'),
63 ('/foo/bar', 'pull', 'GET'),
64 # GIT LFS cases, batch
64 # GIT LFS cases, batch
65 ('/foo/bar/info/lfs/objects/batch', 'push', 'GET'),
65 ('/foo/bar/info/lfs/objects/batch', 'push', 'GET'),
66 ('/foo/bar/info/lfs/objects/batch', 'pull', 'POST'),
66 ('/foo/bar/info/lfs/objects/batch', 'pull', 'POST'),
67 # GIT LFS oid, dl/upl
67 # GIT LFS oid, dl/upl
68 ('/foo/bar/info/lfs/abcdeabcde', 'pull', 'GET'),
68 ('/foo/bar/info/lfs/abcdeabcde', 'pull', 'GET'),
69 ('/foo/bar/info/lfs/abcdeabcde', 'push', 'PUT'),
69 ('/foo/bar/info/lfs/abcdeabcde', 'push', 'PUT'),
70 ('/foo/bar/info/lfs/abcdeabcde', 'push', 'POST'),
70 ('/foo/bar/info/lfs/abcdeabcde', 'push', 'POST'),
71 # Edge case: repo name conflicts with git commands
71 # Edge case: repo name conflicts with git commands
72 ('/info/lfs/info/lfs/objects/batch', 'push', 'GET'),
72 ('/info/lfs/info/lfs/objects/batch', 'push', 'GET'),
73 ('/info/lfs/info/lfs/objects/batch', 'pull', 'POST'),
73 ('/info/lfs/info/lfs/objects/batch', 'pull', 'POST'),
74
74
75 ])
75 ])
76 def test_get_action(url, expected_action, request_method, baseapp, request_stub):
76 def test_get_action(url, expected_action, request_method, baseapp, request_stub):
77 app = simplegit.SimpleGit(application=None,
77 app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''},
78 config={'auth_ret_code': '', 'base_path': ''},
79 registry=request_stub.registry)
78 registry=request_stub.registry)
80 assert expected_action == app._get_action(get_environ(url, request_method))
79 assert expected_action == app._get_action(get_environ(url, request_method))
81
80
82
81
83 @pytest.mark.parametrize(
82 @pytest.mark.parametrize(
84 'url, expected_repo_name, request_method',
83 'url, expected_repo_name, request_method',
85 [
84 [
86 ('/foo/info/refs?service=git-upload-pack', 'foo', 'GET'),
85 ('/foo/info/refs?service=git-upload-pack', 'foo', 'GET'),
87 ('/foo/bar/info/refs?service=git-receive-pack', 'foo/bar', 'GET'),
86 ('/foo/bar/info/refs?service=git-receive-pack', 'foo/bar', 'GET'),
88 ('/foo/git-upload-pack', 'foo', 'GET'),
87 ('/foo/git-upload-pack', 'foo', 'GET'),
89 ('/foo/git-receive-pack', 'foo', 'GET'),
88 ('/foo/git-receive-pack', 'foo', 'GET'),
90 ('/foo/bar/git-upload-pack', 'foo/bar', 'GET'),
89 ('/foo/bar/git-upload-pack', 'foo/bar', 'GET'),
91 ('/foo/bar/git-receive-pack', 'foo/bar', 'GET'),
90 ('/foo/bar/git-receive-pack', 'foo/bar', 'GET'),
92
91
93 # GIT LFS cases, batch
92 # GIT LFS cases, batch
94 ('/foo/bar/info/lfs/objects/batch', 'foo/bar', 'GET'),
93 ('/foo/bar/info/lfs/objects/batch', 'foo/bar', 'GET'),
95 ('/example-git/info/lfs/objects/batch', 'example-git', 'POST'),
94 ('/example-git/info/lfs/objects/batch', 'example-git', 'POST'),
96 # GIT LFS oid, dl/upl
95 # GIT LFS oid, dl/upl
97 ('/foo/info/lfs/abcdeabcde', 'foo', 'GET'),
96 ('/foo/info/lfs/abcdeabcde', 'foo', 'GET'),
98 ('/foo/bar/info/lfs/abcdeabcde', 'foo/bar', 'PUT'),
97 ('/foo/bar/info/lfs/abcdeabcde', 'foo/bar', 'PUT'),
99 ('/my-git-repo/info/lfs/abcdeabcde', 'my-git-repo', 'POST'),
98 ('/my-git-repo/info/lfs/abcdeabcde', 'my-git-repo', 'POST'),
100 # Edge case: repo name conflicts with git commands
99 # Edge case: repo name conflicts with git commands
101 ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'GET'),
100 ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'GET'),
102 ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'POST'),
101 ('/info/lfs/info/lfs/objects/batch', 'info/lfs', 'POST'),
103
102
104 ])
103 ])
105 def test_get_repository_name(url, expected_repo_name, request_method, baseapp, request_stub):
104 def test_get_repository_name(url, expected_repo_name, request_method, baseapp, request_stub):
106 app = simplegit.SimpleGit(application=None,
105 app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''},
107 config={'auth_ret_code': '', 'base_path': ''},
108 registry=request_stub.registry)
106 registry=request_stub.registry)
109 assert expected_repo_name == app._get_repository_name(
107 assert expected_repo_name == app._get_repository_name(
110 get_environ(url, request_method))
108 get_environ(url, request_method))
111
109
112
110
113 def test_get_config(user_util, baseapp, request_stub):
111 def test_get_config(user_util, baseapp, request_stub):
114 repo = user_util.create_repo(repo_type='git')
112 repo = user_util.create_repo(repo_type='git')
115 app = simplegit.SimpleGit(application=None,
113 app = simplegit.SimpleGit(config={'auth_ret_code': '', 'base_path': ''},
116 config={'auth_ret_code': '', 'base_path': ''},
117 registry=request_stub.registry)
114 registry=request_stub.registry)
118 extras = {'foo': 'FOO', 'bar': 'BAR'}
115 extras = {'foo': 'FOO', 'bar': 'BAR'}
119
116
120 # We copy the extras as the method below will change the contents.
117 # We copy the extras as the method below will change the contents.
121 git_config = app._create_config(dict(extras), repo_name=repo.repo_name)
118 git_config = app._create_config(dict(extras), repo_name=repo.repo_name)
122
119
123 expected_config = dict(extras)
120 expected_config = dict(extras)
124 expected_config.update({
121 expected_config.update({
125 'git_update_server_info': False,
122 'git_update_server_info': False,
126 'git_lfs_enabled': False,
123 'git_lfs_enabled': False,
127 'git_lfs_store_path': git_config['git_lfs_store_path']
124 'git_lfs_store_path': git_config['git_lfs_store_path']
128 })
125 })
129
126
130 assert git_config == expected_config
127 assert git_config == expected_config
131
128
132
129
133 def test_create_wsgi_app_uses_scm_app_from_simplevcs(baseapp, request_stub):
130 def test_create_wsgi_app_uses_scm_app_from_simplevcs(baseapp, request_stub):
134 config = {
131 config = {
135 'auth_ret_code': '',
132 'auth_ret_code': '',
136 'base_path': '',
133 'base_path': '',
137 'vcs.scm_app_implementation':
134 'vcs.scm_app_implementation':
138 'rhodecode.tests.lib.middleware.mock_scm_app',
135 'rhodecode.tests.lib.middleware.mock_scm_app',
139 }
136 }
140 app = simplegit.SimpleGit(application=None, config=config, registry=request_stub.registry)
137 app = simplegit.SimpleGit(config=config, registry=request_stub.registry)
141 wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {})
138 wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {})
142 assert wsgi_app is mock_scm_app.mock_git_wsgi
139 assert wsgi_app is mock_scm_app.mock_git_wsgi
@@ -1,129 +1,125 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import urlparse
21 import urlparse
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25 import simplejson as json
25 import simplejson as json
26
26
27 from rhodecode.lib.vcs.backends.base import Config
27 from rhodecode.lib.vcs.backends.base import Config
28 from rhodecode.tests.lib.middleware import mock_scm_app
28 from rhodecode.tests.lib.middleware import mock_scm_app
29 import rhodecode.lib.middleware.simplehg as simplehg
29 import rhodecode.lib.middleware.simplehg as simplehg
30
30
31
31
32 def get_environ(url):
32 def get_environ(url):
33 """Construct a minimum WSGI environ based on the URL."""
33 """Construct a minimum WSGI environ based on the URL."""
34 parsed_url = urlparse.urlparse(url)
34 parsed_url = urlparse.urlparse(url)
35 environ = {
35 environ = {
36 'PATH_INFO': parsed_url.path,
36 'PATH_INFO': parsed_url.path,
37 'QUERY_STRING': parsed_url.query,
37 'QUERY_STRING': parsed_url.query,
38 }
38 }
39
39
40 return environ
40 return environ
41
41
42
42
43 @pytest.mark.parametrize(
43 @pytest.mark.parametrize(
44 'url, expected_action',
44 'url, expected_action',
45 [
45 [
46 ('/foo/bar?cmd=unbundle&key=tip', 'push'),
46 ('/foo/bar?cmd=unbundle&key=tip', 'push'),
47 ('/foo/bar?cmd=pushkey&key=tip', 'push'),
47 ('/foo/bar?cmd=pushkey&key=tip', 'push'),
48 ('/foo/bar?cmd=listkeys&key=tip', 'pull'),
48 ('/foo/bar?cmd=listkeys&key=tip', 'pull'),
49 ('/foo/bar?cmd=changegroup&key=tip', 'pull'),
49 ('/foo/bar?cmd=changegroup&key=tip', 'pull'),
50 # Edge case: unknown argument: assume pull
50 # Edge case: unknown argument: assume pull
51 ('/foo/bar?cmd=unknown&key=tip', 'pull'),
51 ('/foo/bar?cmd=unknown&key=tip', 'pull'),
52 ('/foo/bar?cmd=&key=tip', 'pull'),
52 ('/foo/bar?cmd=&key=tip', 'pull'),
53 # Edge case: not cmd argument
53 # Edge case: not cmd argument
54 ('/foo/bar?key=tip', 'pull'),
54 ('/foo/bar?key=tip', 'pull'),
55 ])
55 ])
56 def test_get_action(url, expected_action, request_stub):
56 def test_get_action(url, expected_action, request_stub):
57 app = simplehg.SimpleHg(application=None,
57 app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''},
58 config={'auth_ret_code': '', 'base_path': ''},
59 registry=request_stub.registry)
58 registry=request_stub.registry)
60 assert expected_action == app._get_action(get_environ(url))
59 assert expected_action == app._get_action(get_environ(url))
61
60
62
61
63 @pytest.mark.parametrize(
62 @pytest.mark.parametrize(
64 'url, expected_repo_name',
63 'url, expected_repo_name',
65 [
64 [
66 ('/foo?cmd=unbundle&key=tip', 'foo'),
65 ('/foo?cmd=unbundle&key=tip', 'foo'),
67 ('/foo/bar?cmd=pushkey&key=tip', 'foo/bar'),
66 ('/foo/bar?cmd=pushkey&key=tip', 'foo/bar'),
68 ('/foo/bar/baz?cmd=listkeys&key=tip', 'foo/bar/baz'),
67 ('/foo/bar/baz?cmd=listkeys&key=tip', 'foo/bar/baz'),
69 # Repos with trailing slashes.
68 # Repos with trailing slashes.
70 ('/foo/?cmd=unbundle&key=tip', 'foo'),
69 ('/foo/?cmd=unbundle&key=tip', 'foo'),
71 ('/foo/bar/?cmd=pushkey&key=tip', 'foo/bar'),
70 ('/foo/bar/?cmd=pushkey&key=tip', 'foo/bar'),
72 ('/foo/bar/baz/?cmd=listkeys&key=tip', 'foo/bar/baz'),
71 ('/foo/bar/baz/?cmd=listkeys&key=tip', 'foo/bar/baz'),
73 ])
72 ])
74 def test_get_repository_name(url, expected_repo_name, request_stub):
73 def test_get_repository_name(url, expected_repo_name, request_stub):
75 app = simplehg.SimpleHg(application=None,
74 app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''},
76 config={'auth_ret_code': '', 'base_path': ''},
77 registry=request_stub.registry)
75 registry=request_stub.registry)
78 assert expected_repo_name == app._get_repository_name(get_environ(url))
76 assert expected_repo_name == app._get_repository_name(get_environ(url))
79
77
80
78
81 def test_get_config(user_util, baseapp, request_stub):
79 def test_get_config(user_util, baseapp, request_stub):
82 repo = user_util.create_repo(repo_type='git')
80 repo = user_util.create_repo(repo_type='git')
83 app = simplehg.SimpleHg(application=None,
81 app = simplehg.SimpleHg(config={'auth_ret_code': '', 'base_path': ''},
84 config={'auth_ret_code': '', 'base_path': ''},
85 registry=request_stub.registry)
82 registry=request_stub.registry)
86 extras = [('foo', 'FOO', 'bar', 'BAR')]
83 extras = [('foo', 'FOO', 'bar', 'BAR')]
87
84
88 hg_config = app._create_config(extras, repo_name=repo.repo_name)
85 hg_config = app._create_config(extras, repo_name=repo.repo_name)
89
86
90 config = simplehg.utils.make_db_config(repo=repo.repo_name)
87 config = simplehg.utils.make_db_config(repo=repo.repo_name)
91 config.set('rhodecode', 'RC_SCM_DATA', json.dumps(extras))
88 config.set('rhodecode', 'RC_SCM_DATA', json.dumps(extras))
92 hg_config_org = config
89 hg_config_org = config
93
90
94 expected_config = [
91 expected_config = [
95 ('vcs_svn_tag', 'ff89f8c714d135d865f44b90e5413b88de19a55f', '/tags/*'),
92 ('vcs_svn_tag', 'ff89f8c714d135d865f44b90e5413b88de19a55f', '/tags/*'),
96 ('web', 'push_ssl', 'False'),
93 ('web', 'push_ssl', 'False'),
97 ('web', 'allow_push', '*'),
94 ('web', 'allow_push', '*'),
98 ('web', 'allow_archive', 'gz zip bz2'),
95 ('web', 'allow_archive', 'gz zip bz2'),
99 ('web', 'baseurl', '/'),
96 ('web', 'baseurl', '/'),
100 ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')),
97 ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')),
101 ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'),
98 ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'),
102 ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'),
99 ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'),
103 ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')),
100 ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')),
104 ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'),
101 ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'),
105 ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'),
102 ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'),
106 ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'),
103 ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'),
107 ('hooks', 'pretxnchangegroup.pre_push', 'python:vcsserver.hooks.pre_push'),
104 ('hooks', 'pretxnchangegroup.pre_push', 'python:vcsserver.hooks.pre_push'),
108 ('hooks', 'changegroup.push_logger', 'python:vcsserver.hooks.log_push_action'),
105 ('hooks', 'changegroup.push_logger', 'python:vcsserver.hooks.log_push_action'),
109 ('hooks', 'changegroup.repo_size', 'python:vcsserver.hooks.repo_size'),
106 ('hooks', 'changegroup.repo_size', 'python:vcsserver.hooks.repo_size'),
110 ('phases', 'publish', 'True'),
107 ('phases', 'publish', 'True'),
111 ('extensions', 'largefiles', ''),
108 ('extensions', 'largefiles', ''),
112 ('paths', '/', hg_config_org.get('paths', '/')),
109 ('paths', '/', hg_config_org.get('paths', '/')),
113 ('rhodecode', 'RC_SCM_DATA', '[["foo", "FOO", "bar", "BAR"]]')
110 ('rhodecode', 'RC_SCM_DATA', '[["foo", "FOO", "bar", "BAR"]]')
114 ]
111 ]
115 for entry in expected_config:
112 for entry in expected_config:
116 assert entry in hg_config
113 assert entry in hg_config
117
114
118
115
119 def test_create_wsgi_app_uses_scm_app_from_simplevcs(request_stub):
116 def test_create_wsgi_app_uses_scm_app_from_simplevcs(request_stub):
120 config = {
117 config = {
121 'auth_ret_code': '',
118 'auth_ret_code': '',
122 'base_path': '',
119 'base_path': '',
123 'vcs.scm_app_implementation':
120 'vcs.scm_app_implementation':
124 'rhodecode.tests.lib.middleware.mock_scm_app',
121 'rhodecode.tests.lib.middleware.mock_scm_app',
125 }
122 }
126 app = simplehg.SimpleHg(
123 app = simplehg.SimpleHg(config=config, registry=request_stub.registry)
127 application=None, config=config, registry=request_stub.registry)
128 wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {})
124 wsgi_app = app._create_wsgi_app('/tmp/test', 'test_repo', {})
129 assert wsgi_app is mock_scm_app.mock_hg_wsgi
125 assert wsgi_app is mock_scm_app.mock_hg_wsgi
@@ -1,201 +1,201 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from StringIO import StringIO
21 from StringIO import StringIO
22
22
23 import pytest
23 import pytest
24 from mock import patch, Mock
24 from mock import patch, Mock
25
25
26 import rhodecode
27 from rhodecode.lib.middleware.simplesvn import SimpleSvn, SimpleSvnApp
26 from rhodecode.lib.middleware.simplesvn import SimpleSvn, SimpleSvnApp
27 from rhodecode.lib.utils import get_rhodecode_base_path
28
28
29
29
30 class TestSimpleSvn(object):
30 class TestSimpleSvn(object):
31 @pytest.fixture(autouse=True)
31 @pytest.fixture(autouse=True)
32 def simple_svn(self, baseapp, request_stub):
32 def simple_svn(self, baseapp, request_stub):
33 base_path = get_rhodecode_base_path()
33 self.app = SimpleSvn(
34 self.app = SimpleSvn(
34 application='None',
35 config={'auth_ret_code': '', 'base_path': base_path},
35 config={'auth_ret_code': '',
36 'base_path': rhodecode.CONFIG['base_path']},
37 registry=request_stub.registry)
36 registry=request_stub.registry)
38
37
39 def test_get_config(self):
38 def test_get_config(self):
40 extras = {'foo': 'FOO', 'bar': 'BAR'}
39 extras = {'foo': 'FOO', 'bar': 'BAR'}
41 config = self.app._create_config(extras, repo_name='test-repo')
40 config = self.app._create_config(extras, repo_name='test-repo')
42 assert config == extras
41 assert config == extras
43
42
44 @pytest.mark.parametrize(
43 @pytest.mark.parametrize(
45 'method', ['OPTIONS', 'PROPFIND', 'GET', 'REPORT'])
44 'method', ['OPTIONS', 'PROPFIND', 'GET', 'REPORT'])
46 def test_get_action_returns_pull(self, method):
45 def test_get_action_returns_pull(self, method):
47 environment = {'REQUEST_METHOD': method}
46 environment = {'REQUEST_METHOD': method}
48 action = self.app._get_action(environment)
47 action = self.app._get_action(environment)
49 assert action == 'pull'
48 assert action == 'pull'
50
49
51 @pytest.mark.parametrize(
50 @pytest.mark.parametrize(
52 'method', [
51 'method', [
53 'MKACTIVITY', 'PROPPATCH', 'PUT', 'CHECKOUT', 'MKCOL', 'MOVE',
52 'MKACTIVITY', 'PROPPATCH', 'PUT', 'CHECKOUT', 'MKCOL', 'MOVE',
54 'COPY', 'DELETE', 'LOCK', 'UNLOCK', 'MERGE'
53 'COPY', 'DELETE', 'LOCK', 'UNLOCK', 'MERGE'
55 ])
54 ])
56 def test_get_action_returns_push(self, method):
55 def test_get_action_returns_push(self, method):
57 environment = {'REQUEST_METHOD': method}
56 environment = {'REQUEST_METHOD': method}
58 action = self.app._get_action(environment)
57 action = self.app._get_action(environment)
59 assert action == 'push'
58 assert action == 'push'
60
59
61 @pytest.mark.parametrize(
60 @pytest.mark.parametrize(
62 'path, expected_name', [
61 'path, expected_name', [
63 ('/hello-svn', 'hello-svn'),
62 ('/hello-svn', 'hello-svn'),
64 ('/hello-svn/', 'hello-svn'),
63 ('/hello-svn/', 'hello-svn'),
65 ('/group/hello-svn/', 'group/hello-svn'),
64 ('/group/hello-svn/', 'group/hello-svn'),
66 ('/group/hello-svn/!svn/vcc/default', 'group/hello-svn'),
65 ('/group/hello-svn/!svn/vcc/default', 'group/hello-svn'),
67 ])
66 ])
68 def test_get_repository_name(self, path, expected_name):
67 def test_get_repository_name(self, path, expected_name):
69 environment = {'PATH_INFO': path}
68 environment = {'PATH_INFO': path}
70 name = self.app._get_repository_name(environment)
69 name = self.app._get_repository_name(environment)
71 assert name == expected_name
70 assert name == expected_name
72
71
73 def test_get_repository_name_subfolder(self, backend_svn):
72 def test_get_repository_name_subfolder(self, backend_svn):
74 repo = backend_svn.repo
73 repo = backend_svn.repo
75 environment = {
74 environment = {
76 'PATH_INFO': '/{}/path/with/subfolders'.format(repo.repo_name)}
75 'PATH_INFO': '/{}/path/with/subfolders'.format(repo.repo_name)}
77 name = self.app._get_repository_name(environment)
76 name = self.app._get_repository_name(environment)
78 assert name == repo.repo_name
77 assert name == repo.repo_name
79
78
80 def test_create_wsgi_app(self):
79 def test_create_wsgi_app(self):
81 with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method:
80 with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method:
82 mock_method.return_value = False
81 mock_method.return_value = False
83 with patch('rhodecode.lib.middleware.simplesvn.DisabledSimpleSvnApp') as (
82 with patch('rhodecode.lib.middleware.simplesvn.DisabledSimpleSvnApp') as (
84 wsgi_app_mock):
83 wsgi_app_mock):
85 config = Mock()
84 config = Mock()
86 wsgi_app = self.app._create_wsgi_app(
85 wsgi_app = self.app._create_wsgi_app(
87 repo_path='', repo_name='', config=config)
86 repo_path='', repo_name='', config=config)
88
87
89 wsgi_app_mock.assert_called_once_with(config)
88 wsgi_app_mock.assert_called_once_with(config)
90 assert wsgi_app == wsgi_app_mock()
89 assert wsgi_app == wsgi_app_mock()
91
90
92 def test_create_wsgi_app_when_enabled(self):
91 def test_create_wsgi_app_when_enabled(self):
93 with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method:
92 with patch.object(SimpleSvn, '_is_svn_enabled') as mock_method:
94 mock_method.return_value = True
93 mock_method.return_value = True
95 with patch('rhodecode.lib.middleware.simplesvn.SimpleSvnApp') as (
94 with patch('rhodecode.lib.middleware.simplesvn.SimpleSvnApp') as (
96 wsgi_app_mock):
95 wsgi_app_mock):
97 config = Mock()
96 config = Mock()
98 wsgi_app = self.app._create_wsgi_app(
97 wsgi_app = self.app._create_wsgi_app(
99 repo_path='', repo_name='', config=config)
98 repo_path='', repo_name='', config=config)
100
99
101 wsgi_app_mock.assert_called_once_with(config)
100 wsgi_app_mock.assert_called_once_with(config)
102 assert wsgi_app == wsgi_app_mock()
101 assert wsgi_app == wsgi_app_mock()
103
102
104
103
105
106 class TestSimpleSvnApp(object):
104 class TestSimpleSvnApp(object):
107 data = '<xml></xml>'
105 data = '<xml></xml>'
108 path = '/group/my-repo'
106 path = '/group/my-repo'
109 wsgi_input = StringIO(data)
107 wsgi_input = StringIO(data)
110 environment = {
108 environment = {
111 'HTTP_DAV': (
109 'HTTP_DAV': (
112 'http://subversion.tigris.org/xmlns/dav/svn/depth,'
110 'http://subversion.tigris.org/xmlns/dav/svn/depth,'
113 ' http://subversion.tigris.org/xmlns/dav/svn/mergeinfo'),
111 ' http://subversion.tigris.org/xmlns/dav/svn/mergeinfo'),
114 'HTTP_USER_AGENT': 'SVN/1.8.11 (x86_64-linux) serf/1.3.8',
112 'HTTP_USER_AGENT': 'SVN/1.8.11 (x86_64-linux) serf/1.3.8',
115 'REQUEST_METHOD': 'OPTIONS',
113 'REQUEST_METHOD': 'OPTIONS',
116 'PATH_INFO': path,
114 'PATH_INFO': path,
117 'wsgi.input': wsgi_input,
115 'wsgi.input': wsgi_input,
118 'CONTENT_TYPE': 'text/xml',
116 'CONTENT_TYPE': 'text/xml',
119 'CONTENT_LENGTH': '130'
117 'CONTENT_LENGTH': '130'
120 }
118 }
121
119
122 def setup_method(self, method):
120 def setup_method(self, method):
123 self.host = 'http://localhost/'
121 self.host = 'http://localhost/'
122 base_path = get_rhodecode_base_path()
124 self.app = SimpleSvnApp(
123 self.app = SimpleSvnApp(
125 config={'subversion_http_server_url': self.host})
124 config={'subversion_http_server_url': self.host,
125 'base_path': base_path})
126
126
127 def test_get_request_headers_with_content_type(self):
127 def test_get_request_headers_with_content_type(self):
128 expected_headers = {
128 expected_headers = {
129 'Dav': self.environment['HTTP_DAV'],
129 'Dav': self.environment['HTTP_DAV'],
130 'User-Agent': self.environment['HTTP_USER_AGENT'],
130 'User-Agent': self.environment['HTTP_USER_AGENT'],
131 'Content-Type': self.environment['CONTENT_TYPE'],
131 'Content-Type': self.environment['CONTENT_TYPE'],
132 'Content-Length': self.environment['CONTENT_LENGTH']
132 'Content-Length': self.environment['CONTENT_LENGTH']
133 }
133 }
134 headers = self.app._get_request_headers(self.environment)
134 headers = self.app._get_request_headers(self.environment)
135 assert headers == expected_headers
135 assert headers == expected_headers
136
136
137 def test_get_request_headers_without_content_type(self):
137 def test_get_request_headers_without_content_type(self):
138 environment = self.environment.copy()
138 environment = self.environment.copy()
139 environment.pop('CONTENT_TYPE')
139 environment.pop('CONTENT_TYPE')
140 expected_headers = {
140 expected_headers = {
141 'Dav': environment['HTTP_DAV'],
141 'Dav': environment['HTTP_DAV'],
142 'Content-Length': self.environment['CONTENT_LENGTH'],
142 'Content-Length': self.environment['CONTENT_LENGTH'],
143 'User-Agent': environment['HTTP_USER_AGENT'],
143 'User-Agent': environment['HTTP_USER_AGENT'],
144 }
144 }
145 request_headers = self.app._get_request_headers(environment)
145 request_headers = self.app._get_request_headers(environment)
146 assert request_headers == expected_headers
146 assert request_headers == expected_headers
147
147
148 def test_get_response_headers(self):
148 def test_get_response_headers(self):
149 headers = {
149 headers = {
150 'Connection': 'keep-alive',
150 'Connection': 'keep-alive',
151 'Keep-Alive': 'timeout=5, max=100',
151 'Keep-Alive': 'timeout=5, max=100',
152 'Transfer-Encoding': 'chunked',
152 'Transfer-Encoding': 'chunked',
153 'Content-Encoding': 'gzip',
153 'Content-Encoding': 'gzip',
154 'MS-Author-Via': 'DAV',
154 'MS-Author-Via': 'DAV',
155 'SVN-Supported-Posts': 'create-txn-with-props'
155 'SVN-Supported-Posts': 'create-txn-with-props'
156 }
156 }
157 expected_headers = [
157 expected_headers = [
158 ('MS-Author-Via', 'DAV'),
158 ('MS-Author-Via', 'DAV'),
159 ('SVN-Supported-Posts', 'create-txn-with-props'),
159 ('SVN-Supported-Posts', 'create-txn-with-props'),
160 ]
160 ]
161 response_headers = self.app._get_response_headers(headers)
161 response_headers = self.app._get_response_headers(headers)
162 assert sorted(response_headers) == sorted(expected_headers)
162 assert sorted(response_headers) == sorted(expected_headers)
163
163
164 def test_get_url(self):
164 def test_get_url(self):
165 url = self.app._get_url(self.path)
165 url = self.app._get_url(self.path)
166 expected_url = '{}{}'.format(self.host.strip('/'), self.path)
166 expected_url = '{}{}'.format(self.host.strip('/'), self.path)
167 assert url == expected_url
167 assert url == expected_url
168
168
169 def test_call(self):
169 def test_call(self):
170 start_response = Mock()
170 start_response = Mock()
171 response_mock = Mock()
171 response_mock = Mock()
172 response_mock.headers = {
172 response_mock.headers = {
173 'Content-Encoding': 'gzip',
173 'Content-Encoding': 'gzip',
174 'MS-Author-Via': 'DAV',
174 'MS-Author-Via': 'DAV',
175 'SVN-Supported-Posts': 'create-txn-with-props'
175 'SVN-Supported-Posts': 'create-txn-with-props'
176 }
176 }
177 response_mock.status_code = 200
177 response_mock.status_code = 200
178 response_mock.reason = 'OK'
178 response_mock.reason = 'OK'
179 with patch('rhodecode.lib.middleware.simplesvn.requests.request') as (
179 with patch('rhodecode.lib.middleware.simplesvn.requests.request') as (
180 request_mock):
180 request_mock):
181 request_mock.return_value = response_mock
181 request_mock.return_value = response_mock
182 self.app(self.environment, start_response)
182 self.app(self.environment, start_response)
183
183
184 expected_url = '{}{}'.format(self.host.strip('/'), self.path)
184 expected_url = '{}{}'.format(self.host.strip('/'), self.path)
185 expected_request_headers = {
185 expected_request_headers = {
186 'Dav': self.environment['HTTP_DAV'],
186 'Dav': self.environment['HTTP_DAV'],
187 'User-Agent': self.environment['HTTP_USER_AGENT'],
187 'User-Agent': self.environment['HTTP_USER_AGENT'],
188 'Content-Type': self.environment['CONTENT_TYPE'],
188 'Content-Type': self.environment['CONTENT_TYPE'],
189 'Content-Length': self.environment['CONTENT_LENGTH']
189 'Content-Length': self.environment['CONTENT_LENGTH']
190 }
190 }
191 expected_response_headers = [
191 expected_response_headers = [
192 ('SVN-Supported-Posts', 'create-txn-with-props'),
192 ('SVN-Supported-Posts', 'create-txn-with-props'),
193 ('MS-Author-Via', 'DAV'),
193 ('MS-Author-Via', 'DAV'),
194 ]
194 ]
195 request_mock.assert_called_once_with(
195 request_mock.assert_called_once_with(
196 self.environment['REQUEST_METHOD'], expected_url,
196 self.environment['REQUEST_METHOD'], expected_url,
197 data=self.data, headers=expected_request_headers)
197 data=self.data, headers=expected_request_headers)
198 response_mock.iter_content.assert_called_once_with(chunk_size=1024)
198 response_mock.iter_content.assert_called_once_with(chunk_size=1024)
199 args, _ = start_response.call_args
199 args, _ = start_response.call_args
200 assert args[0] == '200 OK'
200 assert args[0] == '200 OK'
201 assert sorted(args[1]) == sorted(expected_response_headers)
201 assert sorted(args[1]) == sorted(expected_response_headers)
@@ -1,498 +1,496 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import base64
21 import base64
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils2 import AttributeDict
26 from rhodecode.lib.utils2 import AttributeDict
27 from rhodecode.tests.utils import CustomTestApp
27 from rhodecode.tests.utils import CustomTestApp
28
28
29 from rhodecode.lib.caching_query import FromCache
29 from rhodecode.lib.caching_query import FromCache
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
31 from rhodecode.lib.middleware import simplevcs
31 from rhodecode.lib.middleware import simplevcs
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
33 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.lib.middleware.utils import scm_app_http
34 from rhodecode.model.db import User, _hash_key
34 from rhodecode.model.db import User, _hash_key
35 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
36 from rhodecode.tests import (
36 from rhodecode.tests import (
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
38 from rhodecode.tests.lib.middleware import mock_scm_app
38 from rhodecode.tests.lib.middleware import mock_scm_app
39
39
40
40
41 class StubVCSController(simplevcs.SimpleVCS):
41 class StubVCSController(simplevcs.SimpleVCS):
42
42
43 SCM = 'hg'
43 SCM = 'hg'
44 stub_response_body = tuple()
44 stub_response_body = tuple()
45
45
46 def __init__(self, *args, **kwargs):
46 def __init__(self, *args, **kwargs):
47 super(StubVCSController, self).__init__(*args, **kwargs)
47 super(StubVCSController, self).__init__(*args, **kwargs)
48 self._action = 'pull'
48 self._action = 'pull'
49 self._is_shadow_repo_dir = True
49 self._is_shadow_repo_dir = True
50 self._name = HG_REPO
50 self._name = HG_REPO
51 self.set_repo_names(None)
51 self.set_repo_names(None)
52
52
53 @property
53 @property
54 def is_shadow_repo_dir(self):
54 def is_shadow_repo_dir(self):
55 return self._is_shadow_repo_dir
55 return self._is_shadow_repo_dir
56
56
57 def _get_repository_name(self, environ):
57 def _get_repository_name(self, environ):
58 return self._name
58 return self._name
59
59
60 def _get_action(self, environ):
60 def _get_action(self, environ):
61 return self._action
61 return self._action
62
62
63 def _create_wsgi_app(self, repo_path, repo_name, config):
63 def _create_wsgi_app(self, repo_path, repo_name, config):
64 def fake_app(environ, start_response):
64 def fake_app(environ, start_response):
65 headers = [
65 headers = [
66 ('Http-Accept', 'application/mercurial')
66 ('Http-Accept', 'application/mercurial')
67 ]
67 ]
68 start_response('200 OK', headers)
68 start_response('200 OK', headers)
69 return self.stub_response_body
69 return self.stub_response_body
70 return fake_app
70 return fake_app
71
71
72 def _create_config(self, extras, repo_name):
72 def _create_config(self, extras, repo_name):
73 return None
73 return None
74
74
75
75
76 @pytest.fixture
76 @pytest.fixture
77 def vcscontroller(baseapp, config_stub, request_stub):
77 def vcscontroller(baseapp, config_stub, request_stub):
78 config_stub.testing_securitypolicy()
78 config_stub.testing_securitypolicy()
79 config_stub.include('rhodecode.authentication')
79 config_stub.include('rhodecode.authentication')
80
80
81 controller = StubVCSController(
81 controller = StubVCSController(
82 baseapp, baseapp.config, request_stub.registry)
82 baseapp.config.get_settings(), request_stub.registry)
83 app = HttpsFixup(controller, baseapp.config)
83 app = HttpsFixup(controller, baseapp.config.get_settings())
84 app = CustomTestApp(app)
84 app = CustomTestApp(app)
85
85
86 _remove_default_user_from_query_cache()
86 _remove_default_user_from_query_cache()
87
87
88 # Sanity checks that things are set up correctly
88 # Sanity checks that things are set up correctly
89 app.get('/' + HG_REPO, status=200)
89 app.get('/' + HG_REPO, status=200)
90
90
91 app.controller = controller
91 app.controller = controller
92 return app
92 return app
93
93
94
94
95 def _remove_default_user_from_query_cache():
95 def _remove_default_user_from_query_cache():
96 user = User.get_default_user(cache=True)
96 user = User.get_default_user(cache=True)
97 query = Session().query(User).filter(User.username == user.username)
97 query = Session().query(User).filter(User.username == user.username)
98 query = query.options(
98 query = query.options(
99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
100 query.invalidate()
100 query.invalidate()
101 Session().expire(user)
101 Session().expire(user)
102
102
103
103
104 def test_handles_exceptions_during_permissions_checks(
104 def test_handles_exceptions_during_permissions_checks(
105 vcscontroller, disable_anonymous_user):
105 vcscontroller, disable_anonymous_user):
106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
107 auth_password = base64.encodestring(user_and_pass).strip()
107 auth_password = base64.encodestring(user_and_pass).strip()
108 extra_environ = {
108 extra_environ = {
109 'AUTH_TYPE': 'Basic',
109 'AUTH_TYPE': 'Basic',
110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
112 }
112 }
113
113
114 # Verify that things are hooked up correctly
114 # Verify that things are hooked up correctly
115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
116
116
117 # Simulate trouble during permission checks
117 # Simulate trouble during permission checks
118 with mock.patch('rhodecode.model.db.User.get_by_username',
118 with mock.patch('rhodecode.model.db.User.get_by_username',
119 side_effect=Exception) as get_user:
119 side_effect=Exception) as get_user:
120 # Verify that a correct 500 is returned and check that the expected
120 # Verify that a correct 500 is returned and check that the expected
121 # code path was hit.
121 # code path was hit.
122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
123 assert get_user.called
123 assert get_user.called
124
124
125
125
126 def test_returns_forbidden_if_no_anonymous_access(
126 def test_returns_forbidden_if_no_anonymous_access(
127 vcscontroller, disable_anonymous_user):
127 vcscontroller, disable_anonymous_user):
128 vcscontroller.get('/', status=401)
128 vcscontroller.get('/', status=401)
129
129
130
130
131 class StubFailVCSController(simplevcs.SimpleVCS):
131 class StubFailVCSController(simplevcs.SimpleVCS):
132 def _handle_request(self, environ, start_response):
132 def _handle_request(self, environ, start_response):
133 raise Exception("BOOM")
133 raise Exception("BOOM")
134
134
135
135
136 @pytest.fixture(scope='module')
136 @pytest.fixture(scope='module')
137 def fail_controller(baseapp):
137 def fail_controller(baseapp):
138 controller = StubFailVCSController(
138 controller = StubFailVCSController(
139 baseapp, baseapp.config, baseapp.config)
139 baseapp.config.get_settings(), baseapp.config)
140 controller = HttpsFixup(controller, baseapp.config)
140 controller = HttpsFixup(controller, baseapp.config.get_settings())
141 controller = CustomTestApp(controller)
141 controller = CustomTestApp(controller)
142 return controller
142 return controller
143
143
144
144
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
146 fail_controller.get('/', status=500)
146 fail_controller.get('/', status=500)
147
147
148
148
149 def test_provides_traceback_for_appenlight(fail_controller):
149 def test_provides_traceback_for_appenlight(fail_controller):
150 response = fail_controller.get(
150 response = fail_controller.get(
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
152 assert 'appenlight.__traceback' in response.request.environ
152 assert 'appenlight.__traceback' in response.request.environ
153
153
154
154
155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
156 controller = StubVCSController(
156 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
157 baseapp, baseapp.config, request_stub.registry)
158 assert controller.scm_app is scm_app_http
157 assert controller.scm_app is scm_app_http
159
158
160
159
161 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
160 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
162 config = baseapp.config.copy()
161 config = baseapp.config.get_settings().copy()
163 config['vcs.scm_app_implementation'] = (
162 config['vcs.scm_app_implementation'] = (
164 'rhodecode.tests.lib.middleware.mock_scm_app')
163 'rhodecode.tests.lib.middleware.mock_scm_app')
165 controller = StubVCSController(
164 controller = StubVCSController(config, request_stub.registry)
166 baseapp, config, request_stub.registry)
167 assert controller.scm_app is mock_scm_app
165 assert controller.scm_app is mock_scm_app
168
166
169
167
170 @pytest.mark.parametrize('query_string, expected', [
168 @pytest.mark.parametrize('query_string, expected', [
171 ('cmd=stub_command', True),
169 ('cmd=stub_command', True),
172 ('cmd=listkeys', False),
170 ('cmd=listkeys', False),
173 ])
171 ])
174 def test_should_check_locking(query_string, expected):
172 def test_should_check_locking(query_string, expected):
175 result = simplevcs._should_check_locking(query_string)
173 result = simplevcs._should_check_locking(query_string)
176 assert result == expected
174 assert result == expected
177
175
178
176
179 class TestShadowRepoRegularExpression(object):
177 class TestShadowRepoRegularExpression(object):
180 pr_segment = 'pull-request'
178 pr_segment = 'pull-request'
181 shadow_segment = 'repository'
179 shadow_segment = 'repository'
182
180
183 @pytest.mark.parametrize('url, expected', [
181 @pytest.mark.parametrize('url, expected', [
184 # repo with/without groups
182 # repo with/without groups
185 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
186 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
187 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
188 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
189
187
190 # pull request ID
188 # pull request ID
191 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
192 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
193 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
194 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
195
193
196 # unicode
194 # unicode
197 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
198 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
199
197
200 # trailing/leading slash
198 # trailing/leading slash
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
202 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
203 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
204
202
205 # misc
203 # misc
206 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
207 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
208 ])
206 ])
209 def test_shadow_repo_regular_expression(self, url, expected):
207 def test_shadow_repo_regular_expression(self, url, expected):
210 from rhodecode.lib.middleware.simplevcs import SimpleVCS
208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
211 url = url.format(
209 url = url.format(
212 pr_segment=self.pr_segment,
210 pr_segment=self.pr_segment,
213 shadow_segment=self.shadow_segment)
211 shadow_segment=self.shadow_segment)
214 match_obj = SimpleVCS.shadow_repo_re.match(url)
212 match_obj = SimpleVCS.shadow_repo_re.match(url)
215 assert (match_obj is not None) == expected
213 assert (match_obj is not None) == expected
216
214
217
215
218 @pytest.mark.backends('git', 'hg')
216 @pytest.mark.backends('git', 'hg')
219 class TestShadowRepoExposure(object):
217 class TestShadowRepoExposure(object):
220
218
221 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
222 self, baseapp, request_stub):
220 self, baseapp, request_stub):
223 """
221 """
224 Check that a pull action to a shadow repo is propagated to the
222 Check that a pull action to a shadow repo is propagated to the
225 underlying wsgi app.
223 underlying wsgi app.
226 """
224 """
227 controller = StubVCSController(
225 controller = StubVCSController(
228 baseapp, baseapp.config, request_stub.registry)
226 baseapp.config.get_settings(), request_stub.registry)
229 controller._check_ssl = mock.Mock()
227 controller._check_ssl = mock.Mock()
230 controller.is_shadow_repo = True
228 controller.is_shadow_repo = True
231 controller._action = 'pull'
229 controller._action = 'pull'
232 controller._is_shadow_repo_dir = True
230 controller._is_shadow_repo_dir = True
233 controller.stub_response_body = 'dummy body value'
231 controller.stub_response_body = 'dummy body value'
234 environ_stub = {
232 environ_stub = {
235 'HTTP_HOST': 'test.example.com',
233 'HTTP_HOST': 'test.example.com',
236 'HTTP_ACCEPT': 'application/mercurial',
234 'HTTP_ACCEPT': 'application/mercurial',
237 'REQUEST_METHOD': 'GET',
235 'REQUEST_METHOD': 'GET',
238 'wsgi.url_scheme': 'http',
236 'wsgi.url_scheme': 'http',
239 }
237 }
240
238
241 response = controller(environ_stub, mock.Mock())
239 response = controller(environ_stub, mock.Mock())
242 response_body = ''.join(response)
240 response_body = ''.join(response)
243
241
244 # Assert that we got the response from the wsgi app.
242 # Assert that we got the response from the wsgi app.
245 assert response_body == controller.stub_response_body
243 assert response_body == controller.stub_response_body
246
244
247 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
245 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
248 """
246 """
249 Check that a pull action to a shadow repo is propagated to the
247 Check that a pull action to a shadow repo is propagated to the
250 underlying wsgi app.
248 underlying wsgi app.
251 """
249 """
252 controller = StubVCSController(
250 controller = StubVCSController(
253 baseapp, baseapp.config, request_stub.registry)
251 baseapp.config.get_settings(), request_stub.registry)
254 controller._check_ssl = mock.Mock()
252 controller._check_ssl = mock.Mock()
255 controller.is_shadow_repo = True
253 controller.is_shadow_repo = True
256 controller._action = 'pull'
254 controller._action = 'pull'
257 controller._is_shadow_repo_dir = False
255 controller._is_shadow_repo_dir = False
258 controller.stub_response_body = 'dummy body value'
256 controller.stub_response_body = 'dummy body value'
259 environ_stub = {
257 environ_stub = {
260 'HTTP_HOST': 'test.example.com',
258 'HTTP_HOST': 'test.example.com',
261 'HTTP_ACCEPT': 'application/mercurial',
259 'HTTP_ACCEPT': 'application/mercurial',
262 'REQUEST_METHOD': 'GET',
260 'REQUEST_METHOD': 'GET',
263 'wsgi.url_scheme': 'http',
261 'wsgi.url_scheme': 'http',
264 }
262 }
265
263
266 response = controller(environ_stub, mock.Mock())
264 response = controller(environ_stub, mock.Mock())
267 response_body = ''.join(response)
265 response_body = ''.join(response)
268
266
269 # Assert that we got the response from the wsgi app.
267 # Assert that we got the response from the wsgi app.
270 assert '404 Not Found' in response_body
268 assert '404 Not Found' in response_body
271
269
272 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
270 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
273 """
271 """
274 Check that a push action to a shadow repo is aborted.
272 Check that a push action to a shadow repo is aborted.
275 """
273 """
276 controller = StubVCSController(
274 controller = StubVCSController(
277 baseapp, baseapp.config, request_stub.registry)
275 baseapp.config.get_settings(), request_stub.registry)
278 controller._check_ssl = mock.Mock()
276 controller._check_ssl = mock.Mock()
279 controller.is_shadow_repo = True
277 controller.is_shadow_repo = True
280 controller._action = 'push'
278 controller._action = 'push'
281 controller.stub_response_body = 'dummy body value'
279 controller.stub_response_body = 'dummy body value'
282 environ_stub = {
280 environ_stub = {
283 'HTTP_HOST': 'test.example.com',
281 'HTTP_HOST': 'test.example.com',
284 'HTTP_ACCEPT': 'application/mercurial',
282 'HTTP_ACCEPT': 'application/mercurial',
285 'REQUEST_METHOD': 'GET',
283 'REQUEST_METHOD': 'GET',
286 'wsgi.url_scheme': 'http',
284 'wsgi.url_scheme': 'http',
287 }
285 }
288
286
289 response = controller(environ_stub, mock.Mock())
287 response = controller(environ_stub, mock.Mock())
290 response_body = ''.join(response)
288 response_body = ''.join(response)
291
289
292 assert response_body != controller.stub_response_body
290 assert response_body != controller.stub_response_body
293 # Assert that a 406 error is returned.
291 # Assert that a 406 error is returned.
294 assert '406 Not Acceptable' in response_body
292 assert '406 Not Acceptable' in response_body
295
293
296 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
294 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
297 """
295 """
298 Check that the set_repo_names method sets all names to the one returned
296 Check that the set_repo_names method sets all names to the one returned
299 by the _get_repository_name method on a request to a non shadow repo.
297 by the _get_repository_name method on a request to a non shadow repo.
300 """
298 """
301 environ_stub = {}
299 environ_stub = {}
302 controller = StubVCSController(
300 controller = StubVCSController(
303 baseapp, baseapp.config, request_stub.registry)
301 baseapp.config.get_settings(), request_stub.registry)
304 controller._name = 'RepoGroup/MyRepo'
302 controller._name = 'RepoGroup/MyRepo'
305 controller.set_repo_names(environ_stub)
303 controller.set_repo_names(environ_stub)
306 assert not controller.is_shadow_repo
304 assert not controller.is_shadow_repo
307 assert (controller.url_repo_name ==
305 assert (controller.url_repo_name ==
308 controller.acl_repo_name ==
306 controller.acl_repo_name ==
309 controller.vcs_repo_name ==
307 controller.vcs_repo_name ==
310 controller._get_repository_name(environ_stub))
308 controller._get_repository_name(environ_stub))
311
309
312 def test_set_repo_names_with_shadow(
310 def test_set_repo_names_with_shadow(
313 self, baseapp, pr_util, config_stub, request_stub):
311 self, baseapp, pr_util, config_stub, request_stub):
314 """
312 """
315 Check that the set_repo_names method sets correct names on a request
313 Check that the set_repo_names method sets correct names on a request
316 to a shadow repo.
314 to a shadow repo.
317 """
315 """
318 from rhodecode.model.pull_request import PullRequestModel
316 from rhodecode.model.pull_request import PullRequestModel
319
317
320 pull_request = pr_util.create_pull_request()
318 pull_request = pr_util.create_pull_request()
321 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
319 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
322 target=pull_request.target_repo.repo_name,
320 target=pull_request.target_repo.repo_name,
323 pr_id=pull_request.pull_request_id,
321 pr_id=pull_request.pull_request_id,
324 pr_segment=TestShadowRepoRegularExpression.pr_segment,
322 pr_segment=TestShadowRepoRegularExpression.pr_segment,
325 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
323 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
326 controller = StubVCSController(
324 controller = StubVCSController(
327 baseapp, baseapp.config, request_stub.registry)
325 baseapp.config.get_settings(), request_stub.registry)
328 controller._name = shadow_url
326 controller._name = shadow_url
329 controller.set_repo_names({})
327 controller.set_repo_names({})
330
328
331 # Get file system path to shadow repo for assertions.
329 # Get file system path to shadow repo for assertions.
332 workspace_id = PullRequestModel()._workspace_id(pull_request)
330 workspace_id = PullRequestModel()._workspace_id(pull_request)
333 target_vcs = pull_request.target_repo.scm_instance()
331 target_vcs = pull_request.target_repo.scm_instance()
334 vcs_repo_name = target_vcs._get_shadow_repository_path(
332 vcs_repo_name = target_vcs._get_shadow_repository_path(
335 workspace_id)
333 workspace_id)
336
334
337 assert controller.vcs_repo_name == vcs_repo_name
335 assert controller.vcs_repo_name == vcs_repo_name
338 assert controller.url_repo_name == shadow_url
336 assert controller.url_repo_name == shadow_url
339 assert controller.acl_repo_name == pull_request.target_repo.repo_name
337 assert controller.acl_repo_name == pull_request.target_repo.repo_name
340 assert controller.is_shadow_repo
338 assert controller.is_shadow_repo
341
339
342 def test_set_repo_names_with_shadow_but_missing_pr(
340 def test_set_repo_names_with_shadow_but_missing_pr(
343 self, baseapp, pr_util, config_stub, request_stub):
341 self, baseapp, pr_util, config_stub, request_stub):
344 """
342 """
345 Checks that the set_repo_names method enforces matching target repos
343 Checks that the set_repo_names method enforces matching target repos
346 and pull request IDs.
344 and pull request IDs.
347 """
345 """
348 pull_request = pr_util.create_pull_request()
346 pull_request = pr_util.create_pull_request()
349 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
347 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
350 target=pull_request.target_repo.repo_name,
348 target=pull_request.target_repo.repo_name,
351 pr_id=999999999,
349 pr_id=999999999,
352 pr_segment=TestShadowRepoRegularExpression.pr_segment,
350 pr_segment=TestShadowRepoRegularExpression.pr_segment,
353 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
351 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
354 controller = StubVCSController(
352 controller = StubVCSController(
355 baseapp, baseapp.config, request_stub.registry)
353 baseapp.config.get_settings(), request_stub.registry)
356 controller._name = shadow_url
354 controller._name = shadow_url
357 controller.set_repo_names({})
355 controller.set_repo_names({})
358
356
359 assert not controller.is_shadow_repo
357 assert not controller.is_shadow_repo
360 assert (controller.url_repo_name ==
358 assert (controller.url_repo_name ==
361 controller.acl_repo_name ==
359 controller.acl_repo_name ==
362 controller.vcs_repo_name)
360 controller.vcs_repo_name)
363
361
364
362
365 @pytest.mark.usefixtures('db')
363 @pytest.mark.usefixtures('baseapp')
366 class TestGenerateVcsResponse(object):
364 class TestGenerateVcsResponse(object):
367
365
368 def test_ensures_that_start_response_is_called_early_enough(self):
366 def test_ensures_that_start_response_is_called_early_enough(self):
369 self.call_controller_with_response_body(iter(['a', 'b']))
367 self.call_controller_with_response_body(iter(['a', 'b']))
370 assert self.start_response.called
368 assert self.start_response.called
371
369
372 def test_invalidates_cache_after_body_is_consumed(self):
370 def test_invalidates_cache_after_body_is_consumed(self):
373 result = self.call_controller_with_response_body(iter(['a', 'b']))
371 result = self.call_controller_with_response_body(iter(['a', 'b']))
374 assert not self.was_cache_invalidated()
372 assert not self.was_cache_invalidated()
375 # Consume the result
373 # Consume the result
376 list(result)
374 list(result)
377 assert self.was_cache_invalidated()
375 assert self.was_cache_invalidated()
378
376
379 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
377 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
380 def test_handles_locking_exception(self, http_locked_rc):
378 def test_handles_locking_exception(self, http_locked_rc):
381 result = self.call_controller_with_response_body(
379 result = self.call_controller_with_response_body(
382 self.raise_result_iter(vcs_kind='repo_locked'))
380 self.raise_result_iter(vcs_kind='repo_locked'))
383 assert not http_locked_rc.called
381 assert not http_locked_rc.called
384 # Consume the result
382 # Consume the result
385 list(result)
383 list(result)
386 assert http_locked_rc.called
384 assert http_locked_rc.called
387
385
388 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError')
386 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError')
389 def test_handles_requirement_exception(self, http_requirement):
387 def test_handles_requirement_exception(self, http_requirement):
390 result = self.call_controller_with_response_body(
388 result = self.call_controller_with_response_body(
391 self.raise_result_iter(vcs_kind='requirement'))
389 self.raise_result_iter(vcs_kind='requirement'))
392 assert not http_requirement.called
390 assert not http_requirement.called
393 # Consume the result
391 # Consume the result
394 list(result)
392 list(result)
395 assert http_requirement.called
393 assert http_requirement.called
396
394
397 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
395 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
398 def test_handles_locking_exception_in_app_call(self, http_locked_rc):
396 def test_handles_locking_exception_in_app_call(self, http_locked_rc):
399 app_factory_patcher = mock.patch.object(
397 app_factory_patcher = mock.patch.object(
400 StubVCSController, '_create_wsgi_app')
398 StubVCSController, '_create_wsgi_app')
401 with app_factory_patcher as app_factory:
399 with app_factory_patcher as app_factory:
402 app_factory().side_effect = self.vcs_exception()
400 app_factory().side_effect = self.vcs_exception()
403 result = self.call_controller_with_response_body(['a'])
401 result = self.call_controller_with_response_body(['a'])
404 list(result)
402 list(result)
405 assert http_locked_rc.called
403 assert http_locked_rc.called
406
404
407 def test_raises_unknown_exceptions(self):
405 def test_raises_unknown_exceptions(self):
408 result = self.call_controller_with_response_body(
406 result = self.call_controller_with_response_body(
409 self.raise_result_iter(vcs_kind='unknown'))
407 self.raise_result_iter(vcs_kind='unknown'))
410 with pytest.raises(Exception):
408 with pytest.raises(Exception):
411 list(result)
409 list(result)
412
410
413 def test_prepare_callback_daemon_is_called(self):
411 def test_prepare_callback_daemon_is_called(self):
414 def side_effect(extras):
412 def side_effect(extras):
415 return DummyHooksCallbackDaemon(), extras
413 return DummyHooksCallbackDaemon(), extras
416
414
417 prepare_patcher = mock.patch.object(
415 prepare_patcher = mock.patch.object(
418 StubVCSController, '_prepare_callback_daemon')
416 StubVCSController, '_prepare_callback_daemon')
419 with prepare_patcher as prepare_mock:
417 with prepare_patcher as prepare_mock:
420 prepare_mock.side_effect = side_effect
418 prepare_mock.side_effect = side_effect
421 self.call_controller_with_response_body(iter(['a', 'b']))
419 self.call_controller_with_response_body(iter(['a', 'b']))
422 assert prepare_mock.called
420 assert prepare_mock.called
423 assert prepare_mock.call_count == 1
421 assert prepare_mock.call_count == 1
424
422
425 def call_controller_with_response_body(self, response_body):
423 def call_controller_with_response_body(self, response_body):
426 settings = {
424 settings = {
427 'base_path': 'fake_base_path',
425 'base_path': 'fake_base_path',
428 'vcs.hooks.protocol': 'http',
426 'vcs.hooks.protocol': 'http',
429 'vcs.hooks.direct_calls': False,
427 'vcs.hooks.direct_calls': False,
430 }
428 }
431 registry = AttributeDict()
429 registry = AttributeDict()
432 controller = StubVCSController(None, settings, registry)
430 controller = StubVCSController(settings, registry)
433 controller._invalidate_cache = mock.Mock()
431 controller._invalidate_cache = mock.Mock()
434 controller.stub_response_body = response_body
432 controller.stub_response_body = response_body
435 self.start_response = mock.Mock()
433 self.start_response = mock.Mock()
436 result = controller._generate_vcs_response(
434 result = controller._generate_vcs_response(
437 environ={}, start_response=self.start_response,
435 environ={}, start_response=self.start_response,
438 repo_path='fake_repo_path',
436 repo_path='fake_repo_path',
439 extras={}, action='push')
437 extras={}, action='push')
440 self.controller = controller
438 self.controller = controller
441 return result
439 return result
442
440
443 def raise_result_iter(self, vcs_kind='repo_locked'):
441 def raise_result_iter(self, vcs_kind='repo_locked'):
444 """
442 """
445 Simulates an exception due to a vcs raised exception if kind vcs_kind
443 Simulates an exception due to a vcs raised exception if kind vcs_kind
446 """
444 """
447 raise self.vcs_exception(vcs_kind=vcs_kind)
445 raise self.vcs_exception(vcs_kind=vcs_kind)
448 yield "never_reached"
446 yield "never_reached"
449
447
450 def vcs_exception(self, vcs_kind='repo_locked'):
448 def vcs_exception(self, vcs_kind='repo_locked'):
451 locked_exception = Exception('TEST_MESSAGE')
449 locked_exception = Exception('TEST_MESSAGE')
452 locked_exception._vcs_kind = vcs_kind
450 locked_exception._vcs_kind = vcs_kind
453 return locked_exception
451 return locked_exception
454
452
455 def was_cache_invalidated(self):
453 def was_cache_invalidated(self):
456 return self.controller._invalidate_cache.called
454 return self.controller._invalidate_cache.called
457
455
458
456
459 class TestInitializeGenerator(object):
457 class TestInitializeGenerator(object):
460
458
461 def test_drains_first_element(self):
459 def test_drains_first_element(self):
462 gen = self.factory(['__init__', 1, 2])
460 gen = self.factory(['__init__', 1, 2])
463 result = list(gen)
461 result = list(gen)
464 assert result == [1, 2]
462 assert result == [1, 2]
465
463
466 @pytest.mark.parametrize('values', [
464 @pytest.mark.parametrize('values', [
467 [],
465 [],
468 [1, 2],
466 [1, 2],
469 ])
467 ])
470 def test_raises_value_error(self, values):
468 def test_raises_value_error(self, values):
471 with pytest.raises(ValueError):
469 with pytest.raises(ValueError):
472 self.factory(values)
470 self.factory(values)
473
471
474 @simplevcs.initialize_generator
472 @simplevcs.initialize_generator
475 def factory(self, iterable):
473 def factory(self, iterable):
476 for elem in iterable:
474 for elem in iterable:
477 yield elem
475 yield elem
478
476
479
477
480 class TestPrepareHooksDaemon(object):
478 class TestPrepareHooksDaemon(object):
481 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
479 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
482 expected_extras = {'extra1': 'value1'}
480 expected_extras = {'extra1': 'value1'}
483 daemon = DummyHooksCallbackDaemon()
481 daemon = DummyHooksCallbackDaemon()
484
482
485 controller = StubVCSController(None, app_settings, request_stub.registry)
483 controller = StubVCSController(app_settings, request_stub.registry)
486 prepare_patcher = mock.patch.object(
484 prepare_patcher = mock.patch.object(
487 simplevcs, 'prepare_callback_daemon',
485 simplevcs, 'prepare_callback_daemon',
488 return_value=(daemon, expected_extras))
486 return_value=(daemon, expected_extras))
489 with prepare_patcher as prepare_mock:
487 with prepare_patcher as prepare_mock:
490 callback_daemon, extras = controller._prepare_callback_daemon(
488 callback_daemon, extras = controller._prepare_callback_daemon(
491 expected_extras.copy())
489 expected_extras.copy())
492 prepare_mock.assert_called_once_with(
490 prepare_mock.assert_called_once_with(
493 expected_extras,
491 expected_extras,
494 protocol=app_settings['vcs.hooks.protocol'],
492 protocol=app_settings['vcs.hooks.protocol'],
495 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
493 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
496
494
497 assert callback_daemon == daemon
495 assert callback_daemon == daemon
498 assert extras == extras
496 assert extras == extras
@@ -1,53 +1,51 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24
24
25 @pytest.mark.usefixtures('autologin_user', 'app')
25 @pytest.mark.usefixtures('autologin_user', 'app')
26 def test_vcs_available_returns_summary_page(app, backend):
26 def test_vcs_available_returns_summary_page(app, backend):
27 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
27 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
28 response = app.get(url)
28 response = app.get(url)
29 assert response.status_code == 200
29 assert response.status_code == 200
30 assert 'Summary' in response.body
30 assert 'Summary' in response.body
31
31
32
32
33 @pytest.mark.usefixtures('autologin_user', 'app')
33 @pytest.mark.usefixtures('autologin_user', 'app')
34 def test_vcs_unavailable_returns_vcs_error_page(app, backend):
34 def test_vcs_unavailable_returns_vcs_error_page(app, backend):
35 from rhodecode.lib.vcs.exceptions import VCSCommunicationError
35 from rhodecode.lib.vcs.exceptions import VCSCommunicationError
36
36
37 # Depending on the used VCSServer protocol we have to patch a different
37 # Depending on the used VCSServer protocol we have to patch a different
38 # RemoteRepo class to raise an exception. For the test it doesn't matter
38 # RemoteRepo class to raise an exception. For the test it doesn't matter
39 # if http is used, it just requires the exception to be raised.
39 # if http is used, it just requires the exception to be raised.
40 from rhodecode.lib.vcs.client_http import RemoteRepo
40 from rhodecode.lib.vcs.client_http import RemoteRepo
41
41
42 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
42 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
43
43
44 # Patch remote repo to raise an exception instead of making a RPC.
44 # Patch remote repo to raise an exception instead of making a RPC.
45 with mock.patch.object(RemoteRepo, '__getattr__') as remote_mock:
45 with mock.patch.object(RemoteRepo, '__getattr__') as remote_mock:
46 remote_mock.side_effect = VCSCommunicationError()
46 remote_mock.side_effect = VCSCommunicationError()
47 # Patch pylons error handling middleware to not re-raise exceptions.
47
48 with mock.patch.object(PylonsErrorHandlingMiddleware, 'reraise') as r:
48 response = app.get(url, expect_errors=True)
49 r.return_value = False
50 response = app.get(url, expect_errors=True)
51
49
52 assert response.status_code == 502
50 assert response.status_code == 502
53 assert 'Could not connect to VCS Server' in response.body
51 assert 'Could not connect to VCS Server' in response.body
@@ -1,34 +1,34 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.action_parser import ActionParser
23 from rhodecode.lib.action_parser import ActionParser
24 from rhodecode.model.db import UserLog
24 from rhodecode.model.db import UserLog
25
25
26
26
27 @pytest.mark.parametrize('pr_key', [
27 @pytest.mark.parametrize('pr_key', [
28 'user_commented_pull_request',
28 'user_commented_pull_request',
29 'user_closed_pull_request',
29 'user_closed_pull_request',
30 'user_merged_pull_request'
30 'user_merged_pull_request'
31 ])
31 ])
32 def test_action_map_pr_values(baseapp, pr_key):
32 def test_action_map_pr_values(request_stub, baseapp, pr_key):
33 parser = ActionParser(UserLog(action="test:test"))
33 parser = ActionParser(request_stub, UserLog(action="test:test"))
34 assert pr_key in parser.action_map
34 assert pr_key in parser.action_map
@@ -1,822 +1,805 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import textwrap
21 import textwrap
22
22
23 import pytest
23 import pytest
24
24
25 from rhodecode.lib.diffs import (
25 from rhodecode.lib.diffs import (
26 DiffProcessor, wrapped_diff,
26 DiffProcessor,
27 NEW_FILENODE, DEL_FILENODE, MOD_FILENODE, RENAMED_FILENODE,
27 NEW_FILENODE, DEL_FILENODE, MOD_FILENODE, RENAMED_FILENODE,
28 CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE)
28 CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE)
29 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.fixture import Fixture
30 from rhodecode.lib.vcs.backends.git.repository import GitDiff
30 from rhodecode.lib.vcs.backends.git.repository import GitDiff
31 from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff
31 from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff
32 from rhodecode.lib.vcs.backends.svn.repository import SubversionDiff
32 from rhodecode.lib.vcs.backends.svn.repository import SubversionDiff
33
33
34 fixture = Fixture()
34 fixture = Fixture()
35
35
36
36
37 def test_wrapped_diff_limited_file_diff(vcsbackend_random):
38 vcsbackend = vcsbackend_random
39 repo = vcsbackend.create_repo()
40 vcsbackend.add_file(repo, 'a_file', content="line 1\nline 2\nline3\n")
41 commit = repo.get_commit()
42 file_node = commit.get_node('a_file')
43
44 # Only limit the file diff to trigger the code path
45 result = wrapped_diff(
46 None, file_node, diff_limit=10000, file_limit=1)
47 data = result[5]
48
49 # Verify that the limits were applied
50 assert data['exceeds_limit'] is True
51 assert data['is_limited_diff'] is True
52
53
54 def test_diffprocessor_as_html_with_comments():
37 def test_diffprocessor_as_html_with_comments():
55 raw_diff = textwrap.dedent('''
38 raw_diff = textwrap.dedent('''
56 diff --git a/setup.py b/setup.py
39 diff --git a/setup.py b/setup.py
57 index 5b36422..cfd698e 100755
40 index 5b36422..cfd698e 100755
58 --- a/setup.py
41 --- a/setup.py
59 +++ b/setup.py
42 +++ b/setup.py
60 @@ -2,7 +2,7 @@
43 @@ -2,7 +2,7 @@
61 #!/usr/bin/python
44 #!/usr/bin/python
62 # Setup file for X
45 # Setup file for X
63 # Copyright (C) No one
46 # Copyright (C) No one
64 -
47 -
65 +x
48 +x
66 try:
49 try:
67 from setuptools import setup, Extension
50 from setuptools import setup, Extension
68 except ImportError:
51 except ImportError:
69 ''')
52 ''')
70 diff = GitDiff(raw_diff)
53 diff = GitDiff(raw_diff)
71 processor = DiffProcessor(diff)
54 processor = DiffProcessor(diff)
72 processor.prepare()
55 processor.prepare()
73
56
74 # Note that the cell with the context in line 5 (in the html) has the
57 # Note that the cell with the context in line 5 (in the html) has the
75 # no-comment class, which will prevent the add comment icon to be displayed.
58 # no-comment class, which will prevent the add comment icon to be displayed.
76 expected_html = textwrap.dedent('''
59 expected_html = textwrap.dedent('''
77 <table class="code-difftable">
60 <table class="code-difftable">
78 <tr class="line context">
61 <tr class="line context">
79 <td class="add-comment-line"><span class="add-comment-content"></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
62 <td class="add-comment-line"><span class="add-comment-content"></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
80 <td class="lineno old">...</td>
63 <td class="lineno old">...</td>
81 <td class="lineno new">...</td>
64 <td class="lineno new">...</td>
82 <td class="code no-comment">
65 <td class="code no-comment">
83 <pre>@@ -2,7 +2,7 @@
66 <pre>@@ -2,7 +2,7 @@
84 </pre>
67 </pre>
85 </td>
68 </td>
86 </tr>
69 </tr>
87 <tr class="line unmod">
70 <tr class="line unmod">
88 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
71 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
89 <td id="setuppy_o2" class="lineno old"><a href="#setuppy_o2" class="tooltip"
72 <td id="setuppy_o2" class="lineno old"><a href="#setuppy_o2" class="tooltip"
90 title="Click to select line">2</a></td>
73 title="Click to select line">2</a></td>
91 <td id="setuppy_n2" class="lineno new"><a href="#setuppy_n2" class="tooltip"
74 <td id="setuppy_n2" class="lineno new"><a href="#setuppy_n2" class="tooltip"
92 title="Click to select line">2</a></td>
75 title="Click to select line">2</a></td>
93 <td class="code">
76 <td class="code">
94 <pre>#!/usr/bin/python
77 <pre>#!/usr/bin/python
95 </pre>
78 </pre>
96 </td>
79 </td>
97 </tr>
80 </tr>
98 <tr class="line unmod">
81 <tr class="line unmod">
99 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
82 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
100 <td id="setuppy_o3" class="lineno old"><a href="#setuppy_o3" class="tooltip"
83 <td id="setuppy_o3" class="lineno old"><a href="#setuppy_o3" class="tooltip"
101 title="Click to select line">3</a></td>
84 title="Click to select line">3</a></td>
102 <td id="setuppy_n3" class="lineno new"><a href="#setuppy_n3" class="tooltip"
85 <td id="setuppy_n3" class="lineno new"><a href="#setuppy_n3" class="tooltip"
103 title="Click to select line">3</a></td>
86 title="Click to select line">3</a></td>
104 <td class="code">
87 <td class="code">
105 <pre># Setup file for X
88 <pre># Setup file for X
106 </pre>
89 </pre>
107 </td>
90 </td>
108 </tr>
91 </tr>
109 <tr class="line unmod">
92 <tr class="line unmod">
110 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
93 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
111 <td id="setuppy_o4" class="lineno old"><a href="#setuppy_o4" class="tooltip"
94 <td id="setuppy_o4" class="lineno old"><a href="#setuppy_o4" class="tooltip"
112 title="Click to select line">4</a></td>
95 title="Click to select line">4</a></td>
113 <td id="setuppy_n4" class="lineno new"><a href="#setuppy_n4" class="tooltip"
96 <td id="setuppy_n4" class="lineno new"><a href="#setuppy_n4" class="tooltip"
114 title="Click to select line">4</a></td>
97 title="Click to select line">4</a></td>
115 <td class="code">
98 <td class="code">
116 <pre># Copyright (C) No one
99 <pre># Copyright (C) No one
117 </pre>
100 </pre>
118 </td>
101 </td>
119 </tr>
102 </tr>
120 <tr class="line del">
103 <tr class="line del">
121 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
104 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
122 <td id="setuppy_o5" class="lineno old"><a href="#setuppy_o5" class="tooltip"
105 <td id="setuppy_o5" class="lineno old"><a href="#setuppy_o5" class="tooltip"
123 title="Click to select line">5</a></td>
106 title="Click to select line">5</a></td>
124 <td class="lineno new"><a href="#setuppy_n" class="tooltip"
107 <td class="lineno new"><a href="#setuppy_n" class="tooltip"
125 title="Click to select line"></a></td>
108 title="Click to select line"></a></td>
126 <td class="code">
109 <td class="code">
127 <pre>
110 <pre>
128 </pre>
111 </pre>
129 </td>
112 </td>
130 </tr>
113 </tr>
131 <tr class="line add">
114 <tr class="line add">
132 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
115 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
133 <td class="lineno old"><a href="#setuppy_o" class="tooltip"
116 <td class="lineno old"><a href="#setuppy_o" class="tooltip"
134 title="Click to select line"></a></td>
117 title="Click to select line"></a></td>
135 <td id="setuppy_n5" class="lineno new"><a href="#setuppy_n5" class="tooltip"
118 <td id="setuppy_n5" class="lineno new"><a href="#setuppy_n5" class="tooltip"
136 title="Click to select line">5</a></td>
119 title="Click to select line">5</a></td>
137 <td class="code">
120 <td class="code">
138 <pre><ins>x</ins>
121 <pre><ins>x</ins>
139 </pre>
122 </pre>
140 </td>
123 </td>
141 </tr>
124 </tr>
142 <tr class="line unmod">
125 <tr class="line unmod">
143 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
126 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
144 <td id="setuppy_o6" class="lineno old"><a href="#setuppy_o6" class="tooltip"
127 <td id="setuppy_o6" class="lineno old"><a href="#setuppy_o6" class="tooltip"
145 title="Click to select line">6</a></td>
128 title="Click to select line">6</a></td>
146 <td id="setuppy_n6" class="lineno new"><a href="#setuppy_n6" class="tooltip"
129 <td id="setuppy_n6" class="lineno new"><a href="#setuppy_n6" class="tooltip"
147 title="Click to select line">6</a></td>
130 title="Click to select line">6</a></td>
148 <td class="code">
131 <td class="code">
149 <pre>try:
132 <pre>try:
150 </pre>
133 </pre>
151 </td>
134 </td>
152 </tr>
135 </tr>
153 <tr class="line unmod">
136 <tr class="line unmod">
154 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
137 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
155 <td id="setuppy_o7" class="lineno old"><a href="#setuppy_o7" class="tooltip"
138 <td id="setuppy_o7" class="lineno old"><a href="#setuppy_o7" class="tooltip"
156 title="Click to select line">7</a></td>
139 title="Click to select line">7</a></td>
157 <td id="setuppy_n7" class="lineno new"><a href="#setuppy_n7" class="tooltip"
140 <td id="setuppy_n7" class="lineno new"><a href="#setuppy_n7" class="tooltip"
158 title="Click to select line">7</a></td>
141 title="Click to select line">7</a></td>
159 <td class="code">
142 <td class="code">
160 <pre> from setuptools import setup, Extension
143 <pre> from setuptools import setup, Extension
161 </pre>
144 </pre>
162 </td>
145 </td>
163 </tr>
146 </tr>
164 <tr class="line unmod">
147 <tr class="line unmod">
165 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
148 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
166 <td id="setuppy_o8" class="lineno old"><a href="#setuppy_o8" class="tooltip"
149 <td id="setuppy_o8" class="lineno old"><a href="#setuppy_o8" class="tooltip"
167 title="Click to select line">8</a></td>
150 title="Click to select line">8</a></td>
168 <td id="setuppy_n8" class="lineno new"><a href="#setuppy_n8" class="tooltip"
151 <td id="setuppy_n8" class="lineno new"><a href="#setuppy_n8" class="tooltip"
169 title="Click to select line">8</a></td>
152 title="Click to select line">8</a></td>
170 <td class="code">
153 <td class="code">
171 <pre>except ImportError:
154 <pre>except ImportError:
172 </pre>
155 </pre>
173 </td>
156 </td>
174 </tr>
157 </tr>
175 </table>
158 </table>
176 ''').strip()
159 ''').strip()
177 html = processor.as_html(enable_comments=True).replace('\t', ' ')
160 html = processor.as_html(enable_comments=True).replace('\t', ' ')
178
161
179 assert html == expected_html
162 assert html == expected_html
180
163
181
164
182 class TestMixedFilenameEncodings:
165 class TestMixedFilenameEncodings:
183
166
184 @pytest.fixture(scope="class")
167 @pytest.fixture(scope="class")
185 def raw_diff(self):
168 def raw_diff(self):
186 return fixture.load_resource(
169 return fixture.load_resource(
187 'hg_diff_mixed_filename_encodings.diff')
170 'hg_diff_mixed_filename_encodings.diff')
188
171
189 @pytest.fixture
172 @pytest.fixture
190 def processor(self, raw_diff):
173 def processor(self, raw_diff):
191 diff = MercurialDiff(raw_diff)
174 diff = MercurialDiff(raw_diff)
192 processor = DiffProcessor(diff)
175 processor = DiffProcessor(diff)
193 return processor
176 return processor
194
177
195 def test_filenames_are_decoded_to_unicode(self, processor):
178 def test_filenames_are_decoded_to_unicode(self, processor):
196 diff_data = processor.prepare()
179 diff_data = processor.prepare()
197 filenames = [item['filename'] for item in diff_data]
180 filenames = [item['filename'] for item in diff_data]
198 assert filenames == [
181 assert filenames == [
199 u'späcial-utf8.txt', u'sp�cial-cp1252.txt', u'sp�cial-latin1.txt']
182 u'späcial-utf8.txt', u'sp�cial-cp1252.txt', u'sp�cial-latin1.txt']
200
183
201 def test_raw_diff_is_decoded_to_unicode(self, processor):
184 def test_raw_diff_is_decoded_to_unicode(self, processor):
202 diff_data = processor.prepare()
185 diff_data = processor.prepare()
203 raw_diffs = [item['raw_diff'] for item in diff_data]
186 raw_diffs = [item['raw_diff'] for item in diff_data]
204 new_file_message = u'\nnew file mode 100644\n'
187 new_file_message = u'\nnew file mode 100644\n'
205 expected_raw_diffs = [
188 expected_raw_diffs = [
206 u' a/späcial-utf8.txt b/späcial-utf8.txt' + new_file_message,
189 u' a/späcial-utf8.txt b/späcial-utf8.txt' + new_file_message,
207 u' a/sp�cial-cp1252.txt b/sp�cial-cp1252.txt' + new_file_message,
190 u' a/sp�cial-cp1252.txt b/sp�cial-cp1252.txt' + new_file_message,
208 u' a/sp�cial-latin1.txt b/sp�cial-latin1.txt' + new_file_message]
191 u' a/sp�cial-latin1.txt b/sp�cial-latin1.txt' + new_file_message]
209 assert raw_diffs == expected_raw_diffs
192 assert raw_diffs == expected_raw_diffs
210
193
211 def test_as_raw_preserves_the_encoding(self, processor, raw_diff):
194 def test_as_raw_preserves_the_encoding(self, processor, raw_diff):
212 assert processor.as_raw() == raw_diff
195 assert processor.as_raw() == raw_diff
213
196
214
197
215 # TODO: mikhail: format the following data structure properly
198 # TODO: mikhail: format the following data structure properly
216 DIFF_FIXTURES = [
199 DIFF_FIXTURES = [
217 ('hg',
200 ('hg',
218 'hg_diff_add_single_binary_file.diff',
201 'hg_diff_add_single_binary_file.diff',
219 [('US Warszawa.jpg', 'A',
202 [('US Warszawa.jpg', 'A',
220 {'added': 0,
203 {'added': 0,
221 'deleted': 0,
204 'deleted': 0,
222 'binary': True,
205 'binary': True,
223 'ops': {NEW_FILENODE: 'new file 100755',
206 'ops': {NEW_FILENODE: 'new file 100755',
224 BIN_FILENODE: 'binary diff hidden'}}),
207 BIN_FILENODE: 'binary diff hidden'}}),
225 ]),
208 ]),
226 ('hg',
209 ('hg',
227 'hg_diff_mod_single_binary_file.diff',
210 'hg_diff_mod_single_binary_file.diff',
228 [('US Warszawa.jpg', 'M',
211 [('US Warszawa.jpg', 'M',
229 {'added': 0,
212 {'added': 0,
230 'deleted': 0,
213 'deleted': 0,
231 'binary': True,
214 'binary': True,
232 'ops': {MOD_FILENODE: 'modified file',
215 'ops': {MOD_FILENODE: 'modified file',
233 BIN_FILENODE: 'binary diff hidden'}}),
216 BIN_FILENODE: 'binary diff hidden'}}),
234 ]),
217 ]),
235 ('hg',
218 ('hg',
236 'hg_diff_mod_single_file_and_rename_and_chmod.diff',
219 'hg_diff_mod_single_file_and_rename_and_chmod.diff',
237 [('README', 'M',
220 [('README', 'M',
238 {'added': 3,
221 {'added': 3,
239 'deleted': 0,
222 'deleted': 0,
240 'binary': False,
223 'binary': False,
241 'ops': {MOD_FILENODE: 'modified file',
224 'ops': {MOD_FILENODE: 'modified file',
242 RENAMED_FILENODE: 'file renamed from README.rst to README',
225 RENAMED_FILENODE: 'file renamed from README.rst to README',
243 CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
226 CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
244 ]),
227 ]),
245 ('hg',
228 ('hg',
246 'hg_diff_mod_file_and_rename.diff',
229 'hg_diff_mod_file_and_rename.diff',
247 [('README.rst', 'M',
230 [('README.rst', 'M',
248 {'added': 3,
231 {'added': 3,
249 'deleted': 0,
232 'deleted': 0,
250 'binary': False,
233 'binary': False,
251 'ops': {MOD_FILENODE: 'modified file',
234 'ops': {MOD_FILENODE: 'modified file',
252 RENAMED_FILENODE: 'file renamed from README to README.rst'}}),
235 RENAMED_FILENODE: 'file renamed from README to README.rst'}}),
253 ]),
236 ]),
254 ('hg',
237 ('hg',
255 'hg_diff_del_single_binary_file.diff',
238 'hg_diff_del_single_binary_file.diff',
256 [('US Warszawa.jpg', 'D',
239 [('US Warszawa.jpg', 'D',
257 {'added': 0,
240 {'added': 0,
258 'deleted': 0,
241 'deleted': 0,
259 'binary': True,
242 'binary': True,
260 'ops': {DEL_FILENODE: 'deleted file',
243 'ops': {DEL_FILENODE: 'deleted file',
261 BIN_FILENODE: 'binary diff hidden'}}),
244 BIN_FILENODE: 'binary diff hidden'}}),
262 ]),
245 ]),
263 ('hg',
246 ('hg',
264 'hg_diff_chmod_and_mod_single_binary_file.diff',
247 'hg_diff_chmod_and_mod_single_binary_file.diff',
265 [('gravatar.png', 'M',
248 [('gravatar.png', 'M',
266 {'added': 0,
249 {'added': 0,
267 'deleted': 0,
250 'deleted': 0,
268 'binary': True,
251 'binary': True,
269 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
252 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
270 BIN_FILENODE: 'binary diff hidden'}}),
253 BIN_FILENODE: 'binary diff hidden'}}),
271 ]),
254 ]),
272 ('hg',
255 ('hg',
273 'hg_diff_chmod.diff',
256 'hg_diff_chmod.diff',
274 [('file', 'M',
257 [('file', 'M',
275 {'added': 0,
258 {'added': 0,
276 'deleted': 0,
259 'deleted': 0,
277 'binary': True,
260 'binary': True,
278 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
261 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
279 ]),
262 ]),
280 ('hg',
263 ('hg',
281 'hg_diff_rename_file.diff',
264 'hg_diff_rename_file.diff',
282 [('file_renamed', 'M',
265 [('file_renamed', 'M',
283 {'added': 0,
266 {'added': 0,
284 'deleted': 0,
267 'deleted': 0,
285 'binary': True,
268 'binary': True,
286 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}),
269 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}),
287 ]),
270 ]),
288 ('hg',
271 ('hg',
289 'hg_diff_rename_and_chmod_file.diff',
272 'hg_diff_rename_and_chmod_file.diff',
290 [('README', 'M',
273 [('README', 'M',
291 {'added': 0,
274 {'added': 0,
292 'deleted': 0,
275 'deleted': 0,
293 'binary': True,
276 'binary': True,
294 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
277 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
295 RENAMED_FILENODE: 'file renamed from README.rst to README'}}),
278 RENAMED_FILENODE: 'file renamed from README.rst to README'}}),
296 ]),
279 ]),
297 ('hg',
280 ('hg',
298 'hg_diff_binary_and_normal.diff',
281 'hg_diff_binary_and_normal.diff',
299 [('img/baseline-10px.png', 'A',
282 [('img/baseline-10px.png', 'A',
300 {'added': 0,
283 {'added': 0,
301 'deleted': 0,
284 'deleted': 0,
302 'binary': True,
285 'binary': True,
303 'ops': {NEW_FILENODE: 'new file 100644',
286 'ops': {NEW_FILENODE: 'new file 100644',
304 BIN_FILENODE: 'binary diff hidden'}}),
287 BIN_FILENODE: 'binary diff hidden'}}),
305 ('js/jquery/hashgrid.js', 'A',
288 ('js/jquery/hashgrid.js', 'A',
306 {'added': 340,
289 {'added': 340,
307 'deleted': 0,
290 'deleted': 0,
308 'binary': False,
291 'binary': False,
309 'ops': {NEW_FILENODE: 'new file 100755'}}),
292 'ops': {NEW_FILENODE: 'new file 100755'}}),
310 ('index.html', 'M',
293 ('index.html', 'M',
311 {'added': 3,
294 {'added': 3,
312 'deleted': 2,
295 'deleted': 2,
313 'binary': False,
296 'binary': False,
314 'ops': {MOD_FILENODE: 'modified file'}}),
297 'ops': {MOD_FILENODE: 'modified file'}}),
315 ('less/docs.less', 'M',
298 ('less/docs.less', 'M',
316 {'added': 34,
299 {'added': 34,
317 'deleted': 0,
300 'deleted': 0,
318 'binary': False,
301 'binary': False,
319 'ops': {MOD_FILENODE: 'modified file'}}),
302 'ops': {MOD_FILENODE: 'modified file'}}),
320 ('less/scaffolding.less', 'M',
303 ('less/scaffolding.less', 'M',
321 {'added': 1,
304 {'added': 1,
322 'deleted': 3,
305 'deleted': 3,
323 'binary': False,
306 'binary': False,
324 'ops': {MOD_FILENODE: 'modified file'}}),
307 'ops': {MOD_FILENODE: 'modified file'}}),
325 ('readme.markdown', 'M',
308 ('readme.markdown', 'M',
326 {'added': 1,
309 {'added': 1,
327 'deleted': 10,
310 'deleted': 10,
328 'binary': False,
311 'binary': False,
329 'ops': {MOD_FILENODE: 'modified file'}}),
312 'ops': {MOD_FILENODE: 'modified file'}}),
330 ('img/baseline-20px.png', 'D',
313 ('img/baseline-20px.png', 'D',
331 {'added': 0,
314 {'added': 0,
332 'deleted': 0,
315 'deleted': 0,
333 'binary': True,
316 'binary': True,
334 'ops': {DEL_FILENODE: 'deleted file',
317 'ops': {DEL_FILENODE: 'deleted file',
335 BIN_FILENODE: 'binary diff hidden'}}),
318 BIN_FILENODE: 'binary diff hidden'}}),
336 ('js/global.js', 'D',
319 ('js/global.js', 'D',
337 {'added': 0,
320 {'added': 0,
338 'deleted': 75,
321 'deleted': 75,
339 'binary': False,
322 'binary': False,
340 'ops': {DEL_FILENODE: 'deleted file'}})
323 'ops': {DEL_FILENODE: 'deleted file'}})
341 ]),
324 ]),
342 ('git',
325 ('git',
343 'git_diff_chmod.diff',
326 'git_diff_chmod.diff',
344 [('work-horus.xls', 'M',
327 [('work-horus.xls', 'M',
345 {'added': 0,
328 {'added': 0,
346 'deleted': 0,
329 'deleted': 0,
347 'binary': True,
330 'binary': True,
348 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}})
331 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}})
349 ]),
332 ]),
350 ('git',
333 ('git',
351 'git_diff_rename_file.diff',
334 'git_diff_rename_file.diff',
352 [('file.xls', 'M',
335 [('file.xls', 'M',
353 {'added': 0,
336 {'added': 0,
354 'deleted': 0,
337 'deleted': 0,
355 'binary': True,
338 'binary': True,
356 'ops': {
339 'ops': {
357 RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}})
340 RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}})
358 ]),
341 ]),
359 ('git',
342 ('git',
360 'git_diff_mod_single_binary_file.diff',
343 'git_diff_mod_single_binary_file.diff',
361 [('US Warszawa.jpg', 'M',
344 [('US Warszawa.jpg', 'M',
362 {'added': 0,
345 {'added': 0,
363 'deleted': 0,
346 'deleted': 0,
364 'binary': True,
347 'binary': True,
365 'ops': {MOD_FILENODE: 'modified file',
348 'ops': {MOD_FILENODE: 'modified file',
366 BIN_FILENODE: 'binary diff hidden'}})
349 BIN_FILENODE: 'binary diff hidden'}})
367 ]),
350 ]),
368 ('git',
351 ('git',
369 'git_diff_binary_and_normal.diff',
352 'git_diff_binary_and_normal.diff',
370 [('img/baseline-10px.png', 'A',
353 [('img/baseline-10px.png', 'A',
371 {'added': 0,
354 {'added': 0,
372 'deleted': 0,
355 'deleted': 0,
373 'binary': True,
356 'binary': True,
374 'ops': {NEW_FILENODE: 'new file 100644',
357 'ops': {NEW_FILENODE: 'new file 100644',
375 BIN_FILENODE: 'binary diff hidden'}}),
358 BIN_FILENODE: 'binary diff hidden'}}),
376 ('js/jquery/hashgrid.js', 'A',
359 ('js/jquery/hashgrid.js', 'A',
377 {'added': 340,
360 {'added': 340,
378 'deleted': 0,
361 'deleted': 0,
379 'binary': False,
362 'binary': False,
380 'ops': {NEW_FILENODE: 'new file 100755'}}),
363 'ops': {NEW_FILENODE: 'new file 100755'}}),
381 ('index.html', 'M',
364 ('index.html', 'M',
382 {'added': 3,
365 {'added': 3,
383 'deleted': 2,
366 'deleted': 2,
384 'binary': False,
367 'binary': False,
385 'ops': {MOD_FILENODE: 'modified file'}}),
368 'ops': {MOD_FILENODE: 'modified file'}}),
386 ('less/docs.less', 'M',
369 ('less/docs.less', 'M',
387 {'added': 34,
370 {'added': 34,
388 'deleted': 0,
371 'deleted': 0,
389 'binary': False,
372 'binary': False,
390 'ops': {MOD_FILENODE: 'modified file'}}),
373 'ops': {MOD_FILENODE: 'modified file'}}),
391 ('less/scaffolding.less', 'M',
374 ('less/scaffolding.less', 'M',
392 {'added': 1,
375 {'added': 1,
393 'deleted': 3,
376 'deleted': 3,
394 'binary': False,
377 'binary': False,
395 'ops': {MOD_FILENODE: 'modified file'}}),
378 'ops': {MOD_FILENODE: 'modified file'}}),
396 ('readme.markdown', 'M',
379 ('readme.markdown', 'M',
397 {'added': 1,
380 {'added': 1,
398 'deleted': 10,
381 'deleted': 10,
399 'binary': False,
382 'binary': False,
400 'ops': {MOD_FILENODE: 'modified file'}}),
383 'ops': {MOD_FILENODE: 'modified file'}}),
401 ('img/baseline-20px.png', 'D',
384 ('img/baseline-20px.png', 'D',
402 {'added': 0,
385 {'added': 0,
403 'deleted': 0,
386 'deleted': 0,
404 'binary': True,
387 'binary': True,
405 'ops': {DEL_FILENODE: 'deleted file',
388 'ops': {DEL_FILENODE: 'deleted file',
406 BIN_FILENODE: 'binary diff hidden'}}),
389 BIN_FILENODE: 'binary diff hidden'}}),
407 ('js/global.js', 'D',
390 ('js/global.js', 'D',
408 {'added': 0,
391 {'added': 0,
409 'deleted': 75,
392 'deleted': 75,
410 'binary': False,
393 'binary': False,
411 'ops': {DEL_FILENODE: 'deleted file'}}),
394 'ops': {DEL_FILENODE: 'deleted file'}}),
412 ]),
395 ]),
413 ('hg',
396 ('hg',
414 'diff_with_diff_data.diff',
397 'diff_with_diff_data.diff',
415 [('vcs/backends/base.py', 'M',
398 [('vcs/backends/base.py', 'M',
416 {'added': 18,
399 {'added': 18,
417 'deleted': 2,
400 'deleted': 2,
418 'binary': False,
401 'binary': False,
419 'ops': {MOD_FILENODE: 'modified file'}}),
402 'ops': {MOD_FILENODE: 'modified file'}}),
420 ('vcs/backends/git/repository.py', 'M',
403 ('vcs/backends/git/repository.py', 'M',
421 {'added': 46,
404 {'added': 46,
422 'deleted': 15,
405 'deleted': 15,
423 'binary': False,
406 'binary': False,
424 'ops': {MOD_FILENODE: 'modified file'}}),
407 'ops': {MOD_FILENODE: 'modified file'}}),
425 ('vcs/backends/hg.py', 'M',
408 ('vcs/backends/hg.py', 'M',
426 {'added': 22,
409 {'added': 22,
427 'deleted': 3,
410 'deleted': 3,
428 'binary': False,
411 'binary': False,
429 'ops': {MOD_FILENODE: 'modified file'}}),
412 'ops': {MOD_FILENODE: 'modified file'}}),
430 ('vcs/tests/test_git.py', 'M',
413 ('vcs/tests/test_git.py', 'M',
431 {'added': 5,
414 {'added': 5,
432 'deleted': 5,
415 'deleted': 5,
433 'binary': False,
416 'binary': False,
434 'ops': {MOD_FILENODE: 'modified file'}}),
417 'ops': {MOD_FILENODE: 'modified file'}}),
435 ('vcs/tests/test_repository.py', 'M',
418 ('vcs/tests/test_repository.py', 'M',
436 {'added': 174,
419 {'added': 174,
437 'deleted': 2,
420 'deleted': 2,
438 'binary': False,
421 'binary': False,
439 'ops': {MOD_FILENODE: 'modified file'}}),
422 'ops': {MOD_FILENODE: 'modified file'}}),
440 ]),
423 ]),
441 ('hg',
424 ('hg',
442 'hg_diff_copy_file.diff',
425 'hg_diff_copy_file.diff',
443 [('file2', 'M',
426 [('file2', 'M',
444 {'added': 0,
427 {'added': 0,
445 'deleted': 0,
428 'deleted': 0,
446 'binary': True,
429 'binary': True,
447 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}),
430 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}),
448 ]),
431 ]),
449 ('hg',
432 ('hg',
450 'hg_diff_copy_and_modify_file.diff',
433 'hg_diff_copy_and_modify_file.diff',
451 [('file3', 'M',
434 [('file3', 'M',
452 {'added': 1,
435 {'added': 1,
453 'deleted': 0,
436 'deleted': 0,
454 'binary': False,
437 'binary': False,
455 'ops': {COPIED_FILENODE: 'file copied from file2 to file3',
438 'ops': {COPIED_FILENODE: 'file copied from file2 to file3',
456 MOD_FILENODE: 'modified file'}}),
439 MOD_FILENODE: 'modified file'}}),
457 ]),
440 ]),
458 ('hg',
441 ('hg',
459 'hg_diff_copy_and_chmod_file.diff',
442 'hg_diff_copy_and_chmod_file.diff',
460 [('file4', 'M',
443 [('file4', 'M',
461 {'added': 0,
444 {'added': 0,
462 'deleted': 0,
445 'deleted': 0,
463 'binary': True,
446 'binary': True,
464 'ops': {COPIED_FILENODE: 'file copied from file3 to file4',
447 'ops': {COPIED_FILENODE: 'file copied from file3 to file4',
465 CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}),
448 CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}),
466 ]),
449 ]),
467 ('hg',
450 ('hg',
468 'hg_diff_copy_chmod_and_edit_file.diff',
451 'hg_diff_copy_chmod_and_edit_file.diff',
469 [('file5', 'M',
452 [('file5', 'M',
470 {'added': 2,
453 {'added': 2,
471 'deleted': 1,
454 'deleted': 1,
472 'binary': False,
455 'binary': False,
473 'ops': {COPIED_FILENODE: 'file copied from file4 to file5',
456 'ops': {COPIED_FILENODE: 'file copied from file4 to file5',
474 CHMOD_FILENODE: 'modified file chmod 100755 => 100644',
457 CHMOD_FILENODE: 'modified file chmod 100755 => 100644',
475 MOD_FILENODE: 'modified file'}})]),
458 MOD_FILENODE: 'modified file'}})]),
476
459
477 # Diffs to validate rename and copy file with space in its name
460 # Diffs to validate rename and copy file with space in its name
478 ('git',
461 ('git',
479 'git_diff_rename_file_with_spaces.diff',
462 'git_diff_rename_file_with_spaces.diff',
480 [('file_with_ two spaces.txt', 'M',
463 [('file_with_ two spaces.txt', 'M',
481 {'added': 0,
464 {'added': 0,
482 'deleted': 0,
465 'deleted': 0,
483 'binary': True,
466 'binary': True,
484 'ops': {
467 'ops': {
485 RENAMED_FILENODE: (
468 RENAMED_FILENODE: (
486 'file renamed from file_with_ spaces.txt to file_with_ '
469 'file renamed from file_with_ spaces.txt to file_with_ '
487 ' two spaces.txt')}
470 ' two spaces.txt')}
488 }), ]),
471 }), ]),
489 ('hg',
472 ('hg',
490 'hg_diff_rename_file_with_spaces.diff',
473 'hg_diff_rename_file_with_spaces.diff',
491 [('file_changed _.txt', 'M',
474 [('file_changed _.txt', 'M',
492 {'added': 0,
475 {'added': 0,
493 'deleted': 0,
476 'deleted': 0,
494 'binary': True,
477 'binary': True,
495 'ops': {
478 'ops': {
496 RENAMED_FILENODE: (
479 RENAMED_FILENODE: (
497 'file renamed from file_ with update.txt to file_changed'
480 'file renamed from file_ with update.txt to file_changed'
498 ' _.txt')}
481 ' _.txt')}
499 }), ]),
482 }), ]),
500 ('hg',
483 ('hg',
501 'hg_diff_copy_file_with_spaces.diff',
484 'hg_diff_copy_file_with_spaces.diff',
502 [('file_copied_ with spaces.txt', 'M',
485 [('file_copied_ with spaces.txt', 'M',
503 {'added': 0,
486 {'added': 0,
504 'deleted': 0,
487 'deleted': 0,
505 'binary': True,
488 'binary': True,
506 'ops': {
489 'ops': {
507 COPIED_FILENODE: (
490 COPIED_FILENODE: (
508 'file copied from file_changed_without_spaces.txt to'
491 'file copied from file_changed_without_spaces.txt to'
509 ' file_copied_ with spaces.txt')}
492 ' file_copied_ with spaces.txt')}
510 }),
493 }),
511 ]),
494 ]),
512
495
513 # special signs from git
496 # special signs from git
514 ('git',
497 ('git',
515 'git_diff_binary_special_files.diff',
498 'git_diff_binary_special_files.diff',
516 [('css/_Icon\\r', 'A',
499 [('css/_Icon\\r', 'A',
517 {'added': 0,
500 {'added': 0,
518 'deleted': 0,
501 'deleted': 0,
519 'binary': True,
502 'binary': True,
520 'ops': {NEW_FILENODE: 'new file 100644',
503 'ops': {NEW_FILENODE: 'new file 100644',
521 BIN_FILENODE: 'binary diff hidden'}
504 BIN_FILENODE: 'binary diff hidden'}
522 }),
505 }),
523 ]),
506 ]),
524 ('git',
507 ('git',
525 'git_diff_binary_special_files_2.diff',
508 'git_diff_binary_special_files_2.diff',
526 [('css/Icon\\r', 'A',
509 [('css/Icon\\r', 'A',
527 {'added': 0,
510 {'added': 0,
528 'deleted': 0,
511 'deleted': 0,
529 'binary': True,
512 'binary': True,
530 'ops': {NEW_FILENODE: 'new file 100644', }
513 'ops': {NEW_FILENODE: 'new file 100644', }
531 }),
514 }),
532 ]),
515 ]),
533
516
534 ('svn',
517 ('svn',
535 'svn_diff_binary_add_file.diff',
518 'svn_diff_binary_add_file.diff',
536 [('intl.dll', 'A',
519 [('intl.dll', 'A',
537 {'added': 0,
520 {'added': 0,
538 'deleted': 0,
521 'deleted': 0,
539 'binary': False,
522 'binary': False,
540 'ops': {NEW_FILENODE: 'new file 10644',
523 'ops': {NEW_FILENODE: 'new file 10644',
541 #TODO(Marcink): depends on binary detection on svn patches
524 #TODO(Marcink): depends on binary detection on svn patches
542 # BIN_FILENODE: 'binary diff hidden'
525 # BIN_FILENODE: 'binary diff hidden'
543 }
526 }
544 }),
527 }),
545 ]),
528 ]),
546
529
547 ('svn',
530 ('svn',
548 'svn_diff_multiple_changes.diff',
531 'svn_diff_multiple_changes.diff',
549 [('trunk/doc/images/SettingsOverlay.png', 'M',
532 [('trunk/doc/images/SettingsOverlay.png', 'M',
550 {'added': 0,
533 {'added': 0,
551 'deleted': 0,
534 'deleted': 0,
552 'binary': False,
535 'binary': False,
553 'ops': {MOD_FILENODE: 'modified file',
536 'ops': {MOD_FILENODE: 'modified file',
554 #TODO(Marcink): depends on binary detection on svn patches
537 #TODO(Marcink): depends on binary detection on svn patches
555 # BIN_FILENODE: 'binary diff hidden'
538 # BIN_FILENODE: 'binary diff hidden'
556 }
539 }
557 }),
540 }),
558 ('trunk/doc/source/de/tsvn_ch04.xml', 'M',
541 ('trunk/doc/source/de/tsvn_ch04.xml', 'M',
559 {'added': 89,
542 {'added': 89,
560 'deleted': 34,
543 'deleted': 34,
561 'binary': False,
544 'binary': False,
562 'ops': {MOD_FILENODE: 'modified file'}
545 'ops': {MOD_FILENODE: 'modified file'}
563 }),
546 }),
564 ('trunk/doc/source/en/tsvn_ch04.xml', 'M',
547 ('trunk/doc/source/en/tsvn_ch04.xml', 'M',
565 {'added': 66,
548 {'added': 66,
566 'deleted': 21,
549 'deleted': 21,
567 'binary': False,
550 'binary': False,
568 'ops': {MOD_FILENODE: 'modified file'}
551 'ops': {MOD_FILENODE: 'modified file'}
569 }),
552 }),
570 ('trunk/src/Changelog.txt', 'M',
553 ('trunk/src/Changelog.txt', 'M',
571 {'added': 2,
554 {'added': 2,
572 'deleted': 0,
555 'deleted': 0,
573 'binary': False,
556 'binary': False,
574 'ops': {MOD_FILENODE: 'modified file'}
557 'ops': {MOD_FILENODE: 'modified file'}
575 }),
558 }),
576 ('trunk/src/Resources/TortoiseProcENG.rc', 'M',
559 ('trunk/src/Resources/TortoiseProcENG.rc', 'M',
577 {'added': 19,
560 {'added': 19,
578 'deleted': 13,
561 'deleted': 13,
579 'binary': False,
562 'binary': False,
580 'ops': {MOD_FILENODE: 'modified file'}
563 'ops': {MOD_FILENODE: 'modified file'}
581 }),
564 }),
582 ('trunk/src/TortoiseProc/SetOverlayPage.cpp', 'M',
565 ('trunk/src/TortoiseProc/SetOverlayPage.cpp', 'M',
583 {'added': 16,
566 {'added': 16,
584 'deleted': 1,
567 'deleted': 1,
585 'binary': False,
568 'binary': False,
586 'ops': {MOD_FILENODE: 'modified file'}
569 'ops': {MOD_FILENODE: 'modified file'}
587 }),
570 }),
588 ('trunk/src/TortoiseProc/SetOverlayPage.h', 'M',
571 ('trunk/src/TortoiseProc/SetOverlayPage.h', 'M',
589 {'added': 3,
572 {'added': 3,
590 'deleted': 0,
573 'deleted': 0,
591 'binary': False,
574 'binary': False,
592 'ops': {MOD_FILENODE: 'modified file'}
575 'ops': {MOD_FILENODE: 'modified file'}
593 }),
576 }),
594 ('trunk/src/TortoiseProc/resource.h', 'M',
577 ('trunk/src/TortoiseProc/resource.h', 'M',
595 {'added': 2,
578 {'added': 2,
596 'deleted': 0,
579 'deleted': 0,
597 'binary': False,
580 'binary': False,
598 'ops': {MOD_FILENODE: 'modified file'}
581 'ops': {MOD_FILENODE: 'modified file'}
599 }),
582 }),
600 ('trunk/src/TortoiseShell/ShellCache.h', 'M',
583 ('trunk/src/TortoiseShell/ShellCache.h', 'M',
601 {'added': 50,
584 {'added': 50,
602 'deleted': 1,
585 'deleted': 1,
603 'binary': False,
586 'binary': False,
604 'ops': {MOD_FILENODE: 'modified file'}
587 'ops': {MOD_FILENODE: 'modified file'}
605 }),
588 }),
606 ]),
589 ]),
607
590
608
591
609 # TODO: mikhail: do we still need this?
592 # TODO: mikhail: do we still need this?
610 # (
593 # (
611 # 'hg',
594 # 'hg',
612 # 'large_diff.diff',
595 # 'large_diff.diff',
613 # [
596 # [
614 # ('.hgignore', 'A', {
597 # ('.hgignore', 'A', {
615 # 'deleted': 0, 'binary': False, 'added': 3, 'ops': {
598 # 'deleted': 0, 'binary': False, 'added': 3, 'ops': {
616 # 1: 'new file 100644'}}),
599 # 1: 'new file 100644'}}),
617 # (
600 # (
618 # 'MANIFEST.in', 'A',
601 # 'MANIFEST.in', 'A',
619 # {'deleted': 0, 'binary': False, 'added': 3, 'ops': {
602 # {'deleted': 0, 'binary': False, 'added': 3, 'ops': {
620 # 1: 'new file 100644'}}),
603 # 1: 'new file 100644'}}),
621 # (
604 # (
622 # 'README.txt', 'A',
605 # 'README.txt', 'A',
623 # {'deleted': 0, 'binary': False, 'added': 19, 'ops': {
606 # {'deleted': 0, 'binary': False, 'added': 19, 'ops': {
624 # 1: 'new file 100644'}}),
607 # 1: 'new file 100644'}}),
625 # (
608 # (
626 # 'development.ini', 'A', {
609 # 'development.ini', 'A', {
627 # 'deleted': 0, 'binary': False, 'added': 116, 'ops': {
610 # 'deleted': 0, 'binary': False, 'added': 116, 'ops': {
628 # 1: 'new file 100644'}}),
611 # 1: 'new file 100644'}}),
629 # (
612 # (
630 # 'docs/index.txt', 'A', {
613 # 'docs/index.txt', 'A', {
631 # 'deleted': 0, 'binary': False, 'added': 19, 'ops': {
614 # 'deleted': 0, 'binary': False, 'added': 19, 'ops': {
632 # 1: 'new file 100644'}}),
615 # 1: 'new file 100644'}}),
633 # (
616 # (
634 # 'ez_setup.py', 'A', {
617 # 'ez_setup.py', 'A', {
635 # 'deleted': 0, 'binary': False, 'added': 276, 'ops': {
618 # 'deleted': 0, 'binary': False, 'added': 276, 'ops': {
636 # 1: 'new file 100644'}}),
619 # 1: 'new file 100644'}}),
637 # (
620 # (
638 # 'hgapp.py', 'A', {
621 # 'hgapp.py', 'A', {
639 # 'deleted': 0, 'binary': False, 'added': 26, 'ops': {
622 # 'deleted': 0, 'binary': False, 'added': 26, 'ops': {
640 # 1: 'new file 100644'}}),
623 # 1: 'new file 100644'}}),
641 # (
624 # (
642 # 'hgwebdir.config', 'A', {
625 # 'hgwebdir.config', 'A', {
643 # 'deleted': 0, 'binary': False, 'added': 21, 'ops': {
626 # 'deleted': 0, 'binary': False, 'added': 21, 'ops': {
644 # 1: 'new file 100644'}}),
627 # 1: 'new file 100644'}}),
645 # (
628 # (
646 # 'pylons_app.egg-info/PKG-INFO', 'A', {
629 # 'pylons_app.egg-info/PKG-INFO', 'A', {
647 # 'deleted': 0, 'binary': False, 'added': 10, 'ops': {
630 # 'deleted': 0, 'binary': False, 'added': 10, 'ops': {
648 # 1: 'new file 100644'}}),
631 # 1: 'new file 100644'}}),
649 # (
632 # (
650 # 'pylons_app.egg-info/SOURCES.txt', 'A', {
633 # 'pylons_app.egg-info/SOURCES.txt', 'A', {
651 # 'deleted': 0, 'binary': False, 'added': 33, 'ops': {
634 # 'deleted': 0, 'binary': False, 'added': 33, 'ops': {
652 # 1: 'new file 100644'}}),
635 # 1: 'new file 100644'}}),
653 # (
636 # (
654 # 'pylons_app.egg-info/dependency_links.txt', 'A', {
637 # 'pylons_app.egg-info/dependency_links.txt', 'A', {
655 # 'deleted': 0, 'binary': False, 'added': 1, 'ops': {
638 # 'deleted': 0, 'binary': False, 'added': 1, 'ops': {
656 # 1: 'new file 100644'}}),
639 # 1: 'new file 100644'}}),
657 # ]
640 # ]
658 # ),
641 # ),
659 ]
642 ]
660
643
661 DIFF_FIXTURES_WITH_CONTENT = [
644 DIFF_FIXTURES_WITH_CONTENT = [
662 (
645 (
663 'hg', 'hg_diff_single_file_change_newline.diff',
646 'hg', 'hg_diff_single_file_change_newline.diff',
664 [
647 [
665 (
648 (
666 'file_b', # filename
649 'file_b', # filename
667 'A', # change
650 'A', # change
668 { # stats
651 { # stats
669 'added': 1,
652 'added': 1,
670 'deleted': 0,
653 'deleted': 0,
671 'binary': False,
654 'binary': False,
672 'ops': {NEW_FILENODE: 'new file 100644', }
655 'ops': {NEW_FILENODE: 'new file 100644', }
673 },
656 },
674 '@@ -0,0 +1 @@\n+test_content b\n' # diff
657 '@@ -0,0 +1 @@\n+test_content b\n' # diff
675 ),
658 ),
676 ],
659 ],
677 ),
660 ),
678 (
661 (
679 'hg', 'hg_diff_double_file_change_newline.diff',
662 'hg', 'hg_diff_double_file_change_newline.diff',
680 [
663 [
681 (
664 (
682 'file_b', # filename
665 'file_b', # filename
683 'A', # change
666 'A', # change
684 { # stats
667 { # stats
685 'added': 1,
668 'added': 1,
686 'deleted': 0,
669 'deleted': 0,
687 'binary': False,
670 'binary': False,
688 'ops': {NEW_FILENODE: 'new file 100644', }
671 'ops': {NEW_FILENODE: 'new file 100644', }
689 },
672 },
690 '@@ -0,0 +1 @@\n+test_content b\n' # diff
673 '@@ -0,0 +1 @@\n+test_content b\n' # diff
691 ),
674 ),
692 (
675 (
693 'file_c', # filename
676 'file_c', # filename
694 'A', # change
677 'A', # change
695 { # stats
678 { # stats
696 'added': 1,
679 'added': 1,
697 'deleted': 0,
680 'deleted': 0,
698 'binary': False,
681 'binary': False,
699 'ops': {NEW_FILENODE: 'new file 100644', }
682 'ops': {NEW_FILENODE: 'new file 100644', }
700 },
683 },
701 '@@ -0,0 +1 @@\n+test_content c\n' # diff
684 '@@ -0,0 +1 @@\n+test_content c\n' # diff
702 ),
685 ),
703 ],
686 ],
704 ),
687 ),
705 (
688 (
706 'hg', 'hg_diff_double_file_change_double_newline.diff',
689 'hg', 'hg_diff_double_file_change_double_newline.diff',
707 [
690 [
708 (
691 (
709 'file_b', # filename
692 'file_b', # filename
710 'A', # change
693 'A', # change
711 { # stats
694 { # stats
712 'added': 1,
695 'added': 1,
713 'deleted': 0,
696 'deleted': 0,
714 'binary': False,
697 'binary': False,
715 'ops': {NEW_FILENODE: 'new file 100644', }
698 'ops': {NEW_FILENODE: 'new file 100644', }
716 },
699 },
717 '@@ -0,0 +1 @@\n+test_content b\n\n' # diff
700 '@@ -0,0 +1 @@\n+test_content b\n\n' # diff
718 ),
701 ),
719 (
702 (
720 'file_c', # filename
703 'file_c', # filename
721 'A', # change
704 'A', # change
722 { # stats
705 { # stats
723 'added': 1,
706 'added': 1,
724 'deleted': 0,
707 'deleted': 0,
725 'binary': False,
708 'binary': False,
726 'ops': {NEW_FILENODE: 'new file 100644', }
709 'ops': {NEW_FILENODE: 'new file 100644', }
727 },
710 },
728 '@@ -0,0 +1 @@\n+test_content c\n' # diff
711 '@@ -0,0 +1 @@\n+test_content c\n' # diff
729 ),
712 ),
730 ],
713 ],
731 ),
714 ),
732 (
715 (
733 'hg', 'hg_diff_four_file_change_newline.diff',
716 'hg', 'hg_diff_four_file_change_newline.diff',
734 [
717 [
735 (
718 (
736 'file', # filename
719 'file', # filename
737 'A', # change
720 'A', # change
738 { # stats
721 { # stats
739 'added': 1,
722 'added': 1,
740 'deleted': 0,
723 'deleted': 0,
741 'binary': False,
724 'binary': False,
742 'ops': {NEW_FILENODE: 'new file 100644', }
725 'ops': {NEW_FILENODE: 'new file 100644', }
743 },
726 },
744 '@@ -0,0 +1,1 @@\n+file\n' # diff
727 '@@ -0,0 +1,1 @@\n+file\n' # diff
745 ),
728 ),
746 (
729 (
747 'file2', # filename
730 'file2', # filename
748 'A', # change
731 'A', # change
749 { # stats
732 { # stats
750 'added': 1,
733 'added': 1,
751 'deleted': 0,
734 'deleted': 0,
752 'binary': False,
735 'binary': False,
753 'ops': {NEW_FILENODE: 'new file 100644', }
736 'ops': {NEW_FILENODE: 'new file 100644', }
754 },
737 },
755 '@@ -0,0 +1,1 @@\n+another line\n' # diff
738 '@@ -0,0 +1,1 @@\n+another line\n' # diff
756 ),
739 ),
757 (
740 (
758 'file3', # filename
741 'file3', # filename
759 'A', # change
742 'A', # change
760 { # stats
743 { # stats
761 'added': 1,
744 'added': 1,
762 'deleted': 0,
745 'deleted': 0,
763 'binary': False,
746 'binary': False,
764 'ops': {NEW_FILENODE: 'new file 100644', }
747 'ops': {NEW_FILENODE: 'new file 100644', }
765 },
748 },
766 '@@ -0,0 +1,1 @@\n+newline\n' # diff
749 '@@ -0,0 +1,1 @@\n+newline\n' # diff
767 ),
750 ),
768 (
751 (
769 'file4', # filename
752 'file4', # filename
770 'A', # change
753 'A', # change
771 { # stats
754 { # stats
772 'added': 1,
755 'added': 1,
773 'deleted': 0,
756 'deleted': 0,
774 'binary': False,
757 'binary': False,
775 'ops': {NEW_FILENODE: 'new file 100644', }
758 'ops': {NEW_FILENODE: 'new file 100644', }
776 },
759 },
777 '@@ -0,0 +1,1 @@\n+fil4\n\\ No newline at end of file' # diff
760 '@@ -0,0 +1,1 @@\n+fil4\n\\ No newline at end of file' # diff
778 ),
761 ),
779 ],
762 ],
780 ),
763 ),
781
764
782 ]
765 ]
783
766
784
767
785 diff_class = {
768 diff_class = {
786 'git': GitDiff,
769 'git': GitDiff,
787 'hg': MercurialDiff,
770 'hg': MercurialDiff,
788 'svn': SubversionDiff,
771 'svn': SubversionDiff,
789 }
772 }
790
773
791
774
792 @pytest.fixture(params=DIFF_FIXTURES)
775 @pytest.fixture(params=DIFF_FIXTURES)
793 def diff_fixture(request):
776 def diff_fixture(request):
794 vcs, diff_fixture, expected = request.param
777 vcs, diff_fixture, expected = request.param
795 diff_txt = fixture.load_resource(diff_fixture)
778 diff_txt = fixture.load_resource(diff_fixture)
796 diff = diff_class[vcs](diff_txt)
779 diff = diff_class[vcs](diff_txt)
797 return diff, expected
780 return diff, expected
798
781
799
782
800 def test_diff_lib(diff_fixture):
783 def test_diff_lib(diff_fixture):
801 diff, expected_data = diff_fixture
784 diff, expected_data = diff_fixture
802 diff_proc = DiffProcessor(diff)
785 diff_proc = DiffProcessor(diff)
803 diff_proc_d = diff_proc.prepare()
786 diff_proc_d = diff_proc.prepare()
804 data = [(x['filename'], x['operation'], x['stats']) for x in diff_proc_d]
787 data = [(x['filename'], x['operation'], x['stats']) for x in diff_proc_d]
805 assert expected_data == data
788 assert expected_data == data
806
789
807
790
808 @pytest.fixture(params=DIFF_FIXTURES_WITH_CONTENT)
791 @pytest.fixture(params=DIFF_FIXTURES_WITH_CONTENT)
809 def diff_fixture_w_content(request):
792 def diff_fixture_w_content(request):
810 vcs, diff_fixture, expected = request.param
793 vcs, diff_fixture, expected = request.param
811 diff_txt = fixture.load_resource(diff_fixture)
794 diff_txt = fixture.load_resource(diff_fixture)
812 diff = diff_class[vcs](diff_txt)
795 diff = diff_class[vcs](diff_txt)
813 return diff, expected
796 return diff, expected
814
797
815
798
816 def test_diff_lib_newlines(diff_fixture_w_content):
799 def test_diff_lib_newlines(diff_fixture_w_content):
817 diff, expected_data = diff_fixture_w_content
800 diff, expected_data = diff_fixture_w_content
818 diff_proc = DiffProcessor(diff)
801 diff_proc = DiffProcessor(diff)
819 diff_proc_d = diff_proc.prepare()
802 diff_proc_d = diff_proc.prepare()
820 data = [(x['filename'], x['operation'], x['stats'], x['raw_diff'])
803 data = [(x['filename'], x['operation'], x['stats'], x['raw_diff'])
821 for x in diff_proc_d]
804 for x in diff_proc_d]
822 assert expected_data == data
805 assert expected_data == data
@@ -1,166 +1,166 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import decimal
22 import decimal
23 import io
23 import io
24 import textwrap
24 import textwrap
25
25
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib.ext_json import json
28 from rhodecode.lib.ext_json import json
29 from rhodecode.lib.ext_json import formatted_json
29 from rhodecode.lib.ext_json import formatted_json
30 from rhodecode.translation import _, _pluralize
30 from rhodecode.translation import _, _pluralize
31
31
32
32
33 class Timezone(datetime.tzinfo):
33 class Timezone(datetime.tzinfo):
34 def __init__(self, hours):
34 def __init__(self, hours):
35 self.hours = hours
35 self.hours = hours
36
36
37 def utcoffset(self, unused_dt):
37 def utcoffset(self, unused_dt):
38 return datetime.timedelta(hours=self.hours)
38 return datetime.timedelta(hours=self.hours)
39
39
40
40
41 def test_dumps_set():
41 def test_dumps_set():
42 result = json.dumps(set((1, 2, 3)))
42 result = json.dumps(set((1, 2, 3)))
43 # We cannot infer what the order of result is going to be
43 # We cannot infer what the order of result is going to be
44 result = json.loads(result)
44 result = json.loads(result)
45 assert isinstance(result, list)
45 assert isinstance(result, list)
46 assert [1, 2, 3] == sorted(result)
46 assert [1, 2, 3] == sorted(result)
47
47
48
48
49 def test_dumps_decimal():
49 def test_dumps_decimal():
50 assert '"1.5"' == json.dumps(decimal.Decimal('1.5'))
50 assert '"1.5"' == json.dumps(decimal.Decimal('1.5'))
51
51
52
52
53 def test_dumps_complex():
53 def test_dumps_complex():
54 assert "[0.0, 1.0]" == json.dumps(1j)
54 assert "[0.0, 1.0]" == json.dumps(1j)
55 assert "[1.0, 0.0]" == json.dumps(1 + 0j)
55 assert "[1.0, 0.0]" == json.dumps(1 + 0j)
56 assert "[1.1, 1.2]" == json.dumps(1.1 + 1.2j)
56 assert "[1.1, 1.2]" == json.dumps(1.1 + 1.2j)
57
57
58
58
59 def test_dumps_object_with_json_method():
59 def test_dumps_object_with_json_method():
60 class SerializableObject(object):
60 class SerializableObject(object):
61 def __json__(self):
61 def __json__(self):
62 return 'foo'
62 return 'foo'
63
63
64 assert '"foo"' == json.dumps(SerializableObject())
64 assert '"foo"' == json.dumps(SerializableObject())
65
65
66
66
67 def test_dumps_object_with_json_attribute():
67 def test_dumps_object_with_json_attribute():
68 class SerializableObject(object):
68 class SerializableObject(object):
69 __json__ = 'foo'
69 __json__ = 'foo'
70
70
71 assert '"foo"' == json.dumps(SerializableObject())
71 assert '"foo"' == json.dumps(SerializableObject())
72
72
73
73
74 def test_dumps_time():
74 def test_dumps_time():
75 assert '"03:14:15.926"' == json.dumps(datetime.time(3, 14, 15, 926535))
75 assert '"03:14:15.926"' == json.dumps(datetime.time(3, 14, 15, 926535))
76
76
77
77
78 def test_dumps_time_no_microseconds():
78 def test_dumps_time_no_microseconds():
79 assert '"03:14:15"' == json.dumps(datetime.time(3, 14, 15))
79 assert '"03:14:15"' == json.dumps(datetime.time(3, 14, 15))
80
80
81
81
82 def test_dumps_time_with_timezone():
82 def test_dumps_time_with_timezone():
83 with pytest.raises(TypeError) as excinfo:
83 with pytest.raises(TypeError) as excinfo:
84 json.dumps(datetime.time(3, 14, 15, 926535, Timezone(0)))
84 json.dumps(datetime.time(3, 14, 15, 926535, Timezone(0)))
85
85
86 error_msg = str(excinfo.value)
86 error_msg = str(excinfo.value)
87 assert 'Time-zone aware times are not JSON serializable' in error_msg
87 assert 'Time-zone aware times are not JSON serializable' in error_msg
88
88
89
89
90 def test_dumps_date():
90 def test_dumps_date():
91 assert '"1969-07-20"' == json.dumps(datetime.date(1969, 7, 20))
91 assert '"1969-07-20"' == json.dumps(datetime.date(1969, 7, 20))
92
92
93
93
94 def test_dumps_datetime():
94 def test_dumps_datetime():
95 json_data = json.dumps(datetime.datetime(1969, 7, 20, 3, 14, 15, 926535))
95 json_data = json.dumps(datetime.datetime(1969, 7, 20, 3, 14, 15, 926535))
96 assert '"1969-07-20T03:14:15.926"' == json_data
96 assert '"1969-07-20T03:14:15.926"' == json_data
97
97
98
98
99 def test_dumps_datetime_no_microseconds():
99 def test_dumps_datetime_no_microseconds():
100 json_data = json.dumps(datetime.datetime(1969, 7, 20, 3, 14, 15))
100 json_data = json.dumps(datetime.datetime(1969, 7, 20, 3, 14, 15))
101 assert '"1969-07-20T03:14:15"' == json_data
101 assert '"1969-07-20T03:14:15"' == json_data
102
102
103
103
104 def test_dumps_datetime_with_utc_timezone():
104 def test_dumps_datetime_with_utc_timezone():
105 json_data = json.dumps(
105 json_data = json.dumps(
106 datetime.datetime(1969, 7, 20, 3, 14, 15, 926535, Timezone(0)))
106 datetime.datetime(1969, 7, 20, 3, 14, 15, 926535, Timezone(0)))
107 assert '"1969-07-20T03:14:15.926Z"' == json_data
107 assert '"1969-07-20T03:14:15.926Z"' == json_data
108
108
109
109
110 def test_dumps_datetime_with_plus1_timezone():
110 def test_dumps_datetime_with_plus1_timezone():
111 json_data = json.dumps(
111 json_data = json.dumps(
112 datetime.datetime(1969, 7, 20, 3, 14, 15, 926535, Timezone(1)))
112 datetime.datetime(1969, 7, 20, 3, 14, 15, 926535, Timezone(1)))
113 assert '"1969-07-20T03:14:15.926+01:00"' == json_data
113 assert '"1969-07-20T03:14:15.926+01:00"' == json_data
114
114
115
115
116 def test_dumps_unserializable_class():
116 def test_dumps_unserializable_class():
117 unserializable_obj = object()
117 unserializable_obj = object()
118 with pytest.raises(TypeError) as excinfo:
118 with pytest.raises(TypeError) as excinfo:
119 json.dumps(unserializable_obj)
119 json.dumps(unserializable_obj)
120
120
121 assert repr(unserializable_obj) in str(excinfo.value)
121 assert repr(unserializable_obj) in str(excinfo.value)
122 assert 'is not JSON serializable' in str(excinfo.value)
122 assert 'is not JSON serializable' in str(excinfo.value)
123
123
124
124
125 def test_dump_is_like_dumps():
125 def test_dump_is_like_dumps():
126 data = {
126 data = {
127 'decimal': decimal.Decimal('1.5'),
127 'decimal': decimal.Decimal('1.5'),
128 'set': set([1]), # Just one element to guarantee the order
128 'set': set([1]), # Just one element to guarantee the order
129 'complex': 1 - 1j,
129 'complex': 1 - 1j,
130 'datetime': datetime.datetime(1969, 7, 20, 3, 14, 15, 926535),
130 'datetime': datetime.datetime(1969, 7, 20, 3, 14, 15, 926535),
131 'time': datetime.time(3, 14, 15, 926535),
131 'time': datetime.time(3, 14, 15, 926535),
132 'date': datetime.date(1969, 7, 20),
132 'date': datetime.date(1969, 7, 20),
133 }
133 }
134 json_buffer = io.BytesIO()
134 json_buffer = io.BytesIO()
135 json.dump(data, json_buffer)
135 json.dump(data, json_buffer)
136
136
137 assert json.dumps(data) == json_buffer.getvalue()
137 assert json.dumps(data) == json_buffer.getvalue()
138
138
139
139
140 def test_formatted_json():
140 def test_formatted_json():
141 data = {
141 data = {
142 'b': {'2': 2, '1': 1},
142 'b': {'2': 2, '1': 1},
143 'a': {'3': 3, '4': 4},
143 'a': {'3': 3, '4': 4},
144 }
144 }
145
145
146 expected_data = textwrap.dedent('''
146 expected_data = textwrap.dedent('''
147 {
147 {
148 "a": {
148 "a": {
149 "3": 3,
149 "3": 3,
150 "4": 4
150 "4": 4
151 },
151 },
152 "b": {
152 "b": {
153 "1": 1,
153 "1": 1,
154 "2": 2
154 "2": 2
155 }
155 }
156 }''').strip()
156 }''').strip()
157
157
158 assert formatted_json(data) == expected_data
158 assert formatted_json(data) == expected_data
159
159
160
160
161 def test_pylons_lazy_translation_string(baseapp):
161 def test_lazy_translation_string(baseapp):
162 data = {'label': _('hello')}
162 data = {'label': _('hello')}
163 data2 = {'label2': _pluralize('singular', 'plural', 1)}
163 data2 = {'label2': _pluralize('singular', 'plural', 1)}
164
164
165 assert json.dumps(data) == '{"label": "hello"}'
165 assert json.dumps(data) == '{"label": "hello"}'
166 assert json.dumps(data2) == '{"label2": "singular"}'
166 assert json.dumps(data2) == '{"label2": "singular"}'
@@ -1,859 +1,859 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture
47 @pytest.fixture
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 self.merge_patcher = mock.patch.object(
53 self.merge_patcher = mock.patch.object(
54 BackendClass, 'merge', return_value=MergeResponse(
54 BackendClass, 'merge', return_value=MergeResponse(
55 False, False, None, MergeFailureReason.UNKNOWN))
55 False, False, None, MergeFailureReason.UNKNOWN))
56 self.workspace_remove_patcher = mock.patch.object(
56 self.workspace_remove_patcher = mock.patch.object(
57 BackendClass, 'cleanup_merge_workspace')
57 BackendClass, 'cleanup_merge_workspace')
58
58
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
61 self.comment_patcher = mock.patch(
61 self.comment_patcher = mock.patch(
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 self.comment_patcher.start()
63 self.comment_patcher.start()
64 self.notification_patcher = mock.patch(
64 self.notification_patcher = mock.patch(
65 'rhodecode.model.notification.NotificationModel.create')
65 'rhodecode.model.notification.NotificationModel.create')
66 self.notification_patcher.start()
66 self.notification_patcher.start()
67 self.helper_patcher = mock.patch(
67 self.helper_patcher = mock.patch(
68 'rhodecode.lib.helpers.url')
68 'rhodecode.lib.helpers.route_path')
69 self.helper_patcher.start()
69 self.helper_patcher.start()
70
70
71 self.hook_patcher = mock.patch.object(PullRequestModel,
71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 '_trigger_pull_request_hook')
72 '_trigger_pull_request_hook')
73 self.hook_mock = self.hook_patcher.start()
73 self.hook_mock = self.hook_patcher.start()
74
74
75 self.invalidation_patcher = mock.patch(
75 self.invalidation_patcher = mock.patch(
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 self.invalidation_mock = self.invalidation_patcher.start()
77 self.invalidation_mock = self.invalidation_patcher.start()
78
78
79 self.pull_request = pr_util.create_pull_request(
79 self.pull_request = pr_util.create_pull_request(
80 mergeable=True, name_suffix=u'ąć')
80 mergeable=True, name_suffix=u'ąć')
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84
84
85 @request.addfinalizer
85 @request.addfinalizer
86 def cleanup_pull_request():
86 def cleanup_pull_request():
87 calls = [mock.call(
87 calls = [mock.call(
88 self.pull_request, self.pull_request.author, 'create')]
88 self.pull_request, self.pull_request.author, 'create')]
89 self.hook_mock.assert_has_calls(calls)
89 self.hook_mock.assert_has_calls(calls)
90
90
91 self.workspace_remove_patcher.stop()
91 self.workspace_remove_patcher.stop()
92 self.merge_patcher.stop()
92 self.merge_patcher.stop()
93 self.comment_patcher.stop()
93 self.comment_patcher.stop()
94 self.notification_patcher.stop()
94 self.notification_patcher.stop()
95 self.helper_patcher.stop()
95 self.helper_patcher.stop()
96 self.hook_patcher.stop()
96 self.hook_patcher.stop()
97 self.invalidation_patcher.stop()
97 self.invalidation_patcher.stop()
98
98
99 return self.pull_request
99 return self.pull_request
100
100
101 def test_get_all(self, pull_request):
101 def test_get_all(self, pull_request):
102 prs = PullRequestModel().get_all(pull_request.target_repo)
102 prs = PullRequestModel().get_all(pull_request.target_repo)
103 assert isinstance(prs, list)
103 assert isinstance(prs, list)
104 assert len(prs) == 1
104 assert len(prs) == 1
105
105
106 def test_count_all(self, pull_request):
106 def test_count_all(self, pull_request):
107 pr_count = PullRequestModel().count_all(pull_request.target_repo)
107 pr_count = PullRequestModel().count_all(pull_request.target_repo)
108 assert pr_count == 1
108 assert pr_count == 1
109
109
110 def test_get_awaiting_review(self, pull_request):
110 def test_get_awaiting_review(self, pull_request):
111 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
111 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
112 assert isinstance(prs, list)
112 assert isinstance(prs, list)
113 assert len(prs) == 1
113 assert len(prs) == 1
114
114
115 def test_count_awaiting_review(self, pull_request):
115 def test_count_awaiting_review(self, pull_request):
116 pr_count = PullRequestModel().count_awaiting_review(
116 pr_count = PullRequestModel().count_awaiting_review(
117 pull_request.target_repo)
117 pull_request.target_repo)
118 assert pr_count == 1
118 assert pr_count == 1
119
119
120 def test_get_awaiting_my_review(self, pull_request):
120 def test_get_awaiting_my_review(self, pull_request):
121 PullRequestModel().update_reviewers(
121 PullRequestModel().update_reviewers(
122 pull_request, [(pull_request.author, ['author'], False)],
122 pull_request, [(pull_request.author, ['author'], False)],
123 pull_request.author)
123 pull_request.author)
124 prs = PullRequestModel().get_awaiting_my_review(
124 prs = PullRequestModel().get_awaiting_my_review(
125 pull_request.target_repo, user_id=pull_request.author.user_id)
125 pull_request.target_repo, user_id=pull_request.author.user_id)
126 assert isinstance(prs, list)
126 assert isinstance(prs, list)
127 assert len(prs) == 1
127 assert len(prs) == 1
128
128
129 def test_count_awaiting_my_review(self, pull_request):
129 def test_count_awaiting_my_review(self, pull_request):
130 PullRequestModel().update_reviewers(
130 PullRequestModel().update_reviewers(
131 pull_request, [(pull_request.author, ['author'], False)],
131 pull_request, [(pull_request.author, ['author'], False)],
132 pull_request.author)
132 pull_request.author)
133 pr_count = PullRequestModel().count_awaiting_my_review(
133 pr_count = PullRequestModel().count_awaiting_my_review(
134 pull_request.target_repo, user_id=pull_request.author.user_id)
134 pull_request.target_repo, user_id=pull_request.author.user_id)
135 assert pr_count == 1
135 assert pr_count == 1
136
136
137 def test_delete_calls_cleanup_merge(self, pull_request):
137 def test_delete_calls_cleanup_merge(self, pull_request):
138 PullRequestModel().delete(pull_request, pull_request.author)
138 PullRequestModel().delete(pull_request, pull_request.author)
139
139
140 self.workspace_remove_mock.assert_called_once_with(
140 self.workspace_remove_mock.assert_called_once_with(
141 self.workspace_id)
141 self.workspace_id)
142
142
143 def test_close_calls_cleanup_and_hook(self, pull_request):
143 def test_close_calls_cleanup_and_hook(self, pull_request):
144 PullRequestModel().close_pull_request(
144 PullRequestModel().close_pull_request(
145 pull_request, pull_request.author)
145 pull_request, pull_request.author)
146
146
147 self.workspace_remove_mock.assert_called_once_with(
147 self.workspace_remove_mock.assert_called_once_with(
148 self.workspace_id)
148 self.workspace_id)
149 self.hook_mock.assert_called_with(
149 self.hook_mock.assert_called_with(
150 self.pull_request, self.pull_request.author, 'close')
150 self.pull_request, self.pull_request.author, 'close')
151
151
152 def test_merge_status(self, pull_request):
152 def test_merge_status(self, pull_request):
153 self.merge_mock.return_value = MergeResponse(
153 self.merge_mock.return_value = MergeResponse(
154 True, False, None, MergeFailureReason.NONE)
154 True, False, None, MergeFailureReason.NONE)
155
155
156 assert pull_request._last_merge_source_rev is None
156 assert pull_request._last_merge_source_rev is None
157 assert pull_request._last_merge_target_rev is None
157 assert pull_request._last_merge_target_rev is None
158 assert pull_request.last_merge_status is None
158 assert pull_request.last_merge_status is None
159
159
160 status, msg = PullRequestModel().merge_status(pull_request)
160 status, msg = PullRequestModel().merge_status(pull_request)
161 assert status is True
161 assert status is True
162 assert msg.eval() == 'This pull request can be automatically merged.'
162 assert msg.eval() == 'This pull request can be automatically merged.'
163 self.merge_mock.assert_called_with(
163 self.merge_mock.assert_called_with(
164 pull_request.target_ref_parts,
164 pull_request.target_ref_parts,
165 pull_request.source_repo.scm_instance(),
165 pull_request.source_repo.scm_instance(),
166 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
166 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
167 use_rebase=False, close_branch=False)
167 use_rebase=False, close_branch=False)
168
168
169 assert pull_request._last_merge_source_rev == self.source_commit
169 assert pull_request._last_merge_source_rev == self.source_commit
170 assert pull_request._last_merge_target_rev == self.target_commit
170 assert pull_request._last_merge_target_rev == self.target_commit
171 assert pull_request.last_merge_status is MergeFailureReason.NONE
171 assert pull_request.last_merge_status is MergeFailureReason.NONE
172
172
173 self.merge_mock.reset_mock()
173 self.merge_mock.reset_mock()
174 status, msg = PullRequestModel().merge_status(pull_request)
174 status, msg = PullRequestModel().merge_status(pull_request)
175 assert status is True
175 assert status is True
176 assert msg.eval() == 'This pull request can be automatically merged.'
176 assert msg.eval() == 'This pull request can be automatically merged.'
177 assert self.merge_mock.called is False
177 assert self.merge_mock.called is False
178
178
179 def test_merge_status_known_failure(self, pull_request):
179 def test_merge_status_known_failure(self, pull_request):
180 self.merge_mock.return_value = MergeResponse(
180 self.merge_mock.return_value = MergeResponse(
181 False, False, None, MergeFailureReason.MERGE_FAILED)
181 False, False, None, MergeFailureReason.MERGE_FAILED)
182
182
183 assert pull_request._last_merge_source_rev is None
183 assert pull_request._last_merge_source_rev is None
184 assert pull_request._last_merge_target_rev is None
184 assert pull_request._last_merge_target_rev is None
185 assert pull_request.last_merge_status is None
185 assert pull_request.last_merge_status is None
186
186
187 status, msg = PullRequestModel().merge_status(pull_request)
187 status, msg = PullRequestModel().merge_status(pull_request)
188 assert status is False
188 assert status is False
189 assert (
189 assert (
190 msg.eval() ==
190 msg.eval() ==
191 'This pull request cannot be merged because of merge conflicts.')
191 'This pull request cannot be merged because of merge conflicts.')
192 self.merge_mock.assert_called_with(
192 self.merge_mock.assert_called_with(
193 pull_request.target_ref_parts,
193 pull_request.target_ref_parts,
194 pull_request.source_repo.scm_instance(),
194 pull_request.source_repo.scm_instance(),
195 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
195 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
196 use_rebase=False, close_branch=False)
196 use_rebase=False, close_branch=False)
197
197
198 assert pull_request._last_merge_source_rev == self.source_commit
198 assert pull_request._last_merge_source_rev == self.source_commit
199 assert pull_request._last_merge_target_rev == self.target_commit
199 assert pull_request._last_merge_target_rev == self.target_commit
200 assert (
200 assert (
201 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
201 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
202
202
203 self.merge_mock.reset_mock()
203 self.merge_mock.reset_mock()
204 status, msg = PullRequestModel().merge_status(pull_request)
204 status, msg = PullRequestModel().merge_status(pull_request)
205 assert status is False
205 assert status is False
206 assert (
206 assert (
207 msg.eval() ==
207 msg.eval() ==
208 'This pull request cannot be merged because of merge conflicts.')
208 'This pull request cannot be merged because of merge conflicts.')
209 assert self.merge_mock.called is False
209 assert self.merge_mock.called is False
210
210
211 def test_merge_status_unknown_failure(self, pull_request):
211 def test_merge_status_unknown_failure(self, pull_request):
212 self.merge_mock.return_value = MergeResponse(
212 self.merge_mock.return_value = MergeResponse(
213 False, False, None, MergeFailureReason.UNKNOWN)
213 False, False, None, MergeFailureReason.UNKNOWN)
214
214
215 assert pull_request._last_merge_source_rev is None
215 assert pull_request._last_merge_source_rev is None
216 assert pull_request._last_merge_target_rev is None
216 assert pull_request._last_merge_target_rev is None
217 assert pull_request.last_merge_status is None
217 assert pull_request.last_merge_status is None
218
218
219 status, msg = PullRequestModel().merge_status(pull_request)
219 status, msg = PullRequestModel().merge_status(pull_request)
220 assert status is False
220 assert status is False
221 assert msg.eval() == (
221 assert msg.eval() == (
222 'This pull request cannot be merged because of an unhandled'
222 'This pull request cannot be merged because of an unhandled'
223 ' exception.')
223 ' exception.')
224 self.merge_mock.assert_called_with(
224 self.merge_mock.assert_called_with(
225 pull_request.target_ref_parts,
225 pull_request.target_ref_parts,
226 pull_request.source_repo.scm_instance(),
226 pull_request.source_repo.scm_instance(),
227 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
227 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
228 use_rebase=False, close_branch=False)
228 use_rebase=False, close_branch=False)
229
229
230 assert pull_request._last_merge_source_rev is None
230 assert pull_request._last_merge_source_rev is None
231 assert pull_request._last_merge_target_rev is None
231 assert pull_request._last_merge_target_rev is None
232 assert pull_request.last_merge_status is None
232 assert pull_request.last_merge_status is None
233
233
234 self.merge_mock.reset_mock()
234 self.merge_mock.reset_mock()
235 status, msg = PullRequestModel().merge_status(pull_request)
235 status, msg = PullRequestModel().merge_status(pull_request)
236 assert status is False
236 assert status is False
237 assert msg.eval() == (
237 assert msg.eval() == (
238 'This pull request cannot be merged because of an unhandled'
238 'This pull request cannot be merged because of an unhandled'
239 ' exception.')
239 ' exception.')
240 assert self.merge_mock.called is True
240 assert self.merge_mock.called is True
241
241
242 def test_merge_status_when_target_is_locked(self, pull_request):
242 def test_merge_status_when_target_is_locked(self, pull_request):
243 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
243 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
244 status, msg = PullRequestModel().merge_status(pull_request)
244 status, msg = PullRequestModel().merge_status(pull_request)
245 assert status is False
245 assert status is False
246 assert msg.eval() == (
246 assert msg.eval() == (
247 'This pull request cannot be merged because the target repository'
247 'This pull request cannot be merged because the target repository'
248 ' is locked.')
248 ' is locked.')
249
249
250 def test_merge_status_requirements_check_target(self, pull_request):
250 def test_merge_status_requirements_check_target(self, pull_request):
251
251
252 def has_largefiles(self, repo):
252 def has_largefiles(self, repo):
253 return repo == pull_request.source_repo
253 return repo == pull_request.source_repo
254
254
255 patcher = mock.patch.object(
255 patcher = mock.patch.object(
256 PullRequestModel, '_has_largefiles', has_largefiles)
256 PullRequestModel, '_has_largefiles', has_largefiles)
257 with patcher:
257 with patcher:
258 status, msg = PullRequestModel().merge_status(pull_request)
258 status, msg = PullRequestModel().merge_status(pull_request)
259
259
260 assert status is False
260 assert status is False
261 assert msg == 'Target repository large files support is disabled.'
261 assert msg == 'Target repository large files support is disabled.'
262
262
263 def test_merge_status_requirements_check_source(self, pull_request):
263 def test_merge_status_requirements_check_source(self, pull_request):
264
264
265 def has_largefiles(self, repo):
265 def has_largefiles(self, repo):
266 return repo == pull_request.target_repo
266 return repo == pull_request.target_repo
267
267
268 patcher = mock.patch.object(
268 patcher = mock.patch.object(
269 PullRequestModel, '_has_largefiles', has_largefiles)
269 PullRequestModel, '_has_largefiles', has_largefiles)
270 with patcher:
270 with patcher:
271 status, msg = PullRequestModel().merge_status(pull_request)
271 status, msg = PullRequestModel().merge_status(pull_request)
272
272
273 assert status is False
273 assert status is False
274 assert msg == 'Source repository large files support is disabled.'
274 assert msg == 'Source repository large files support is disabled.'
275
275
276 def test_merge(self, pull_request, merge_extras):
276 def test_merge(self, pull_request, merge_extras):
277 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
277 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
278 merge_ref = Reference(
278 merge_ref = Reference(
279 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
279 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
280 self.merge_mock.return_value = MergeResponse(
280 self.merge_mock.return_value = MergeResponse(
281 True, True, merge_ref, MergeFailureReason.NONE)
281 True, True, merge_ref, MergeFailureReason.NONE)
282
282
283 merge_extras['repository'] = pull_request.target_repo.repo_name
283 merge_extras['repository'] = pull_request.target_repo.repo_name
284 PullRequestModel().merge(
284 PullRequestModel().merge(
285 pull_request, pull_request.author, extras=merge_extras)
285 pull_request, pull_request.author, extras=merge_extras)
286
286
287 message = (
287 message = (
288 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
288 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
289 u'\n\n {pr_title}'.format(
289 u'\n\n {pr_title}'.format(
290 pr_id=pull_request.pull_request_id,
290 pr_id=pull_request.pull_request_id,
291 source_repo=safe_unicode(
291 source_repo=safe_unicode(
292 pull_request.source_repo.scm_instance().name),
292 pull_request.source_repo.scm_instance().name),
293 source_ref_name=pull_request.source_ref_parts.name,
293 source_ref_name=pull_request.source_ref_parts.name,
294 pr_title=safe_unicode(pull_request.title)
294 pr_title=safe_unicode(pull_request.title)
295 )
295 )
296 )
296 )
297 self.merge_mock.assert_called_with(
297 self.merge_mock.assert_called_with(
298 pull_request.target_ref_parts,
298 pull_request.target_ref_parts,
299 pull_request.source_repo.scm_instance(),
299 pull_request.source_repo.scm_instance(),
300 pull_request.source_ref_parts, self.workspace_id,
300 pull_request.source_ref_parts, self.workspace_id,
301 user_name=user.username, user_email=user.email, message=message,
301 user_name=user.username, user_email=user.email, message=message,
302 use_rebase=False, close_branch=False
302 use_rebase=False, close_branch=False
303 )
303 )
304 self.invalidation_mock.assert_called_once_with(
304 self.invalidation_mock.assert_called_once_with(
305 pull_request.target_repo.repo_name)
305 pull_request.target_repo.repo_name)
306
306
307 self.hook_mock.assert_called_with(
307 self.hook_mock.assert_called_with(
308 self.pull_request, self.pull_request.author, 'merge')
308 self.pull_request, self.pull_request.author, 'merge')
309
309
310 pull_request = PullRequest.get(pull_request.pull_request_id)
310 pull_request = PullRequest.get(pull_request.pull_request_id)
311 assert (
311 assert (
312 pull_request.merge_rev ==
312 pull_request.merge_rev ==
313 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
313 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
314
314
315 def test_merge_failed(self, pull_request, merge_extras):
315 def test_merge_failed(self, pull_request, merge_extras):
316 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
316 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
317 merge_ref = Reference(
317 merge_ref = Reference(
318 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
318 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
319 self.merge_mock.return_value = MergeResponse(
319 self.merge_mock.return_value = MergeResponse(
320 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
320 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
321
321
322 merge_extras['repository'] = pull_request.target_repo.repo_name
322 merge_extras['repository'] = pull_request.target_repo.repo_name
323 PullRequestModel().merge(
323 PullRequestModel().merge(
324 pull_request, pull_request.author, extras=merge_extras)
324 pull_request, pull_request.author, extras=merge_extras)
325
325
326 message = (
326 message = (
327 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
327 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
328 u'\n\n {pr_title}'.format(
328 u'\n\n {pr_title}'.format(
329 pr_id=pull_request.pull_request_id,
329 pr_id=pull_request.pull_request_id,
330 source_repo=safe_unicode(
330 source_repo=safe_unicode(
331 pull_request.source_repo.scm_instance().name),
331 pull_request.source_repo.scm_instance().name),
332 source_ref_name=pull_request.source_ref_parts.name,
332 source_ref_name=pull_request.source_ref_parts.name,
333 pr_title=safe_unicode(pull_request.title)
333 pr_title=safe_unicode(pull_request.title)
334 )
334 )
335 )
335 )
336 self.merge_mock.assert_called_with(
336 self.merge_mock.assert_called_with(
337 pull_request.target_ref_parts,
337 pull_request.target_ref_parts,
338 pull_request.source_repo.scm_instance(),
338 pull_request.source_repo.scm_instance(),
339 pull_request.source_ref_parts, self.workspace_id,
339 pull_request.source_ref_parts, self.workspace_id,
340 user_name=user.username, user_email=user.email, message=message,
340 user_name=user.username, user_email=user.email, message=message,
341 use_rebase=False, close_branch=False
341 use_rebase=False, close_branch=False
342 )
342 )
343
343
344 pull_request = PullRequest.get(pull_request.pull_request_id)
344 pull_request = PullRequest.get(pull_request.pull_request_id)
345 assert self.invalidation_mock.called is False
345 assert self.invalidation_mock.called is False
346 assert pull_request.merge_rev is None
346 assert pull_request.merge_rev is None
347
347
348 def test_get_commit_ids(self, pull_request):
348 def test_get_commit_ids(self, pull_request):
349 # The PR has been not merget yet, so expect an exception
349 # The PR has been not merget yet, so expect an exception
350 with pytest.raises(ValueError):
350 with pytest.raises(ValueError):
351 PullRequestModel()._get_commit_ids(pull_request)
351 PullRequestModel()._get_commit_ids(pull_request)
352
352
353 # Merge revision is in the revisions list
353 # Merge revision is in the revisions list
354 pull_request.merge_rev = pull_request.revisions[0]
354 pull_request.merge_rev = pull_request.revisions[0]
355 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
355 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
356 assert commit_ids == pull_request.revisions
356 assert commit_ids == pull_request.revisions
357
357
358 # Merge revision is not in the revisions list
358 # Merge revision is not in the revisions list
359 pull_request.merge_rev = 'f000' * 10
359 pull_request.merge_rev = 'f000' * 10
360 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
360 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
361 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
361 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
362
362
363 def test_get_diff_from_pr_version(self, pull_request):
363 def test_get_diff_from_pr_version(self, pull_request):
364 source_repo = pull_request.source_repo
364 source_repo = pull_request.source_repo
365 source_ref_id = pull_request.source_ref_parts.commit_id
365 source_ref_id = pull_request.source_ref_parts.commit_id
366 target_ref_id = pull_request.target_ref_parts.commit_id
366 target_ref_id = pull_request.target_ref_parts.commit_id
367 diff = PullRequestModel()._get_diff_from_pr_or_version(
367 diff = PullRequestModel()._get_diff_from_pr_or_version(
368 source_repo, source_ref_id, target_ref_id, context=6)
368 source_repo, source_ref_id, target_ref_id, context=6)
369 assert 'file_1' in diff.raw
369 assert 'file_1' in diff.raw
370
370
371 def test_generate_title_returns_unicode(self):
371 def test_generate_title_returns_unicode(self):
372 title = PullRequestModel().generate_pullrequest_title(
372 title = PullRequestModel().generate_pullrequest_title(
373 source='source-dummy',
373 source='source-dummy',
374 source_ref='source-ref-dummy',
374 source_ref='source-ref-dummy',
375 target='target-dummy',
375 target='target-dummy',
376 )
376 )
377 assert type(title) == unicode
377 assert type(title) == unicode
378
378
379
379
380 @pytest.mark.usefixtures('config_stub')
380 @pytest.mark.usefixtures('config_stub')
381 class TestIntegrationMerge(object):
381 class TestIntegrationMerge(object):
382 @pytest.mark.parametrize('extra_config', (
382 @pytest.mark.parametrize('extra_config', (
383 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
383 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
384 ))
384 ))
385 def test_merge_triggers_push_hooks(
385 def test_merge_triggers_push_hooks(
386 self, pr_util, user_admin, capture_rcextensions, merge_extras,
386 self, pr_util, user_admin, capture_rcextensions, merge_extras,
387 extra_config):
387 extra_config):
388 pull_request = pr_util.create_pull_request(
388 pull_request = pr_util.create_pull_request(
389 approved=True, mergeable=True)
389 approved=True, mergeable=True)
390 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
390 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
391 merge_extras['repository'] = pull_request.target_repo.repo_name
391 merge_extras['repository'] = pull_request.target_repo.repo_name
392 Session().commit()
392 Session().commit()
393
393
394 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
394 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
395 merge_state = PullRequestModel().merge(
395 merge_state = PullRequestModel().merge(
396 pull_request, user_admin, extras=merge_extras)
396 pull_request, user_admin, extras=merge_extras)
397
397
398 assert merge_state.executed
398 assert merge_state.executed
399 assert 'pre_push' in capture_rcextensions
399 assert 'pre_push' in capture_rcextensions
400 assert 'post_push' in capture_rcextensions
400 assert 'post_push' in capture_rcextensions
401
401
402 def test_merge_can_be_rejected_by_pre_push_hook(
402 def test_merge_can_be_rejected_by_pre_push_hook(
403 self, pr_util, user_admin, capture_rcextensions, merge_extras):
403 self, pr_util, user_admin, capture_rcextensions, merge_extras):
404 pull_request = pr_util.create_pull_request(
404 pull_request = pr_util.create_pull_request(
405 approved=True, mergeable=True)
405 approved=True, mergeable=True)
406 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
406 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
407 merge_extras['repository'] = pull_request.target_repo.repo_name
407 merge_extras['repository'] = pull_request.target_repo.repo_name
408 Session().commit()
408 Session().commit()
409
409
410 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
410 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
411 pre_pull.side_effect = RepositoryError("Disallow push!")
411 pre_pull.side_effect = RepositoryError("Disallow push!")
412 merge_status = PullRequestModel().merge(
412 merge_status = PullRequestModel().merge(
413 pull_request, user_admin, extras=merge_extras)
413 pull_request, user_admin, extras=merge_extras)
414
414
415 assert not merge_status.executed
415 assert not merge_status.executed
416 assert 'pre_push' not in capture_rcextensions
416 assert 'pre_push' not in capture_rcextensions
417 assert 'post_push' not in capture_rcextensions
417 assert 'post_push' not in capture_rcextensions
418
418
419 def test_merge_fails_if_target_is_locked(
419 def test_merge_fails_if_target_is_locked(
420 self, pr_util, user_regular, merge_extras):
420 self, pr_util, user_regular, merge_extras):
421 pull_request = pr_util.create_pull_request(
421 pull_request = pr_util.create_pull_request(
422 approved=True, mergeable=True)
422 approved=True, mergeable=True)
423 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
423 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
424 pull_request.target_repo.locked = locked_by
424 pull_request.target_repo.locked = locked_by
425 # TODO: johbo: Check if this can work based on the database, currently
425 # TODO: johbo: Check if this can work based on the database, currently
426 # all data is pre-computed, that's why just updating the DB is not
426 # all data is pre-computed, that's why just updating the DB is not
427 # enough.
427 # enough.
428 merge_extras['locked_by'] = locked_by
428 merge_extras['locked_by'] = locked_by
429 merge_extras['repository'] = pull_request.target_repo.repo_name
429 merge_extras['repository'] = pull_request.target_repo.repo_name
430 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
430 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
431 Session().commit()
431 Session().commit()
432 merge_status = PullRequestModel().merge(
432 merge_status = PullRequestModel().merge(
433 pull_request, user_regular, extras=merge_extras)
433 pull_request, user_regular, extras=merge_extras)
434 assert not merge_status.executed
434 assert not merge_status.executed
435
435
436
436
437 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
437 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
438 (False, 1, 0),
438 (False, 1, 0),
439 (True, 0, 1),
439 (True, 0, 1),
440 ])
440 ])
441 def test_outdated_comments(
441 def test_outdated_comments(
442 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
442 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
443 pull_request = pr_util.create_pull_request()
443 pull_request = pr_util.create_pull_request()
444 pr_util.create_inline_comment(file_path='not_in_updated_diff')
444 pr_util.create_inline_comment(file_path='not_in_updated_diff')
445
445
446 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
446 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
447 pr_util.add_one_commit()
447 pr_util.add_one_commit()
448 assert_inline_comments(
448 assert_inline_comments(
449 pull_request, visible=inlines_count, outdated=outdated_count)
449 pull_request, visible=inlines_count, outdated=outdated_count)
450 outdated_comment_mock.assert_called_with(pull_request)
450 outdated_comment_mock.assert_called_with(pull_request)
451
451
452
452
453 @pytest.fixture
453 @pytest.fixture
454 def merge_extras(user_regular):
454 def merge_extras(user_regular):
455 """
455 """
456 Context for the vcs operation when running a merge.
456 Context for the vcs operation when running a merge.
457 """
457 """
458 extras = {
458 extras = {
459 'ip': '127.0.0.1',
459 'ip': '127.0.0.1',
460 'username': user_regular.username,
460 'username': user_regular.username,
461 'action': 'push',
461 'action': 'push',
462 'repository': 'fake_target_repo_name',
462 'repository': 'fake_target_repo_name',
463 'scm': 'git',
463 'scm': 'git',
464 'config': 'fake_config_ini_path',
464 'config': 'fake_config_ini_path',
465 'make_lock': None,
465 'make_lock': None,
466 'locked_by': [None, None, None],
466 'locked_by': [None, None, None],
467 'server_url': 'http://test.example.com:5000',
467 'server_url': 'http://test.example.com:5000',
468 'hooks': ['push', 'pull'],
468 'hooks': ['push', 'pull'],
469 'is_shadow_repo': False,
469 'is_shadow_repo': False,
470 }
470 }
471 return extras
471 return extras
472
472
473
473
474 @pytest.mark.usefixtures('config_stub')
474 @pytest.mark.usefixtures('config_stub')
475 class TestUpdateCommentHandling(object):
475 class TestUpdateCommentHandling(object):
476
476
477 @pytest.fixture(autouse=True, scope='class')
477 @pytest.fixture(autouse=True, scope='class')
478 def enable_outdated_comments(self, request, baseapp):
478 def enable_outdated_comments(self, request, baseapp):
479 config_patch = mock.patch.dict(
479 config_patch = mock.patch.dict(
480 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
480 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
481 config_patch.start()
481 config_patch.start()
482
482
483 @request.addfinalizer
483 @request.addfinalizer
484 def cleanup():
484 def cleanup():
485 config_patch.stop()
485 config_patch.stop()
486
486
487 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
487 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
488 commits = [
488 commits = [
489 {'message': 'a'},
489 {'message': 'a'},
490 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
490 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
491 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
491 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
492 ]
492 ]
493 pull_request = pr_util.create_pull_request(
493 pull_request = pr_util.create_pull_request(
494 commits=commits, target_head='a', source_head='b', revisions=['b'])
494 commits=commits, target_head='a', source_head='b', revisions=['b'])
495 pr_util.create_inline_comment(file_path='file_b')
495 pr_util.create_inline_comment(file_path='file_b')
496 pr_util.add_one_commit(head='c')
496 pr_util.add_one_commit(head='c')
497
497
498 assert_inline_comments(pull_request, visible=1, outdated=0)
498 assert_inline_comments(pull_request, visible=1, outdated=0)
499
499
500 def test_comment_stays_unflagged_on_change_above(self, pr_util):
500 def test_comment_stays_unflagged_on_change_above(self, pr_util):
501 original_content = ''.join(
501 original_content = ''.join(
502 ['line {}\n'.format(x) for x in range(1, 11)])
502 ['line {}\n'.format(x) for x in range(1, 11)])
503 updated_content = 'new_line_at_top\n' + original_content
503 updated_content = 'new_line_at_top\n' + original_content
504 commits = [
504 commits = [
505 {'message': 'a'},
505 {'message': 'a'},
506 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
506 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
507 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
507 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
508 ]
508 ]
509 pull_request = pr_util.create_pull_request(
509 pull_request = pr_util.create_pull_request(
510 commits=commits, target_head='a', source_head='b', revisions=['b'])
510 commits=commits, target_head='a', source_head='b', revisions=['b'])
511
511
512 with outdated_comments_patcher():
512 with outdated_comments_patcher():
513 comment = pr_util.create_inline_comment(
513 comment = pr_util.create_inline_comment(
514 line_no=u'n8', file_path='file_b')
514 line_no=u'n8', file_path='file_b')
515 pr_util.add_one_commit(head='c')
515 pr_util.add_one_commit(head='c')
516
516
517 assert_inline_comments(pull_request, visible=1, outdated=0)
517 assert_inline_comments(pull_request, visible=1, outdated=0)
518 assert comment.line_no == u'n9'
518 assert comment.line_no == u'n9'
519
519
520 def test_comment_stays_unflagged_on_change_below(self, pr_util):
520 def test_comment_stays_unflagged_on_change_below(self, pr_util):
521 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
521 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
522 updated_content = original_content + 'new_line_at_end\n'
522 updated_content = original_content + 'new_line_at_end\n'
523 commits = [
523 commits = [
524 {'message': 'a'},
524 {'message': 'a'},
525 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
525 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
526 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
526 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
527 ]
527 ]
528 pull_request = pr_util.create_pull_request(
528 pull_request = pr_util.create_pull_request(
529 commits=commits, target_head='a', source_head='b', revisions=['b'])
529 commits=commits, target_head='a', source_head='b', revisions=['b'])
530 pr_util.create_inline_comment(file_path='file_b')
530 pr_util.create_inline_comment(file_path='file_b')
531 pr_util.add_one_commit(head='c')
531 pr_util.add_one_commit(head='c')
532
532
533 assert_inline_comments(pull_request, visible=1, outdated=0)
533 assert_inline_comments(pull_request, visible=1, outdated=0)
534
534
535 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
535 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
536 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
536 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
537 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
537 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
538 change_lines = list(base_lines)
538 change_lines = list(base_lines)
539 change_lines.insert(6, 'line 6a added\n')
539 change_lines.insert(6, 'line 6a added\n')
540
540
541 # Changes on the last line of sight
541 # Changes on the last line of sight
542 update_lines = list(change_lines)
542 update_lines = list(change_lines)
543 update_lines[0] = 'line 1 changed\n'
543 update_lines[0] = 'line 1 changed\n'
544 update_lines[-1] = 'line 12 changed\n'
544 update_lines[-1] = 'line 12 changed\n'
545
545
546 def file_b(lines):
546 def file_b(lines):
547 return FileNode('file_b', ''.join(lines))
547 return FileNode('file_b', ''.join(lines))
548
548
549 commits = [
549 commits = [
550 {'message': 'a', 'added': [file_b(base_lines)]},
550 {'message': 'a', 'added': [file_b(base_lines)]},
551 {'message': 'b', 'changed': [file_b(change_lines)]},
551 {'message': 'b', 'changed': [file_b(change_lines)]},
552 {'message': 'c', 'changed': [file_b(update_lines)]},
552 {'message': 'c', 'changed': [file_b(update_lines)]},
553 ]
553 ]
554
554
555 pull_request = pr_util.create_pull_request(
555 pull_request = pr_util.create_pull_request(
556 commits=commits, target_head='a', source_head='b', revisions=['b'])
556 commits=commits, target_head='a', source_head='b', revisions=['b'])
557 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
557 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
558
558
559 with outdated_comments_patcher():
559 with outdated_comments_patcher():
560 pr_util.add_one_commit(head='c')
560 pr_util.add_one_commit(head='c')
561 assert_inline_comments(pull_request, visible=0, outdated=1)
561 assert_inline_comments(pull_request, visible=0, outdated=1)
562
562
563 @pytest.mark.parametrize("change, content", [
563 @pytest.mark.parametrize("change, content", [
564 ('changed', 'changed\n'),
564 ('changed', 'changed\n'),
565 ('removed', ''),
565 ('removed', ''),
566 ], ids=['changed', 'removed'])
566 ], ids=['changed', 'removed'])
567 def test_comment_flagged_on_change(self, pr_util, change, content):
567 def test_comment_flagged_on_change(self, pr_util, change, content):
568 commits = [
568 commits = [
569 {'message': 'a'},
569 {'message': 'a'},
570 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
570 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
571 {'message': 'c', change: [FileNode('file_b', content)]},
571 {'message': 'c', change: [FileNode('file_b', content)]},
572 ]
572 ]
573 pull_request = pr_util.create_pull_request(
573 pull_request = pr_util.create_pull_request(
574 commits=commits, target_head='a', source_head='b', revisions=['b'])
574 commits=commits, target_head='a', source_head='b', revisions=['b'])
575 pr_util.create_inline_comment(file_path='file_b')
575 pr_util.create_inline_comment(file_path='file_b')
576
576
577 with outdated_comments_patcher():
577 with outdated_comments_patcher():
578 pr_util.add_one_commit(head='c')
578 pr_util.add_one_commit(head='c')
579 assert_inline_comments(pull_request, visible=0, outdated=1)
579 assert_inline_comments(pull_request, visible=0, outdated=1)
580
580
581
581
582 @pytest.mark.usefixtures('config_stub')
582 @pytest.mark.usefixtures('config_stub')
583 class TestUpdateChangedFiles(object):
583 class TestUpdateChangedFiles(object):
584
584
585 def test_no_changes_on_unchanged_diff(self, pr_util):
585 def test_no_changes_on_unchanged_diff(self, pr_util):
586 commits = [
586 commits = [
587 {'message': 'a'},
587 {'message': 'a'},
588 {'message': 'b',
588 {'message': 'b',
589 'added': [FileNode('file_b', 'test_content b\n')]},
589 'added': [FileNode('file_b', 'test_content b\n')]},
590 {'message': 'c',
590 {'message': 'c',
591 'added': [FileNode('file_c', 'test_content c\n')]},
591 'added': [FileNode('file_c', 'test_content c\n')]},
592 ]
592 ]
593 # open a PR from a to b, adding file_b
593 # open a PR from a to b, adding file_b
594 pull_request = pr_util.create_pull_request(
594 pull_request = pr_util.create_pull_request(
595 commits=commits, target_head='a', source_head='b', revisions=['b'],
595 commits=commits, target_head='a', source_head='b', revisions=['b'],
596 name_suffix='per-file-review')
596 name_suffix='per-file-review')
597
597
598 # modify PR adding new file file_c
598 # modify PR adding new file file_c
599 pr_util.add_one_commit(head='c')
599 pr_util.add_one_commit(head='c')
600
600
601 assert_pr_file_changes(
601 assert_pr_file_changes(
602 pull_request,
602 pull_request,
603 added=['file_c'],
603 added=['file_c'],
604 modified=[],
604 modified=[],
605 removed=[])
605 removed=[])
606
606
607 def test_modify_and_undo_modification_diff(self, pr_util):
607 def test_modify_and_undo_modification_diff(self, pr_util):
608 commits = [
608 commits = [
609 {'message': 'a'},
609 {'message': 'a'},
610 {'message': 'b',
610 {'message': 'b',
611 'added': [FileNode('file_b', 'test_content b\n')]},
611 'added': [FileNode('file_b', 'test_content b\n')]},
612 {'message': 'c',
612 {'message': 'c',
613 'changed': [FileNode('file_b', 'test_content b modified\n')]},
613 'changed': [FileNode('file_b', 'test_content b modified\n')]},
614 {'message': 'd',
614 {'message': 'd',
615 'changed': [FileNode('file_b', 'test_content b\n')]},
615 'changed': [FileNode('file_b', 'test_content b\n')]},
616 ]
616 ]
617 # open a PR from a to b, adding file_b
617 # open a PR from a to b, adding file_b
618 pull_request = pr_util.create_pull_request(
618 pull_request = pr_util.create_pull_request(
619 commits=commits, target_head='a', source_head='b', revisions=['b'],
619 commits=commits, target_head='a', source_head='b', revisions=['b'],
620 name_suffix='per-file-review')
620 name_suffix='per-file-review')
621
621
622 # modify PR modifying file file_b
622 # modify PR modifying file file_b
623 pr_util.add_one_commit(head='c')
623 pr_util.add_one_commit(head='c')
624
624
625 assert_pr_file_changes(
625 assert_pr_file_changes(
626 pull_request,
626 pull_request,
627 added=[],
627 added=[],
628 modified=['file_b'],
628 modified=['file_b'],
629 removed=[])
629 removed=[])
630
630
631 # move the head again to d, which rollbacks change,
631 # move the head again to d, which rollbacks change,
632 # meaning we should indicate no changes
632 # meaning we should indicate no changes
633 pr_util.add_one_commit(head='d')
633 pr_util.add_one_commit(head='d')
634
634
635 assert_pr_file_changes(
635 assert_pr_file_changes(
636 pull_request,
636 pull_request,
637 added=[],
637 added=[],
638 modified=[],
638 modified=[],
639 removed=[])
639 removed=[])
640
640
641 def test_updated_all_files_in_pr(self, pr_util):
641 def test_updated_all_files_in_pr(self, pr_util):
642 commits = [
642 commits = [
643 {'message': 'a'},
643 {'message': 'a'},
644 {'message': 'b', 'added': [
644 {'message': 'b', 'added': [
645 FileNode('file_a', 'test_content a\n'),
645 FileNode('file_a', 'test_content a\n'),
646 FileNode('file_b', 'test_content b\n'),
646 FileNode('file_b', 'test_content b\n'),
647 FileNode('file_c', 'test_content c\n')]},
647 FileNode('file_c', 'test_content c\n')]},
648 {'message': 'c', 'changed': [
648 {'message': 'c', 'changed': [
649 FileNode('file_a', 'test_content a changed\n'),
649 FileNode('file_a', 'test_content a changed\n'),
650 FileNode('file_b', 'test_content b changed\n'),
650 FileNode('file_b', 'test_content b changed\n'),
651 FileNode('file_c', 'test_content c changed\n')]},
651 FileNode('file_c', 'test_content c changed\n')]},
652 ]
652 ]
653 # open a PR from a to b, changing 3 files
653 # open a PR from a to b, changing 3 files
654 pull_request = pr_util.create_pull_request(
654 pull_request = pr_util.create_pull_request(
655 commits=commits, target_head='a', source_head='b', revisions=['b'],
655 commits=commits, target_head='a', source_head='b', revisions=['b'],
656 name_suffix='per-file-review')
656 name_suffix='per-file-review')
657
657
658 pr_util.add_one_commit(head='c')
658 pr_util.add_one_commit(head='c')
659
659
660 assert_pr_file_changes(
660 assert_pr_file_changes(
661 pull_request,
661 pull_request,
662 added=[],
662 added=[],
663 modified=['file_a', 'file_b', 'file_c'],
663 modified=['file_a', 'file_b', 'file_c'],
664 removed=[])
664 removed=[])
665
665
666 def test_updated_and_removed_all_files_in_pr(self, pr_util):
666 def test_updated_and_removed_all_files_in_pr(self, pr_util):
667 commits = [
667 commits = [
668 {'message': 'a'},
668 {'message': 'a'},
669 {'message': 'b', 'added': [
669 {'message': 'b', 'added': [
670 FileNode('file_a', 'test_content a\n'),
670 FileNode('file_a', 'test_content a\n'),
671 FileNode('file_b', 'test_content b\n'),
671 FileNode('file_b', 'test_content b\n'),
672 FileNode('file_c', 'test_content c\n')]},
672 FileNode('file_c', 'test_content c\n')]},
673 {'message': 'c', 'removed': [
673 {'message': 'c', 'removed': [
674 FileNode('file_a', 'test_content a changed\n'),
674 FileNode('file_a', 'test_content a changed\n'),
675 FileNode('file_b', 'test_content b changed\n'),
675 FileNode('file_b', 'test_content b changed\n'),
676 FileNode('file_c', 'test_content c changed\n')]},
676 FileNode('file_c', 'test_content c changed\n')]},
677 ]
677 ]
678 # open a PR from a to b, removing 3 files
678 # open a PR from a to b, removing 3 files
679 pull_request = pr_util.create_pull_request(
679 pull_request = pr_util.create_pull_request(
680 commits=commits, target_head='a', source_head='b', revisions=['b'],
680 commits=commits, target_head='a', source_head='b', revisions=['b'],
681 name_suffix='per-file-review')
681 name_suffix='per-file-review')
682
682
683 pr_util.add_one_commit(head='c')
683 pr_util.add_one_commit(head='c')
684
684
685 assert_pr_file_changes(
685 assert_pr_file_changes(
686 pull_request,
686 pull_request,
687 added=[],
687 added=[],
688 modified=[],
688 modified=[],
689 removed=['file_a', 'file_b', 'file_c'])
689 removed=['file_a', 'file_b', 'file_c'])
690
690
691
691
692 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
692 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
693 model = PullRequestModel()
693 model = PullRequestModel()
694 pull_request = pr_util.create_pull_request()
694 pull_request = pr_util.create_pull_request()
695 pr_util.update_source_repository()
695 pr_util.update_source_repository()
696
696
697 model.update_commits(pull_request)
697 model.update_commits(pull_request)
698
698
699 # Expect that it has a version entry now
699 # Expect that it has a version entry now
700 assert len(model.get_versions(pull_request)) == 1
700 assert len(model.get_versions(pull_request)) == 1
701
701
702
702
703 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
703 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
704 pull_request = pr_util.create_pull_request()
704 pull_request = pr_util.create_pull_request()
705 model = PullRequestModel()
705 model = PullRequestModel()
706 model.update_commits(pull_request)
706 model.update_commits(pull_request)
707
707
708 # Expect that it still has no versions
708 # Expect that it still has no versions
709 assert len(model.get_versions(pull_request)) == 0
709 assert len(model.get_versions(pull_request)) == 0
710
710
711
711
712 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
712 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
713 model = PullRequestModel()
713 model = PullRequestModel()
714 pull_request = pr_util.create_pull_request()
714 pull_request = pr_util.create_pull_request()
715 comment = pr_util.create_comment()
715 comment = pr_util.create_comment()
716 pr_util.update_source_repository()
716 pr_util.update_source_repository()
717
717
718 model.update_commits(pull_request)
718 model.update_commits(pull_request)
719
719
720 # Expect that the comment is linked to the pr version now
720 # Expect that the comment is linked to the pr version now
721 assert comment.pull_request_version == model.get_versions(pull_request)[0]
721 assert comment.pull_request_version == model.get_versions(pull_request)[0]
722
722
723
723
724 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
724 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
725 model = PullRequestModel()
725 model = PullRequestModel()
726 pull_request = pr_util.create_pull_request()
726 pull_request = pr_util.create_pull_request()
727 pr_util.update_source_repository()
727 pr_util.update_source_repository()
728 pr_util.update_source_repository()
728 pr_util.update_source_repository()
729
729
730 model.update_commits(pull_request)
730 model.update_commits(pull_request)
731
731
732 # Expect to find a new comment about the change
732 # Expect to find a new comment about the change
733 expected_message = textwrap.dedent(
733 expected_message = textwrap.dedent(
734 """\
734 """\
735 Pull request updated. Auto status change to |under_review|
735 Pull request updated. Auto status change to |under_review|
736
736
737 .. role:: added
737 .. role:: added
738 .. role:: removed
738 .. role:: removed
739 .. parsed-literal::
739 .. parsed-literal::
740
740
741 Changed commits:
741 Changed commits:
742 * :added:`1 added`
742 * :added:`1 added`
743 * :removed:`0 removed`
743 * :removed:`0 removed`
744
744
745 Changed files:
745 Changed files:
746 * `A file_2 <#a_c--92ed3b5f07b4>`_
746 * `A file_2 <#a_c--92ed3b5f07b4>`_
747
747
748 .. |under_review| replace:: *"Under Review"*"""
748 .. |under_review| replace:: *"Under Review"*"""
749 )
749 )
750 pull_request_comments = sorted(
750 pull_request_comments = sorted(
751 pull_request.comments, key=lambda c: c.modified_at)
751 pull_request.comments, key=lambda c: c.modified_at)
752 update_comment = pull_request_comments[-1]
752 update_comment = pull_request_comments[-1]
753 assert update_comment.text == expected_message
753 assert update_comment.text == expected_message
754
754
755
755
756 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
756 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
757 pull_request = pr_util.create_pull_request()
757 pull_request = pr_util.create_pull_request()
758
758
759 # Avoiding default values
759 # Avoiding default values
760 pull_request.status = PullRequest.STATUS_CLOSED
760 pull_request.status = PullRequest.STATUS_CLOSED
761 pull_request._last_merge_source_rev = "0" * 40
761 pull_request._last_merge_source_rev = "0" * 40
762 pull_request._last_merge_target_rev = "1" * 40
762 pull_request._last_merge_target_rev = "1" * 40
763 pull_request.last_merge_status = 1
763 pull_request.last_merge_status = 1
764 pull_request.merge_rev = "2" * 40
764 pull_request.merge_rev = "2" * 40
765
765
766 # Remember automatic values
766 # Remember automatic values
767 created_on = pull_request.created_on
767 created_on = pull_request.created_on
768 updated_on = pull_request.updated_on
768 updated_on = pull_request.updated_on
769
769
770 # Create a new version of the pull request
770 # Create a new version of the pull request
771 version = PullRequestModel()._create_version_from_snapshot(pull_request)
771 version = PullRequestModel()._create_version_from_snapshot(pull_request)
772
772
773 # Check attributes
773 # Check attributes
774 assert version.title == pr_util.create_parameters['title']
774 assert version.title == pr_util.create_parameters['title']
775 assert version.description == pr_util.create_parameters['description']
775 assert version.description == pr_util.create_parameters['description']
776 assert version.status == PullRequest.STATUS_CLOSED
776 assert version.status == PullRequest.STATUS_CLOSED
777
777
778 # versions get updated created_on
778 # versions get updated created_on
779 assert version.created_on != created_on
779 assert version.created_on != created_on
780
780
781 assert version.updated_on == updated_on
781 assert version.updated_on == updated_on
782 assert version.user_id == pull_request.user_id
782 assert version.user_id == pull_request.user_id
783 assert version.revisions == pr_util.create_parameters['revisions']
783 assert version.revisions == pr_util.create_parameters['revisions']
784 assert version.source_repo == pr_util.source_repository
784 assert version.source_repo == pr_util.source_repository
785 assert version.source_ref == pr_util.create_parameters['source_ref']
785 assert version.source_ref == pr_util.create_parameters['source_ref']
786 assert version.target_repo == pr_util.target_repository
786 assert version.target_repo == pr_util.target_repository
787 assert version.target_ref == pr_util.create_parameters['target_ref']
787 assert version.target_ref == pr_util.create_parameters['target_ref']
788 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
788 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
789 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
789 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
790 assert version.last_merge_status == pull_request.last_merge_status
790 assert version.last_merge_status == pull_request.last_merge_status
791 assert version.merge_rev == pull_request.merge_rev
791 assert version.merge_rev == pull_request.merge_rev
792 assert version.pull_request == pull_request
792 assert version.pull_request == pull_request
793
793
794
794
795 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
795 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
796 version1 = pr_util.create_version_of_pull_request()
796 version1 = pr_util.create_version_of_pull_request()
797 comment_linked = pr_util.create_comment(linked_to=version1)
797 comment_linked = pr_util.create_comment(linked_to=version1)
798 comment_unlinked = pr_util.create_comment()
798 comment_unlinked = pr_util.create_comment()
799 version2 = pr_util.create_version_of_pull_request()
799 version2 = pr_util.create_version_of_pull_request()
800
800
801 PullRequestModel()._link_comments_to_version(version2)
801 PullRequestModel()._link_comments_to_version(version2)
802
802
803 # Expect that only the new comment is linked to version2
803 # Expect that only the new comment is linked to version2
804 assert (
804 assert (
805 comment_unlinked.pull_request_version_id ==
805 comment_unlinked.pull_request_version_id ==
806 version2.pull_request_version_id)
806 version2.pull_request_version_id)
807 assert (
807 assert (
808 comment_linked.pull_request_version_id ==
808 comment_linked.pull_request_version_id ==
809 version1.pull_request_version_id)
809 version1.pull_request_version_id)
810 assert (
810 assert (
811 comment_unlinked.pull_request_version_id !=
811 comment_unlinked.pull_request_version_id !=
812 comment_linked.pull_request_version_id)
812 comment_linked.pull_request_version_id)
813
813
814
814
815 def test_calculate_commits():
815 def test_calculate_commits():
816 old_ids = [1, 2, 3]
816 old_ids = [1, 2, 3]
817 new_ids = [1, 3, 4, 5]
817 new_ids = [1, 3, 4, 5]
818 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
818 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
819 assert change.added == [4, 5]
819 assert change.added == [4, 5]
820 assert change.common == [1, 3]
820 assert change.common == [1, 3]
821 assert change.removed == [2]
821 assert change.removed == [2]
822 assert change.total == [1, 3, 4, 5]
822 assert change.total == [1, 3, 4, 5]
823
823
824
824
825 def assert_inline_comments(pull_request, visible=None, outdated=None):
825 def assert_inline_comments(pull_request, visible=None, outdated=None):
826 if visible is not None:
826 if visible is not None:
827 inline_comments = CommentsModel().get_inline_comments(
827 inline_comments = CommentsModel().get_inline_comments(
828 pull_request.target_repo.repo_id, pull_request=pull_request)
828 pull_request.target_repo.repo_id, pull_request=pull_request)
829 inline_cnt = CommentsModel().get_inline_comments_count(
829 inline_cnt = CommentsModel().get_inline_comments_count(
830 inline_comments)
830 inline_comments)
831 assert inline_cnt == visible
831 assert inline_cnt == visible
832 if outdated is not None:
832 if outdated is not None:
833 outdated_comments = CommentsModel().get_outdated_comments(
833 outdated_comments = CommentsModel().get_outdated_comments(
834 pull_request.target_repo.repo_id, pull_request)
834 pull_request.target_repo.repo_id, pull_request)
835 assert len(outdated_comments) == outdated
835 assert len(outdated_comments) == outdated
836
836
837
837
838 def assert_pr_file_changes(
838 def assert_pr_file_changes(
839 pull_request, added=None, modified=None, removed=None):
839 pull_request, added=None, modified=None, removed=None):
840 pr_versions = PullRequestModel().get_versions(pull_request)
840 pr_versions = PullRequestModel().get_versions(pull_request)
841 # always use first version, ie original PR to calculate changes
841 # always use first version, ie original PR to calculate changes
842 pull_request_version = pr_versions[0]
842 pull_request_version = pr_versions[0]
843 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
843 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
844 pull_request, pull_request_version)
844 pull_request, pull_request_version)
845 file_changes = PullRequestModel()._calculate_file_changes(
845 file_changes = PullRequestModel()._calculate_file_changes(
846 old_diff_data, new_diff_data)
846 old_diff_data, new_diff_data)
847
847
848 assert added == file_changes.added, \
848 assert added == file_changes.added, \
849 'expected added:%s vs value:%s' % (added, file_changes.added)
849 'expected added:%s vs value:%s' % (added, file_changes.added)
850 assert modified == file_changes.modified, \
850 assert modified == file_changes.modified, \
851 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
851 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
852 assert removed == file_changes.removed, \
852 assert removed == file_changes.removed, \
853 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
853 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
854
854
855
855
856 def outdated_comments_patcher(use_outdated=True):
856 def outdated_comments_patcher(use_outdated=True):
857 return mock.patch.object(
857 return mock.patch.object(
858 CommentsModel, 'use_outdated_comments',
858 CommentsModel, 'use_outdated_comments',
859 return_value=use_outdated)
859 return_value=use_outdated)
@@ -1,336 +1,336 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import stat
22 import stat
23 import sys
23 import sys
24
24
25 import pytest
25 import pytest
26 from mock import Mock, patch, DEFAULT
26 from mock import Mock, patch, DEFAULT
27
27
28 import rhodecode
28 import rhodecode
29 from rhodecode.model import db, scm
29 from rhodecode.model import db, scm
30 from rhodecode.tests import no_newline_id_generator
30 from rhodecode.tests import no_newline_id_generator
31
31
32
32
33 def test_scm_instance_config(backend):
33 def test_scm_instance_config(backend):
34 repo = backend.create_repo()
34 repo = backend.create_repo()
35 with patch.multiple('rhodecode.model.db.Repository',
35 with patch.multiple('rhodecode.model.db.Repository',
36 _get_instance=DEFAULT,
36 _get_instance=DEFAULT,
37 _get_instance_cached=DEFAULT) as mocks:
37 _get_instance_cached=DEFAULT) as mocks:
38 repo.scm_instance()
38 repo.scm_instance()
39 mocks['_get_instance'].assert_called_with(
39 mocks['_get_instance'].assert_called_with(
40 config=None, cache=False)
40 config=None, cache=False)
41
41
42 config = {'some': 'value'}
42 config = {'some': 'value'}
43 repo.scm_instance(config=config)
43 repo.scm_instance(config=config)
44 mocks['_get_instance'].assert_called_with(
44 mocks['_get_instance'].assert_called_with(
45 config=config, cache=False)
45 config=config, cache=False)
46
46
47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
48 repo.scm_instance(config=config)
48 repo.scm_instance(config=config)
49 mocks['_get_instance_cached'].assert_called()
49 mocks['_get_instance_cached'].assert_called()
50
50
51
51
52 def test__get_instance_config(backend):
52 def test__get_instance_config(backend):
53 repo = backend.create_repo()
53 repo = backend.create_repo()
54 vcs_class = Mock()
54 vcs_class = Mock()
55 with patch.multiple('rhodecode.lib.vcs.backends',
55 with patch.multiple('rhodecode.lib.vcs.backends',
56 get_scm=DEFAULT,
56 get_scm=DEFAULT,
57 get_backend=DEFAULT) as mocks:
57 get_backend=DEFAULT) as mocks:
58 mocks['get_scm'].return_value = backend.alias
58 mocks['get_scm'].return_value = backend.alias
59 mocks['get_backend'].return_value = vcs_class
59 mocks['get_backend'].return_value = vcs_class
60 with patch('rhodecode.model.db.Repository._config') as config_mock:
60 with patch('rhodecode.model.db.Repository._config') as config_mock:
61 repo._get_instance()
61 repo._get_instance()
62 vcs_class.assert_called_with(
62 vcs_class.assert_called_with(
63 repo_path=repo.repo_full_path, config=config_mock,
63 repo_path=repo.repo_full_path, config=config_mock,
64 create=False, with_wire={'cache': True})
64 create=False, with_wire={'cache': True})
65
65
66 new_config = {'override': 'old_config'}
66 new_config = {'override': 'old_config'}
67 repo._get_instance(config=new_config)
67 repo._get_instance(config=new_config)
68 vcs_class.assert_called_with(
68 vcs_class.assert_called_with(
69 repo_path=repo.repo_full_path, config=new_config, create=False,
69 repo_path=repo.repo_full_path, config=new_config, create=False,
70 with_wire={'cache': True})
70 with_wire={'cache': True})
71
71
72
72
73 def test_mark_for_invalidation_config(backend):
73 def test_mark_for_invalidation_config(backend):
74 repo = backend.create_repo()
74 repo = backend.create_repo()
75 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
75 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
76 scm.ScmModel().mark_for_invalidation(repo.repo_name)
76 scm.ScmModel().mark_for_invalidation(repo.repo_name)
77 _, kwargs = _mock.call_args
77 _, kwargs = _mock.call_args
78 assert kwargs['config'].__dict__ == repo._config.__dict__
78 assert kwargs['config'].__dict__ == repo._config.__dict__
79
79
80
80
81 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
81 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
82 commits = [{'message': 'A'}, {'message': 'B'}]
82 commits = [{'message': 'A'}, {'message': 'B'}]
83 repo = backend.create_repo(commits=commits)
83 repo = backend.create_repo(commits=commits)
84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
85 assert repo.changeset_cache['revision'] == 1
85 assert repo.changeset_cache['revision'] == 1
86
86
87
87
88 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
88 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
89 repo = backend.create_repo()
89 repo = backend.create_repo()
90 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
90 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
91 assert repo.changeset_cache['revision'] == -1
91 assert repo.changeset_cache['revision'] == -1
92
92
93
93
94 def test_strip_with_multiple_heads(backend_hg):
94 def test_strip_with_multiple_heads(backend_hg):
95 commits = [
95 commits = [
96 {'message': 'A'},
96 {'message': 'A'},
97 {'message': 'a'},
97 {'message': 'a'},
98 {'message': 'b'},
98 {'message': 'b'},
99 {'message': 'B', 'parents': ['A']},
99 {'message': 'B', 'parents': ['A']},
100 {'message': 'a1'},
100 {'message': 'a1'},
101 ]
101 ]
102 repo = backend_hg.create_repo(commits=commits)
102 repo = backend_hg.create_repo(commits=commits)
103 commit_ids = backend_hg.commit_ids
103 commit_ids = backend_hg.commit_ids
104
104
105 model = scm.ScmModel()
105 model = scm.ScmModel()
106 model.strip(repo, commit_ids['b'], branch=None)
106 model.strip(repo, commit_ids['b'], branch=None)
107
107
108 vcs_repo = repo.scm_instance()
108 vcs_repo = repo.scm_instance()
109 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
109 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
110 assert len(rest_commit_ids) == 4
110 assert len(rest_commit_ids) == 4
111 assert commit_ids['b'] not in rest_commit_ids
111 assert commit_ids['b'] not in rest_commit_ids
112
112
113
113
114 def test_strip_with_single_heads(backend_hg):
114 def test_strip_with_single_heads(backend_hg):
115 commits = [
115 commits = [
116 {'message': 'A'},
116 {'message': 'A'},
117 {'message': 'a'},
117 {'message': 'a'},
118 {'message': 'b'},
118 {'message': 'b'},
119 ]
119 ]
120 repo = backend_hg.create_repo(commits=commits)
120 repo = backend_hg.create_repo(commits=commits)
121 commit_ids = backend_hg.commit_ids
121 commit_ids = backend_hg.commit_ids
122
122
123 model = scm.ScmModel()
123 model = scm.ScmModel()
124 model.strip(repo, commit_ids['b'], branch=None)
124 model.strip(repo, commit_ids['b'], branch=None)
125
125
126 vcs_repo = repo.scm_instance()
126 vcs_repo = repo.scm_instance()
127 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
127 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
128 assert len(rest_commit_ids) == 2
128 assert len(rest_commit_ids) == 2
129 assert commit_ids['b'] not in rest_commit_ids
129 assert commit_ids['b'] not in rest_commit_ids
130
130
131
131
132 def test_get_nodes_returns_unicode_flat(backend_random):
132 def test_get_nodes_returns_unicode_flat(backend_random):
133 repo = backend_random.repo
133 repo = backend_random.repo
134 directories, files = scm.ScmModel().get_nodes(
134 directories, files = scm.ScmModel().get_nodes(
135 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
135 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
136 flat=True)
136 flat=True)
137 assert_contains_only_unicode(directories)
137 assert_contains_only_unicode(directories)
138 assert_contains_only_unicode(files)
138 assert_contains_only_unicode(files)
139
139
140
140
141 def test_get_nodes_returns_unicode_non_flat(backend_random):
141 def test_get_nodes_returns_unicode_non_flat(backend_random):
142 repo = backend_random.repo
142 repo = backend_random.repo
143 directories, files = scm.ScmModel().get_nodes(
143 directories, files = scm.ScmModel().get_nodes(
144 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
144 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
145 flat=False)
145 flat=False)
146 # johbo: Checking only the names for now, since that is the critical
146 # johbo: Checking only the names for now, since that is the critical
147 # part.
147 # part.
148 assert_contains_only_unicode([d['name'] for d in directories])
148 assert_contains_only_unicode([d['name'] for d in directories])
149 assert_contains_only_unicode([f['name'] for f in files])
149 assert_contains_only_unicode([f['name'] for f in files])
150
150
151
151
152 def test_get_nodes_max_file_bytes(backend_random):
152 def test_get_nodes_max_file_bytes(backend_random):
153 repo = backend_random.repo
153 repo = backend_random.repo
154 max_file_bytes = 10
154 max_file_bytes = 10
155 directories, files = scm.ScmModel().get_nodes(
155 directories, files = scm.ScmModel().get_nodes(
156 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
156 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
157 extended_info=True, flat=False)
157 extended_info=True, flat=False)
158 assert any(file['content'] and len(file['content']) > max_file_bytes
158 assert any(file['content'] and len(file['content']) > max_file_bytes
159 for file in files)
159 for file in files)
160
160
161 directories, files = scm.ScmModel().get_nodes(
161 directories, files = scm.ScmModel().get_nodes(
162 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
162 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
163 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
163 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
164 assert all(
164 assert all(
165 file['content'] is None if file['size'] > max_file_bytes else True
165 file['content'] is None if file['size'] > max_file_bytes else True
166 for file in files)
166 for file in files)
167
167
168
168
169 def assert_contains_only_unicode(structure):
169 def assert_contains_only_unicode(structure):
170 assert structure
170 assert structure
171 for value in structure:
171 for value in structure:
172 assert isinstance(value, unicode)
172 assert isinstance(value, unicode)
173
173
174
174
175 @pytest.mark.backends("hg", "git")
175 @pytest.mark.backends("hg", "git")
176 def test_get_non_unicode_reference(backend):
176 def test_get_non_unicode_reference(backend):
177 model = scm.ScmModel()
177 model = scm.ScmModel()
178 non_unicode_list = ["Adını".decode("cp1254")]
178 non_unicode_list = ["Adını".decode("cp1254")]
179
179
180 def scm_instance():
180 def scm_instance():
181 return Mock(
181 return Mock(
182 branches=non_unicode_list, bookmarks=non_unicode_list,
182 branches=non_unicode_list, bookmarks=non_unicode_list,
183 tags=non_unicode_list, alias=backend.alias)
183 tags=non_unicode_list, alias=backend.alias)
184
184
185 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
185 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
186 choices, __ = model.get_repo_landing_revs(repo=repo)
186 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
187 if backend.alias == 'hg':
187 if backend.alias == 'hg':
188 valid_choices = [
188 valid_choices = [
189 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
189 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
190 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
190 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
191 else:
191 else:
192 valid_choices = [
192 valid_choices = [
193 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
193 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
194 u'tag:Ad\xc4\xb1n\xc4\xb1']
194 u'tag:Ad\xc4\xb1n\xc4\xb1']
195
195
196 assert choices == valid_choices
196 assert choices == valid_choices
197
197
198
198
199 class TestInstallSvnHooks(object):
199 class TestInstallSvnHooks(object):
200 HOOK_FILES = ('pre-commit', 'post-commit')
200 HOOK_FILES = ('pre-commit', 'post-commit')
201
201
202 def test_new_hooks_are_created(self, backend_svn):
202 def test_new_hooks_are_created(self, backend_svn):
203 model = scm.ScmModel()
203 model = scm.ScmModel()
204 repo = backend_svn.create_repo()
204 repo = backend_svn.create_repo()
205 vcs_repo = repo.scm_instance()
205 vcs_repo = repo.scm_instance()
206 model.install_svn_hooks(vcs_repo)
206 model.install_svn_hooks(vcs_repo)
207
207
208 hooks_path = os.path.join(vcs_repo.path, 'hooks')
208 hooks_path = os.path.join(vcs_repo.path, 'hooks')
209 assert os.path.isdir(hooks_path)
209 assert os.path.isdir(hooks_path)
210 for file_name in self.HOOK_FILES:
210 for file_name in self.HOOK_FILES:
211 file_path = os.path.join(hooks_path, file_name)
211 file_path = os.path.join(hooks_path, file_name)
212 self._check_hook_file_mode(file_path)
212 self._check_hook_file_mode(file_path)
213 self._check_hook_file_content(file_path)
213 self._check_hook_file_content(file_path)
214
214
215 def test_rc_hooks_are_replaced(self, backend_svn):
215 def test_rc_hooks_are_replaced(self, backend_svn):
216 model = scm.ScmModel()
216 model = scm.ScmModel()
217 repo = backend_svn.create_repo()
217 repo = backend_svn.create_repo()
218 vcs_repo = repo.scm_instance()
218 vcs_repo = repo.scm_instance()
219 hooks_path = os.path.join(vcs_repo.path, 'hooks')
219 hooks_path = os.path.join(vcs_repo.path, 'hooks')
220 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
220 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
221
221
222 for file_path in file_paths:
222 for file_path in file_paths:
223 self._create_fake_hook(
223 self._create_fake_hook(
224 file_path, content="RC_HOOK_VER = 'abcde'\n")
224 file_path, content="RC_HOOK_VER = 'abcde'\n")
225
225
226 model.install_svn_hooks(vcs_repo)
226 model.install_svn_hooks(vcs_repo)
227
227
228 for file_path in file_paths:
228 for file_path in file_paths:
229 self._check_hook_file_content(file_path)
229 self._check_hook_file_content(file_path)
230
230
231 def test_non_rc_hooks_are_not_replaced_without_force_create(
231 def test_non_rc_hooks_are_not_replaced_without_force_create(
232 self, backend_svn):
232 self, backend_svn):
233 model = scm.ScmModel()
233 model = scm.ScmModel()
234 repo = backend_svn.create_repo()
234 repo = backend_svn.create_repo()
235 vcs_repo = repo.scm_instance()
235 vcs_repo = repo.scm_instance()
236 hooks_path = os.path.join(vcs_repo.path, 'hooks')
236 hooks_path = os.path.join(vcs_repo.path, 'hooks')
237 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
237 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
238 non_rc_content = "exit 0\n"
238 non_rc_content = "exit 0\n"
239
239
240 for file_path in file_paths:
240 for file_path in file_paths:
241 self._create_fake_hook(file_path, content=non_rc_content)
241 self._create_fake_hook(file_path, content=non_rc_content)
242
242
243 model.install_svn_hooks(vcs_repo)
243 model.install_svn_hooks(vcs_repo)
244
244
245 for file_path in file_paths:
245 for file_path in file_paths:
246 with open(file_path, 'rt') as hook_file:
246 with open(file_path, 'rt') as hook_file:
247 content = hook_file.read()
247 content = hook_file.read()
248 assert content == non_rc_content
248 assert content == non_rc_content
249
249
250 def test_non_rc_hooks_are_replaced_with_force_create(self, backend_svn):
250 def test_non_rc_hooks_are_replaced_with_force_create(self, backend_svn):
251 model = scm.ScmModel()
251 model = scm.ScmModel()
252 repo = backend_svn.create_repo()
252 repo = backend_svn.create_repo()
253 vcs_repo = repo.scm_instance()
253 vcs_repo = repo.scm_instance()
254 hooks_path = os.path.join(vcs_repo.path, 'hooks')
254 hooks_path = os.path.join(vcs_repo.path, 'hooks')
255 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
255 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
256 non_rc_content = "exit 0\n"
256 non_rc_content = "exit 0\n"
257
257
258 for file_path in file_paths:
258 for file_path in file_paths:
259 self._create_fake_hook(file_path, content=non_rc_content)
259 self._create_fake_hook(file_path, content=non_rc_content)
260
260
261 model.install_svn_hooks(vcs_repo, force_create=True)
261 model.install_svn_hooks(vcs_repo, force_create=True)
262
262
263 for file_path in file_paths:
263 for file_path in file_paths:
264 self._check_hook_file_content(file_path)
264 self._check_hook_file_content(file_path)
265
265
266 def _check_hook_file_mode(self, file_path):
266 def _check_hook_file_mode(self, file_path):
267 assert os.path.exists(file_path)
267 assert os.path.exists(file_path)
268 stat_info = os.stat(file_path)
268 stat_info = os.stat(file_path)
269
269
270 file_mode = stat.S_IMODE(stat_info.st_mode)
270 file_mode = stat.S_IMODE(stat_info.st_mode)
271 expected_mode = int('755', 8)
271 expected_mode = int('755', 8)
272 assert expected_mode == file_mode
272 assert expected_mode == file_mode
273
273
274 def _check_hook_file_content(self, file_path):
274 def _check_hook_file_content(self, file_path):
275 with open(file_path, 'rt') as hook_file:
275 with open(file_path, 'rt') as hook_file:
276 content = hook_file.read()
276 content = hook_file.read()
277
277
278 expected_env = '#!{}'.format(sys.executable)
278 expected_env = '#!{}'.format(sys.executable)
279 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
279 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
280 rhodecode.__version__)
280 rhodecode.__version__)
281 assert content.strip().startswith(expected_env)
281 assert content.strip().startswith(expected_env)
282 assert expected_rc_version in content
282 assert expected_rc_version in content
283
283
284 def _create_fake_hook(self, file_path, content):
284 def _create_fake_hook(self, file_path, content):
285 with open(file_path, 'w') as hook_file:
285 with open(file_path, 'w') as hook_file:
286 hook_file.write(content)
286 hook_file.write(content)
287
287
288
288
289 class TestCheckRhodecodeHook(object):
289 class TestCheckRhodecodeHook(object):
290
290
291 @patch('os.path.exists', Mock(return_value=False))
291 @patch('os.path.exists', Mock(return_value=False))
292 def test_returns_true_when_no_hook_found(self):
292 def test_returns_true_when_no_hook_found(self):
293 result = scm._check_rhodecode_hook('/tmp/fake_hook_file.py')
293 result = scm._check_rhodecode_hook('/tmp/fake_hook_file.py')
294 assert result
294 assert result
295
295
296 @pytest.mark.parametrize("file_content, expected_result", [
296 @pytest.mark.parametrize("file_content, expected_result", [
297 ("RC_HOOK_VER = '3.3.3'\n", True),
297 ("RC_HOOK_VER = '3.3.3'\n", True),
298 ("RC_HOOK = '3.3.3'\n", False),
298 ("RC_HOOK = '3.3.3'\n", False),
299 ], ids=no_newline_id_generator)
299 ], ids=no_newline_id_generator)
300 @patch('os.path.exists', Mock(return_value=True))
300 @patch('os.path.exists', Mock(return_value=True))
301 def test_signatures(self, file_content, expected_result):
301 def test_signatures(self, file_content, expected_result):
302 hook_content_patcher = patch.object(
302 hook_content_patcher = patch.object(
303 scm, '_read_hook', return_value=file_content)
303 scm, '_read_hook', return_value=file_content)
304 with hook_content_patcher:
304 with hook_content_patcher:
305 result = scm._check_rhodecode_hook('/tmp/fake_hook_file.py')
305 result = scm._check_rhodecode_hook('/tmp/fake_hook_file.py')
306
306
307 assert result is expected_result
307 assert result is expected_result
308
308
309
309
310 class TestInstallHooks(object):
310 class TestInstallHooks(object):
311 def test_hooks_are_installed_for_git_repo(self, backend_git):
311 def test_hooks_are_installed_for_git_repo(self, backend_git):
312 repo = backend_git.create_repo()
312 repo = backend_git.create_repo()
313 model = scm.ScmModel()
313 model = scm.ScmModel()
314 scm_repo = repo.scm_instance()
314 scm_repo = repo.scm_instance()
315 with patch.object(model, 'install_git_hook') as hooks_mock:
315 with patch.object(model, 'install_git_hook') as hooks_mock:
316 model.install_hooks(scm_repo, repo_type='git')
316 model.install_hooks(scm_repo, repo_type='git')
317 hooks_mock.assert_called_once_with(scm_repo)
317 hooks_mock.assert_called_once_with(scm_repo)
318
318
319 def test_hooks_are_installed_for_svn_repo(self, backend_svn):
319 def test_hooks_are_installed_for_svn_repo(self, backend_svn):
320 repo = backend_svn.create_repo()
320 repo = backend_svn.create_repo()
321 scm_repo = repo.scm_instance()
321 scm_repo = repo.scm_instance()
322 model = scm.ScmModel()
322 model = scm.ScmModel()
323 with patch.object(scm.ScmModel, 'install_svn_hooks') as hooks_mock:
323 with patch.object(scm.ScmModel, 'install_svn_hooks') as hooks_mock:
324 model.install_hooks(scm_repo, repo_type='svn')
324 model.install_hooks(scm_repo, repo_type='svn')
325 hooks_mock.assert_called_once_with(scm_repo)
325 hooks_mock.assert_called_once_with(scm_repo)
326
326
327 @pytest.mark.parametrize('hook_method', [
327 @pytest.mark.parametrize('hook_method', [
328 'install_svn_hooks',
328 'install_svn_hooks',
329 'install_git_hook'])
329 'install_git_hook'])
330 def test_mercurial_doesnt_trigger_hooks(self, backend_hg, hook_method):
330 def test_mercurial_doesnt_trigger_hooks(self, backend_hg, hook_method):
331 repo = backend_hg.create_repo()
331 repo = backend_hg.create_repo()
332 scm_repo = repo.scm_instance()
332 scm_repo = repo.scm_instance()
333 model = scm.ScmModel()
333 model = scm.ScmModel()
334 with patch.object(scm.ScmModel, hook_method) as hooks_mock:
334 with patch.object(scm.ScmModel, hook_method) as hooks_mock:
335 model.install_hooks(scm_repo, repo_type='hg')
335 model.install_hooks(scm_repo, repo_type='hg')
336 assert hooks_mock.call_count == 0
336 assert hooks_mock.call_count == 0
@@ -1,301 +1,295 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import formencode
22 import formencode
23 import pytest
23 import pytest
24
24
25 from rhodecode.tests import (
25 from rhodecode.tests import (
26 HG_REPO, TEST_USER_REGULAR2_EMAIL, TEST_USER_REGULAR2_LOGIN,
26 HG_REPO, TEST_USER_REGULAR2_EMAIL, TEST_USER_REGULAR2_LOGIN,
27 TEST_USER_REGULAR2_PASS, TEST_USER_ADMIN_LOGIN, TESTS_TMP_PATH,
27 TEST_USER_REGULAR2_PASS, TEST_USER_ADMIN_LOGIN, TESTS_TMP_PATH,
28 ldap_lib_installed)
28 ldap_lib_installed)
29
29
30 from rhodecode.model import validators as v
30 from rhodecode.model import validators as v
31 from rhodecode.model.user_group import UserGroupModel
31 from rhodecode.model.user_group import UserGroupModel
32
32
33 from rhodecode.model.meta import Session
33 from rhodecode.model.meta import Session
34 from rhodecode.model.repo_group import RepoGroupModel
34 from rhodecode.model.repo_group import RepoGroupModel
35 from rhodecode.model.db import ChangesetStatus, Repository
35 from rhodecode.model.db import ChangesetStatus, Repository
36 from rhodecode.model.changeset_status import ChangesetStatusModel
36 from rhodecode.model.changeset_status import ChangesetStatusModel
37 from rhodecode.tests.fixture import Fixture
37 from rhodecode.tests.fixture import Fixture
38
38
39 fixture = Fixture()
39 fixture = Fixture()
40
40
41 pytestmark = pytest.mark.usefixtures('baseapp')
41 pytestmark = pytest.mark.usefixtures('baseapp')
42
42
43
43
44 @pytest.fixture
44 @pytest.fixture
45 def localizer():
45 def localizer():
46 def func(msg):
46 def func(msg):
47 return msg
47 return msg
48 return func
48 return func
49
49
50
50
51 def test_Message_extractor(localizer):
51 def test_Message_extractor(localizer):
52 validator = v.ValidUsername(localizer)
52 validator = v.ValidUsername(localizer)
53 pytest.raises(formencode.Invalid, validator.to_python, 'default')
53 pytest.raises(formencode.Invalid, validator.to_python, 'default')
54
54
55 class StateObj(object):
55 class StateObj(object):
56 pass
56 pass
57
57
58 pytest.raises(
58 pytest.raises(
59 formencode.Invalid, validator.to_python, 'default', StateObj)
59 formencode.Invalid, validator.to_python, 'default', StateObj)
60
60
61
61
62 def test_ValidUsername(localizer):
62 def test_ValidUsername(localizer):
63 validator = v.ValidUsername(localizer)
63 validator = v.ValidUsername(localizer)
64
64
65 pytest.raises(formencode.Invalid, validator.to_python, 'default')
65 pytest.raises(formencode.Invalid, validator.to_python, 'default')
66 pytest.raises(formencode.Invalid, validator.to_python, 'new_user')
66 pytest.raises(formencode.Invalid, validator.to_python, 'new_user')
67 pytest.raises(formencode.Invalid, validator.to_python, '.,')
67 pytest.raises(formencode.Invalid, validator.to_python, '.,')
68 pytest.raises(
68 pytest.raises(
69 formencode.Invalid, validator.to_python, TEST_USER_ADMIN_LOGIN)
69 formencode.Invalid, validator.to_python, TEST_USER_ADMIN_LOGIN)
70 assert 'test' == validator.to_python('test')
70 assert 'test' == validator.to_python('test')
71
71
72 validator = v.ValidUsername(localizer, edit=True, old_data={'user_id': 1})
72 validator = v.ValidUsername(localizer, edit=True, old_data={'user_id': 1})
73
73
74
74
75 def test_ValidRepoUser(localizer):
75 def test_ValidRepoUser(localizer):
76 validator = v.ValidRepoUser(localizer)
76 validator = v.ValidRepoUser(localizer)
77 pytest.raises(formencode.Invalid, validator.to_python, 'nouser')
77 pytest.raises(formencode.Invalid, validator.to_python, 'nouser')
78 assert TEST_USER_ADMIN_LOGIN == \
78 assert TEST_USER_ADMIN_LOGIN == \
79 validator.to_python(TEST_USER_ADMIN_LOGIN)
79 validator.to_python(TEST_USER_ADMIN_LOGIN)
80
80
81
81
82 def test_ValidUserGroup(localizer):
82 def test_ValidUserGroup(localizer):
83 validator = v.ValidUserGroup(localizer)
83 validator = v.ValidUserGroup(localizer)
84 pytest.raises(formencode.Invalid, validator.to_python, 'default')
84 pytest.raises(formencode.Invalid, validator.to_python, 'default')
85 pytest.raises(formencode.Invalid, validator.to_python, '.,')
85 pytest.raises(formencode.Invalid, validator.to_python, '.,')
86
86
87 gr = fixture.create_user_group('test')
87 gr = fixture.create_user_group('test')
88 gr2 = fixture.create_user_group('tes2')
88 gr2 = fixture.create_user_group('tes2')
89 Session().commit()
89 Session().commit()
90 pytest.raises(formencode.Invalid, validator.to_python, 'test')
90 pytest.raises(formencode.Invalid, validator.to_python, 'test')
91 assert gr.users_group_id is not None
91 assert gr.users_group_id is not None
92 validator = v.ValidUserGroup(localizer,
92 validator = v.ValidUserGroup(localizer,
93 edit=True,
93 edit=True,
94 old_data={'users_group_id': gr2.users_group_id})
94 old_data={'users_group_id': gr2.users_group_id})
95
95
96 pytest.raises(formencode.Invalid, validator.to_python, 'test')
96 pytest.raises(formencode.Invalid, validator.to_python, 'test')
97 pytest.raises(formencode.Invalid, validator.to_python, 'TesT')
97 pytest.raises(formencode.Invalid, validator.to_python, 'TesT')
98 pytest.raises(formencode.Invalid, validator.to_python, 'TEST')
98 pytest.raises(formencode.Invalid, validator.to_python, 'TEST')
99 UserGroupModel().delete(gr)
99 UserGroupModel().delete(gr)
100 UserGroupModel().delete(gr2)
100 UserGroupModel().delete(gr2)
101 Session().commit()
101 Session().commit()
102
102
103
103
104 @pytest.fixture(scope='function')
104 @pytest.fixture(scope='function')
105 def repo_group(request):
105 def repo_group(request):
106 model = RepoGroupModel()
106 model = RepoGroupModel()
107 gr = model.create(
107 gr = model.create(
108 group_name='test_gr', group_description='desc', just_db=True,
108 group_name='test_gr', group_description='desc', just_db=True,
109 owner=TEST_USER_ADMIN_LOGIN)
109 owner=TEST_USER_ADMIN_LOGIN)
110
110
111 def cleanup():
111 def cleanup():
112 model.delete(gr)
112 model.delete(gr)
113
113
114 request.addfinalizer(cleanup)
114 request.addfinalizer(cleanup)
115
115
116 return gr
116 return gr
117
117
118
118
119 def test_ValidRepoGroup_same_name_as_repo(localizer):
119 def test_ValidRepoGroup_same_name_as_repo(localizer):
120 validator = v.ValidRepoGroup(localizer)
120 validator = v.ValidRepoGroup(localizer)
121 with pytest.raises(formencode.Invalid) as excinfo:
121 with pytest.raises(formencode.Invalid) as excinfo:
122 validator.to_python({'group_name': HG_REPO})
122 validator.to_python({'group_name': HG_REPO})
123 expected_msg = 'Repository with name "vcs_test_hg" already exists'
123 expected_msg = 'Repository with name "vcs_test_hg" already exists'
124 assert expected_msg in str(excinfo.value)
124 assert expected_msg in str(excinfo.value)
125
125
126
126
127 def test_ValidRepoGroup_group_exists(localizer, repo_group):
127 def test_ValidRepoGroup_group_exists(localizer, repo_group):
128 validator = v.ValidRepoGroup(localizer)
128 validator = v.ValidRepoGroup(localizer)
129 with pytest.raises(formencode.Invalid) as excinfo:
129 with pytest.raises(formencode.Invalid) as excinfo:
130 validator.to_python({'group_name': repo_group.group_name})
130 validator.to_python({'group_name': repo_group.group_name})
131 expected_msg = 'Group "test_gr" already exists'
131 expected_msg = 'Group "test_gr" already exists'
132 assert expected_msg in str(excinfo.value)
132 assert expected_msg in str(excinfo.value)
133
133
134
134
135 def test_ValidRepoGroup_invalid_parent(localizer, repo_group):
135 def test_ValidRepoGroup_invalid_parent(localizer, repo_group):
136 validator = v.ValidRepoGroup(localizer, edit=True,
136 validator = v.ValidRepoGroup(localizer, edit=True,
137 old_data={'group_id': repo_group.group_id})
137 old_data={'group_id': repo_group.group_id})
138 with pytest.raises(formencode.Invalid) as excinfo:
138 with pytest.raises(formencode.Invalid) as excinfo:
139 validator.to_python({
139 validator.to_python({
140 'group_name': repo_group.group_name + 'n',
140 'group_name': repo_group.group_name + 'n',
141 'group_parent_id': repo_group.group_id,
141 'group_parent_id': repo_group.group_id,
142 })
142 })
143 expected_msg = 'Cannot assign this group as parent'
143 expected_msg = 'Cannot assign this group as parent'
144 assert expected_msg in str(excinfo.value)
144 assert expected_msg in str(excinfo.value)
145
145
146
146
147 def test_ValidRepoGroup_edit_group_no_root_permission(localizer, repo_group):
147 def test_ValidRepoGroup_edit_group_no_root_permission(localizer, repo_group):
148 validator = v.ValidRepoGroup(localizer,
148 validator = v.ValidRepoGroup(localizer,
149 edit=True, old_data={'group_id': repo_group.group_id},
149 edit=True, old_data={'group_id': repo_group.group_id},
150 can_create_in_root=False)
150 can_create_in_root=False)
151
151
152 # Cannot change parent
152 # Cannot change parent
153 with pytest.raises(formencode.Invalid) as excinfo:
153 with pytest.raises(formencode.Invalid) as excinfo:
154 validator.to_python({'group_parent_id': '25'})
154 validator.to_python({'group_parent_id': '25'})
155 expected_msg = 'no permission to store repository group in root location'
155 expected_msg = 'no permission to store repository group in root location'
156 assert expected_msg in str(excinfo.value)
156 assert expected_msg in str(excinfo.value)
157
157
158 # Chaning all the other fields is allowed
158 # Chaning all the other fields is allowed
159 validator.to_python({'group_name': 'foo', 'group_parent_id': '-1'})
159 validator.to_python({'group_name': 'foo', 'group_parent_id': '-1'})
160 validator.to_python(
160 validator.to_python(
161 {'user': TEST_USER_REGULAR2_LOGIN, 'group_parent_id': '-1'})
161 {'user': TEST_USER_REGULAR2_LOGIN, 'group_parent_id': '-1'})
162 validator.to_python({'group_description': 'bar', 'group_parent_id': '-1'})
162 validator.to_python({'group_description': 'bar', 'group_parent_id': '-1'})
163 validator.to_python({'enable_locking': 'true', 'group_parent_id': '-1'})
163 validator.to_python({'enable_locking': 'true', 'group_parent_id': '-1'})
164
164
165
165
166 def test_ValidPassword(localizer):
166 def test_ValidPassword(localizer):
167 validator = v.ValidPassword(localizer)
167 validator = v.ValidPassword(localizer)
168 assert 'lol' == validator.to_python('lol')
168 assert 'lol' == validator.to_python('lol')
169 assert None == validator.to_python(None)
169 assert None == validator.to_python(None)
170 pytest.raises(formencode.Invalid, validator.to_python, 'ąćżź')
170 pytest.raises(formencode.Invalid, validator.to_python, 'ąćżź')
171
171
172
172
173 def test_ValidPasswordsMatch(localizer):
173 def test_ValidPasswordsMatch(localizer):
174 validator = v.ValidPasswordsMatch(localizer)
174 validator = v.ValidPasswordsMatch(localizer)
175 pytest.raises(
175 pytest.raises(
176 formencode.Invalid,
176 formencode.Invalid,
177 validator.to_python, {'password': 'pass',
177 validator.to_python, {'password': 'pass',
178 'password_confirmation': 'pass2'})
178 'password_confirmation': 'pass2'})
179
179
180 pytest.raises(
180 pytest.raises(
181 formencode.Invalid,
181 formencode.Invalid,
182 validator.to_python, {'new_password': 'pass',
182 validator.to_python, {'new_password': 'pass',
183 'password_confirmation': 'pass2'})
183 'password_confirmation': 'pass2'})
184
184
185 assert {'new_password': 'pass', 'password_confirmation': 'pass'} == \
185 assert {'new_password': 'pass', 'password_confirmation': 'pass'} == \
186 validator.to_python({'new_password': 'pass',
186 validator.to_python({'new_password': 'pass',
187 'password_confirmation': 'pass'})
187 'password_confirmation': 'pass'})
188
188
189 assert {'password': 'pass', 'password_confirmation': 'pass'} == \
189 assert {'password': 'pass', 'password_confirmation': 'pass'} == \
190 validator.to_python({'password': 'pass',
190 validator.to_python({'password': 'pass',
191 'password_confirmation': 'pass'})
191 'password_confirmation': 'pass'})
192
192
193
193
194 def test_ValidAuth(localizer, config_stub):
194 def test_ValidAuth(localizer, config_stub):
195 config_stub.testing_securitypolicy()
195 config_stub.testing_securitypolicy()
196 config_stub.include('rhodecode.authentication')
196 config_stub.include('rhodecode.authentication')
197
197
198 validator = v.ValidAuth(localizer)
198 validator = v.ValidAuth(localizer)
199 valid_creds = {
199 valid_creds = {
200 'username': TEST_USER_REGULAR2_LOGIN,
200 'username': TEST_USER_REGULAR2_LOGIN,
201 'password': TEST_USER_REGULAR2_PASS,
201 'password': TEST_USER_REGULAR2_PASS,
202 }
202 }
203 invalid_creds = {
203 invalid_creds = {
204 'username': 'err',
204 'username': 'err',
205 'password': 'err',
205 'password': 'err',
206 }
206 }
207 assert valid_creds == validator.to_python(valid_creds)
207 assert valid_creds == validator.to_python(valid_creds)
208 pytest.raises(
208 pytest.raises(
209 formencode.Invalid, validator.to_python, invalid_creds)
209 formencode.Invalid, validator.to_python, invalid_creds)
210
210
211
211
212 def test_ValidAuthToken(localizer):
213 validator = v.ValidAuthToken(localizer)
214 pytest.raises(formencode.Invalid, validator.to_python, 'BadToken')
215 validator
216
217
218 def test_ValidRepoName(localizer):
212 def test_ValidRepoName(localizer):
219 validator = v.ValidRepoName(localizer)
213 validator = v.ValidRepoName(localizer)
220
214
221 pytest.raises(
215 pytest.raises(
222 formencode.Invalid, validator.to_python, {'repo_name': ''})
216 formencode.Invalid, validator.to_python, {'repo_name': ''})
223
217
224 pytest.raises(
218 pytest.raises(
225 formencode.Invalid, validator.to_python, {'repo_name': HG_REPO})
219 formencode.Invalid, validator.to_python, {'repo_name': HG_REPO})
226
220
227 gr = RepoGroupModel().create(group_name='group_test',
221 gr = RepoGroupModel().create(group_name='group_test',
228 group_description='desc',
222 group_description='desc',
229 owner=TEST_USER_ADMIN_LOGIN)
223 owner=TEST_USER_ADMIN_LOGIN)
230 pytest.raises(
224 pytest.raises(
231 formencode.Invalid, validator.to_python, {'repo_name': gr.group_name})
225 formencode.Invalid, validator.to_python, {'repo_name': gr.group_name})
232
226
233 #TODO: write an error case for that ie. create a repo withinh a group
227 #TODO: write an error case for that ie. create a repo withinh a group
234 # pytest.raises(formencode.Invalid,
228 # pytest.raises(formencode.Invalid,
235 # validator.to_python, {'repo_name': 'some',
229 # validator.to_python, {'repo_name': 'some',
236 # 'repo_group': gr.group_id})
230 # 'repo_group': gr.group_id})
237
231
238
232
239 def test_ValidForkName(localizer):
233 def test_ValidForkName(localizer):
240 # this uses ValidRepoName validator
234 # this uses ValidRepoName validator
241 assert True
235 assert True
242
236
243 @pytest.mark.parametrize("name, expected", [
237 @pytest.mark.parametrize("name, expected", [
244 ('test', 'test'), ('lolz!', 'lolz'), (' aavv', 'aavv'),
238 ('test', 'test'), ('lolz!', 'lolz'), (' aavv', 'aavv'),
245 ('ala ma kota', 'ala-ma-kota'), ('@nooo', 'nooo'),
239 ('ala ma kota', 'ala-ma-kota'), ('@nooo', 'nooo'),
246 ('$!haha lolz !', 'haha-lolz'), ('$$$$$', ''), ('{}OK!', 'OK'),
240 ('$!haha lolz !', 'haha-lolz'), ('$$$$$', ''), ('{}OK!', 'OK'),
247 ('/]re po', 're-po')])
241 ('/]re po', 're-po')])
248 def test_SlugifyName(name, expected, localizer):
242 def test_SlugifyName(name, expected, localizer):
249 validator = v.SlugifyName(localizer)
243 validator = v.SlugifyName(localizer)
250 assert expected == validator.to_python(name)
244 assert expected == validator.to_python(name)
251
245
252
246
253 def test_ValidForkType(localizer):
247 def test_ValidForkType(localizer):
254 validator = v.ValidForkType(localizer, old_data={'repo_type': 'hg'})
248 validator = v.ValidForkType(localizer, old_data={'repo_type': 'hg'})
255 assert 'hg' == validator.to_python('hg')
249 assert 'hg' == validator.to_python('hg')
256 pytest.raises(formencode.Invalid, validator.to_python, 'git')
250 pytest.raises(formencode.Invalid, validator.to_python, 'git')
257
251
258
252
259 def test_ValidPath(localizer):
253 def test_ValidPath(localizer):
260 validator = v.ValidPath(localizer)
254 validator = v.ValidPath(localizer)
261 assert TESTS_TMP_PATH == validator.to_python(TESTS_TMP_PATH)
255 assert TESTS_TMP_PATH == validator.to_python(TESTS_TMP_PATH)
262 pytest.raises(
256 pytest.raises(
263 formencode.Invalid, validator.to_python, '/no_such_dir')
257 formencode.Invalid, validator.to_python, '/no_such_dir')
264
258
265
259
266 def test_UniqSystemEmail(localizer):
260 def test_UniqSystemEmail(localizer):
267 validator = v.UniqSystemEmail(localizer, old_data={})
261 validator = v.UniqSystemEmail(localizer, old_data={})
268
262
269 assert 'mail@python.org' == validator.to_python('MaiL@Python.org')
263 assert 'mail@python.org' == validator.to_python('MaiL@Python.org')
270
264
271 email = TEST_USER_REGULAR2_EMAIL
265 email = TEST_USER_REGULAR2_EMAIL
272 pytest.raises(formencode.Invalid, validator.to_python, email)
266 pytest.raises(formencode.Invalid, validator.to_python, email)
273
267
274
268
275 def test_ValidSystemEmail(localizer):
269 def test_ValidSystemEmail(localizer):
276 validator = v.ValidSystemEmail(localizer)
270 validator = v.ValidSystemEmail(localizer)
277 email = TEST_USER_REGULAR2_EMAIL
271 email = TEST_USER_REGULAR2_EMAIL
278
272
279 assert email == validator.to_python(email)
273 assert email == validator.to_python(email)
280 pytest.raises(formencode.Invalid, validator.to_python, 'err')
274 pytest.raises(formencode.Invalid, validator.to_python, 'err')
281
275
282
276
283 def test_NotReviewedRevisions(localizer):
277 def test_NotReviewedRevisions(localizer):
284 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
278 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
285 validator = v.NotReviewedRevisions(localizer, repo_id)
279 validator = v.NotReviewedRevisions(localizer, repo_id)
286 rev = '0' * 40
280 rev = '0' * 40
287 # add status for a rev, that should throw an error because it is already
281 # add status for a rev, that should throw an error because it is already
288 # reviewed
282 # reviewed
289 new_status = ChangesetStatus()
283 new_status = ChangesetStatus()
290 new_status.author = ChangesetStatusModel()._get_user(TEST_USER_ADMIN_LOGIN)
284 new_status.author = ChangesetStatusModel()._get_user(TEST_USER_ADMIN_LOGIN)
291 new_status.repo = ChangesetStatusModel()._get_repo(HG_REPO)
285 new_status.repo = ChangesetStatusModel()._get_repo(HG_REPO)
292 new_status.status = ChangesetStatus.STATUS_APPROVED
286 new_status.status = ChangesetStatus.STATUS_APPROVED
293 new_status.comment = None
287 new_status.comment = None
294 new_status.revision = rev
288 new_status.revision = rev
295 Session().add(new_status)
289 Session().add(new_status)
296 Session().commit()
290 Session().commit()
297 try:
291 try:
298 pytest.raises(formencode.Invalid, validator.to_python, [rev])
292 pytest.raises(formencode.Invalid, validator.to_python, [rev])
299 finally:
293 finally:
300 Session().delete(new_status)
294 Session().delete(new_status)
301 Session().commit()
295 Session().commit()
@@ -1,66 +1,64 b''
1 # Copyright (C) 2016-2017 RhodeCode GmbH
1 # Copyright (C) 2016-2017 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from pyramid.i18n import TranslationStringFactory, TranslationString
19 from pyramid.i18n import TranslationStringFactory, TranslationString
20
20
21 # Create a translation string factory for the 'rhodecode' domain.
21 # Create a translation string factory for the 'rhodecode' domain.
22 from pyramid.threadlocal import get_current_request
22 from pyramid.threadlocal import get_current_request
23
23
24 _ = TranslationStringFactory('rhodecode')
24 _ = TranslationStringFactory('rhodecode')
25
25
26 temp_translation_factory = _
27
28
26
29 class _LazyString(object):
27 class _LazyString(object):
30 def __init__(self, *args, **kw):
28 def __init__(self, *args, **kw):
31 self.args = args
29 self.args = args
32 self.kw = kw
30 self.kw = kw
33
31
34 def eval(self):
32 def eval(self):
35 req = get_current_request()
33 req = get_current_request()
36 translator = _
34 translator = _
37 if req:
35 if req:
38 translator = req.translate
36 translator = req.translate
39 return translator(*self.args, **self.kw)
37 return translator(*self.args, **self.kw)
40
38
41 def __unicode__(self):
39 def __unicode__(self):
42 return unicode(self.eval())
40 return unicode(self.eval())
43
41
44 def __str__(self):
42 def __str__(self):
45 return self.eval()
43 return self.eval()
46
44
47 def __repr__(self):
45 def __repr__(self):
48 return self.__str__()
46 return self.__str__()
49
47
50 def __mod__(self, other):
48 def __mod__(self, other):
51 return self.eval() % other
49 return self.eval() % other
52
50
53 def format(self, *args):
51 def format(self, *args):
54 return self.eval().format(*args)
52 return self.eval().format(*args)
55
53
56
54
57 def lazy_ugettext(*args, **kw):
55 def lazy_ugettext(*args, **kw):
58 """ Lazily evaluated version of _() """
56 """ Lazily evaluated version of _() """
59 return _LazyString(*args, **kw)
57 return _LazyString(*args, **kw)
60
58
61
59
62 def _pluralize(msgid1, msgid2, n, mapping=None):
60 def _pluralize(msgid1, msgid2, n, mapping=None):
63 if n == 1:
61 if n == 1:
64 return _(msgid1, mapping=mapping)
62 return _(msgid1, mapping=mapping)
65 else:
63 else:
66 return _(msgid2, mapping=mapping)
64 return _(msgid2, mapping=mapping)
General Comments 0
You need to be logged in to leave comments. Login now