##// END OF EJS Templates
hooks: added new hooks for comments on pull requests and commits....
dan -
r4305:de8db8da default
parent child Browse files
Show More
@@ -0,0 +1,60 b''
1 # Example to trigger a CI call action on specific comment text, e.g chatops and ci
2 # rebuild on mention of ci bot
3
4 @has_kwargs({
5 'repo_name': '',
6 'repo_type': '',
7 'description': '',
8 'private': '',
9 'created_on': '',
10 'enable_downloads': '',
11 'repo_id': '',
12 'user_id': '',
13 'enable_statistics': '',
14 'clone_uri': '',
15 'fork_id': '',
16 'group_id': '',
17 'created_by': '',
18 'repository': '',
19 'comment': '',
20 'commit': ''
21 })
22 def _comment_commit_repo_hook(*args, **kwargs):
23 """
24 POST CREATE REPOSITORY COMMENT ON COMMIT HOOK. This function will be executed after
25 a comment is made on this repository commit.
26
27 """
28 from .helpers import http_call, extra_fields
29 from .utils import UrlTemplate
30 # returns list of dicts with key-val fetched from extra fields
31 repo_extra_fields = extra_fields.run(**kwargs)
32
33 import rhodecode
34 from rc_integrations.jenkins_ci import csrf_call, get_auth, requests_retry_call
35
36 endpoint_url = extra_fields.get_field(
37 repo_extra_fields, key='ci_endpoint_url',
38 default='http://ci.rc.com/job/rc-ce-commits/build?COMMIT_ID=${commit}')
39 mention_text = extra_fields.get_field(
40 repo_extra_fields, key='ci_mention_text',
41 default='@jenkins build')
42
43 endpoint_url = UrlTemplate(endpoint_url).safe_substitute(
44 commit=kwargs['commit']['raw_id'])
45
46 trigger_ci = False
47 comment = kwargs['comment']['comment_text']
48 if mention_text in comment:
49 trigger_ci = True
50
51 if trigger_ci is False:
52 return HookResponse(0, '')
53
54 # call some CI based on the special coment mention marker
55 data = {
56 'project': kwargs['repository'],
57 }
58 response = http_call.run(url=endpoint_url, params=data)
59
60 return HookResponse(0, '') No newline at end of file
@@ -1,2343 +1,2348 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import time
22 import time
23
23
24 import rhodecode
24 import rhodecode
25 from rhodecode.api import (
25 from rhodecode.api import (
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
27 from rhodecode.api.utils import (
27 from rhodecode.api.utils import (
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
31 validate_set_owner_permissions)
31 validate_set_owner_permissions)
32 from rhodecode.lib import audit_logger, rc_cache
32 from rhodecode.lib import audit_logger, rc_cache
33 from rhodecode.lib import repo_maintenance
33 from rhodecode.lib import repo_maintenance
34 from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi
34 from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi
35 from rhodecode.lib.celerylib.utils import get_task_id
35 from rhodecode.lib.celerylib.utils import get_task_id
36 from rhodecode.lib.utils2 import str2bool, time_to_datetime, safe_str, safe_int
36 from rhodecode.lib.utils2 import str2bool, time_to_datetime, safe_str, safe_int
37 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
38 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
39 from rhodecode.lib.vcs import RepositoryError
39 from rhodecode.lib.vcs import RepositoryError
40 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
40 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
41 from rhodecode.model.changeset_status import ChangesetStatusModel
41 from rhodecode.model.changeset_status import ChangesetStatusModel
42 from rhodecode.model.comment import CommentsModel
42 from rhodecode.model.comment import CommentsModel
43 from rhodecode.model.db import (
43 from rhodecode.model.db import (
44 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
44 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
45 ChangesetComment)
45 ChangesetComment)
46 from rhodecode.model.permission import PermissionModel
46 from rhodecode.model.permission import PermissionModel
47 from rhodecode.model.repo import RepoModel
47 from rhodecode.model.repo import RepoModel
48 from rhodecode.model.scm import ScmModel, RepoList
48 from rhodecode.model.scm import ScmModel, RepoList
49 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
49 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
50 from rhodecode.model import validation_schema
50 from rhodecode.model import validation_schema
51 from rhodecode.model.validation_schema.schemas import repo_schema
51 from rhodecode.model.validation_schema.schemas import repo_schema
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 @jsonrpc_method()
56 @jsonrpc_method()
57 def get_repo(request, apiuser, repoid, cache=Optional(True)):
57 def get_repo(request, apiuser, repoid, cache=Optional(True)):
58 """
58 """
59 Gets an existing repository by its name or repository_id.
59 Gets an existing repository by its name or repository_id.
60
60
61 The members section so the output returns users groups or users
61 The members section so the output returns users groups or users
62 associated with that repository.
62 associated with that repository.
63
63
64 This command can only be run using an |authtoken| with admin rights,
64 This command can only be run using an |authtoken| with admin rights,
65 or users with at least read rights to the |repo|.
65 or users with at least read rights to the |repo|.
66
66
67 :param apiuser: This is filled automatically from the |authtoken|.
67 :param apiuser: This is filled automatically from the |authtoken|.
68 :type apiuser: AuthUser
68 :type apiuser: AuthUser
69 :param repoid: The repository name or repository id.
69 :param repoid: The repository name or repository id.
70 :type repoid: str or int
70 :type repoid: str or int
71 :param cache: use the cached value for last changeset
71 :param cache: use the cached value for last changeset
72 :type: cache: Optional(bool)
72 :type: cache: Optional(bool)
73
73
74 Example output:
74 Example output:
75
75
76 .. code-block:: bash
76 .. code-block:: bash
77
77
78 {
78 {
79 "error": null,
79 "error": null,
80 "id": <repo_id>,
80 "id": <repo_id>,
81 "result": {
81 "result": {
82 "clone_uri": null,
82 "clone_uri": null,
83 "created_on": "timestamp",
83 "created_on": "timestamp",
84 "description": "repo description",
84 "description": "repo description",
85 "enable_downloads": false,
85 "enable_downloads": false,
86 "enable_locking": false,
86 "enable_locking": false,
87 "enable_statistics": false,
87 "enable_statistics": false,
88 "followers": [
88 "followers": [
89 {
89 {
90 "active": true,
90 "active": true,
91 "admin": false,
91 "admin": false,
92 "api_key": "****************************************",
92 "api_key": "****************************************",
93 "api_keys": [
93 "api_keys": [
94 "****************************************"
94 "****************************************"
95 ],
95 ],
96 "email": "user@example.com",
96 "email": "user@example.com",
97 "emails": [
97 "emails": [
98 "user@example.com"
98 "user@example.com"
99 ],
99 ],
100 "extern_name": "rhodecode",
100 "extern_name": "rhodecode",
101 "extern_type": "rhodecode",
101 "extern_type": "rhodecode",
102 "firstname": "username",
102 "firstname": "username",
103 "ip_addresses": [],
103 "ip_addresses": [],
104 "language": null,
104 "language": null,
105 "last_login": "2015-09-16T17:16:35.854",
105 "last_login": "2015-09-16T17:16:35.854",
106 "lastname": "surname",
106 "lastname": "surname",
107 "user_id": <user_id>,
107 "user_id": <user_id>,
108 "username": "name"
108 "username": "name"
109 }
109 }
110 ],
110 ],
111 "fork_of": "parent-repo",
111 "fork_of": "parent-repo",
112 "landing_rev": [
112 "landing_rev": [
113 "rev",
113 "rev",
114 "tip"
114 "tip"
115 ],
115 ],
116 "last_changeset": {
116 "last_changeset": {
117 "author": "User <user@example.com>",
117 "author": "User <user@example.com>",
118 "branch": "default",
118 "branch": "default",
119 "date": "timestamp",
119 "date": "timestamp",
120 "message": "last commit message",
120 "message": "last commit message",
121 "parents": [
121 "parents": [
122 {
122 {
123 "raw_id": "commit-id"
123 "raw_id": "commit-id"
124 }
124 }
125 ],
125 ],
126 "raw_id": "commit-id",
126 "raw_id": "commit-id",
127 "revision": <revision number>,
127 "revision": <revision number>,
128 "short_id": "short id"
128 "short_id": "short id"
129 },
129 },
130 "lock_reason": null,
130 "lock_reason": null,
131 "locked_by": null,
131 "locked_by": null,
132 "locked_date": null,
132 "locked_date": null,
133 "owner": "owner-name",
133 "owner": "owner-name",
134 "permissions": [
134 "permissions": [
135 {
135 {
136 "name": "super-admin-name",
136 "name": "super-admin-name",
137 "origin": "super-admin",
137 "origin": "super-admin",
138 "permission": "repository.admin",
138 "permission": "repository.admin",
139 "type": "user"
139 "type": "user"
140 },
140 },
141 {
141 {
142 "name": "owner-name",
142 "name": "owner-name",
143 "origin": "owner",
143 "origin": "owner",
144 "permission": "repository.admin",
144 "permission": "repository.admin",
145 "type": "user"
145 "type": "user"
146 },
146 },
147 {
147 {
148 "name": "user-group-name",
148 "name": "user-group-name",
149 "origin": "permission",
149 "origin": "permission",
150 "permission": "repository.write",
150 "permission": "repository.write",
151 "type": "user_group"
151 "type": "user_group"
152 }
152 }
153 ],
153 ],
154 "private": true,
154 "private": true,
155 "repo_id": 676,
155 "repo_id": 676,
156 "repo_name": "user-group/repo-name",
156 "repo_name": "user-group/repo-name",
157 "repo_type": "hg"
157 "repo_type": "hg"
158 }
158 }
159 }
159 }
160 """
160 """
161
161
162 repo = get_repo_or_error(repoid)
162 repo = get_repo_or_error(repoid)
163 cache = Optional.extract(cache)
163 cache = Optional.extract(cache)
164
164
165 include_secrets = False
165 include_secrets = False
166 if has_superadmin_permission(apiuser):
166 if has_superadmin_permission(apiuser):
167 include_secrets = True
167 include_secrets = True
168 else:
168 else:
169 # check if we have at least read permission for this repo !
169 # check if we have at least read permission for this repo !
170 _perms = (
170 _perms = (
171 'repository.admin', 'repository.write', 'repository.read',)
171 'repository.admin', 'repository.write', 'repository.read',)
172 validate_repo_permissions(apiuser, repoid, repo, _perms)
172 validate_repo_permissions(apiuser, repoid, repo, _perms)
173
173
174 permissions = []
174 permissions = []
175 for _user in repo.permissions():
175 for _user in repo.permissions():
176 user_data = {
176 user_data = {
177 'name': _user.username,
177 'name': _user.username,
178 'permission': _user.permission,
178 'permission': _user.permission,
179 'origin': get_origin(_user),
179 'origin': get_origin(_user),
180 'type': "user",
180 'type': "user",
181 }
181 }
182 permissions.append(user_data)
182 permissions.append(user_data)
183
183
184 for _user_group in repo.permission_user_groups():
184 for _user_group in repo.permission_user_groups():
185 user_group_data = {
185 user_group_data = {
186 'name': _user_group.users_group_name,
186 'name': _user_group.users_group_name,
187 'permission': _user_group.permission,
187 'permission': _user_group.permission,
188 'origin': get_origin(_user_group),
188 'origin': get_origin(_user_group),
189 'type': "user_group",
189 'type': "user_group",
190 }
190 }
191 permissions.append(user_group_data)
191 permissions.append(user_group_data)
192
192
193 following_users = [
193 following_users = [
194 user.user.get_api_data(include_secrets=include_secrets)
194 user.user.get_api_data(include_secrets=include_secrets)
195 for user in repo.followers]
195 for user in repo.followers]
196
196
197 if not cache:
197 if not cache:
198 repo.update_commit_cache()
198 repo.update_commit_cache()
199 data = repo.get_api_data(include_secrets=include_secrets)
199 data = repo.get_api_data(include_secrets=include_secrets)
200 data['permissions'] = permissions
200 data['permissions'] = permissions
201 data['followers'] = following_users
201 data['followers'] = following_users
202 return data
202 return data
203
203
204
204
205 @jsonrpc_method()
205 @jsonrpc_method()
206 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
206 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
207 """
207 """
208 Lists all existing repositories.
208 Lists all existing repositories.
209
209
210 This command can only be run using an |authtoken| with admin rights,
210 This command can only be run using an |authtoken| with admin rights,
211 or users with at least read rights to |repos|.
211 or users with at least read rights to |repos|.
212
212
213 :param apiuser: This is filled automatically from the |authtoken|.
213 :param apiuser: This is filled automatically from the |authtoken|.
214 :type apiuser: AuthUser
214 :type apiuser: AuthUser
215 :param root: specify root repository group to fetch repositories.
215 :param root: specify root repository group to fetch repositories.
216 filters the returned repositories to be members of given root group.
216 filters the returned repositories to be members of given root group.
217 :type root: Optional(None)
217 :type root: Optional(None)
218 :param traverse: traverse given root into subrepositories. With this flag
218 :param traverse: traverse given root into subrepositories. With this flag
219 set to False, it will only return top-level repositories from `root`.
219 set to False, it will only return top-level repositories from `root`.
220 if root is empty it will return just top-level repositories.
220 if root is empty it will return just top-level repositories.
221 :type traverse: Optional(True)
221 :type traverse: Optional(True)
222
222
223
223
224 Example output:
224 Example output:
225
225
226 .. code-block:: bash
226 .. code-block:: bash
227
227
228 id : <id_given_in_input>
228 id : <id_given_in_input>
229 result: [
229 result: [
230 {
230 {
231 "repo_id" : "<repo_id>",
231 "repo_id" : "<repo_id>",
232 "repo_name" : "<reponame>"
232 "repo_name" : "<reponame>"
233 "repo_type" : "<repo_type>",
233 "repo_type" : "<repo_type>",
234 "clone_uri" : "<clone_uri>",
234 "clone_uri" : "<clone_uri>",
235 "private": : "<bool>",
235 "private": : "<bool>",
236 "created_on" : "<datetimecreated>",
236 "created_on" : "<datetimecreated>",
237 "description" : "<description>",
237 "description" : "<description>",
238 "landing_rev": "<landing_rev>",
238 "landing_rev": "<landing_rev>",
239 "owner": "<repo_owner>",
239 "owner": "<repo_owner>",
240 "fork_of": "<name_of_fork_parent>",
240 "fork_of": "<name_of_fork_parent>",
241 "enable_downloads": "<bool>",
241 "enable_downloads": "<bool>",
242 "enable_locking": "<bool>",
242 "enable_locking": "<bool>",
243 "enable_statistics": "<bool>",
243 "enable_statistics": "<bool>",
244 },
244 },
245 ...
245 ...
246 ]
246 ]
247 error: null
247 error: null
248 """
248 """
249
249
250 include_secrets = has_superadmin_permission(apiuser)
250 include_secrets = has_superadmin_permission(apiuser)
251 _perms = ('repository.read', 'repository.write', 'repository.admin',)
251 _perms = ('repository.read', 'repository.write', 'repository.admin',)
252 extras = {'user': apiuser}
252 extras = {'user': apiuser}
253
253
254 root = Optional.extract(root)
254 root = Optional.extract(root)
255 traverse = Optional.extract(traverse, binary=True)
255 traverse = Optional.extract(traverse, binary=True)
256
256
257 if root:
257 if root:
258 # verify parent existance, if it's empty return an error
258 # verify parent existance, if it's empty return an error
259 parent = RepoGroup.get_by_group_name(root)
259 parent = RepoGroup.get_by_group_name(root)
260 if not parent:
260 if not parent:
261 raise JSONRPCError(
261 raise JSONRPCError(
262 'Root repository group `{}` does not exist'.format(root))
262 'Root repository group `{}` does not exist'.format(root))
263
263
264 if traverse:
264 if traverse:
265 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
265 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
266 else:
266 else:
267 repos = RepoModel().get_repos_for_root(root=parent)
267 repos = RepoModel().get_repos_for_root(root=parent)
268 else:
268 else:
269 if traverse:
269 if traverse:
270 repos = RepoModel().get_all()
270 repos = RepoModel().get_all()
271 else:
271 else:
272 # return just top-level
272 # return just top-level
273 repos = RepoModel().get_repos_for_root(root=None)
273 repos = RepoModel().get_repos_for_root(root=None)
274
274
275 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
275 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
276 return [repo.get_api_data(include_secrets=include_secrets)
276 return [repo.get_api_data(include_secrets=include_secrets)
277 for repo in repo_list]
277 for repo in repo_list]
278
278
279
279
280 @jsonrpc_method()
280 @jsonrpc_method()
281 def get_repo_changeset(request, apiuser, repoid, revision,
281 def get_repo_changeset(request, apiuser, repoid, revision,
282 details=Optional('basic')):
282 details=Optional('basic')):
283 """
283 """
284 Returns information about a changeset.
284 Returns information about a changeset.
285
285
286 Additionally parameters define the amount of details returned by
286 Additionally parameters define the amount of details returned by
287 this function.
287 this function.
288
288
289 This command can only be run using an |authtoken| with admin rights,
289 This command can only be run using an |authtoken| with admin rights,
290 or users with at least read rights to the |repo|.
290 or users with at least read rights to the |repo|.
291
291
292 :param apiuser: This is filled automatically from the |authtoken|.
292 :param apiuser: This is filled automatically from the |authtoken|.
293 :type apiuser: AuthUser
293 :type apiuser: AuthUser
294 :param repoid: The repository name or repository id
294 :param repoid: The repository name or repository id
295 :type repoid: str or int
295 :type repoid: str or int
296 :param revision: revision for which listing should be done
296 :param revision: revision for which listing should be done
297 :type revision: str
297 :type revision: str
298 :param details: details can be 'basic|extended|full' full gives diff
298 :param details: details can be 'basic|extended|full' full gives diff
299 info details like the diff itself, and number of changed files etc.
299 info details like the diff itself, and number of changed files etc.
300 :type details: Optional(str)
300 :type details: Optional(str)
301
301
302 """
302 """
303 repo = get_repo_or_error(repoid)
303 repo = get_repo_or_error(repoid)
304 if not has_superadmin_permission(apiuser):
304 if not has_superadmin_permission(apiuser):
305 _perms = (
305 _perms = (
306 'repository.admin', 'repository.write', 'repository.read',)
306 'repository.admin', 'repository.write', 'repository.read',)
307 validate_repo_permissions(apiuser, repoid, repo, _perms)
307 validate_repo_permissions(apiuser, repoid, repo, _perms)
308
308
309 changes_details = Optional.extract(details)
309 changes_details = Optional.extract(details)
310 _changes_details_types = ['basic', 'extended', 'full']
310 _changes_details_types = ['basic', 'extended', 'full']
311 if changes_details not in _changes_details_types:
311 if changes_details not in _changes_details_types:
312 raise JSONRPCError(
312 raise JSONRPCError(
313 'ret_type must be one of %s' % (
313 'ret_type must be one of %s' % (
314 ','.join(_changes_details_types)))
314 ','.join(_changes_details_types)))
315
315
316 pre_load = ['author', 'branch', 'date', 'message', 'parents',
316 pre_load = ['author', 'branch', 'date', 'message', 'parents',
317 'status', '_commit', '_file_paths']
317 'status', '_commit', '_file_paths']
318
318
319 try:
319 try:
320 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
320 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
321 except TypeError as e:
321 except TypeError as e:
322 raise JSONRPCError(safe_str(e))
322 raise JSONRPCError(safe_str(e))
323 _cs_json = cs.__json__()
323 _cs_json = cs.__json__()
324 _cs_json['diff'] = build_commit_data(cs, changes_details)
324 _cs_json['diff'] = build_commit_data(cs, changes_details)
325 if changes_details == 'full':
325 if changes_details == 'full':
326 _cs_json['refs'] = cs._get_refs()
326 _cs_json['refs'] = cs._get_refs()
327 return _cs_json
327 return _cs_json
328
328
329
329
330 @jsonrpc_method()
330 @jsonrpc_method()
331 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
331 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
332 details=Optional('basic')):
332 details=Optional('basic')):
333 """
333 """
334 Returns a set of commits limited by the number starting
334 Returns a set of commits limited by the number starting
335 from the `start_rev` option.
335 from the `start_rev` option.
336
336
337 Additional parameters define the amount of details returned by this
337 Additional parameters define the amount of details returned by this
338 function.
338 function.
339
339
340 This command can only be run using an |authtoken| with admin rights,
340 This command can only be run using an |authtoken| with admin rights,
341 or users with at least read rights to |repos|.
341 or users with at least read rights to |repos|.
342
342
343 :param apiuser: This is filled automatically from the |authtoken|.
343 :param apiuser: This is filled automatically from the |authtoken|.
344 :type apiuser: AuthUser
344 :type apiuser: AuthUser
345 :param repoid: The repository name or repository ID.
345 :param repoid: The repository name or repository ID.
346 :type repoid: str or int
346 :type repoid: str or int
347 :param start_rev: The starting revision from where to get changesets.
347 :param start_rev: The starting revision from where to get changesets.
348 :type start_rev: str
348 :type start_rev: str
349 :param limit: Limit the number of commits to this amount
349 :param limit: Limit the number of commits to this amount
350 :type limit: str or int
350 :type limit: str or int
351 :param details: Set the level of detail returned. Valid option are:
351 :param details: Set the level of detail returned. Valid option are:
352 ``basic``, ``extended`` and ``full``.
352 ``basic``, ``extended`` and ``full``.
353 :type details: Optional(str)
353 :type details: Optional(str)
354
354
355 .. note::
355 .. note::
356
356
357 Setting the parameter `details` to the value ``full`` is extensive
357 Setting the parameter `details` to the value ``full`` is extensive
358 and returns details like the diff itself, and the number
358 and returns details like the diff itself, and the number
359 of changed files.
359 of changed files.
360
360
361 """
361 """
362 repo = get_repo_or_error(repoid)
362 repo = get_repo_or_error(repoid)
363 if not has_superadmin_permission(apiuser):
363 if not has_superadmin_permission(apiuser):
364 _perms = (
364 _perms = (
365 'repository.admin', 'repository.write', 'repository.read',)
365 'repository.admin', 'repository.write', 'repository.read',)
366 validate_repo_permissions(apiuser, repoid, repo, _perms)
366 validate_repo_permissions(apiuser, repoid, repo, _perms)
367
367
368 changes_details = Optional.extract(details)
368 changes_details = Optional.extract(details)
369 _changes_details_types = ['basic', 'extended', 'full']
369 _changes_details_types = ['basic', 'extended', 'full']
370 if changes_details not in _changes_details_types:
370 if changes_details not in _changes_details_types:
371 raise JSONRPCError(
371 raise JSONRPCError(
372 'ret_type must be one of %s' % (
372 'ret_type must be one of %s' % (
373 ','.join(_changes_details_types)))
373 ','.join(_changes_details_types)))
374
374
375 limit = int(limit)
375 limit = int(limit)
376 pre_load = ['author', 'branch', 'date', 'message', 'parents',
376 pre_load = ['author', 'branch', 'date', 'message', 'parents',
377 'status', '_commit', '_file_paths']
377 'status', '_commit', '_file_paths']
378
378
379 vcs_repo = repo.scm_instance()
379 vcs_repo = repo.scm_instance()
380 # SVN needs a special case to distinguish its index and commit id
380 # SVN needs a special case to distinguish its index and commit id
381 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
381 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
382 start_rev = vcs_repo.commit_ids[0]
382 start_rev = vcs_repo.commit_ids[0]
383
383
384 try:
384 try:
385 commits = vcs_repo.get_commits(
385 commits = vcs_repo.get_commits(
386 start_id=start_rev, pre_load=pre_load, translate_tags=False)
386 start_id=start_rev, pre_load=pre_load, translate_tags=False)
387 except TypeError as e:
387 except TypeError as e:
388 raise JSONRPCError(safe_str(e))
388 raise JSONRPCError(safe_str(e))
389 except Exception:
389 except Exception:
390 log.exception('Fetching of commits failed')
390 log.exception('Fetching of commits failed')
391 raise JSONRPCError('Error occurred during commit fetching')
391 raise JSONRPCError('Error occurred during commit fetching')
392
392
393 ret = []
393 ret = []
394 for cnt, commit in enumerate(commits):
394 for cnt, commit in enumerate(commits):
395 if cnt >= limit != -1:
395 if cnt >= limit != -1:
396 break
396 break
397 _cs_json = commit.__json__()
397 _cs_json = commit.__json__()
398 _cs_json['diff'] = build_commit_data(commit, changes_details)
398 _cs_json['diff'] = build_commit_data(commit, changes_details)
399 if changes_details == 'full':
399 if changes_details == 'full':
400 _cs_json['refs'] = {
400 _cs_json['refs'] = {
401 'branches': [commit.branch],
401 'branches': [commit.branch],
402 'bookmarks': getattr(commit, 'bookmarks', []),
402 'bookmarks': getattr(commit, 'bookmarks', []),
403 'tags': commit.tags
403 'tags': commit.tags
404 }
404 }
405 ret.append(_cs_json)
405 ret.append(_cs_json)
406 return ret
406 return ret
407
407
408
408
409 @jsonrpc_method()
409 @jsonrpc_method()
410 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
410 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
411 ret_type=Optional('all'), details=Optional('basic'),
411 ret_type=Optional('all'), details=Optional('basic'),
412 max_file_bytes=Optional(None)):
412 max_file_bytes=Optional(None)):
413 """
413 """
414 Returns a list of nodes and children in a flat list for a given
414 Returns a list of nodes and children in a flat list for a given
415 path at given revision.
415 path at given revision.
416
416
417 It's possible to specify ret_type to show only `files` or `dirs`.
417 It's possible to specify ret_type to show only `files` or `dirs`.
418
418
419 This command can only be run using an |authtoken| with admin rights,
419 This command can only be run using an |authtoken| with admin rights,
420 or users with at least read rights to |repos|.
420 or users with at least read rights to |repos|.
421
421
422 :param apiuser: This is filled automatically from the |authtoken|.
422 :param apiuser: This is filled automatically from the |authtoken|.
423 :type apiuser: AuthUser
423 :type apiuser: AuthUser
424 :param repoid: The repository name or repository ID.
424 :param repoid: The repository name or repository ID.
425 :type repoid: str or int
425 :type repoid: str or int
426 :param revision: The revision for which listing should be done.
426 :param revision: The revision for which listing should be done.
427 :type revision: str
427 :type revision: str
428 :param root_path: The path from which to start displaying.
428 :param root_path: The path from which to start displaying.
429 :type root_path: str
429 :type root_path: str
430 :param ret_type: Set the return type. Valid options are
430 :param ret_type: Set the return type. Valid options are
431 ``all`` (default), ``files`` and ``dirs``.
431 ``all`` (default), ``files`` and ``dirs``.
432 :type ret_type: Optional(str)
432 :type ret_type: Optional(str)
433 :param details: Returns extended information about nodes, such as
433 :param details: Returns extended information about nodes, such as
434 md5, binary, and or content.
434 md5, binary, and or content.
435 The valid options are ``basic`` and ``full``.
435 The valid options are ``basic`` and ``full``.
436 :type details: Optional(str)
436 :type details: Optional(str)
437 :param max_file_bytes: Only return file content under this file size bytes
437 :param max_file_bytes: Only return file content under this file size bytes
438 :type details: Optional(int)
438 :type details: Optional(int)
439
439
440 Example output:
440 Example output:
441
441
442 .. code-block:: bash
442 .. code-block:: bash
443
443
444 id : <id_given_in_input>
444 id : <id_given_in_input>
445 result: [
445 result: [
446 {
446 {
447 "binary": false,
447 "binary": false,
448 "content": "File line",
448 "content": "File line",
449 "extension": "md",
449 "extension": "md",
450 "lines": 2,
450 "lines": 2,
451 "md5": "059fa5d29b19c0657e384749480f6422",
451 "md5": "059fa5d29b19c0657e384749480f6422",
452 "mimetype": "text/x-minidsrc",
452 "mimetype": "text/x-minidsrc",
453 "name": "file.md",
453 "name": "file.md",
454 "size": 580,
454 "size": 580,
455 "type": "file"
455 "type": "file"
456 },
456 },
457 ...
457 ...
458 ]
458 ]
459 error: null
459 error: null
460 """
460 """
461
461
462 repo = get_repo_or_error(repoid)
462 repo = get_repo_or_error(repoid)
463 if not has_superadmin_permission(apiuser):
463 if not has_superadmin_permission(apiuser):
464 _perms = ('repository.admin', 'repository.write', 'repository.read',)
464 _perms = ('repository.admin', 'repository.write', 'repository.read',)
465 validate_repo_permissions(apiuser, repoid, repo, _perms)
465 validate_repo_permissions(apiuser, repoid, repo, _perms)
466
466
467 ret_type = Optional.extract(ret_type)
467 ret_type = Optional.extract(ret_type)
468 details = Optional.extract(details)
468 details = Optional.extract(details)
469 _extended_types = ['basic', 'full']
469 _extended_types = ['basic', 'full']
470 if details not in _extended_types:
470 if details not in _extended_types:
471 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
471 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
472 extended_info = False
472 extended_info = False
473 content = False
473 content = False
474 if details == 'basic':
474 if details == 'basic':
475 extended_info = True
475 extended_info = True
476
476
477 if details == 'full':
477 if details == 'full':
478 extended_info = content = True
478 extended_info = content = True
479
479
480 _map = {}
480 _map = {}
481 try:
481 try:
482 # check if repo is not empty by any chance, skip quicker if it is.
482 # check if repo is not empty by any chance, skip quicker if it is.
483 _scm = repo.scm_instance()
483 _scm = repo.scm_instance()
484 if _scm.is_empty():
484 if _scm.is_empty():
485 return []
485 return []
486
486
487 _d, _f = ScmModel().get_nodes(
487 _d, _f = ScmModel().get_nodes(
488 repo, revision, root_path, flat=False,
488 repo, revision, root_path, flat=False,
489 extended_info=extended_info, content=content,
489 extended_info=extended_info, content=content,
490 max_file_bytes=max_file_bytes)
490 max_file_bytes=max_file_bytes)
491 _map = {
491 _map = {
492 'all': _d + _f,
492 'all': _d + _f,
493 'files': _f,
493 'files': _f,
494 'dirs': _d,
494 'dirs': _d,
495 }
495 }
496 return _map[ret_type]
496 return _map[ret_type]
497 except KeyError:
497 except KeyError:
498 raise JSONRPCError(
498 raise JSONRPCError(
499 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
499 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
500 except Exception:
500 except Exception:
501 log.exception("Exception occurred while trying to get repo nodes")
501 log.exception("Exception occurred while trying to get repo nodes")
502 raise JSONRPCError(
502 raise JSONRPCError(
503 'failed to get repo: `%s` nodes' % repo.repo_name
503 'failed to get repo: `%s` nodes' % repo.repo_name
504 )
504 )
505
505
506
506
507 @jsonrpc_method()
507 @jsonrpc_method()
508 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
508 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
509 max_file_bytes=Optional(None), details=Optional('basic'),
509 max_file_bytes=Optional(None), details=Optional('basic'),
510 cache=Optional(True)):
510 cache=Optional(True)):
511 """
511 """
512 Returns a single file from repository at given revision.
512 Returns a single file from repository at given revision.
513
513
514 This command can only be run using an |authtoken| with admin rights,
514 This command can only be run using an |authtoken| with admin rights,
515 or users with at least read rights to |repos|.
515 or users with at least read rights to |repos|.
516
516
517 :param apiuser: This is filled automatically from the |authtoken|.
517 :param apiuser: This is filled automatically from the |authtoken|.
518 :type apiuser: AuthUser
518 :type apiuser: AuthUser
519 :param repoid: The repository name or repository ID.
519 :param repoid: The repository name or repository ID.
520 :type repoid: str or int
520 :type repoid: str or int
521 :param commit_id: The revision for which listing should be done.
521 :param commit_id: The revision for which listing should be done.
522 :type commit_id: str
522 :type commit_id: str
523 :param file_path: The path from which to start displaying.
523 :param file_path: The path from which to start displaying.
524 :type file_path: str
524 :type file_path: str
525 :param details: Returns different set of information about nodes.
525 :param details: Returns different set of information about nodes.
526 The valid options are ``minimal`` ``basic`` and ``full``.
526 The valid options are ``minimal`` ``basic`` and ``full``.
527 :type details: Optional(str)
527 :type details: Optional(str)
528 :param max_file_bytes: Only return file content under this file size bytes
528 :param max_file_bytes: Only return file content under this file size bytes
529 :type max_file_bytes: Optional(int)
529 :type max_file_bytes: Optional(int)
530 :param cache: Use internal caches for fetching files. If disabled fetching
530 :param cache: Use internal caches for fetching files. If disabled fetching
531 files is slower but more memory efficient
531 files is slower but more memory efficient
532 :type cache: Optional(bool)
532 :type cache: Optional(bool)
533
533
534 Example output:
534 Example output:
535
535
536 .. code-block:: bash
536 .. code-block:: bash
537
537
538 id : <id_given_in_input>
538 id : <id_given_in_input>
539 result: {
539 result: {
540 "binary": false,
540 "binary": false,
541 "extension": "py",
541 "extension": "py",
542 "lines": 35,
542 "lines": 35,
543 "content": "....",
543 "content": "....",
544 "md5": "76318336366b0f17ee249e11b0c99c41",
544 "md5": "76318336366b0f17ee249e11b0c99c41",
545 "mimetype": "text/x-python",
545 "mimetype": "text/x-python",
546 "name": "python.py",
546 "name": "python.py",
547 "size": 817,
547 "size": 817,
548 "type": "file",
548 "type": "file",
549 }
549 }
550 error: null
550 error: null
551 """
551 """
552
552
553 repo = get_repo_or_error(repoid)
553 repo = get_repo_or_error(repoid)
554 if not has_superadmin_permission(apiuser):
554 if not has_superadmin_permission(apiuser):
555 _perms = ('repository.admin', 'repository.write', 'repository.read',)
555 _perms = ('repository.admin', 'repository.write', 'repository.read',)
556 validate_repo_permissions(apiuser, repoid, repo, _perms)
556 validate_repo_permissions(apiuser, repoid, repo, _perms)
557
557
558 cache = Optional.extract(cache, binary=True)
558 cache = Optional.extract(cache, binary=True)
559 details = Optional.extract(details)
559 details = Optional.extract(details)
560 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
560 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
561 if details not in _extended_types:
561 if details not in _extended_types:
562 raise JSONRPCError(
562 raise JSONRPCError(
563 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
563 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
564 extended_info = False
564 extended_info = False
565 content = False
565 content = False
566
566
567 if details == 'minimal':
567 if details == 'minimal':
568 extended_info = False
568 extended_info = False
569
569
570 elif details == 'basic':
570 elif details == 'basic':
571 extended_info = True
571 extended_info = True
572
572
573 elif details == 'full':
573 elif details == 'full':
574 extended_info = content = True
574 extended_info = content = True
575
575
576 try:
576 try:
577 # check if repo is not empty by any chance, skip quicker if it is.
577 # check if repo is not empty by any chance, skip quicker if it is.
578 _scm = repo.scm_instance()
578 _scm = repo.scm_instance()
579 if _scm.is_empty():
579 if _scm.is_empty():
580 return None
580 return None
581
581
582 node = ScmModel().get_node(
582 node = ScmModel().get_node(
583 repo, commit_id, file_path, extended_info=extended_info,
583 repo, commit_id, file_path, extended_info=extended_info,
584 content=content, max_file_bytes=max_file_bytes, cache=cache)
584 content=content, max_file_bytes=max_file_bytes, cache=cache)
585 except NodeDoesNotExistError:
585 except NodeDoesNotExistError:
586 raise JSONRPCError('There is no file in repo: `{}` at path `{}` for commit: `{}`'.format(
586 raise JSONRPCError('There is no file in repo: `{}` at path `{}` for commit: `{}`'.format(
587 repo.repo_name, file_path, commit_id))
587 repo.repo_name, file_path, commit_id))
588 except Exception:
588 except Exception:
589 log.exception("Exception occurred while trying to get repo %s file",
589 log.exception("Exception occurred while trying to get repo %s file",
590 repo.repo_name)
590 repo.repo_name)
591 raise JSONRPCError('failed to get repo: `{}` file at path {}'.format(
591 raise JSONRPCError('failed to get repo: `{}` file at path {}'.format(
592 repo.repo_name, file_path))
592 repo.repo_name, file_path))
593
593
594 return node
594 return node
595
595
596
596
597 @jsonrpc_method()
597 @jsonrpc_method()
598 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
598 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
599 """
599 """
600 Returns a list of tree nodes for path at given revision. This api is built
600 Returns a list of tree nodes for path at given revision. This api is built
601 strictly for usage in full text search building, and shouldn't be consumed
601 strictly for usage in full text search building, and shouldn't be consumed
602
602
603 This command can only be run using an |authtoken| with admin rights,
603 This command can only be run using an |authtoken| with admin rights,
604 or users with at least read rights to |repos|.
604 or users with at least read rights to |repos|.
605
605
606 """
606 """
607
607
608 repo = get_repo_or_error(repoid)
608 repo = get_repo_or_error(repoid)
609 if not has_superadmin_permission(apiuser):
609 if not has_superadmin_permission(apiuser):
610 _perms = ('repository.admin', 'repository.write', 'repository.read',)
610 _perms = ('repository.admin', 'repository.write', 'repository.read',)
611 validate_repo_permissions(apiuser, repoid, repo, _perms)
611 validate_repo_permissions(apiuser, repoid, repo, _perms)
612
612
613 repo_id = repo.repo_id
613 repo_id = repo.repo_id
614 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
614 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
615 cache_on = cache_seconds > 0
615 cache_on = cache_seconds > 0
616
616
617 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
617 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
618 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
618 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
619
619
620 def compute_fts_tree(cache_ver, repo_id, commit_id, root_path):
620 def compute_fts_tree(cache_ver, repo_id, commit_id, root_path):
621 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
621 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
622
622
623 try:
623 try:
624 # check if repo is not empty by any chance, skip quicker if it is.
624 # check if repo is not empty by any chance, skip quicker if it is.
625 _scm = repo.scm_instance()
625 _scm = repo.scm_instance()
626 if _scm.is_empty():
626 if _scm.is_empty():
627 return []
627 return []
628 except RepositoryError:
628 except RepositoryError:
629 log.exception("Exception occurred while trying to get repo nodes")
629 log.exception("Exception occurred while trying to get repo nodes")
630 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
630 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
631
631
632 try:
632 try:
633 # we need to resolve commit_id to a FULL sha for cache to work correctly.
633 # we need to resolve commit_id to a FULL sha for cache to work correctly.
634 # sending 'master' is a pointer that needs to be translated to current commit.
634 # sending 'master' is a pointer that needs to be translated to current commit.
635 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
635 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
636 log.debug(
636 log.debug(
637 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
637 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
638 'with caching: %s[TTL: %ss]' % (
638 'with caching: %s[TTL: %ss]' % (
639 repo_id, commit_id, cache_on, cache_seconds or 0))
639 repo_id, commit_id, cache_on, cache_seconds or 0))
640
640
641 tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path)
641 tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path)
642 return tree_files
642 return tree_files
643
643
644 except Exception:
644 except Exception:
645 log.exception("Exception occurred while trying to get repo nodes")
645 log.exception("Exception occurred while trying to get repo nodes")
646 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
646 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
647
647
648
648
649 @jsonrpc_method()
649 @jsonrpc_method()
650 def get_repo_refs(request, apiuser, repoid):
650 def get_repo_refs(request, apiuser, repoid):
651 """
651 """
652 Returns a dictionary of current references. It returns
652 Returns a dictionary of current references. It returns
653 bookmarks, branches, closed_branches, and tags for given repository
653 bookmarks, branches, closed_branches, and tags for given repository
654
654
655 It's possible to specify ret_type to show only `files` or `dirs`.
655 It's possible to specify ret_type to show only `files` or `dirs`.
656
656
657 This command can only be run using an |authtoken| with admin rights,
657 This command can only be run using an |authtoken| with admin rights,
658 or users with at least read rights to |repos|.
658 or users with at least read rights to |repos|.
659
659
660 :param apiuser: This is filled automatically from the |authtoken|.
660 :param apiuser: This is filled automatically from the |authtoken|.
661 :type apiuser: AuthUser
661 :type apiuser: AuthUser
662 :param repoid: The repository name or repository ID.
662 :param repoid: The repository name or repository ID.
663 :type repoid: str or int
663 :type repoid: str or int
664
664
665 Example output:
665 Example output:
666
666
667 .. code-block:: bash
667 .. code-block:: bash
668
668
669 id : <id_given_in_input>
669 id : <id_given_in_input>
670 "result": {
670 "result": {
671 "bookmarks": {
671 "bookmarks": {
672 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
672 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
673 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
673 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
674 },
674 },
675 "branches": {
675 "branches": {
676 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
676 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
677 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
677 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
678 },
678 },
679 "branches_closed": {},
679 "branches_closed": {},
680 "tags": {
680 "tags": {
681 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
681 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
682 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
682 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
683 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
683 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
684 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
684 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
685 }
685 }
686 }
686 }
687 error: null
687 error: null
688 """
688 """
689
689
690 repo = get_repo_or_error(repoid)
690 repo = get_repo_or_error(repoid)
691 if not has_superadmin_permission(apiuser):
691 if not has_superadmin_permission(apiuser):
692 _perms = ('repository.admin', 'repository.write', 'repository.read',)
692 _perms = ('repository.admin', 'repository.write', 'repository.read',)
693 validate_repo_permissions(apiuser, repoid, repo, _perms)
693 validate_repo_permissions(apiuser, repoid, repo, _perms)
694
694
695 try:
695 try:
696 # check if repo is not empty by any chance, skip quicker if it is.
696 # check if repo is not empty by any chance, skip quicker if it is.
697 vcs_instance = repo.scm_instance()
697 vcs_instance = repo.scm_instance()
698 refs = vcs_instance.refs()
698 refs = vcs_instance.refs()
699 return refs
699 return refs
700 except Exception:
700 except Exception:
701 log.exception("Exception occurred while trying to get repo refs")
701 log.exception("Exception occurred while trying to get repo refs")
702 raise JSONRPCError(
702 raise JSONRPCError(
703 'failed to get repo: `%s` references' % repo.repo_name
703 'failed to get repo: `%s` references' % repo.repo_name
704 )
704 )
705
705
706
706
707 @jsonrpc_method()
707 @jsonrpc_method()
708 def create_repo(
708 def create_repo(
709 request, apiuser, repo_name, repo_type,
709 request, apiuser, repo_name, repo_type,
710 owner=Optional(OAttr('apiuser')),
710 owner=Optional(OAttr('apiuser')),
711 description=Optional(''),
711 description=Optional(''),
712 private=Optional(False),
712 private=Optional(False),
713 clone_uri=Optional(None),
713 clone_uri=Optional(None),
714 push_uri=Optional(None),
714 push_uri=Optional(None),
715 landing_rev=Optional(None),
715 landing_rev=Optional(None),
716 enable_statistics=Optional(False),
716 enable_statistics=Optional(False),
717 enable_locking=Optional(False),
717 enable_locking=Optional(False),
718 enable_downloads=Optional(False),
718 enable_downloads=Optional(False),
719 copy_permissions=Optional(False)):
719 copy_permissions=Optional(False)):
720 """
720 """
721 Creates a repository.
721 Creates a repository.
722
722
723 * If the repository name contains "/", repository will be created inside
723 * If the repository name contains "/", repository will be created inside
724 a repository group or nested repository groups
724 a repository group or nested repository groups
725
725
726 For example "foo/bar/repo1" will create |repo| called "repo1" inside
726 For example "foo/bar/repo1" will create |repo| called "repo1" inside
727 group "foo/bar". You have to have permissions to access and write to
727 group "foo/bar". You have to have permissions to access and write to
728 the last repository group ("bar" in this example)
728 the last repository group ("bar" in this example)
729
729
730 This command can only be run using an |authtoken| with at least
730 This command can only be run using an |authtoken| with at least
731 permissions to create repositories, or write permissions to
731 permissions to create repositories, or write permissions to
732 parent repository groups.
732 parent repository groups.
733
733
734 :param apiuser: This is filled automatically from the |authtoken|.
734 :param apiuser: This is filled automatically from the |authtoken|.
735 :type apiuser: AuthUser
735 :type apiuser: AuthUser
736 :param repo_name: Set the repository name.
736 :param repo_name: Set the repository name.
737 :type repo_name: str
737 :type repo_name: str
738 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
738 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
739 :type repo_type: str
739 :type repo_type: str
740 :param owner: user_id or username
740 :param owner: user_id or username
741 :type owner: Optional(str)
741 :type owner: Optional(str)
742 :param description: Set the repository description.
742 :param description: Set the repository description.
743 :type description: Optional(str)
743 :type description: Optional(str)
744 :param private: set repository as private
744 :param private: set repository as private
745 :type private: bool
745 :type private: bool
746 :param clone_uri: set clone_uri
746 :param clone_uri: set clone_uri
747 :type clone_uri: str
747 :type clone_uri: str
748 :param push_uri: set push_uri
748 :param push_uri: set push_uri
749 :type push_uri: str
749 :type push_uri: str
750 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
750 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
751 :type landing_rev: str
751 :type landing_rev: str
752 :param enable_locking:
752 :param enable_locking:
753 :type enable_locking: bool
753 :type enable_locking: bool
754 :param enable_downloads:
754 :param enable_downloads:
755 :type enable_downloads: bool
755 :type enable_downloads: bool
756 :param enable_statistics:
756 :param enable_statistics:
757 :type enable_statistics: bool
757 :type enable_statistics: bool
758 :param copy_permissions: Copy permission from group in which the
758 :param copy_permissions: Copy permission from group in which the
759 repository is being created.
759 repository is being created.
760 :type copy_permissions: bool
760 :type copy_permissions: bool
761
761
762
762
763 Example output:
763 Example output:
764
764
765 .. code-block:: bash
765 .. code-block:: bash
766
766
767 id : <id_given_in_input>
767 id : <id_given_in_input>
768 result: {
768 result: {
769 "msg": "Created new repository `<reponame>`",
769 "msg": "Created new repository `<reponame>`",
770 "success": true,
770 "success": true,
771 "task": "<celery task id or None if done sync>"
771 "task": "<celery task id or None if done sync>"
772 }
772 }
773 error: null
773 error: null
774
774
775
775
776 Example error output:
776 Example error output:
777
777
778 .. code-block:: bash
778 .. code-block:: bash
779
779
780 id : <id_given_in_input>
780 id : <id_given_in_input>
781 result : null
781 result : null
782 error : {
782 error : {
783 'failed to create repository `<repo_name>`'
783 'failed to create repository `<repo_name>`'
784 }
784 }
785
785
786 """
786 """
787
787
788 owner = validate_set_owner_permissions(apiuser, owner)
788 owner = validate_set_owner_permissions(apiuser, owner)
789
789
790 description = Optional.extract(description)
790 description = Optional.extract(description)
791 copy_permissions = Optional.extract(copy_permissions)
791 copy_permissions = Optional.extract(copy_permissions)
792 clone_uri = Optional.extract(clone_uri)
792 clone_uri = Optional.extract(clone_uri)
793 push_uri = Optional.extract(push_uri)
793 push_uri = Optional.extract(push_uri)
794
794
795 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
795 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
796 if isinstance(private, Optional):
796 if isinstance(private, Optional):
797 private = defs.get('repo_private') or Optional.extract(private)
797 private = defs.get('repo_private') or Optional.extract(private)
798 if isinstance(repo_type, Optional):
798 if isinstance(repo_type, Optional):
799 repo_type = defs.get('repo_type')
799 repo_type = defs.get('repo_type')
800 if isinstance(enable_statistics, Optional):
800 if isinstance(enable_statistics, Optional):
801 enable_statistics = defs.get('repo_enable_statistics')
801 enable_statistics = defs.get('repo_enable_statistics')
802 if isinstance(enable_locking, Optional):
802 if isinstance(enable_locking, Optional):
803 enable_locking = defs.get('repo_enable_locking')
803 enable_locking = defs.get('repo_enable_locking')
804 if isinstance(enable_downloads, Optional):
804 if isinstance(enable_downloads, Optional):
805 enable_downloads = defs.get('repo_enable_downloads')
805 enable_downloads = defs.get('repo_enable_downloads')
806
806
807 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
807 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
808 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
808 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
809 ref_choices = list(set(ref_choices + [landing_ref]))
809 ref_choices = list(set(ref_choices + [landing_ref]))
810
810
811 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
811 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
812
812
813 schema = repo_schema.RepoSchema().bind(
813 schema = repo_schema.RepoSchema().bind(
814 repo_type_options=rhodecode.BACKENDS.keys(),
814 repo_type_options=rhodecode.BACKENDS.keys(),
815 repo_ref_options=ref_choices,
815 repo_ref_options=ref_choices,
816 repo_type=repo_type,
816 repo_type=repo_type,
817 # user caller
817 # user caller
818 user=apiuser)
818 user=apiuser)
819
819
820 try:
820 try:
821 schema_data = schema.deserialize(dict(
821 schema_data = schema.deserialize(dict(
822 repo_name=repo_name,
822 repo_name=repo_name,
823 repo_type=repo_type,
823 repo_type=repo_type,
824 repo_owner=owner.username,
824 repo_owner=owner.username,
825 repo_description=description,
825 repo_description=description,
826 repo_landing_commit_ref=landing_commit_ref,
826 repo_landing_commit_ref=landing_commit_ref,
827 repo_clone_uri=clone_uri,
827 repo_clone_uri=clone_uri,
828 repo_push_uri=push_uri,
828 repo_push_uri=push_uri,
829 repo_private=private,
829 repo_private=private,
830 repo_copy_permissions=copy_permissions,
830 repo_copy_permissions=copy_permissions,
831 repo_enable_statistics=enable_statistics,
831 repo_enable_statistics=enable_statistics,
832 repo_enable_downloads=enable_downloads,
832 repo_enable_downloads=enable_downloads,
833 repo_enable_locking=enable_locking))
833 repo_enable_locking=enable_locking))
834 except validation_schema.Invalid as err:
834 except validation_schema.Invalid as err:
835 raise JSONRPCValidationError(colander_exc=err)
835 raise JSONRPCValidationError(colander_exc=err)
836
836
837 try:
837 try:
838 data = {
838 data = {
839 'owner': owner,
839 'owner': owner,
840 'repo_name': schema_data['repo_group']['repo_name_without_group'],
840 'repo_name': schema_data['repo_group']['repo_name_without_group'],
841 'repo_name_full': schema_data['repo_name'],
841 'repo_name_full': schema_data['repo_name'],
842 'repo_group': schema_data['repo_group']['repo_group_id'],
842 'repo_group': schema_data['repo_group']['repo_group_id'],
843 'repo_type': schema_data['repo_type'],
843 'repo_type': schema_data['repo_type'],
844 'repo_description': schema_data['repo_description'],
844 'repo_description': schema_data['repo_description'],
845 'repo_private': schema_data['repo_private'],
845 'repo_private': schema_data['repo_private'],
846 'clone_uri': schema_data['repo_clone_uri'],
846 'clone_uri': schema_data['repo_clone_uri'],
847 'push_uri': schema_data['repo_push_uri'],
847 'push_uri': schema_data['repo_push_uri'],
848 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
848 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
849 'enable_statistics': schema_data['repo_enable_statistics'],
849 'enable_statistics': schema_data['repo_enable_statistics'],
850 'enable_locking': schema_data['repo_enable_locking'],
850 'enable_locking': schema_data['repo_enable_locking'],
851 'enable_downloads': schema_data['repo_enable_downloads'],
851 'enable_downloads': schema_data['repo_enable_downloads'],
852 'repo_copy_permissions': schema_data['repo_copy_permissions'],
852 'repo_copy_permissions': schema_data['repo_copy_permissions'],
853 }
853 }
854
854
855 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
855 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
856 task_id = get_task_id(task)
856 task_id = get_task_id(task)
857 # no commit, it's done in RepoModel, or async via celery
857 # no commit, it's done in RepoModel, or async via celery
858 return {
858 return {
859 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
859 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
860 'success': True, # cannot return the repo data here since fork
860 'success': True, # cannot return the repo data here since fork
861 # can be done async
861 # can be done async
862 'task': task_id
862 'task': task_id
863 }
863 }
864 except Exception:
864 except Exception:
865 log.exception(
865 log.exception(
866 u"Exception while trying to create the repository %s",
866 u"Exception while trying to create the repository %s",
867 schema_data['repo_name'])
867 schema_data['repo_name'])
868 raise JSONRPCError(
868 raise JSONRPCError(
869 'failed to create repository `%s`' % (schema_data['repo_name'],))
869 'failed to create repository `%s`' % (schema_data['repo_name'],))
870
870
871
871
872 @jsonrpc_method()
872 @jsonrpc_method()
873 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
873 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
874 description=Optional('')):
874 description=Optional('')):
875 """
875 """
876 Adds an extra field to a repository.
876 Adds an extra field to a repository.
877
877
878 This command can only be run using an |authtoken| with at least
878 This command can only be run using an |authtoken| with at least
879 write permissions to the |repo|.
879 write permissions to the |repo|.
880
880
881 :param apiuser: This is filled automatically from the |authtoken|.
881 :param apiuser: This is filled automatically from the |authtoken|.
882 :type apiuser: AuthUser
882 :type apiuser: AuthUser
883 :param repoid: Set the repository name or repository id.
883 :param repoid: Set the repository name or repository id.
884 :type repoid: str or int
884 :type repoid: str or int
885 :param key: Create a unique field key for this repository.
885 :param key: Create a unique field key for this repository.
886 :type key: str
886 :type key: str
887 :param label:
887 :param label:
888 :type label: Optional(str)
888 :type label: Optional(str)
889 :param description:
889 :param description:
890 :type description: Optional(str)
890 :type description: Optional(str)
891 """
891 """
892 repo = get_repo_or_error(repoid)
892 repo = get_repo_or_error(repoid)
893 if not has_superadmin_permission(apiuser):
893 if not has_superadmin_permission(apiuser):
894 _perms = ('repository.admin',)
894 _perms = ('repository.admin',)
895 validate_repo_permissions(apiuser, repoid, repo, _perms)
895 validate_repo_permissions(apiuser, repoid, repo, _perms)
896
896
897 label = Optional.extract(label) or key
897 label = Optional.extract(label) or key
898 description = Optional.extract(description)
898 description = Optional.extract(description)
899
899
900 field = RepositoryField.get_by_key_name(key, repo)
900 field = RepositoryField.get_by_key_name(key, repo)
901 if field:
901 if field:
902 raise JSONRPCError('Field with key '
902 raise JSONRPCError('Field with key '
903 '`%s` exists for repo `%s`' % (key, repoid))
903 '`%s` exists for repo `%s`' % (key, repoid))
904
904
905 try:
905 try:
906 RepoModel().add_repo_field(repo, key, field_label=label,
906 RepoModel().add_repo_field(repo, key, field_label=label,
907 field_desc=description)
907 field_desc=description)
908 Session().commit()
908 Session().commit()
909 return {
909 return {
910 'msg': "Added new repository field `%s`" % (key,),
910 'msg': "Added new repository field `%s`" % (key,),
911 'success': True,
911 'success': True,
912 }
912 }
913 except Exception:
913 except Exception:
914 log.exception("Exception occurred while trying to add field to repo")
914 log.exception("Exception occurred while trying to add field to repo")
915 raise JSONRPCError(
915 raise JSONRPCError(
916 'failed to create new field for repository `%s`' % (repoid,))
916 'failed to create new field for repository `%s`' % (repoid,))
917
917
918
918
919 @jsonrpc_method()
919 @jsonrpc_method()
920 def remove_field_from_repo(request, apiuser, repoid, key):
920 def remove_field_from_repo(request, apiuser, repoid, key):
921 """
921 """
922 Removes an extra field from a repository.
922 Removes an extra field from a repository.
923
923
924 This command can only be run using an |authtoken| with at least
924 This command can only be run using an |authtoken| with at least
925 write permissions to the |repo|.
925 write permissions to the |repo|.
926
926
927 :param apiuser: This is filled automatically from the |authtoken|.
927 :param apiuser: This is filled automatically from the |authtoken|.
928 :type apiuser: AuthUser
928 :type apiuser: AuthUser
929 :param repoid: Set the repository name or repository ID.
929 :param repoid: Set the repository name or repository ID.
930 :type repoid: str or int
930 :type repoid: str or int
931 :param key: Set the unique field key for this repository.
931 :param key: Set the unique field key for this repository.
932 :type key: str
932 :type key: str
933 """
933 """
934
934
935 repo = get_repo_or_error(repoid)
935 repo = get_repo_or_error(repoid)
936 if not has_superadmin_permission(apiuser):
936 if not has_superadmin_permission(apiuser):
937 _perms = ('repository.admin',)
937 _perms = ('repository.admin',)
938 validate_repo_permissions(apiuser, repoid, repo, _perms)
938 validate_repo_permissions(apiuser, repoid, repo, _perms)
939
939
940 field = RepositoryField.get_by_key_name(key, repo)
940 field = RepositoryField.get_by_key_name(key, repo)
941 if not field:
941 if not field:
942 raise JSONRPCError('Field with key `%s` does not '
942 raise JSONRPCError('Field with key `%s` does not '
943 'exists for repo `%s`' % (key, repoid))
943 'exists for repo `%s`' % (key, repoid))
944
944
945 try:
945 try:
946 RepoModel().delete_repo_field(repo, field_key=key)
946 RepoModel().delete_repo_field(repo, field_key=key)
947 Session().commit()
947 Session().commit()
948 return {
948 return {
949 'msg': "Deleted repository field `%s`" % (key,),
949 'msg': "Deleted repository field `%s`" % (key,),
950 'success': True,
950 'success': True,
951 }
951 }
952 except Exception:
952 except Exception:
953 log.exception(
953 log.exception(
954 "Exception occurred while trying to delete field from repo")
954 "Exception occurred while trying to delete field from repo")
955 raise JSONRPCError(
955 raise JSONRPCError(
956 'failed to delete field for repository `%s`' % (repoid,))
956 'failed to delete field for repository `%s`' % (repoid,))
957
957
958
958
959 @jsonrpc_method()
959 @jsonrpc_method()
960 def update_repo(
960 def update_repo(
961 request, apiuser, repoid, repo_name=Optional(None),
961 request, apiuser, repoid, repo_name=Optional(None),
962 owner=Optional(OAttr('apiuser')), description=Optional(''),
962 owner=Optional(OAttr('apiuser')), description=Optional(''),
963 private=Optional(False),
963 private=Optional(False),
964 clone_uri=Optional(None), push_uri=Optional(None),
964 clone_uri=Optional(None), push_uri=Optional(None),
965 landing_rev=Optional(None), fork_of=Optional(None),
965 landing_rev=Optional(None), fork_of=Optional(None),
966 enable_statistics=Optional(False),
966 enable_statistics=Optional(False),
967 enable_locking=Optional(False),
967 enable_locking=Optional(False),
968 enable_downloads=Optional(False), fields=Optional('')):
968 enable_downloads=Optional(False), fields=Optional('')):
969 """
969 """
970 Updates a repository with the given information.
970 Updates a repository with the given information.
971
971
972 This command can only be run using an |authtoken| with at least
972 This command can only be run using an |authtoken| with at least
973 admin permissions to the |repo|.
973 admin permissions to the |repo|.
974
974
975 * If the repository name contains "/", repository will be updated
975 * If the repository name contains "/", repository will be updated
976 accordingly with a repository group or nested repository groups
976 accordingly with a repository group or nested repository groups
977
977
978 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
978 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
979 called "repo-test" and place it inside group "foo/bar".
979 called "repo-test" and place it inside group "foo/bar".
980 You have to have permissions to access and write to the last repository
980 You have to have permissions to access and write to the last repository
981 group ("bar" in this example)
981 group ("bar" in this example)
982
982
983 :param apiuser: This is filled automatically from the |authtoken|.
983 :param apiuser: This is filled automatically from the |authtoken|.
984 :type apiuser: AuthUser
984 :type apiuser: AuthUser
985 :param repoid: repository name or repository ID.
985 :param repoid: repository name or repository ID.
986 :type repoid: str or int
986 :type repoid: str or int
987 :param repo_name: Update the |repo| name, including the
987 :param repo_name: Update the |repo| name, including the
988 repository group it's in.
988 repository group it's in.
989 :type repo_name: str
989 :type repo_name: str
990 :param owner: Set the |repo| owner.
990 :param owner: Set the |repo| owner.
991 :type owner: str
991 :type owner: str
992 :param fork_of: Set the |repo| as fork of another |repo|.
992 :param fork_of: Set the |repo| as fork of another |repo|.
993 :type fork_of: str
993 :type fork_of: str
994 :param description: Update the |repo| description.
994 :param description: Update the |repo| description.
995 :type description: str
995 :type description: str
996 :param private: Set the |repo| as private. (True | False)
996 :param private: Set the |repo| as private. (True | False)
997 :type private: bool
997 :type private: bool
998 :param clone_uri: Update the |repo| clone URI.
998 :param clone_uri: Update the |repo| clone URI.
999 :type clone_uri: str
999 :type clone_uri: str
1000 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1000 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1001 :type landing_rev: str
1001 :type landing_rev: str
1002 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1002 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1003 :type enable_statistics: bool
1003 :type enable_statistics: bool
1004 :param enable_locking: Enable |repo| locking.
1004 :param enable_locking: Enable |repo| locking.
1005 :type enable_locking: bool
1005 :type enable_locking: bool
1006 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1006 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1007 :type enable_downloads: bool
1007 :type enable_downloads: bool
1008 :param fields: Add extra fields to the |repo|. Use the following
1008 :param fields: Add extra fields to the |repo|. Use the following
1009 example format: ``field_key=field_val,field_key2=fieldval2``.
1009 example format: ``field_key=field_val,field_key2=fieldval2``.
1010 Escape ', ' with \,
1010 Escape ', ' with \,
1011 :type fields: str
1011 :type fields: str
1012 """
1012 """
1013
1013
1014 repo = get_repo_or_error(repoid)
1014 repo = get_repo_or_error(repoid)
1015
1015
1016 include_secrets = False
1016 include_secrets = False
1017 if not has_superadmin_permission(apiuser):
1017 if not has_superadmin_permission(apiuser):
1018 validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',))
1018 validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',))
1019 else:
1019 else:
1020 include_secrets = True
1020 include_secrets = True
1021
1021
1022 updates = dict(
1022 updates = dict(
1023 repo_name=repo_name
1023 repo_name=repo_name
1024 if not isinstance(repo_name, Optional) else repo.repo_name,
1024 if not isinstance(repo_name, Optional) else repo.repo_name,
1025
1025
1026 fork_id=fork_of
1026 fork_id=fork_of
1027 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1027 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1028
1028
1029 user=owner
1029 user=owner
1030 if not isinstance(owner, Optional) else repo.user.username,
1030 if not isinstance(owner, Optional) else repo.user.username,
1031
1031
1032 repo_description=description
1032 repo_description=description
1033 if not isinstance(description, Optional) else repo.description,
1033 if not isinstance(description, Optional) else repo.description,
1034
1034
1035 repo_private=private
1035 repo_private=private
1036 if not isinstance(private, Optional) else repo.private,
1036 if not isinstance(private, Optional) else repo.private,
1037
1037
1038 clone_uri=clone_uri
1038 clone_uri=clone_uri
1039 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1039 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1040
1040
1041 push_uri=push_uri
1041 push_uri=push_uri
1042 if not isinstance(push_uri, Optional) else repo.push_uri,
1042 if not isinstance(push_uri, Optional) else repo.push_uri,
1043
1043
1044 repo_landing_rev=landing_rev
1044 repo_landing_rev=landing_rev
1045 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1045 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1046
1046
1047 repo_enable_statistics=enable_statistics
1047 repo_enable_statistics=enable_statistics
1048 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1048 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1049
1049
1050 repo_enable_locking=enable_locking
1050 repo_enable_locking=enable_locking
1051 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1051 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1052
1052
1053 repo_enable_downloads=enable_downloads
1053 repo_enable_downloads=enable_downloads
1054 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1054 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1055
1055
1056 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1056 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1057 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1057 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1058 request.translate, repo=repo)
1058 request.translate, repo=repo)
1059 ref_choices = list(set(ref_choices + [landing_ref]))
1059 ref_choices = list(set(ref_choices + [landing_ref]))
1060
1060
1061 old_values = repo.get_api_data()
1061 old_values = repo.get_api_data()
1062 repo_type = repo.repo_type
1062 repo_type = repo.repo_type
1063 schema = repo_schema.RepoSchema().bind(
1063 schema = repo_schema.RepoSchema().bind(
1064 repo_type_options=rhodecode.BACKENDS.keys(),
1064 repo_type_options=rhodecode.BACKENDS.keys(),
1065 repo_ref_options=ref_choices,
1065 repo_ref_options=ref_choices,
1066 repo_type=repo_type,
1066 repo_type=repo_type,
1067 # user caller
1067 # user caller
1068 user=apiuser,
1068 user=apiuser,
1069 old_values=old_values)
1069 old_values=old_values)
1070 try:
1070 try:
1071 schema_data = schema.deserialize(dict(
1071 schema_data = schema.deserialize(dict(
1072 # we save old value, users cannot change type
1072 # we save old value, users cannot change type
1073 repo_type=repo_type,
1073 repo_type=repo_type,
1074
1074
1075 repo_name=updates['repo_name'],
1075 repo_name=updates['repo_name'],
1076 repo_owner=updates['user'],
1076 repo_owner=updates['user'],
1077 repo_description=updates['repo_description'],
1077 repo_description=updates['repo_description'],
1078 repo_clone_uri=updates['clone_uri'],
1078 repo_clone_uri=updates['clone_uri'],
1079 repo_push_uri=updates['push_uri'],
1079 repo_push_uri=updates['push_uri'],
1080 repo_fork_of=updates['fork_id'],
1080 repo_fork_of=updates['fork_id'],
1081 repo_private=updates['repo_private'],
1081 repo_private=updates['repo_private'],
1082 repo_landing_commit_ref=updates['repo_landing_rev'],
1082 repo_landing_commit_ref=updates['repo_landing_rev'],
1083 repo_enable_statistics=updates['repo_enable_statistics'],
1083 repo_enable_statistics=updates['repo_enable_statistics'],
1084 repo_enable_downloads=updates['repo_enable_downloads'],
1084 repo_enable_downloads=updates['repo_enable_downloads'],
1085 repo_enable_locking=updates['repo_enable_locking']))
1085 repo_enable_locking=updates['repo_enable_locking']))
1086 except validation_schema.Invalid as err:
1086 except validation_schema.Invalid as err:
1087 raise JSONRPCValidationError(colander_exc=err)
1087 raise JSONRPCValidationError(colander_exc=err)
1088
1088
1089 # save validated data back into the updates dict
1089 # save validated data back into the updates dict
1090 validated_updates = dict(
1090 validated_updates = dict(
1091 repo_name=schema_data['repo_group']['repo_name_without_group'],
1091 repo_name=schema_data['repo_group']['repo_name_without_group'],
1092 repo_group=schema_data['repo_group']['repo_group_id'],
1092 repo_group=schema_data['repo_group']['repo_group_id'],
1093
1093
1094 user=schema_data['repo_owner'],
1094 user=schema_data['repo_owner'],
1095 repo_description=schema_data['repo_description'],
1095 repo_description=schema_data['repo_description'],
1096 repo_private=schema_data['repo_private'],
1096 repo_private=schema_data['repo_private'],
1097 clone_uri=schema_data['repo_clone_uri'],
1097 clone_uri=schema_data['repo_clone_uri'],
1098 push_uri=schema_data['repo_push_uri'],
1098 push_uri=schema_data['repo_push_uri'],
1099 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1099 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1100 repo_enable_statistics=schema_data['repo_enable_statistics'],
1100 repo_enable_statistics=schema_data['repo_enable_statistics'],
1101 repo_enable_locking=schema_data['repo_enable_locking'],
1101 repo_enable_locking=schema_data['repo_enable_locking'],
1102 repo_enable_downloads=schema_data['repo_enable_downloads'],
1102 repo_enable_downloads=schema_data['repo_enable_downloads'],
1103 )
1103 )
1104
1104
1105 if schema_data['repo_fork_of']:
1105 if schema_data['repo_fork_of']:
1106 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1106 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1107 validated_updates['fork_id'] = fork_repo.repo_id
1107 validated_updates['fork_id'] = fork_repo.repo_id
1108
1108
1109 # extra fields
1109 # extra fields
1110 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1110 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1111 if fields:
1111 if fields:
1112 validated_updates.update(fields)
1112 validated_updates.update(fields)
1113
1113
1114 try:
1114 try:
1115 RepoModel().update(repo, **validated_updates)
1115 RepoModel().update(repo, **validated_updates)
1116 audit_logger.store_api(
1116 audit_logger.store_api(
1117 'repo.edit', action_data={'old_data': old_values},
1117 'repo.edit', action_data={'old_data': old_values},
1118 user=apiuser, repo=repo)
1118 user=apiuser, repo=repo)
1119 Session().commit()
1119 Session().commit()
1120 return {
1120 return {
1121 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1121 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1122 'repository': repo.get_api_data(include_secrets=include_secrets)
1122 'repository': repo.get_api_data(include_secrets=include_secrets)
1123 }
1123 }
1124 except Exception:
1124 except Exception:
1125 log.exception(
1125 log.exception(
1126 u"Exception while trying to update the repository %s",
1126 u"Exception while trying to update the repository %s",
1127 repoid)
1127 repoid)
1128 raise JSONRPCError('failed to update repo `%s`' % repoid)
1128 raise JSONRPCError('failed to update repo `%s`' % repoid)
1129
1129
1130
1130
1131 @jsonrpc_method()
1131 @jsonrpc_method()
1132 def fork_repo(request, apiuser, repoid, fork_name,
1132 def fork_repo(request, apiuser, repoid, fork_name,
1133 owner=Optional(OAttr('apiuser')),
1133 owner=Optional(OAttr('apiuser')),
1134 description=Optional(''),
1134 description=Optional(''),
1135 private=Optional(False),
1135 private=Optional(False),
1136 clone_uri=Optional(None),
1136 clone_uri=Optional(None),
1137 landing_rev=Optional(None),
1137 landing_rev=Optional(None),
1138 copy_permissions=Optional(False)):
1138 copy_permissions=Optional(False)):
1139 """
1139 """
1140 Creates a fork of the specified |repo|.
1140 Creates a fork of the specified |repo|.
1141
1141
1142 * If the fork_name contains "/", fork will be created inside
1142 * If the fork_name contains "/", fork will be created inside
1143 a repository group or nested repository groups
1143 a repository group or nested repository groups
1144
1144
1145 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1145 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1146 inside group "foo/bar". You have to have permissions to access and
1146 inside group "foo/bar". You have to have permissions to access and
1147 write to the last repository group ("bar" in this example)
1147 write to the last repository group ("bar" in this example)
1148
1148
1149 This command can only be run using an |authtoken| with minimum
1149 This command can only be run using an |authtoken| with minimum
1150 read permissions of the forked repo, create fork permissions for an user.
1150 read permissions of the forked repo, create fork permissions for an user.
1151
1151
1152 :param apiuser: This is filled automatically from the |authtoken|.
1152 :param apiuser: This is filled automatically from the |authtoken|.
1153 :type apiuser: AuthUser
1153 :type apiuser: AuthUser
1154 :param repoid: Set repository name or repository ID.
1154 :param repoid: Set repository name or repository ID.
1155 :type repoid: str or int
1155 :type repoid: str or int
1156 :param fork_name: Set the fork name, including it's repository group membership.
1156 :param fork_name: Set the fork name, including it's repository group membership.
1157 :type fork_name: str
1157 :type fork_name: str
1158 :param owner: Set the fork owner.
1158 :param owner: Set the fork owner.
1159 :type owner: str
1159 :type owner: str
1160 :param description: Set the fork description.
1160 :param description: Set the fork description.
1161 :type description: str
1161 :type description: str
1162 :param copy_permissions: Copy permissions from parent |repo|. The
1162 :param copy_permissions: Copy permissions from parent |repo|. The
1163 default is False.
1163 default is False.
1164 :type copy_permissions: bool
1164 :type copy_permissions: bool
1165 :param private: Make the fork private. The default is False.
1165 :param private: Make the fork private. The default is False.
1166 :type private: bool
1166 :type private: bool
1167 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1167 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1168
1168
1169 Example output:
1169 Example output:
1170
1170
1171 .. code-block:: bash
1171 .. code-block:: bash
1172
1172
1173 id : <id_for_response>
1173 id : <id_for_response>
1174 api_key : "<api_key>"
1174 api_key : "<api_key>"
1175 args: {
1175 args: {
1176 "repoid" : "<reponame or repo_id>",
1176 "repoid" : "<reponame or repo_id>",
1177 "fork_name": "<forkname>",
1177 "fork_name": "<forkname>",
1178 "owner": "<username or user_id = Optional(=apiuser)>",
1178 "owner": "<username or user_id = Optional(=apiuser)>",
1179 "description": "<description>",
1179 "description": "<description>",
1180 "copy_permissions": "<bool>",
1180 "copy_permissions": "<bool>",
1181 "private": "<bool>",
1181 "private": "<bool>",
1182 "landing_rev": "<landing_rev>"
1182 "landing_rev": "<landing_rev>"
1183 }
1183 }
1184
1184
1185 Example error output:
1185 Example error output:
1186
1186
1187 .. code-block:: bash
1187 .. code-block:: bash
1188
1188
1189 id : <id_given_in_input>
1189 id : <id_given_in_input>
1190 result: {
1190 result: {
1191 "msg": "Created fork of `<reponame>` as `<forkname>`",
1191 "msg": "Created fork of `<reponame>` as `<forkname>`",
1192 "success": true,
1192 "success": true,
1193 "task": "<celery task id or None if done sync>"
1193 "task": "<celery task id or None if done sync>"
1194 }
1194 }
1195 error: null
1195 error: null
1196
1196
1197 """
1197 """
1198
1198
1199 repo = get_repo_or_error(repoid)
1199 repo = get_repo_or_error(repoid)
1200 repo_name = repo.repo_name
1200 repo_name = repo.repo_name
1201
1201
1202 if not has_superadmin_permission(apiuser):
1202 if not has_superadmin_permission(apiuser):
1203 # check if we have at least read permission for
1203 # check if we have at least read permission for
1204 # this repo that we fork !
1204 # this repo that we fork !
1205 _perms = (
1205 _perms = (
1206 'repository.admin', 'repository.write', 'repository.read')
1206 'repository.admin', 'repository.write', 'repository.read')
1207 validate_repo_permissions(apiuser, repoid, repo, _perms)
1207 validate_repo_permissions(apiuser, repoid, repo, _perms)
1208
1208
1209 # check if the regular user has at least fork permissions as well
1209 # check if the regular user has at least fork permissions as well
1210 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1210 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1211 raise JSONRPCForbidden()
1211 raise JSONRPCForbidden()
1212
1212
1213 # check if user can set owner parameter
1213 # check if user can set owner parameter
1214 owner = validate_set_owner_permissions(apiuser, owner)
1214 owner = validate_set_owner_permissions(apiuser, owner)
1215
1215
1216 description = Optional.extract(description)
1216 description = Optional.extract(description)
1217 copy_permissions = Optional.extract(copy_permissions)
1217 copy_permissions = Optional.extract(copy_permissions)
1218 clone_uri = Optional.extract(clone_uri)
1218 clone_uri = Optional.extract(clone_uri)
1219
1219
1220 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1220 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1221 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1221 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1222 ref_choices = list(set(ref_choices + [landing_ref]))
1222 ref_choices = list(set(ref_choices + [landing_ref]))
1223 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1223 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1224
1224
1225 private = Optional.extract(private)
1225 private = Optional.extract(private)
1226
1226
1227 schema = repo_schema.RepoSchema().bind(
1227 schema = repo_schema.RepoSchema().bind(
1228 repo_type_options=rhodecode.BACKENDS.keys(),
1228 repo_type_options=rhodecode.BACKENDS.keys(),
1229 repo_ref_options=ref_choices,
1229 repo_ref_options=ref_choices,
1230 repo_type=repo.repo_type,
1230 repo_type=repo.repo_type,
1231 # user caller
1231 # user caller
1232 user=apiuser)
1232 user=apiuser)
1233
1233
1234 try:
1234 try:
1235 schema_data = schema.deserialize(dict(
1235 schema_data = schema.deserialize(dict(
1236 repo_name=fork_name,
1236 repo_name=fork_name,
1237 repo_type=repo.repo_type,
1237 repo_type=repo.repo_type,
1238 repo_owner=owner.username,
1238 repo_owner=owner.username,
1239 repo_description=description,
1239 repo_description=description,
1240 repo_landing_commit_ref=landing_commit_ref,
1240 repo_landing_commit_ref=landing_commit_ref,
1241 repo_clone_uri=clone_uri,
1241 repo_clone_uri=clone_uri,
1242 repo_private=private,
1242 repo_private=private,
1243 repo_copy_permissions=copy_permissions))
1243 repo_copy_permissions=copy_permissions))
1244 except validation_schema.Invalid as err:
1244 except validation_schema.Invalid as err:
1245 raise JSONRPCValidationError(colander_exc=err)
1245 raise JSONRPCValidationError(colander_exc=err)
1246
1246
1247 try:
1247 try:
1248 data = {
1248 data = {
1249 'fork_parent_id': repo.repo_id,
1249 'fork_parent_id': repo.repo_id,
1250
1250
1251 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1251 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1252 'repo_name_full': schema_data['repo_name'],
1252 'repo_name_full': schema_data['repo_name'],
1253 'repo_group': schema_data['repo_group']['repo_group_id'],
1253 'repo_group': schema_data['repo_group']['repo_group_id'],
1254 'repo_type': schema_data['repo_type'],
1254 'repo_type': schema_data['repo_type'],
1255 'description': schema_data['repo_description'],
1255 'description': schema_data['repo_description'],
1256 'private': schema_data['repo_private'],
1256 'private': schema_data['repo_private'],
1257 'copy_permissions': schema_data['repo_copy_permissions'],
1257 'copy_permissions': schema_data['repo_copy_permissions'],
1258 'landing_rev': schema_data['repo_landing_commit_ref'],
1258 'landing_rev': schema_data['repo_landing_commit_ref'],
1259 }
1259 }
1260
1260
1261 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1261 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1262 # no commit, it's done in RepoModel, or async via celery
1262 # no commit, it's done in RepoModel, or async via celery
1263 task_id = get_task_id(task)
1263 task_id = get_task_id(task)
1264
1264
1265 return {
1265 return {
1266 'msg': 'Created fork of `%s` as `%s`' % (
1266 'msg': 'Created fork of `%s` as `%s`' % (
1267 repo.repo_name, schema_data['repo_name']),
1267 repo.repo_name, schema_data['repo_name']),
1268 'success': True, # cannot return the repo data here since fork
1268 'success': True, # cannot return the repo data here since fork
1269 # can be done async
1269 # can be done async
1270 'task': task_id
1270 'task': task_id
1271 }
1271 }
1272 except Exception:
1272 except Exception:
1273 log.exception(
1273 log.exception(
1274 u"Exception while trying to create fork %s",
1274 u"Exception while trying to create fork %s",
1275 schema_data['repo_name'])
1275 schema_data['repo_name'])
1276 raise JSONRPCError(
1276 raise JSONRPCError(
1277 'failed to fork repository `%s` as `%s`' % (
1277 'failed to fork repository `%s` as `%s`' % (
1278 repo_name, schema_data['repo_name']))
1278 repo_name, schema_data['repo_name']))
1279
1279
1280
1280
1281 @jsonrpc_method()
1281 @jsonrpc_method()
1282 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1282 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1283 """
1283 """
1284 Deletes a repository.
1284 Deletes a repository.
1285
1285
1286 * When the `forks` parameter is set it's possible to detach or delete
1286 * When the `forks` parameter is set it's possible to detach or delete
1287 forks of deleted repository.
1287 forks of deleted repository.
1288
1288
1289 This command can only be run using an |authtoken| with admin
1289 This command can only be run using an |authtoken| with admin
1290 permissions on the |repo|.
1290 permissions on the |repo|.
1291
1291
1292 :param apiuser: This is filled automatically from the |authtoken|.
1292 :param apiuser: This is filled automatically from the |authtoken|.
1293 :type apiuser: AuthUser
1293 :type apiuser: AuthUser
1294 :param repoid: Set the repository name or repository ID.
1294 :param repoid: Set the repository name or repository ID.
1295 :type repoid: str or int
1295 :type repoid: str or int
1296 :param forks: Set to `detach` or `delete` forks from the |repo|.
1296 :param forks: Set to `detach` or `delete` forks from the |repo|.
1297 :type forks: Optional(str)
1297 :type forks: Optional(str)
1298
1298
1299 Example error output:
1299 Example error output:
1300
1300
1301 .. code-block:: bash
1301 .. code-block:: bash
1302
1302
1303 id : <id_given_in_input>
1303 id : <id_given_in_input>
1304 result: {
1304 result: {
1305 "msg": "Deleted repository `<reponame>`",
1305 "msg": "Deleted repository `<reponame>`",
1306 "success": true
1306 "success": true
1307 }
1307 }
1308 error: null
1308 error: null
1309 """
1309 """
1310
1310
1311 repo = get_repo_or_error(repoid)
1311 repo = get_repo_or_error(repoid)
1312 repo_name = repo.repo_name
1312 repo_name = repo.repo_name
1313 if not has_superadmin_permission(apiuser):
1313 if not has_superadmin_permission(apiuser):
1314 _perms = ('repository.admin',)
1314 _perms = ('repository.admin',)
1315 validate_repo_permissions(apiuser, repoid, repo, _perms)
1315 validate_repo_permissions(apiuser, repoid, repo, _perms)
1316
1316
1317 try:
1317 try:
1318 handle_forks = Optional.extract(forks)
1318 handle_forks = Optional.extract(forks)
1319 _forks_msg = ''
1319 _forks_msg = ''
1320 _forks = [f for f in repo.forks]
1320 _forks = [f for f in repo.forks]
1321 if handle_forks == 'detach':
1321 if handle_forks == 'detach':
1322 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1322 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1323 elif handle_forks == 'delete':
1323 elif handle_forks == 'delete':
1324 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1324 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1325 elif _forks:
1325 elif _forks:
1326 raise JSONRPCError(
1326 raise JSONRPCError(
1327 'Cannot delete `%s` it still contains attached forks' %
1327 'Cannot delete `%s` it still contains attached forks' %
1328 (repo.repo_name,)
1328 (repo.repo_name,)
1329 )
1329 )
1330 old_data = repo.get_api_data()
1330 old_data = repo.get_api_data()
1331 RepoModel().delete(repo, forks=forks)
1331 RepoModel().delete(repo, forks=forks)
1332
1332
1333 repo = audit_logger.RepoWrap(repo_id=None,
1333 repo = audit_logger.RepoWrap(repo_id=None,
1334 repo_name=repo.repo_name)
1334 repo_name=repo.repo_name)
1335
1335
1336 audit_logger.store_api(
1336 audit_logger.store_api(
1337 'repo.delete', action_data={'old_data': old_data},
1337 'repo.delete', action_data={'old_data': old_data},
1338 user=apiuser, repo=repo)
1338 user=apiuser, repo=repo)
1339
1339
1340 ScmModel().mark_for_invalidation(repo_name, delete=True)
1340 ScmModel().mark_for_invalidation(repo_name, delete=True)
1341 Session().commit()
1341 Session().commit()
1342 return {
1342 return {
1343 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1343 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1344 'success': True
1344 'success': True
1345 }
1345 }
1346 except Exception:
1346 except Exception:
1347 log.exception("Exception occurred while trying to delete repo")
1347 log.exception("Exception occurred while trying to delete repo")
1348 raise JSONRPCError(
1348 raise JSONRPCError(
1349 'failed to delete repository `%s`' % (repo_name,)
1349 'failed to delete repository `%s`' % (repo_name,)
1350 )
1350 )
1351
1351
1352
1352
1353 #TODO: marcink, change name ?
1353 #TODO: marcink, change name ?
1354 @jsonrpc_method()
1354 @jsonrpc_method()
1355 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1355 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1356 """
1356 """
1357 Invalidates the cache for the specified repository.
1357 Invalidates the cache for the specified repository.
1358
1358
1359 This command can only be run using an |authtoken| with admin rights to
1359 This command can only be run using an |authtoken| with admin rights to
1360 the specified repository.
1360 the specified repository.
1361
1361
1362 This command takes the following options:
1362 This command takes the following options:
1363
1363
1364 :param apiuser: This is filled automatically from |authtoken|.
1364 :param apiuser: This is filled automatically from |authtoken|.
1365 :type apiuser: AuthUser
1365 :type apiuser: AuthUser
1366 :param repoid: Sets the repository name or repository ID.
1366 :param repoid: Sets the repository name or repository ID.
1367 :type repoid: str or int
1367 :type repoid: str or int
1368 :param delete_keys: This deletes the invalidated keys instead of
1368 :param delete_keys: This deletes the invalidated keys instead of
1369 just flagging them.
1369 just flagging them.
1370 :type delete_keys: Optional(``True`` | ``False``)
1370 :type delete_keys: Optional(``True`` | ``False``)
1371
1371
1372 Example output:
1372 Example output:
1373
1373
1374 .. code-block:: bash
1374 .. code-block:: bash
1375
1375
1376 id : <id_given_in_input>
1376 id : <id_given_in_input>
1377 result : {
1377 result : {
1378 'msg': Cache for repository `<repository name>` was invalidated,
1378 'msg': Cache for repository `<repository name>` was invalidated,
1379 'repository': <repository name>
1379 'repository': <repository name>
1380 }
1380 }
1381 error : null
1381 error : null
1382
1382
1383 Example error output:
1383 Example error output:
1384
1384
1385 .. code-block:: bash
1385 .. code-block:: bash
1386
1386
1387 id : <id_given_in_input>
1387 id : <id_given_in_input>
1388 result : null
1388 result : null
1389 error : {
1389 error : {
1390 'Error occurred during cache invalidation action'
1390 'Error occurred during cache invalidation action'
1391 }
1391 }
1392
1392
1393 """
1393 """
1394
1394
1395 repo = get_repo_or_error(repoid)
1395 repo = get_repo_or_error(repoid)
1396 if not has_superadmin_permission(apiuser):
1396 if not has_superadmin_permission(apiuser):
1397 _perms = ('repository.admin', 'repository.write',)
1397 _perms = ('repository.admin', 'repository.write',)
1398 validate_repo_permissions(apiuser, repoid, repo, _perms)
1398 validate_repo_permissions(apiuser, repoid, repo, _perms)
1399
1399
1400 delete = Optional.extract(delete_keys)
1400 delete = Optional.extract(delete_keys)
1401 try:
1401 try:
1402 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1402 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1403 return {
1403 return {
1404 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1404 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1405 'repository': repo.repo_name
1405 'repository': repo.repo_name
1406 }
1406 }
1407 except Exception:
1407 except Exception:
1408 log.exception(
1408 log.exception(
1409 "Exception occurred while trying to invalidate repo cache")
1409 "Exception occurred while trying to invalidate repo cache")
1410 raise JSONRPCError(
1410 raise JSONRPCError(
1411 'Error occurred during cache invalidation action'
1411 'Error occurred during cache invalidation action'
1412 )
1412 )
1413
1413
1414
1414
1415 #TODO: marcink, change name ?
1415 #TODO: marcink, change name ?
1416 @jsonrpc_method()
1416 @jsonrpc_method()
1417 def lock(request, apiuser, repoid, locked=Optional(None),
1417 def lock(request, apiuser, repoid, locked=Optional(None),
1418 userid=Optional(OAttr('apiuser'))):
1418 userid=Optional(OAttr('apiuser'))):
1419 """
1419 """
1420 Sets the lock state of the specified |repo| by the given user.
1420 Sets the lock state of the specified |repo| by the given user.
1421 From more information, see :ref:`repo-locking`.
1421 From more information, see :ref:`repo-locking`.
1422
1422
1423 * If the ``userid`` option is not set, the repository is locked to the
1423 * If the ``userid`` option is not set, the repository is locked to the
1424 user who called the method.
1424 user who called the method.
1425 * If the ``locked`` parameter is not set, the current lock state of the
1425 * If the ``locked`` parameter is not set, the current lock state of the
1426 repository is displayed.
1426 repository is displayed.
1427
1427
1428 This command can only be run using an |authtoken| with admin rights to
1428 This command can only be run using an |authtoken| with admin rights to
1429 the specified repository.
1429 the specified repository.
1430
1430
1431 This command takes the following options:
1431 This command takes the following options:
1432
1432
1433 :param apiuser: This is filled automatically from the |authtoken|.
1433 :param apiuser: This is filled automatically from the |authtoken|.
1434 :type apiuser: AuthUser
1434 :type apiuser: AuthUser
1435 :param repoid: Sets the repository name or repository ID.
1435 :param repoid: Sets the repository name or repository ID.
1436 :type repoid: str or int
1436 :type repoid: str or int
1437 :param locked: Sets the lock state.
1437 :param locked: Sets the lock state.
1438 :type locked: Optional(``True`` | ``False``)
1438 :type locked: Optional(``True`` | ``False``)
1439 :param userid: Set the repository lock to this user.
1439 :param userid: Set the repository lock to this user.
1440 :type userid: Optional(str or int)
1440 :type userid: Optional(str or int)
1441
1441
1442 Example error output:
1442 Example error output:
1443
1443
1444 .. code-block:: bash
1444 .. code-block:: bash
1445
1445
1446 id : <id_given_in_input>
1446 id : <id_given_in_input>
1447 result : {
1447 result : {
1448 'repo': '<reponame>',
1448 'repo': '<reponame>',
1449 'locked': <bool: lock state>,
1449 'locked': <bool: lock state>,
1450 'locked_since': <int: lock timestamp>,
1450 'locked_since': <int: lock timestamp>,
1451 'locked_by': <username of person who made the lock>,
1451 'locked_by': <username of person who made the lock>,
1452 'lock_reason': <str: reason for locking>,
1452 'lock_reason': <str: reason for locking>,
1453 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1453 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1454 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1454 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1455 or
1455 or
1456 'msg': 'Repo `<repository name>` not locked.'
1456 'msg': 'Repo `<repository name>` not locked.'
1457 or
1457 or
1458 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1458 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1459 }
1459 }
1460 error : null
1460 error : null
1461
1461
1462 Example error output:
1462 Example error output:
1463
1463
1464 .. code-block:: bash
1464 .. code-block:: bash
1465
1465
1466 id : <id_given_in_input>
1466 id : <id_given_in_input>
1467 result : null
1467 result : null
1468 error : {
1468 error : {
1469 'Error occurred locking repository `<reponame>`'
1469 'Error occurred locking repository `<reponame>`'
1470 }
1470 }
1471 """
1471 """
1472
1472
1473 repo = get_repo_or_error(repoid)
1473 repo = get_repo_or_error(repoid)
1474 if not has_superadmin_permission(apiuser):
1474 if not has_superadmin_permission(apiuser):
1475 # check if we have at least write permission for this repo !
1475 # check if we have at least write permission for this repo !
1476 _perms = ('repository.admin', 'repository.write',)
1476 _perms = ('repository.admin', 'repository.write',)
1477 validate_repo_permissions(apiuser, repoid, repo, _perms)
1477 validate_repo_permissions(apiuser, repoid, repo, _perms)
1478
1478
1479 # make sure normal user does not pass someone else userid,
1479 # make sure normal user does not pass someone else userid,
1480 # he is not allowed to do that
1480 # he is not allowed to do that
1481 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1481 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1482 raise JSONRPCError('userid is not the same as your user')
1482 raise JSONRPCError('userid is not the same as your user')
1483
1483
1484 if isinstance(userid, Optional):
1484 if isinstance(userid, Optional):
1485 userid = apiuser.user_id
1485 userid = apiuser.user_id
1486
1486
1487 user = get_user_or_error(userid)
1487 user = get_user_or_error(userid)
1488
1488
1489 if isinstance(locked, Optional):
1489 if isinstance(locked, Optional):
1490 lockobj = repo.locked
1490 lockobj = repo.locked
1491
1491
1492 if lockobj[0] is None:
1492 if lockobj[0] is None:
1493 _d = {
1493 _d = {
1494 'repo': repo.repo_name,
1494 'repo': repo.repo_name,
1495 'locked': False,
1495 'locked': False,
1496 'locked_since': None,
1496 'locked_since': None,
1497 'locked_by': None,
1497 'locked_by': None,
1498 'lock_reason': None,
1498 'lock_reason': None,
1499 'lock_state_changed': False,
1499 'lock_state_changed': False,
1500 'msg': 'Repo `%s` not locked.' % repo.repo_name
1500 'msg': 'Repo `%s` not locked.' % repo.repo_name
1501 }
1501 }
1502 return _d
1502 return _d
1503 else:
1503 else:
1504 _user_id, _time, _reason = lockobj
1504 _user_id, _time, _reason = lockobj
1505 lock_user = get_user_or_error(userid)
1505 lock_user = get_user_or_error(userid)
1506 _d = {
1506 _d = {
1507 'repo': repo.repo_name,
1507 'repo': repo.repo_name,
1508 'locked': True,
1508 'locked': True,
1509 'locked_since': _time,
1509 'locked_since': _time,
1510 'locked_by': lock_user.username,
1510 'locked_by': lock_user.username,
1511 'lock_reason': _reason,
1511 'lock_reason': _reason,
1512 'lock_state_changed': False,
1512 'lock_state_changed': False,
1513 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1513 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1514 % (repo.repo_name, lock_user.username,
1514 % (repo.repo_name, lock_user.username,
1515 json.dumps(time_to_datetime(_time))))
1515 json.dumps(time_to_datetime(_time))))
1516 }
1516 }
1517 return _d
1517 return _d
1518
1518
1519 # force locked state through a flag
1519 # force locked state through a flag
1520 else:
1520 else:
1521 locked = str2bool(locked)
1521 locked = str2bool(locked)
1522 lock_reason = Repository.LOCK_API
1522 lock_reason = Repository.LOCK_API
1523 try:
1523 try:
1524 if locked:
1524 if locked:
1525 lock_time = time.time()
1525 lock_time = time.time()
1526 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1526 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1527 else:
1527 else:
1528 lock_time = None
1528 lock_time = None
1529 Repository.unlock(repo)
1529 Repository.unlock(repo)
1530 _d = {
1530 _d = {
1531 'repo': repo.repo_name,
1531 'repo': repo.repo_name,
1532 'locked': locked,
1532 'locked': locked,
1533 'locked_since': lock_time,
1533 'locked_since': lock_time,
1534 'locked_by': user.username,
1534 'locked_by': user.username,
1535 'lock_reason': lock_reason,
1535 'lock_reason': lock_reason,
1536 'lock_state_changed': True,
1536 'lock_state_changed': True,
1537 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1537 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1538 % (user.username, repo.repo_name, locked))
1538 % (user.username, repo.repo_name, locked))
1539 }
1539 }
1540 return _d
1540 return _d
1541 except Exception:
1541 except Exception:
1542 log.exception(
1542 log.exception(
1543 "Exception occurred while trying to lock repository")
1543 "Exception occurred while trying to lock repository")
1544 raise JSONRPCError(
1544 raise JSONRPCError(
1545 'Error occurred locking repository `%s`' % repo.repo_name
1545 'Error occurred locking repository `%s`' % repo.repo_name
1546 )
1546 )
1547
1547
1548
1548
1549 @jsonrpc_method()
1549 @jsonrpc_method()
1550 def comment_commit(
1550 def comment_commit(
1551 request, apiuser, repoid, commit_id, message, status=Optional(None),
1551 request, apiuser, repoid, commit_id, message, status=Optional(None),
1552 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1552 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1553 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1553 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1554 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1554 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1555 """
1555 """
1556 Set a commit comment, and optionally change the status of the commit.
1556 Set a commit comment, and optionally change the status of the commit.
1557
1557
1558 :param apiuser: This is filled automatically from the |authtoken|.
1558 :param apiuser: This is filled automatically from the |authtoken|.
1559 :type apiuser: AuthUser
1559 :type apiuser: AuthUser
1560 :param repoid: Set the repository name or repository ID.
1560 :param repoid: Set the repository name or repository ID.
1561 :type repoid: str or int
1561 :type repoid: str or int
1562 :param commit_id: Specify the commit_id for which to set a comment.
1562 :param commit_id: Specify the commit_id for which to set a comment.
1563 :type commit_id: str
1563 :type commit_id: str
1564 :param message: The comment text.
1564 :param message: The comment text.
1565 :type message: str
1565 :type message: str
1566 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1566 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1567 'approved', 'rejected', 'under_review'
1567 'approved', 'rejected', 'under_review'
1568 :type status: str
1568 :type status: str
1569 :param comment_type: Comment type, one of: 'note', 'todo'
1569 :param comment_type: Comment type, one of: 'note', 'todo'
1570 :type comment_type: Optional(str), default: 'note'
1570 :type comment_type: Optional(str), default: 'note'
1571 :param resolves_comment_id: id of comment which this one will resolve
1571 :param resolves_comment_id: id of comment which this one will resolve
1572 :type resolves_comment_id: Optional(int)
1572 :type resolves_comment_id: Optional(int)
1573 :param extra_recipients: list of user ids or usernames to add
1573 :param extra_recipients: list of user ids or usernames to add
1574 notifications for this comment. Acts like a CC for notification
1574 notifications for this comment. Acts like a CC for notification
1575 :type extra_recipients: Optional(list)
1575 :type extra_recipients: Optional(list)
1576 :param userid: Set the user name of the comment creator.
1576 :param userid: Set the user name of the comment creator.
1577 :type userid: Optional(str or int)
1577 :type userid: Optional(str or int)
1578 :param send_email: Define if this comment should also send email notification
1578 :param send_email: Define if this comment should also send email notification
1579 :type send_email: Optional(bool)
1579 :type send_email: Optional(bool)
1580
1580
1581 Example error output:
1581 Example error output:
1582
1582
1583 .. code-block:: bash
1583 .. code-block:: bash
1584
1584
1585 {
1585 {
1586 "id" : <id_given_in_input>,
1586 "id" : <id_given_in_input>,
1587 "result" : {
1587 "result" : {
1588 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1588 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1589 "status_change": null or <status>,
1589 "status_change": null or <status>,
1590 "success": true
1590 "success": true
1591 },
1591 },
1592 "error" : null
1592 "error" : null
1593 }
1593 }
1594
1594
1595 """
1595 """
1596 repo = get_repo_or_error(repoid)
1596 repo = get_repo_or_error(repoid)
1597 if not has_superadmin_permission(apiuser):
1597 if not has_superadmin_permission(apiuser):
1598 _perms = ('repository.read', 'repository.write', 'repository.admin')
1598 _perms = ('repository.read', 'repository.write', 'repository.admin')
1599 validate_repo_permissions(apiuser, repoid, repo, _perms)
1599 validate_repo_permissions(apiuser, repoid, repo, _perms)
1600
1600
1601 try:
1601 try:
1602 commit_id = repo.scm_instance().get_commit(commit_id=commit_id).raw_id
1602 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1603 commit_id = commit.raw_id
1603 except Exception as e:
1604 except Exception as e:
1604 log.exception('Failed to fetch commit')
1605 log.exception('Failed to fetch commit')
1605 raise JSONRPCError(safe_str(e))
1606 raise JSONRPCError(safe_str(e))
1606
1607
1607 if isinstance(userid, Optional):
1608 if isinstance(userid, Optional):
1608 userid = apiuser.user_id
1609 userid = apiuser.user_id
1609
1610
1610 user = get_user_or_error(userid)
1611 user = get_user_or_error(userid)
1611 status = Optional.extract(status)
1612 status = Optional.extract(status)
1612 comment_type = Optional.extract(comment_type)
1613 comment_type = Optional.extract(comment_type)
1613 resolves_comment_id = Optional.extract(resolves_comment_id)
1614 resolves_comment_id = Optional.extract(resolves_comment_id)
1614 extra_recipients = Optional.extract(extra_recipients)
1615 extra_recipients = Optional.extract(extra_recipients)
1615 send_email = Optional.extract(send_email, binary=True)
1616 send_email = Optional.extract(send_email, binary=True)
1616
1617
1617 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1618 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1618 if status and status not in allowed_statuses:
1619 if status and status not in allowed_statuses:
1619 raise JSONRPCError('Bad status, must be on '
1620 raise JSONRPCError('Bad status, must be on '
1620 'of %s got %s' % (allowed_statuses, status,))
1621 'of %s got %s' % (allowed_statuses, status,))
1621
1622
1622 if resolves_comment_id:
1623 if resolves_comment_id:
1623 comment = ChangesetComment.get(resolves_comment_id)
1624 comment = ChangesetComment.get(resolves_comment_id)
1624 if not comment:
1625 if not comment:
1625 raise JSONRPCError(
1626 raise JSONRPCError(
1626 'Invalid resolves_comment_id `%s` for this commit.'
1627 'Invalid resolves_comment_id `%s` for this commit.'
1627 % resolves_comment_id)
1628 % resolves_comment_id)
1628 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1629 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1629 raise JSONRPCError(
1630 raise JSONRPCError(
1630 'Comment `%s` is wrong type for setting status to resolved.'
1631 'Comment `%s` is wrong type for setting status to resolved.'
1631 % resolves_comment_id)
1632 % resolves_comment_id)
1632
1633
1633 try:
1634 try:
1634 rc_config = SettingsModel().get_all_settings()
1635 rc_config = SettingsModel().get_all_settings()
1635 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1636 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1636 status_change_label = ChangesetStatus.get_status_lbl(status)
1637 status_change_label = ChangesetStatus.get_status_lbl(status)
1637 comment = CommentsModel().create(
1638 comment = CommentsModel().create(
1638 message, repo, user, commit_id=commit_id,
1639 message, repo, user, commit_id=commit_id,
1639 status_change=status_change_label,
1640 status_change=status_change_label,
1640 status_change_type=status,
1641 status_change_type=status,
1641 renderer=renderer,
1642 renderer=renderer,
1642 comment_type=comment_type,
1643 comment_type=comment_type,
1643 resolves_comment_id=resolves_comment_id,
1644 resolves_comment_id=resolves_comment_id,
1644 auth_user=apiuser,
1645 auth_user=apiuser,
1645 extra_recipients=extra_recipients,
1646 extra_recipients=extra_recipients,
1646 send_email=send_email
1647 send_email=send_email
1647 )
1648 )
1648 if status:
1649 if status:
1649 # also do a status change
1650 # also do a status change
1650 try:
1651 try:
1651 ChangesetStatusModel().set_status(
1652 ChangesetStatusModel().set_status(
1652 repo, status, user, comment, revision=commit_id,
1653 repo, status, user, comment, revision=commit_id,
1653 dont_allow_on_closed_pull_request=True
1654 dont_allow_on_closed_pull_request=True
1654 )
1655 )
1655 except StatusChangeOnClosedPullRequestError:
1656 except StatusChangeOnClosedPullRequestError:
1656 log.exception(
1657 log.exception(
1657 "Exception occurred while trying to change repo commit status")
1658 "Exception occurred while trying to change repo commit status")
1658 msg = ('Changing status on a commit associated with '
1659 msg = ('Changing status on a commit associated with '
1659 'a closed pull request is not allowed')
1660 'a closed pull request is not allowed')
1660 raise JSONRPCError(msg)
1661 raise JSONRPCError(msg)
1661
1662
1663 CommentsModel().trigger_commit_comment_hook(
1664 repo, apiuser, 'create',
1665 data={'comment': comment, 'commit': commit})
1666
1662 Session().commit()
1667 Session().commit()
1663 return {
1668 return {
1664 'msg': (
1669 'msg': (
1665 'Commented on commit `%s` for repository `%s`' % (
1670 'Commented on commit `%s` for repository `%s`' % (
1666 comment.revision, repo.repo_name)),
1671 comment.revision, repo.repo_name)),
1667 'status_change': status,
1672 'status_change': status,
1668 'success': True,
1673 'success': True,
1669 }
1674 }
1670 except JSONRPCError:
1675 except JSONRPCError:
1671 # catch any inside errors, and re-raise them to prevent from
1676 # catch any inside errors, and re-raise them to prevent from
1672 # below global catch to silence them
1677 # below global catch to silence them
1673 raise
1678 raise
1674 except Exception:
1679 except Exception:
1675 log.exception("Exception occurred while trying to comment on commit")
1680 log.exception("Exception occurred while trying to comment on commit")
1676 raise JSONRPCError(
1681 raise JSONRPCError(
1677 'failed to set comment on repository `%s`' % (repo.repo_name,)
1682 'failed to set comment on repository `%s`' % (repo.repo_name,)
1678 )
1683 )
1679
1684
1680
1685
1681 @jsonrpc_method()
1686 @jsonrpc_method()
1682 def get_repo_comments(request, apiuser, repoid,
1687 def get_repo_comments(request, apiuser, repoid,
1683 commit_id=Optional(None), comment_type=Optional(None),
1688 commit_id=Optional(None), comment_type=Optional(None),
1684 userid=Optional(None)):
1689 userid=Optional(None)):
1685 """
1690 """
1686 Get all comments for a repository
1691 Get all comments for a repository
1687
1692
1688 :param apiuser: This is filled automatically from the |authtoken|.
1693 :param apiuser: This is filled automatically from the |authtoken|.
1689 :type apiuser: AuthUser
1694 :type apiuser: AuthUser
1690 :param repoid: Set the repository name or repository ID.
1695 :param repoid: Set the repository name or repository ID.
1691 :type repoid: str or int
1696 :type repoid: str or int
1692 :param commit_id: Optionally filter the comments by the commit_id
1697 :param commit_id: Optionally filter the comments by the commit_id
1693 :type commit_id: Optional(str), default: None
1698 :type commit_id: Optional(str), default: None
1694 :param comment_type: Optionally filter the comments by the comment_type
1699 :param comment_type: Optionally filter the comments by the comment_type
1695 one of: 'note', 'todo'
1700 one of: 'note', 'todo'
1696 :type comment_type: Optional(str), default: None
1701 :type comment_type: Optional(str), default: None
1697 :param userid: Optionally filter the comments by the author of comment
1702 :param userid: Optionally filter the comments by the author of comment
1698 :type userid: Optional(str or int), Default: None
1703 :type userid: Optional(str or int), Default: None
1699
1704
1700 Example error output:
1705 Example error output:
1701
1706
1702 .. code-block:: bash
1707 .. code-block:: bash
1703
1708
1704 {
1709 {
1705 "id" : <id_given_in_input>,
1710 "id" : <id_given_in_input>,
1706 "result" : [
1711 "result" : [
1707 {
1712 {
1708 "comment_author": <USER_DETAILS>,
1713 "comment_author": <USER_DETAILS>,
1709 "comment_created_on": "2017-02-01T14:38:16.309",
1714 "comment_created_on": "2017-02-01T14:38:16.309",
1710 "comment_f_path": "file.txt",
1715 "comment_f_path": "file.txt",
1711 "comment_id": 282,
1716 "comment_id": 282,
1712 "comment_lineno": "n1",
1717 "comment_lineno": "n1",
1713 "comment_resolved_by": null,
1718 "comment_resolved_by": null,
1714 "comment_status": [],
1719 "comment_status": [],
1715 "comment_text": "This file needs a header",
1720 "comment_text": "This file needs a header",
1716 "comment_type": "todo"
1721 "comment_type": "todo"
1717 }
1722 }
1718 ],
1723 ],
1719 "error" : null
1724 "error" : null
1720 }
1725 }
1721
1726
1722 """
1727 """
1723 repo = get_repo_or_error(repoid)
1728 repo = get_repo_or_error(repoid)
1724 if not has_superadmin_permission(apiuser):
1729 if not has_superadmin_permission(apiuser):
1725 _perms = ('repository.read', 'repository.write', 'repository.admin')
1730 _perms = ('repository.read', 'repository.write', 'repository.admin')
1726 validate_repo_permissions(apiuser, repoid, repo, _perms)
1731 validate_repo_permissions(apiuser, repoid, repo, _perms)
1727
1732
1728 commit_id = Optional.extract(commit_id)
1733 commit_id = Optional.extract(commit_id)
1729
1734
1730 userid = Optional.extract(userid)
1735 userid = Optional.extract(userid)
1731 if userid:
1736 if userid:
1732 user = get_user_or_error(userid)
1737 user = get_user_or_error(userid)
1733 else:
1738 else:
1734 user = None
1739 user = None
1735
1740
1736 comment_type = Optional.extract(comment_type)
1741 comment_type = Optional.extract(comment_type)
1737 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1742 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1738 raise JSONRPCError(
1743 raise JSONRPCError(
1739 'comment_type must be one of `{}` got {}'.format(
1744 'comment_type must be one of `{}` got {}'.format(
1740 ChangesetComment.COMMENT_TYPES, comment_type)
1745 ChangesetComment.COMMENT_TYPES, comment_type)
1741 )
1746 )
1742
1747
1743 comments = CommentsModel().get_repository_comments(
1748 comments = CommentsModel().get_repository_comments(
1744 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1749 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1745 return comments
1750 return comments
1746
1751
1747
1752
1748 @jsonrpc_method()
1753 @jsonrpc_method()
1749 def grant_user_permission(request, apiuser, repoid, userid, perm):
1754 def grant_user_permission(request, apiuser, repoid, userid, perm):
1750 """
1755 """
1751 Grant permissions for the specified user on the given repository,
1756 Grant permissions for the specified user on the given repository,
1752 or update existing permissions if found.
1757 or update existing permissions if found.
1753
1758
1754 This command can only be run using an |authtoken| with admin
1759 This command can only be run using an |authtoken| with admin
1755 permissions on the |repo|.
1760 permissions on the |repo|.
1756
1761
1757 :param apiuser: This is filled automatically from the |authtoken|.
1762 :param apiuser: This is filled automatically from the |authtoken|.
1758 :type apiuser: AuthUser
1763 :type apiuser: AuthUser
1759 :param repoid: Set the repository name or repository ID.
1764 :param repoid: Set the repository name or repository ID.
1760 :type repoid: str or int
1765 :type repoid: str or int
1761 :param userid: Set the user name.
1766 :param userid: Set the user name.
1762 :type userid: str
1767 :type userid: str
1763 :param perm: Set the user permissions, using the following format
1768 :param perm: Set the user permissions, using the following format
1764 ``(repository.(none|read|write|admin))``
1769 ``(repository.(none|read|write|admin))``
1765 :type perm: str
1770 :type perm: str
1766
1771
1767 Example output:
1772 Example output:
1768
1773
1769 .. code-block:: bash
1774 .. code-block:: bash
1770
1775
1771 id : <id_given_in_input>
1776 id : <id_given_in_input>
1772 result: {
1777 result: {
1773 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1778 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1774 "success": true
1779 "success": true
1775 }
1780 }
1776 error: null
1781 error: null
1777 """
1782 """
1778
1783
1779 repo = get_repo_or_error(repoid)
1784 repo = get_repo_or_error(repoid)
1780 user = get_user_or_error(userid)
1785 user = get_user_or_error(userid)
1781 perm = get_perm_or_error(perm)
1786 perm = get_perm_or_error(perm)
1782 if not has_superadmin_permission(apiuser):
1787 if not has_superadmin_permission(apiuser):
1783 _perms = ('repository.admin',)
1788 _perms = ('repository.admin',)
1784 validate_repo_permissions(apiuser, repoid, repo, _perms)
1789 validate_repo_permissions(apiuser, repoid, repo, _perms)
1785
1790
1786 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1791 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1787 try:
1792 try:
1788 changes = RepoModel().update_permissions(
1793 changes = RepoModel().update_permissions(
1789 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1794 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1790
1795
1791 action_data = {
1796 action_data = {
1792 'added': changes['added'],
1797 'added': changes['added'],
1793 'updated': changes['updated'],
1798 'updated': changes['updated'],
1794 'deleted': changes['deleted'],
1799 'deleted': changes['deleted'],
1795 }
1800 }
1796 audit_logger.store_api(
1801 audit_logger.store_api(
1797 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1802 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1798 Session().commit()
1803 Session().commit()
1799 PermissionModel().flush_user_permission_caches(changes)
1804 PermissionModel().flush_user_permission_caches(changes)
1800
1805
1801 return {
1806 return {
1802 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1807 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1803 perm.permission_name, user.username, repo.repo_name
1808 perm.permission_name, user.username, repo.repo_name
1804 ),
1809 ),
1805 'success': True
1810 'success': True
1806 }
1811 }
1807 except Exception:
1812 except Exception:
1808 log.exception("Exception occurred while trying edit permissions for repo")
1813 log.exception("Exception occurred while trying edit permissions for repo")
1809 raise JSONRPCError(
1814 raise JSONRPCError(
1810 'failed to edit permission for user: `%s` in repo: `%s`' % (
1815 'failed to edit permission for user: `%s` in repo: `%s`' % (
1811 userid, repoid
1816 userid, repoid
1812 )
1817 )
1813 )
1818 )
1814
1819
1815
1820
1816 @jsonrpc_method()
1821 @jsonrpc_method()
1817 def revoke_user_permission(request, apiuser, repoid, userid):
1822 def revoke_user_permission(request, apiuser, repoid, userid):
1818 """
1823 """
1819 Revoke permission for a user on the specified repository.
1824 Revoke permission for a user on the specified repository.
1820
1825
1821 This command can only be run using an |authtoken| with admin
1826 This command can only be run using an |authtoken| with admin
1822 permissions on the |repo|.
1827 permissions on the |repo|.
1823
1828
1824 :param apiuser: This is filled automatically from the |authtoken|.
1829 :param apiuser: This is filled automatically from the |authtoken|.
1825 :type apiuser: AuthUser
1830 :type apiuser: AuthUser
1826 :param repoid: Set the repository name or repository ID.
1831 :param repoid: Set the repository name or repository ID.
1827 :type repoid: str or int
1832 :type repoid: str or int
1828 :param userid: Set the user name of revoked user.
1833 :param userid: Set the user name of revoked user.
1829 :type userid: str or int
1834 :type userid: str or int
1830
1835
1831 Example error output:
1836 Example error output:
1832
1837
1833 .. code-block:: bash
1838 .. code-block:: bash
1834
1839
1835 id : <id_given_in_input>
1840 id : <id_given_in_input>
1836 result: {
1841 result: {
1837 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1842 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
1838 "success": true
1843 "success": true
1839 }
1844 }
1840 error: null
1845 error: null
1841 """
1846 """
1842
1847
1843 repo = get_repo_or_error(repoid)
1848 repo = get_repo_or_error(repoid)
1844 user = get_user_or_error(userid)
1849 user = get_user_or_error(userid)
1845 if not has_superadmin_permission(apiuser):
1850 if not has_superadmin_permission(apiuser):
1846 _perms = ('repository.admin',)
1851 _perms = ('repository.admin',)
1847 validate_repo_permissions(apiuser, repoid, repo, _perms)
1852 validate_repo_permissions(apiuser, repoid, repo, _perms)
1848
1853
1849 perm_deletions = [[user.user_id, None, "user"]]
1854 perm_deletions = [[user.user_id, None, "user"]]
1850 try:
1855 try:
1851 changes = RepoModel().update_permissions(
1856 changes = RepoModel().update_permissions(
1852 repo=repo, perm_deletions=perm_deletions, cur_user=user)
1857 repo=repo, perm_deletions=perm_deletions, cur_user=user)
1853
1858
1854 action_data = {
1859 action_data = {
1855 'added': changes['added'],
1860 'added': changes['added'],
1856 'updated': changes['updated'],
1861 'updated': changes['updated'],
1857 'deleted': changes['deleted'],
1862 'deleted': changes['deleted'],
1858 }
1863 }
1859 audit_logger.store_api(
1864 audit_logger.store_api(
1860 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1865 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1861 Session().commit()
1866 Session().commit()
1862 PermissionModel().flush_user_permission_caches(changes)
1867 PermissionModel().flush_user_permission_caches(changes)
1863
1868
1864 return {
1869 return {
1865 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
1870 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
1866 user.username, repo.repo_name
1871 user.username, repo.repo_name
1867 ),
1872 ),
1868 'success': True
1873 'success': True
1869 }
1874 }
1870 except Exception:
1875 except Exception:
1871 log.exception("Exception occurred while trying revoke permissions to repo")
1876 log.exception("Exception occurred while trying revoke permissions to repo")
1872 raise JSONRPCError(
1877 raise JSONRPCError(
1873 'failed to edit permission for user: `%s` in repo: `%s`' % (
1878 'failed to edit permission for user: `%s` in repo: `%s`' % (
1874 userid, repoid
1879 userid, repoid
1875 )
1880 )
1876 )
1881 )
1877
1882
1878
1883
1879 @jsonrpc_method()
1884 @jsonrpc_method()
1880 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
1885 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
1881 """
1886 """
1882 Grant permission for a user group on the specified repository,
1887 Grant permission for a user group on the specified repository,
1883 or update existing permissions.
1888 or update existing permissions.
1884
1889
1885 This command can only be run using an |authtoken| with admin
1890 This command can only be run using an |authtoken| with admin
1886 permissions on the |repo|.
1891 permissions on the |repo|.
1887
1892
1888 :param apiuser: This is filled automatically from the |authtoken|.
1893 :param apiuser: This is filled automatically from the |authtoken|.
1889 :type apiuser: AuthUser
1894 :type apiuser: AuthUser
1890 :param repoid: Set the repository name or repository ID.
1895 :param repoid: Set the repository name or repository ID.
1891 :type repoid: str or int
1896 :type repoid: str or int
1892 :param usergroupid: Specify the ID of the user group.
1897 :param usergroupid: Specify the ID of the user group.
1893 :type usergroupid: str or int
1898 :type usergroupid: str or int
1894 :param perm: Set the user group permissions using the following
1899 :param perm: Set the user group permissions using the following
1895 format: (repository.(none|read|write|admin))
1900 format: (repository.(none|read|write|admin))
1896 :type perm: str
1901 :type perm: str
1897
1902
1898 Example output:
1903 Example output:
1899
1904
1900 .. code-block:: bash
1905 .. code-block:: bash
1901
1906
1902 id : <id_given_in_input>
1907 id : <id_given_in_input>
1903 result : {
1908 result : {
1904 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
1909 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
1905 "success": true
1910 "success": true
1906
1911
1907 }
1912 }
1908 error : null
1913 error : null
1909
1914
1910 Example error output:
1915 Example error output:
1911
1916
1912 .. code-block:: bash
1917 .. code-block:: bash
1913
1918
1914 id : <id_given_in_input>
1919 id : <id_given_in_input>
1915 result : null
1920 result : null
1916 error : {
1921 error : {
1917 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
1922 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
1918 }
1923 }
1919
1924
1920 """
1925 """
1921
1926
1922 repo = get_repo_or_error(repoid)
1927 repo = get_repo_or_error(repoid)
1923 perm = get_perm_or_error(perm)
1928 perm = get_perm_or_error(perm)
1924 if not has_superadmin_permission(apiuser):
1929 if not has_superadmin_permission(apiuser):
1925 _perms = ('repository.admin',)
1930 _perms = ('repository.admin',)
1926 validate_repo_permissions(apiuser, repoid, repo, _perms)
1931 validate_repo_permissions(apiuser, repoid, repo, _perms)
1927
1932
1928 user_group = get_user_group_or_error(usergroupid)
1933 user_group = get_user_group_or_error(usergroupid)
1929 if not has_superadmin_permission(apiuser):
1934 if not has_superadmin_permission(apiuser):
1930 # check if we have at least read permission for this user group !
1935 # check if we have at least read permission for this user group !
1931 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1936 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
1932 if not HasUserGroupPermissionAnyApi(*_perms)(
1937 if not HasUserGroupPermissionAnyApi(*_perms)(
1933 user=apiuser, user_group_name=user_group.users_group_name):
1938 user=apiuser, user_group_name=user_group.users_group_name):
1934 raise JSONRPCError(
1939 raise JSONRPCError(
1935 'user group `%s` does not exist' % (usergroupid,))
1940 'user group `%s` does not exist' % (usergroupid,))
1936
1941
1937 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
1942 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
1938 try:
1943 try:
1939 changes = RepoModel().update_permissions(
1944 changes = RepoModel().update_permissions(
1940 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1945 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1941 action_data = {
1946 action_data = {
1942 'added': changes['added'],
1947 'added': changes['added'],
1943 'updated': changes['updated'],
1948 'updated': changes['updated'],
1944 'deleted': changes['deleted'],
1949 'deleted': changes['deleted'],
1945 }
1950 }
1946 audit_logger.store_api(
1951 audit_logger.store_api(
1947 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1952 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1948 Session().commit()
1953 Session().commit()
1949 PermissionModel().flush_user_permission_caches(changes)
1954 PermissionModel().flush_user_permission_caches(changes)
1950
1955
1951 return {
1956 return {
1952 'msg': 'Granted perm: `%s` for user group: `%s` in '
1957 'msg': 'Granted perm: `%s` for user group: `%s` in '
1953 'repo: `%s`' % (
1958 'repo: `%s`' % (
1954 perm.permission_name, user_group.users_group_name,
1959 perm.permission_name, user_group.users_group_name,
1955 repo.repo_name
1960 repo.repo_name
1956 ),
1961 ),
1957 'success': True
1962 'success': True
1958 }
1963 }
1959 except Exception:
1964 except Exception:
1960 log.exception(
1965 log.exception(
1961 "Exception occurred while trying change permission on repo")
1966 "Exception occurred while trying change permission on repo")
1962 raise JSONRPCError(
1967 raise JSONRPCError(
1963 'failed to edit permission for user group: `%s` in '
1968 'failed to edit permission for user group: `%s` in '
1964 'repo: `%s`' % (
1969 'repo: `%s`' % (
1965 usergroupid, repo.repo_name
1970 usergroupid, repo.repo_name
1966 )
1971 )
1967 )
1972 )
1968
1973
1969
1974
1970 @jsonrpc_method()
1975 @jsonrpc_method()
1971 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
1976 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
1972 """
1977 """
1973 Revoke the permissions of a user group on a given repository.
1978 Revoke the permissions of a user group on a given repository.
1974
1979
1975 This command can only be run using an |authtoken| with admin
1980 This command can only be run using an |authtoken| with admin
1976 permissions on the |repo|.
1981 permissions on the |repo|.
1977
1982
1978 :param apiuser: This is filled automatically from the |authtoken|.
1983 :param apiuser: This is filled automatically from the |authtoken|.
1979 :type apiuser: AuthUser
1984 :type apiuser: AuthUser
1980 :param repoid: Set the repository name or repository ID.
1985 :param repoid: Set the repository name or repository ID.
1981 :type repoid: str or int
1986 :type repoid: str or int
1982 :param usergroupid: Specify the user group ID.
1987 :param usergroupid: Specify the user group ID.
1983 :type usergroupid: str or int
1988 :type usergroupid: str or int
1984
1989
1985 Example output:
1990 Example output:
1986
1991
1987 .. code-block:: bash
1992 .. code-block:: bash
1988
1993
1989 id : <id_given_in_input>
1994 id : <id_given_in_input>
1990 result: {
1995 result: {
1991 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
1996 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
1992 "success": true
1997 "success": true
1993 }
1998 }
1994 error: null
1999 error: null
1995 """
2000 """
1996
2001
1997 repo = get_repo_or_error(repoid)
2002 repo = get_repo_or_error(repoid)
1998 if not has_superadmin_permission(apiuser):
2003 if not has_superadmin_permission(apiuser):
1999 _perms = ('repository.admin',)
2004 _perms = ('repository.admin',)
2000 validate_repo_permissions(apiuser, repoid, repo, _perms)
2005 validate_repo_permissions(apiuser, repoid, repo, _perms)
2001
2006
2002 user_group = get_user_group_or_error(usergroupid)
2007 user_group = get_user_group_or_error(usergroupid)
2003 if not has_superadmin_permission(apiuser):
2008 if not has_superadmin_permission(apiuser):
2004 # check if we have at least read permission for this user group !
2009 # check if we have at least read permission for this user group !
2005 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2010 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2006 if not HasUserGroupPermissionAnyApi(*_perms)(
2011 if not HasUserGroupPermissionAnyApi(*_perms)(
2007 user=apiuser, user_group_name=user_group.users_group_name):
2012 user=apiuser, user_group_name=user_group.users_group_name):
2008 raise JSONRPCError(
2013 raise JSONRPCError(
2009 'user group `%s` does not exist' % (usergroupid,))
2014 'user group `%s` does not exist' % (usergroupid,))
2010
2015
2011 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2016 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2012 try:
2017 try:
2013 changes = RepoModel().update_permissions(
2018 changes = RepoModel().update_permissions(
2014 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2019 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2015 action_data = {
2020 action_data = {
2016 'added': changes['added'],
2021 'added': changes['added'],
2017 'updated': changes['updated'],
2022 'updated': changes['updated'],
2018 'deleted': changes['deleted'],
2023 'deleted': changes['deleted'],
2019 }
2024 }
2020 audit_logger.store_api(
2025 audit_logger.store_api(
2021 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2026 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2022 Session().commit()
2027 Session().commit()
2023 PermissionModel().flush_user_permission_caches(changes)
2028 PermissionModel().flush_user_permission_caches(changes)
2024
2029
2025 return {
2030 return {
2026 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
2031 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
2027 user_group.users_group_name, repo.repo_name
2032 user_group.users_group_name, repo.repo_name
2028 ),
2033 ),
2029 'success': True
2034 'success': True
2030 }
2035 }
2031 except Exception:
2036 except Exception:
2032 log.exception("Exception occurred while trying revoke "
2037 log.exception("Exception occurred while trying revoke "
2033 "user group permission on repo")
2038 "user group permission on repo")
2034 raise JSONRPCError(
2039 raise JSONRPCError(
2035 'failed to edit permission for user group: `%s` in '
2040 'failed to edit permission for user group: `%s` in '
2036 'repo: `%s`' % (
2041 'repo: `%s`' % (
2037 user_group.users_group_name, repo.repo_name
2042 user_group.users_group_name, repo.repo_name
2038 )
2043 )
2039 )
2044 )
2040
2045
2041
2046
2042 @jsonrpc_method()
2047 @jsonrpc_method()
2043 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2048 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2044 """
2049 """
2045 Triggers a pull on the given repository from a remote location. You
2050 Triggers a pull on the given repository from a remote location. You
2046 can use this to keep remote repositories up-to-date.
2051 can use this to keep remote repositories up-to-date.
2047
2052
2048 This command can only be run using an |authtoken| with admin
2053 This command can only be run using an |authtoken| with admin
2049 rights to the specified repository. For more information,
2054 rights to the specified repository. For more information,
2050 see :ref:`config-token-ref`.
2055 see :ref:`config-token-ref`.
2051
2056
2052 This command takes the following options:
2057 This command takes the following options:
2053
2058
2054 :param apiuser: This is filled automatically from the |authtoken|.
2059 :param apiuser: This is filled automatically from the |authtoken|.
2055 :type apiuser: AuthUser
2060 :type apiuser: AuthUser
2056 :param repoid: The repository name or repository ID.
2061 :param repoid: The repository name or repository ID.
2057 :type repoid: str or int
2062 :type repoid: str or int
2058 :param remote_uri: Optional remote URI to pass in for pull
2063 :param remote_uri: Optional remote URI to pass in for pull
2059 :type remote_uri: str
2064 :type remote_uri: str
2060
2065
2061 Example output:
2066 Example output:
2062
2067
2063 .. code-block:: bash
2068 .. code-block:: bash
2064
2069
2065 id : <id_given_in_input>
2070 id : <id_given_in_input>
2066 result : {
2071 result : {
2067 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2072 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2068 "repository": "<repository name>"
2073 "repository": "<repository name>"
2069 }
2074 }
2070 error : null
2075 error : null
2071
2076
2072 Example error output:
2077 Example error output:
2073
2078
2074 .. code-block:: bash
2079 .. code-block:: bash
2075
2080
2076 id : <id_given_in_input>
2081 id : <id_given_in_input>
2077 result : null
2082 result : null
2078 error : {
2083 error : {
2079 "Unable to push changes from `<remote_url>`"
2084 "Unable to push changes from `<remote_url>`"
2080 }
2085 }
2081
2086
2082 """
2087 """
2083
2088
2084 repo = get_repo_or_error(repoid)
2089 repo = get_repo_or_error(repoid)
2085 remote_uri = Optional.extract(remote_uri)
2090 remote_uri = Optional.extract(remote_uri)
2086 remote_uri_display = remote_uri or repo.clone_uri_hidden
2091 remote_uri_display = remote_uri or repo.clone_uri_hidden
2087 if not has_superadmin_permission(apiuser):
2092 if not has_superadmin_permission(apiuser):
2088 _perms = ('repository.admin',)
2093 _perms = ('repository.admin',)
2089 validate_repo_permissions(apiuser, repoid, repo, _perms)
2094 validate_repo_permissions(apiuser, repoid, repo, _perms)
2090
2095
2091 try:
2096 try:
2092 ScmModel().pull_changes(
2097 ScmModel().pull_changes(
2093 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2098 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2094 return {
2099 return {
2095 'msg': 'Pulled from url `%s` on repo `%s`' % (
2100 'msg': 'Pulled from url `%s` on repo `%s`' % (
2096 remote_uri_display, repo.repo_name),
2101 remote_uri_display, repo.repo_name),
2097 'repository': repo.repo_name
2102 'repository': repo.repo_name
2098 }
2103 }
2099 except Exception:
2104 except Exception:
2100 log.exception("Exception occurred while trying to "
2105 log.exception("Exception occurred while trying to "
2101 "pull changes from remote location")
2106 "pull changes from remote location")
2102 raise JSONRPCError(
2107 raise JSONRPCError(
2103 'Unable to pull changes from `%s`' % remote_uri_display
2108 'Unable to pull changes from `%s`' % remote_uri_display
2104 )
2109 )
2105
2110
2106
2111
2107 @jsonrpc_method()
2112 @jsonrpc_method()
2108 def strip(request, apiuser, repoid, revision, branch):
2113 def strip(request, apiuser, repoid, revision, branch):
2109 """
2114 """
2110 Strips the given revision from the specified repository.
2115 Strips the given revision from the specified repository.
2111
2116
2112 * This will remove the revision and all of its decendants.
2117 * This will remove the revision and all of its decendants.
2113
2118
2114 This command can only be run using an |authtoken| with admin rights to
2119 This command can only be run using an |authtoken| with admin rights to
2115 the specified repository.
2120 the specified repository.
2116
2121
2117 This command takes the following options:
2122 This command takes the following options:
2118
2123
2119 :param apiuser: This is filled automatically from the |authtoken|.
2124 :param apiuser: This is filled automatically from the |authtoken|.
2120 :type apiuser: AuthUser
2125 :type apiuser: AuthUser
2121 :param repoid: The repository name or repository ID.
2126 :param repoid: The repository name or repository ID.
2122 :type repoid: str or int
2127 :type repoid: str or int
2123 :param revision: The revision you wish to strip.
2128 :param revision: The revision you wish to strip.
2124 :type revision: str
2129 :type revision: str
2125 :param branch: The branch from which to strip the revision.
2130 :param branch: The branch from which to strip the revision.
2126 :type branch: str
2131 :type branch: str
2127
2132
2128 Example output:
2133 Example output:
2129
2134
2130 .. code-block:: bash
2135 .. code-block:: bash
2131
2136
2132 id : <id_given_in_input>
2137 id : <id_given_in_input>
2133 result : {
2138 result : {
2134 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2139 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2135 "repository": "<repository name>"
2140 "repository": "<repository name>"
2136 }
2141 }
2137 error : null
2142 error : null
2138
2143
2139 Example error output:
2144 Example error output:
2140
2145
2141 .. code-block:: bash
2146 .. code-block:: bash
2142
2147
2143 id : <id_given_in_input>
2148 id : <id_given_in_input>
2144 result : null
2149 result : null
2145 error : {
2150 error : {
2146 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2151 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2147 }
2152 }
2148
2153
2149 """
2154 """
2150
2155
2151 repo = get_repo_or_error(repoid)
2156 repo = get_repo_or_error(repoid)
2152 if not has_superadmin_permission(apiuser):
2157 if not has_superadmin_permission(apiuser):
2153 _perms = ('repository.admin',)
2158 _perms = ('repository.admin',)
2154 validate_repo_permissions(apiuser, repoid, repo, _perms)
2159 validate_repo_permissions(apiuser, repoid, repo, _perms)
2155
2160
2156 try:
2161 try:
2157 ScmModel().strip(repo, revision, branch)
2162 ScmModel().strip(repo, revision, branch)
2158 audit_logger.store_api(
2163 audit_logger.store_api(
2159 'repo.commit.strip', action_data={'commit_id': revision},
2164 'repo.commit.strip', action_data={'commit_id': revision},
2160 repo=repo,
2165 repo=repo,
2161 user=apiuser, commit=True)
2166 user=apiuser, commit=True)
2162
2167
2163 return {
2168 return {
2164 'msg': 'Stripped commit %s from repo `%s`' % (
2169 'msg': 'Stripped commit %s from repo `%s`' % (
2165 revision, repo.repo_name),
2170 revision, repo.repo_name),
2166 'repository': repo.repo_name
2171 'repository': repo.repo_name
2167 }
2172 }
2168 except Exception:
2173 except Exception:
2169 log.exception("Exception while trying to strip")
2174 log.exception("Exception while trying to strip")
2170 raise JSONRPCError(
2175 raise JSONRPCError(
2171 'Unable to strip commit %s from repo `%s`' % (
2176 'Unable to strip commit %s from repo `%s`' % (
2172 revision, repo.repo_name)
2177 revision, repo.repo_name)
2173 )
2178 )
2174
2179
2175
2180
2176 @jsonrpc_method()
2181 @jsonrpc_method()
2177 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2182 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2178 """
2183 """
2179 Returns all settings for a repository. If key is given it only returns the
2184 Returns all settings for a repository. If key is given it only returns the
2180 setting identified by the key or null.
2185 setting identified by the key or null.
2181
2186
2182 :param apiuser: This is filled automatically from the |authtoken|.
2187 :param apiuser: This is filled automatically from the |authtoken|.
2183 :type apiuser: AuthUser
2188 :type apiuser: AuthUser
2184 :param repoid: The repository name or repository id.
2189 :param repoid: The repository name or repository id.
2185 :type repoid: str or int
2190 :type repoid: str or int
2186 :param key: Key of the setting to return.
2191 :param key: Key of the setting to return.
2187 :type: key: Optional(str)
2192 :type: key: Optional(str)
2188
2193
2189 Example output:
2194 Example output:
2190
2195
2191 .. code-block:: bash
2196 .. code-block:: bash
2192
2197
2193 {
2198 {
2194 "error": null,
2199 "error": null,
2195 "id": 237,
2200 "id": 237,
2196 "result": {
2201 "result": {
2197 "extensions_largefiles": true,
2202 "extensions_largefiles": true,
2198 "extensions_evolve": true,
2203 "extensions_evolve": true,
2199 "hooks_changegroup_push_logger": true,
2204 "hooks_changegroup_push_logger": true,
2200 "hooks_changegroup_repo_size": false,
2205 "hooks_changegroup_repo_size": false,
2201 "hooks_outgoing_pull_logger": true,
2206 "hooks_outgoing_pull_logger": true,
2202 "phases_publish": "True",
2207 "phases_publish": "True",
2203 "rhodecode_hg_use_rebase_for_merging": true,
2208 "rhodecode_hg_use_rebase_for_merging": true,
2204 "rhodecode_pr_merge_enabled": true,
2209 "rhodecode_pr_merge_enabled": true,
2205 "rhodecode_use_outdated_comments": true
2210 "rhodecode_use_outdated_comments": true
2206 }
2211 }
2207 }
2212 }
2208 """
2213 """
2209
2214
2210 # Restrict access to this api method to admins only.
2215 # Restrict access to this api method to admins only.
2211 if not has_superadmin_permission(apiuser):
2216 if not has_superadmin_permission(apiuser):
2212 raise JSONRPCForbidden()
2217 raise JSONRPCForbidden()
2213
2218
2214 try:
2219 try:
2215 repo = get_repo_or_error(repoid)
2220 repo = get_repo_or_error(repoid)
2216 settings_model = VcsSettingsModel(repo=repo)
2221 settings_model = VcsSettingsModel(repo=repo)
2217 settings = settings_model.get_global_settings()
2222 settings = settings_model.get_global_settings()
2218 settings.update(settings_model.get_repo_settings())
2223 settings.update(settings_model.get_repo_settings())
2219
2224
2220 # If only a single setting is requested fetch it from all settings.
2225 # If only a single setting is requested fetch it from all settings.
2221 key = Optional.extract(key)
2226 key = Optional.extract(key)
2222 if key is not None:
2227 if key is not None:
2223 settings = settings.get(key, None)
2228 settings = settings.get(key, None)
2224 except Exception:
2229 except Exception:
2225 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2230 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2226 log.exception(msg)
2231 log.exception(msg)
2227 raise JSONRPCError(msg)
2232 raise JSONRPCError(msg)
2228
2233
2229 return settings
2234 return settings
2230
2235
2231
2236
2232 @jsonrpc_method()
2237 @jsonrpc_method()
2233 def set_repo_settings(request, apiuser, repoid, settings):
2238 def set_repo_settings(request, apiuser, repoid, settings):
2234 """
2239 """
2235 Update repository settings. Returns true on success.
2240 Update repository settings. Returns true on success.
2236
2241
2237 :param apiuser: This is filled automatically from the |authtoken|.
2242 :param apiuser: This is filled automatically from the |authtoken|.
2238 :type apiuser: AuthUser
2243 :type apiuser: AuthUser
2239 :param repoid: The repository name or repository id.
2244 :param repoid: The repository name or repository id.
2240 :type repoid: str or int
2245 :type repoid: str or int
2241 :param settings: The new settings for the repository.
2246 :param settings: The new settings for the repository.
2242 :type: settings: dict
2247 :type: settings: dict
2243
2248
2244 Example output:
2249 Example output:
2245
2250
2246 .. code-block:: bash
2251 .. code-block:: bash
2247
2252
2248 {
2253 {
2249 "error": null,
2254 "error": null,
2250 "id": 237,
2255 "id": 237,
2251 "result": true
2256 "result": true
2252 }
2257 }
2253 """
2258 """
2254 # Restrict access to this api method to admins only.
2259 # Restrict access to this api method to admins only.
2255 if not has_superadmin_permission(apiuser):
2260 if not has_superadmin_permission(apiuser):
2256 raise JSONRPCForbidden()
2261 raise JSONRPCForbidden()
2257
2262
2258 if type(settings) is not dict:
2263 if type(settings) is not dict:
2259 raise JSONRPCError('Settings have to be a JSON Object.')
2264 raise JSONRPCError('Settings have to be a JSON Object.')
2260
2265
2261 try:
2266 try:
2262 settings_model = VcsSettingsModel(repo=repoid)
2267 settings_model = VcsSettingsModel(repo=repoid)
2263
2268
2264 # Merge global, repo and incoming settings.
2269 # Merge global, repo and incoming settings.
2265 new_settings = settings_model.get_global_settings()
2270 new_settings = settings_model.get_global_settings()
2266 new_settings.update(settings_model.get_repo_settings())
2271 new_settings.update(settings_model.get_repo_settings())
2267 new_settings.update(settings)
2272 new_settings.update(settings)
2268
2273
2269 # Update the settings.
2274 # Update the settings.
2270 inherit_global_settings = new_settings.get(
2275 inherit_global_settings = new_settings.get(
2271 'inherit_global_settings', False)
2276 'inherit_global_settings', False)
2272 settings_model.create_or_update_repo_settings(
2277 settings_model.create_or_update_repo_settings(
2273 new_settings, inherit_global_settings=inherit_global_settings)
2278 new_settings, inherit_global_settings=inherit_global_settings)
2274 Session().commit()
2279 Session().commit()
2275 except Exception:
2280 except Exception:
2276 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2281 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2277 log.exception(msg)
2282 log.exception(msg)
2278 raise JSONRPCError(msg)
2283 raise JSONRPCError(msg)
2279
2284
2280 # Indicate success.
2285 # Indicate success.
2281 return True
2286 return True
2282
2287
2283
2288
2284 @jsonrpc_method()
2289 @jsonrpc_method()
2285 def maintenance(request, apiuser, repoid):
2290 def maintenance(request, apiuser, repoid):
2286 """
2291 """
2287 Triggers a maintenance on the given repository.
2292 Triggers a maintenance on the given repository.
2288
2293
2289 This command can only be run using an |authtoken| with admin
2294 This command can only be run using an |authtoken| with admin
2290 rights to the specified repository. For more information,
2295 rights to the specified repository. For more information,
2291 see :ref:`config-token-ref`.
2296 see :ref:`config-token-ref`.
2292
2297
2293 This command takes the following options:
2298 This command takes the following options:
2294
2299
2295 :param apiuser: This is filled automatically from the |authtoken|.
2300 :param apiuser: This is filled automatically from the |authtoken|.
2296 :type apiuser: AuthUser
2301 :type apiuser: AuthUser
2297 :param repoid: The repository name or repository ID.
2302 :param repoid: The repository name or repository ID.
2298 :type repoid: str or int
2303 :type repoid: str or int
2299
2304
2300 Example output:
2305 Example output:
2301
2306
2302 .. code-block:: bash
2307 .. code-block:: bash
2303
2308
2304 id : <id_given_in_input>
2309 id : <id_given_in_input>
2305 result : {
2310 result : {
2306 "msg": "executed maintenance command",
2311 "msg": "executed maintenance command",
2307 "executed_actions": [
2312 "executed_actions": [
2308 <action_message>, <action_message2>...
2313 <action_message>, <action_message2>...
2309 ],
2314 ],
2310 "repository": "<repository name>"
2315 "repository": "<repository name>"
2311 }
2316 }
2312 error : null
2317 error : null
2313
2318
2314 Example error output:
2319 Example error output:
2315
2320
2316 .. code-block:: bash
2321 .. code-block:: bash
2317
2322
2318 id : <id_given_in_input>
2323 id : <id_given_in_input>
2319 result : null
2324 result : null
2320 error : {
2325 error : {
2321 "Unable to execute maintenance on `<reponame>`"
2326 "Unable to execute maintenance on `<reponame>`"
2322 }
2327 }
2323
2328
2324 """
2329 """
2325
2330
2326 repo = get_repo_or_error(repoid)
2331 repo = get_repo_or_error(repoid)
2327 if not has_superadmin_permission(apiuser):
2332 if not has_superadmin_permission(apiuser):
2328 _perms = ('repository.admin',)
2333 _perms = ('repository.admin',)
2329 validate_repo_permissions(apiuser, repoid, repo, _perms)
2334 validate_repo_permissions(apiuser, repoid, repo, _perms)
2330
2335
2331 try:
2336 try:
2332 maintenance = repo_maintenance.RepoMaintenance()
2337 maintenance = repo_maintenance.RepoMaintenance()
2333 executed_actions = maintenance.execute(repo)
2338 executed_actions = maintenance.execute(repo)
2334
2339
2335 return {
2340 return {
2336 'msg': 'executed maintenance command',
2341 'msg': 'executed maintenance command',
2337 'executed_actions': executed_actions,
2342 'executed_actions': executed_actions,
2338 'repository': repo.repo_name
2343 'repository': repo.repo_name
2339 }
2344 }
2340 except Exception:
2345 except Exception:
2341 log.exception("Exception occurred while trying to run maintenance")
2346 log.exception("Exception occurred while trying to run maintenance")
2342 raise JSONRPCError(
2347 raise JSONRPCError(
2343 'Unable to execute maintenance on `%s`' % repo.repo_name)
2348 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,601 +1,606 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23 import collections
23 import collections
24
24
25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
26 from pyramid.view import view_config
26 from pyramid.view import view_config
27 from pyramid.renderers import render
27 from pyramid.renderers import render
28 from pyramid.response import Response
28 from pyramid.response import Response
29
29
30 from rhodecode.apps._base import RepoAppView
30 from rhodecode.apps._base import RepoAppView
31 from rhodecode.apps.file_store import utils as store_utils
31 from rhodecode.apps.file_store import utils as store_utils
32 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
32 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
33
33
34 from rhodecode.lib import diffs, codeblocks
34 from rhodecode.lib import diffs, codeblocks
35 from rhodecode.lib.auth import (
35 from rhodecode.lib.auth import (
36 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
36 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
37
37
38 from rhodecode.lib.compat import OrderedDict
38 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.diffs import (
39 from rhodecode.lib.diffs import (
40 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
40 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
41 get_diff_whitespace_flag)
41 get_diff_whitespace_flag)
42 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
42 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
43 import rhodecode.lib.helpers as h
43 import rhodecode.lib.helpers as h
44 from rhodecode.lib.utils2 import safe_unicode, str2bool
44 from rhodecode.lib.utils2 import safe_unicode, str2bool
45 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.lib.vcs.backends.base import EmptyCommit
46 from rhodecode.lib.vcs.exceptions import (
46 from rhodecode.lib.vcs.exceptions import (
47 RepositoryError, CommitDoesNotExistError)
47 RepositoryError, CommitDoesNotExistError)
48 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore
48 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore
49 from rhodecode.model.changeset_status import ChangesetStatusModel
49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.comment import CommentsModel
50 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.meta import Session
51 from rhodecode.model.meta import Session
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 def _update_with_GET(params, request):
57 def _update_with_GET(params, request):
58 for k in ['diff1', 'diff2', 'diff']:
58 for k in ['diff1', 'diff2', 'diff']:
59 params[k] += request.GET.getall(k)
59 params[k] += request.GET.getall(k)
60
60
61
61
62 class RepoCommitsView(RepoAppView):
62 class RepoCommitsView(RepoAppView):
63 def load_default_context(self):
63 def load_default_context(self):
64 c = self._get_local_tmpl_context(include_app_defaults=True)
64 c = self._get_local_tmpl_context(include_app_defaults=True)
65 c.rhodecode_repo = self.rhodecode_vcs_repo
65 c.rhodecode_repo = self.rhodecode_vcs_repo
66
66
67 return c
67 return c
68
68
69 def _is_diff_cache_enabled(self, target_repo):
69 def _is_diff_cache_enabled(self, target_repo):
70 caching_enabled = self._get_general_setting(
70 caching_enabled = self._get_general_setting(
71 target_repo, 'rhodecode_diff_cache')
71 target_repo, 'rhodecode_diff_cache')
72 log.debug('Diff caching enabled: %s', caching_enabled)
72 log.debug('Diff caching enabled: %s', caching_enabled)
73 return caching_enabled
73 return caching_enabled
74
74
75 def _commit(self, commit_id_range, method):
75 def _commit(self, commit_id_range, method):
76 _ = self.request.translate
76 _ = self.request.translate
77 c = self.load_default_context()
77 c = self.load_default_context()
78 c.fulldiff = self.request.GET.get('fulldiff')
78 c.fulldiff = self.request.GET.get('fulldiff')
79
79
80 # fetch global flags of ignore ws or context lines
80 # fetch global flags of ignore ws or context lines
81 diff_context = get_diff_context(self.request)
81 diff_context = get_diff_context(self.request)
82 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
82 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
83
83
84 # diff_limit will cut off the whole diff if the limit is applied
84 # diff_limit will cut off the whole diff if the limit is applied
85 # otherwise it will just hide the big files from the front-end
85 # otherwise it will just hide the big files from the front-end
86 diff_limit = c.visual.cut_off_limit_diff
86 diff_limit = c.visual.cut_off_limit_diff
87 file_limit = c.visual.cut_off_limit_file
87 file_limit = c.visual.cut_off_limit_file
88
88
89 # get ranges of commit ids if preset
89 # get ranges of commit ids if preset
90 commit_range = commit_id_range.split('...')[:2]
90 commit_range = commit_id_range.split('...')[:2]
91
91
92 try:
92 try:
93 pre_load = ['affected_files', 'author', 'branch', 'date',
93 pre_load = ['affected_files', 'author', 'branch', 'date',
94 'message', 'parents']
94 'message', 'parents']
95 if self.rhodecode_vcs_repo.alias == 'hg':
95 if self.rhodecode_vcs_repo.alias == 'hg':
96 pre_load += ['hidden', 'obsolete', 'phase']
96 pre_load += ['hidden', 'obsolete', 'phase']
97
97
98 if len(commit_range) == 2:
98 if len(commit_range) == 2:
99 commits = self.rhodecode_vcs_repo.get_commits(
99 commits = self.rhodecode_vcs_repo.get_commits(
100 start_id=commit_range[0], end_id=commit_range[1],
100 start_id=commit_range[0], end_id=commit_range[1],
101 pre_load=pre_load, translate_tags=False)
101 pre_load=pre_load, translate_tags=False)
102 commits = list(commits)
102 commits = list(commits)
103 else:
103 else:
104 commits = [self.rhodecode_vcs_repo.get_commit(
104 commits = [self.rhodecode_vcs_repo.get_commit(
105 commit_id=commit_id_range, pre_load=pre_load)]
105 commit_id=commit_id_range, pre_load=pre_load)]
106
106
107 c.commit_ranges = commits
107 c.commit_ranges = commits
108 if not c.commit_ranges:
108 if not c.commit_ranges:
109 raise RepositoryError('The commit range returned an empty result')
109 raise RepositoryError('The commit range returned an empty result')
110 except CommitDoesNotExistError as e:
110 except CommitDoesNotExistError as e:
111 msg = _('No such commit exists. Org exception: `{}`').format(e)
111 msg = _('No such commit exists. Org exception: `{}`').format(e)
112 h.flash(msg, category='error')
112 h.flash(msg, category='error')
113 raise HTTPNotFound()
113 raise HTTPNotFound()
114 except Exception:
114 except Exception:
115 log.exception("General failure")
115 log.exception("General failure")
116 raise HTTPNotFound()
116 raise HTTPNotFound()
117
117
118 c.changes = OrderedDict()
118 c.changes = OrderedDict()
119 c.lines_added = 0
119 c.lines_added = 0
120 c.lines_deleted = 0
120 c.lines_deleted = 0
121
121
122 # auto collapse if we have more than limit
122 # auto collapse if we have more than limit
123 collapse_limit = diffs.DiffProcessor._collapse_commits_over
123 collapse_limit = diffs.DiffProcessor._collapse_commits_over
124 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
124 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
125
125
126 c.commit_statuses = ChangesetStatus.STATUSES
126 c.commit_statuses = ChangesetStatus.STATUSES
127 c.inline_comments = []
127 c.inline_comments = []
128 c.files = []
128 c.files = []
129
129
130 c.statuses = []
130 c.statuses = []
131 c.comments = []
131 c.comments = []
132 c.unresolved_comments = []
132 c.unresolved_comments = []
133 c.resolved_comments = []
133 c.resolved_comments = []
134 if len(c.commit_ranges) == 1:
134 if len(c.commit_ranges) == 1:
135 commit = c.commit_ranges[0]
135 commit = c.commit_ranges[0]
136 c.comments = CommentsModel().get_comments(
136 c.comments = CommentsModel().get_comments(
137 self.db_repo.repo_id,
137 self.db_repo.repo_id,
138 revision=commit.raw_id)
138 revision=commit.raw_id)
139 c.statuses.append(ChangesetStatusModel().get_status(
139 c.statuses.append(ChangesetStatusModel().get_status(
140 self.db_repo.repo_id, commit.raw_id))
140 self.db_repo.repo_id, commit.raw_id))
141 # comments from PR
141 # comments from PR
142 statuses = ChangesetStatusModel().get_statuses(
142 statuses = ChangesetStatusModel().get_statuses(
143 self.db_repo.repo_id, commit.raw_id,
143 self.db_repo.repo_id, commit.raw_id,
144 with_revisions=True)
144 with_revisions=True)
145 prs = set(st.pull_request for st in statuses
145 prs = set(st.pull_request for st in statuses
146 if st.pull_request is not None)
146 if st.pull_request is not None)
147 # from associated statuses, check the pull requests, and
147 # from associated statuses, check the pull requests, and
148 # show comments from them
148 # show comments from them
149 for pr in prs:
149 for pr in prs:
150 c.comments.extend(pr.comments)
150 c.comments.extend(pr.comments)
151
151
152 c.unresolved_comments = CommentsModel()\
152 c.unresolved_comments = CommentsModel()\
153 .get_commit_unresolved_todos(commit.raw_id)
153 .get_commit_unresolved_todos(commit.raw_id)
154 c.resolved_comments = CommentsModel()\
154 c.resolved_comments = CommentsModel()\
155 .get_commit_resolved_todos(commit.raw_id)
155 .get_commit_resolved_todos(commit.raw_id)
156
156
157 diff = None
157 diff = None
158 # Iterate over ranges (default commit view is always one commit)
158 # Iterate over ranges (default commit view is always one commit)
159 for commit in c.commit_ranges:
159 for commit in c.commit_ranges:
160 c.changes[commit.raw_id] = []
160 c.changes[commit.raw_id] = []
161
161
162 commit2 = commit
162 commit2 = commit
163 commit1 = commit.first_parent
163 commit1 = commit.first_parent
164
164
165 if method == 'show':
165 if method == 'show':
166 inline_comments = CommentsModel().get_inline_comments(
166 inline_comments = CommentsModel().get_inline_comments(
167 self.db_repo.repo_id, revision=commit.raw_id)
167 self.db_repo.repo_id, revision=commit.raw_id)
168 c.inline_cnt = CommentsModel().get_inline_comments_count(
168 c.inline_cnt = CommentsModel().get_inline_comments_count(
169 inline_comments)
169 inline_comments)
170 c.inline_comments = inline_comments
170 c.inline_comments = inline_comments
171
171
172 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
172 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
173 self.db_repo)
173 self.db_repo)
174 cache_file_path = diff_cache_exist(
174 cache_file_path = diff_cache_exist(
175 cache_path, 'diff', commit.raw_id,
175 cache_path, 'diff', commit.raw_id,
176 hide_whitespace_changes, diff_context, c.fulldiff)
176 hide_whitespace_changes, diff_context, c.fulldiff)
177
177
178 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
178 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
179 force_recache = str2bool(self.request.GET.get('force_recache'))
179 force_recache = str2bool(self.request.GET.get('force_recache'))
180
180
181 cached_diff = None
181 cached_diff = None
182 if caching_enabled:
182 if caching_enabled:
183 cached_diff = load_cached_diff(cache_file_path)
183 cached_diff = load_cached_diff(cache_file_path)
184
184
185 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
185 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
186 if not force_recache and has_proper_diff_cache:
186 if not force_recache and has_proper_diff_cache:
187 diffset = cached_diff['diff']
187 diffset = cached_diff['diff']
188 else:
188 else:
189 vcs_diff = self.rhodecode_vcs_repo.get_diff(
189 vcs_diff = self.rhodecode_vcs_repo.get_diff(
190 commit1, commit2,
190 commit1, commit2,
191 ignore_whitespace=hide_whitespace_changes,
191 ignore_whitespace=hide_whitespace_changes,
192 context=diff_context)
192 context=diff_context)
193
193
194 diff_processor = diffs.DiffProcessor(
194 diff_processor = diffs.DiffProcessor(
195 vcs_diff, format='newdiff', diff_limit=diff_limit,
195 vcs_diff, format='newdiff', diff_limit=diff_limit,
196 file_limit=file_limit, show_full_diff=c.fulldiff)
196 file_limit=file_limit, show_full_diff=c.fulldiff)
197
197
198 _parsed = diff_processor.prepare()
198 _parsed = diff_processor.prepare()
199
199
200 diffset = codeblocks.DiffSet(
200 diffset = codeblocks.DiffSet(
201 repo_name=self.db_repo_name,
201 repo_name=self.db_repo_name,
202 source_node_getter=codeblocks.diffset_node_getter(commit1),
202 source_node_getter=codeblocks.diffset_node_getter(commit1),
203 target_node_getter=codeblocks.diffset_node_getter(commit2))
203 target_node_getter=codeblocks.diffset_node_getter(commit2))
204
204
205 diffset = self.path_filter.render_patchset_filtered(
205 diffset = self.path_filter.render_patchset_filtered(
206 diffset, _parsed, commit1.raw_id, commit2.raw_id)
206 diffset, _parsed, commit1.raw_id, commit2.raw_id)
207
207
208 # save cached diff
208 # save cached diff
209 if caching_enabled:
209 if caching_enabled:
210 cache_diff(cache_file_path, diffset, None)
210 cache_diff(cache_file_path, diffset, None)
211
211
212 c.limited_diff = diffset.limited_diff
212 c.limited_diff = diffset.limited_diff
213 c.changes[commit.raw_id] = diffset
213 c.changes[commit.raw_id] = diffset
214 else:
214 else:
215 # TODO(marcink): no cache usage here...
215 # TODO(marcink): no cache usage here...
216 _diff = self.rhodecode_vcs_repo.get_diff(
216 _diff = self.rhodecode_vcs_repo.get_diff(
217 commit1, commit2,
217 commit1, commit2,
218 ignore_whitespace=hide_whitespace_changes, context=diff_context)
218 ignore_whitespace=hide_whitespace_changes, context=diff_context)
219 diff_processor = diffs.DiffProcessor(
219 diff_processor = diffs.DiffProcessor(
220 _diff, format='newdiff', diff_limit=diff_limit,
220 _diff, format='newdiff', diff_limit=diff_limit,
221 file_limit=file_limit, show_full_diff=c.fulldiff)
221 file_limit=file_limit, show_full_diff=c.fulldiff)
222 # downloads/raw we only need RAW diff nothing else
222 # downloads/raw we only need RAW diff nothing else
223 diff = self.path_filter.get_raw_patch(diff_processor)
223 diff = self.path_filter.get_raw_patch(diff_processor)
224 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
224 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
225
225
226 # sort comments by how they were generated
226 # sort comments by how they were generated
227 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
227 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
228
228
229 if len(c.commit_ranges) == 1:
229 if len(c.commit_ranges) == 1:
230 c.commit = c.commit_ranges[0]
230 c.commit = c.commit_ranges[0]
231 c.parent_tmpl = ''.join(
231 c.parent_tmpl = ''.join(
232 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
232 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
233
233
234 if method == 'download':
234 if method == 'download':
235 response = Response(diff)
235 response = Response(diff)
236 response.content_type = 'text/plain'
236 response.content_type = 'text/plain'
237 response.content_disposition = (
237 response.content_disposition = (
238 'attachment; filename=%s.diff' % commit_id_range[:12])
238 'attachment; filename=%s.diff' % commit_id_range[:12])
239 return response
239 return response
240 elif method == 'patch':
240 elif method == 'patch':
241 c.diff = safe_unicode(diff)
241 c.diff = safe_unicode(diff)
242 patch = render(
242 patch = render(
243 'rhodecode:templates/changeset/patch_changeset.mako',
243 'rhodecode:templates/changeset/patch_changeset.mako',
244 self._get_template_context(c), self.request)
244 self._get_template_context(c), self.request)
245 response = Response(patch)
245 response = Response(patch)
246 response.content_type = 'text/plain'
246 response.content_type = 'text/plain'
247 return response
247 return response
248 elif method == 'raw':
248 elif method == 'raw':
249 response = Response(diff)
249 response = Response(diff)
250 response.content_type = 'text/plain'
250 response.content_type = 'text/plain'
251 return response
251 return response
252 elif method == 'show':
252 elif method == 'show':
253 if len(c.commit_ranges) == 1:
253 if len(c.commit_ranges) == 1:
254 html = render(
254 html = render(
255 'rhodecode:templates/changeset/changeset.mako',
255 'rhodecode:templates/changeset/changeset.mako',
256 self._get_template_context(c), self.request)
256 self._get_template_context(c), self.request)
257 return Response(html)
257 return Response(html)
258 else:
258 else:
259 c.ancestor = None
259 c.ancestor = None
260 c.target_repo = self.db_repo
260 c.target_repo = self.db_repo
261 html = render(
261 html = render(
262 'rhodecode:templates/changeset/changeset_range.mako',
262 'rhodecode:templates/changeset/changeset_range.mako',
263 self._get_template_context(c), self.request)
263 self._get_template_context(c), self.request)
264 return Response(html)
264 return Response(html)
265
265
266 raise HTTPBadRequest()
266 raise HTTPBadRequest()
267
267
268 @LoginRequired()
268 @LoginRequired()
269 @HasRepoPermissionAnyDecorator(
269 @HasRepoPermissionAnyDecorator(
270 'repository.read', 'repository.write', 'repository.admin')
270 'repository.read', 'repository.write', 'repository.admin')
271 @view_config(
271 @view_config(
272 route_name='repo_commit', request_method='GET',
272 route_name='repo_commit', request_method='GET',
273 renderer=None)
273 renderer=None)
274 def repo_commit_show(self):
274 def repo_commit_show(self):
275 commit_id = self.request.matchdict['commit_id']
275 commit_id = self.request.matchdict['commit_id']
276 return self._commit(commit_id, method='show')
276 return self._commit(commit_id, method='show')
277
277
278 @LoginRequired()
278 @LoginRequired()
279 @HasRepoPermissionAnyDecorator(
279 @HasRepoPermissionAnyDecorator(
280 'repository.read', 'repository.write', 'repository.admin')
280 'repository.read', 'repository.write', 'repository.admin')
281 @view_config(
281 @view_config(
282 route_name='repo_commit_raw', request_method='GET',
282 route_name='repo_commit_raw', request_method='GET',
283 renderer=None)
283 renderer=None)
284 @view_config(
284 @view_config(
285 route_name='repo_commit_raw_deprecated', request_method='GET',
285 route_name='repo_commit_raw_deprecated', request_method='GET',
286 renderer=None)
286 renderer=None)
287 def repo_commit_raw(self):
287 def repo_commit_raw(self):
288 commit_id = self.request.matchdict['commit_id']
288 commit_id = self.request.matchdict['commit_id']
289 return self._commit(commit_id, method='raw')
289 return self._commit(commit_id, method='raw')
290
290
291 @LoginRequired()
291 @LoginRequired()
292 @HasRepoPermissionAnyDecorator(
292 @HasRepoPermissionAnyDecorator(
293 'repository.read', 'repository.write', 'repository.admin')
293 'repository.read', 'repository.write', 'repository.admin')
294 @view_config(
294 @view_config(
295 route_name='repo_commit_patch', request_method='GET',
295 route_name='repo_commit_patch', request_method='GET',
296 renderer=None)
296 renderer=None)
297 def repo_commit_patch(self):
297 def repo_commit_patch(self):
298 commit_id = self.request.matchdict['commit_id']
298 commit_id = self.request.matchdict['commit_id']
299 return self._commit(commit_id, method='patch')
299 return self._commit(commit_id, method='patch')
300
300
301 @LoginRequired()
301 @LoginRequired()
302 @HasRepoPermissionAnyDecorator(
302 @HasRepoPermissionAnyDecorator(
303 'repository.read', 'repository.write', 'repository.admin')
303 'repository.read', 'repository.write', 'repository.admin')
304 @view_config(
304 @view_config(
305 route_name='repo_commit_download', request_method='GET',
305 route_name='repo_commit_download', request_method='GET',
306 renderer=None)
306 renderer=None)
307 def repo_commit_download(self):
307 def repo_commit_download(self):
308 commit_id = self.request.matchdict['commit_id']
308 commit_id = self.request.matchdict['commit_id']
309 return self._commit(commit_id, method='download')
309 return self._commit(commit_id, method='download')
310
310
311 @LoginRequired()
311 @LoginRequired()
312 @NotAnonymous()
312 @NotAnonymous()
313 @HasRepoPermissionAnyDecorator(
313 @HasRepoPermissionAnyDecorator(
314 'repository.read', 'repository.write', 'repository.admin')
314 'repository.read', 'repository.write', 'repository.admin')
315 @CSRFRequired()
315 @CSRFRequired()
316 @view_config(
316 @view_config(
317 route_name='repo_commit_comment_create', request_method='POST',
317 route_name='repo_commit_comment_create', request_method='POST',
318 renderer='json_ext')
318 renderer='json_ext')
319 def repo_commit_comment_create(self):
319 def repo_commit_comment_create(self):
320 _ = self.request.translate
320 _ = self.request.translate
321 commit_id = self.request.matchdict['commit_id']
321 commit_id = self.request.matchdict['commit_id']
322
322
323 c = self.load_default_context()
323 c = self.load_default_context()
324 status = self.request.POST.get('changeset_status', None)
324 status = self.request.POST.get('changeset_status', None)
325 text = self.request.POST.get('text')
325 text = self.request.POST.get('text')
326 comment_type = self.request.POST.get('comment_type')
326 comment_type = self.request.POST.get('comment_type')
327 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
327 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
328
328
329 if status:
329 if status:
330 text = text or (_('Status change %(transition_icon)s %(status)s')
330 text = text or (_('Status change %(transition_icon)s %(status)s')
331 % {'transition_icon': '>',
331 % {'transition_icon': '>',
332 'status': ChangesetStatus.get_status_lbl(status)})
332 'status': ChangesetStatus.get_status_lbl(status)})
333
333
334 multi_commit_ids = []
334 multi_commit_ids = []
335 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
335 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
336 if _commit_id not in ['', None, EmptyCommit.raw_id]:
336 if _commit_id not in ['', None, EmptyCommit.raw_id]:
337 if _commit_id not in multi_commit_ids:
337 if _commit_id not in multi_commit_ids:
338 multi_commit_ids.append(_commit_id)
338 multi_commit_ids.append(_commit_id)
339
339
340 commit_ids = multi_commit_ids or [commit_id]
340 commit_ids = multi_commit_ids or [commit_id]
341
341
342 comment = None
342 comment = None
343 for current_id in filter(None, commit_ids):
343 for current_id in filter(None, commit_ids):
344 comment = CommentsModel().create(
344 comment = CommentsModel().create(
345 text=text,
345 text=text,
346 repo=self.db_repo.repo_id,
346 repo=self.db_repo.repo_id,
347 user=self._rhodecode_db_user.user_id,
347 user=self._rhodecode_db_user.user_id,
348 commit_id=current_id,
348 commit_id=current_id,
349 f_path=self.request.POST.get('f_path'),
349 f_path=self.request.POST.get('f_path'),
350 line_no=self.request.POST.get('line'),
350 line_no=self.request.POST.get('line'),
351 status_change=(ChangesetStatus.get_status_lbl(status)
351 status_change=(ChangesetStatus.get_status_lbl(status)
352 if status else None),
352 if status else None),
353 status_change_type=status,
353 status_change_type=status,
354 comment_type=comment_type,
354 comment_type=comment_type,
355 resolves_comment_id=resolves_comment_id,
355 resolves_comment_id=resolves_comment_id,
356 auth_user=self._rhodecode_user
356 auth_user=self._rhodecode_user
357 )
357 )
358
358
359 # get status if set !
359 # get status if set !
360 if status:
360 if status:
361 # if latest status was from pull request and it's closed
361 # if latest status was from pull request and it's closed
362 # disallow changing status !
362 # disallow changing status !
363 # dont_allow_on_closed_pull_request = True !
363 # dont_allow_on_closed_pull_request = True !
364
364
365 try:
365 try:
366 ChangesetStatusModel().set_status(
366 ChangesetStatusModel().set_status(
367 self.db_repo.repo_id,
367 self.db_repo.repo_id,
368 status,
368 status,
369 self._rhodecode_db_user.user_id,
369 self._rhodecode_db_user.user_id,
370 comment,
370 comment,
371 revision=current_id,
371 revision=current_id,
372 dont_allow_on_closed_pull_request=True
372 dont_allow_on_closed_pull_request=True
373 )
373 )
374 except StatusChangeOnClosedPullRequestError:
374 except StatusChangeOnClosedPullRequestError:
375 msg = _('Changing the status of a commit associated with '
375 msg = _('Changing the status of a commit associated with '
376 'a closed pull request is not allowed')
376 'a closed pull request is not allowed')
377 log.exception(msg)
377 log.exception(msg)
378 h.flash(msg, category='warning')
378 h.flash(msg, category='warning')
379 raise HTTPFound(h.route_path(
379 raise HTTPFound(h.route_path(
380 'repo_commit', repo_name=self.db_repo_name,
380 'repo_commit', repo_name=self.db_repo_name,
381 commit_id=current_id))
381 commit_id=current_id))
382
382
383 commit = self.db_repo.get_commit(current_id)
384 CommentsModel().trigger_commit_comment_hook(
385 self.db_repo, self._rhodecode_user, 'create',
386 data={'comment': comment, 'commit': commit})
387
383 # finalize, commit and redirect
388 # finalize, commit and redirect
384 Session().commit()
389 Session().commit()
385
390
386 data = {
391 data = {
387 'target_id': h.safeid(h.safe_unicode(
392 'target_id': h.safeid(h.safe_unicode(
388 self.request.POST.get('f_path'))),
393 self.request.POST.get('f_path'))),
389 }
394 }
390 if comment:
395 if comment:
391 c.co = comment
396 c.co = comment
392 rendered_comment = render(
397 rendered_comment = render(
393 'rhodecode:templates/changeset/changeset_comment_block.mako',
398 'rhodecode:templates/changeset/changeset_comment_block.mako',
394 self._get_template_context(c), self.request)
399 self._get_template_context(c), self.request)
395
400
396 data.update(comment.get_dict())
401 data.update(comment.get_dict())
397 data.update({'rendered_text': rendered_comment})
402 data.update({'rendered_text': rendered_comment})
398
403
399 return data
404 return data
400
405
401 @LoginRequired()
406 @LoginRequired()
402 @NotAnonymous()
407 @NotAnonymous()
403 @HasRepoPermissionAnyDecorator(
408 @HasRepoPermissionAnyDecorator(
404 'repository.read', 'repository.write', 'repository.admin')
409 'repository.read', 'repository.write', 'repository.admin')
405 @CSRFRequired()
410 @CSRFRequired()
406 @view_config(
411 @view_config(
407 route_name='repo_commit_comment_preview', request_method='POST',
412 route_name='repo_commit_comment_preview', request_method='POST',
408 renderer='string', xhr=True)
413 renderer='string', xhr=True)
409 def repo_commit_comment_preview(self):
414 def repo_commit_comment_preview(self):
410 # Technically a CSRF token is not needed as no state changes with this
415 # Technically a CSRF token is not needed as no state changes with this
411 # call. However, as this is a POST is better to have it, so automated
416 # call. However, as this is a POST is better to have it, so automated
412 # tools don't flag it as potential CSRF.
417 # tools don't flag it as potential CSRF.
413 # Post is required because the payload could be bigger than the maximum
418 # Post is required because the payload could be bigger than the maximum
414 # allowed by GET.
419 # allowed by GET.
415
420
416 text = self.request.POST.get('text')
421 text = self.request.POST.get('text')
417 renderer = self.request.POST.get('renderer') or 'rst'
422 renderer = self.request.POST.get('renderer') or 'rst'
418 if text:
423 if text:
419 return h.render(text, renderer=renderer, mentions=True,
424 return h.render(text, renderer=renderer, mentions=True,
420 repo_name=self.db_repo_name)
425 repo_name=self.db_repo_name)
421 return ''
426 return ''
422
427
423 @LoginRequired()
428 @LoginRequired()
424 @NotAnonymous()
429 @NotAnonymous()
425 @HasRepoPermissionAnyDecorator(
430 @HasRepoPermissionAnyDecorator(
426 'repository.read', 'repository.write', 'repository.admin')
431 'repository.read', 'repository.write', 'repository.admin')
427 @CSRFRequired()
432 @CSRFRequired()
428 @view_config(
433 @view_config(
429 route_name='repo_commit_comment_attachment_upload', request_method='POST',
434 route_name='repo_commit_comment_attachment_upload', request_method='POST',
430 renderer='json_ext', xhr=True)
435 renderer='json_ext', xhr=True)
431 def repo_commit_comment_attachment_upload(self):
436 def repo_commit_comment_attachment_upload(self):
432 c = self.load_default_context()
437 c = self.load_default_context()
433 upload_key = 'attachment'
438 upload_key = 'attachment'
434
439
435 file_obj = self.request.POST.get(upload_key)
440 file_obj = self.request.POST.get(upload_key)
436
441
437 if file_obj is None:
442 if file_obj is None:
438 self.request.response.status = 400
443 self.request.response.status = 400
439 return {'store_fid': None,
444 return {'store_fid': None,
440 'access_path': None,
445 'access_path': None,
441 'error': '{} data field is missing'.format(upload_key)}
446 'error': '{} data field is missing'.format(upload_key)}
442
447
443 if not hasattr(file_obj, 'filename'):
448 if not hasattr(file_obj, 'filename'):
444 self.request.response.status = 400
449 self.request.response.status = 400
445 return {'store_fid': None,
450 return {'store_fid': None,
446 'access_path': None,
451 'access_path': None,
447 'error': 'filename cannot be read from the data field'}
452 'error': 'filename cannot be read from the data field'}
448
453
449 filename = file_obj.filename
454 filename = file_obj.filename
450 file_display_name = filename
455 file_display_name = filename
451
456
452 metadata = {
457 metadata = {
453 'user_uploaded': {'username': self._rhodecode_user.username,
458 'user_uploaded': {'username': self._rhodecode_user.username,
454 'user_id': self._rhodecode_user.user_id,
459 'user_id': self._rhodecode_user.user_id,
455 'ip': self._rhodecode_user.ip_addr}}
460 'ip': self._rhodecode_user.ip_addr}}
456
461
457 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
462 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
458 allowed_extensions = [
463 allowed_extensions = [
459 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
464 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
460 '.pptx', '.txt', '.xlsx', '.zip']
465 '.pptx', '.txt', '.xlsx', '.zip']
461 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
466 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
462
467
463 try:
468 try:
464 storage = store_utils.get_file_storage(self.request.registry.settings)
469 storage = store_utils.get_file_storage(self.request.registry.settings)
465 store_uid, metadata = storage.save_file(
470 store_uid, metadata = storage.save_file(
466 file_obj.file, filename, extra_metadata=metadata,
471 file_obj.file, filename, extra_metadata=metadata,
467 extensions=allowed_extensions, max_filesize=max_file_size)
472 extensions=allowed_extensions, max_filesize=max_file_size)
468 except FileNotAllowedException:
473 except FileNotAllowedException:
469 self.request.response.status = 400
474 self.request.response.status = 400
470 permitted_extensions = ', '.join(allowed_extensions)
475 permitted_extensions = ', '.join(allowed_extensions)
471 error_msg = 'File `{}` is not allowed. ' \
476 error_msg = 'File `{}` is not allowed. ' \
472 'Only following extensions are permitted: {}'.format(
477 'Only following extensions are permitted: {}'.format(
473 filename, permitted_extensions)
478 filename, permitted_extensions)
474 return {'store_fid': None,
479 return {'store_fid': None,
475 'access_path': None,
480 'access_path': None,
476 'error': error_msg}
481 'error': error_msg}
477 except FileOverSizeException:
482 except FileOverSizeException:
478 self.request.response.status = 400
483 self.request.response.status = 400
479 limit_mb = h.format_byte_size_binary(max_file_size)
484 limit_mb = h.format_byte_size_binary(max_file_size)
480 return {'store_fid': None,
485 return {'store_fid': None,
481 'access_path': None,
486 'access_path': None,
482 'error': 'File {} is exceeding allowed limit of {}.'.format(
487 'error': 'File {} is exceeding allowed limit of {}.'.format(
483 filename, limit_mb)}
488 filename, limit_mb)}
484
489
485 try:
490 try:
486 entry = FileStore.create(
491 entry = FileStore.create(
487 file_uid=store_uid, filename=metadata["filename"],
492 file_uid=store_uid, filename=metadata["filename"],
488 file_hash=metadata["sha256"], file_size=metadata["size"],
493 file_hash=metadata["sha256"], file_size=metadata["size"],
489 file_display_name=file_display_name,
494 file_display_name=file_display_name,
490 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
495 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
491 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
496 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
492 scope_repo_id=self.db_repo.repo_id
497 scope_repo_id=self.db_repo.repo_id
493 )
498 )
494 Session().add(entry)
499 Session().add(entry)
495 Session().commit()
500 Session().commit()
496 log.debug('Stored upload in DB as %s', entry)
501 log.debug('Stored upload in DB as %s', entry)
497 except Exception:
502 except Exception:
498 log.exception('Failed to store file %s', filename)
503 log.exception('Failed to store file %s', filename)
499 self.request.response.status = 400
504 self.request.response.status = 400
500 return {'store_fid': None,
505 return {'store_fid': None,
501 'access_path': None,
506 'access_path': None,
502 'error': 'File {} failed to store in DB.'.format(filename)}
507 'error': 'File {} failed to store in DB.'.format(filename)}
503
508
504 Session().commit()
509 Session().commit()
505
510
506 return {
511 return {
507 'store_fid': store_uid,
512 'store_fid': store_uid,
508 'access_path': h.route_path(
513 'access_path': h.route_path(
509 'download_file', fid=store_uid),
514 'download_file', fid=store_uid),
510 'fqn_access_path': h.route_url(
515 'fqn_access_path': h.route_url(
511 'download_file', fid=store_uid),
516 'download_file', fid=store_uid),
512 'repo_access_path': h.route_path(
517 'repo_access_path': h.route_path(
513 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
518 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
514 'repo_fqn_access_path': h.route_url(
519 'repo_fqn_access_path': h.route_url(
515 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
520 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
516 }
521 }
517
522
518 @LoginRequired()
523 @LoginRequired()
519 @NotAnonymous()
524 @NotAnonymous()
520 @HasRepoPermissionAnyDecorator(
525 @HasRepoPermissionAnyDecorator(
521 'repository.read', 'repository.write', 'repository.admin')
526 'repository.read', 'repository.write', 'repository.admin')
522 @CSRFRequired()
527 @CSRFRequired()
523 @view_config(
528 @view_config(
524 route_name='repo_commit_comment_delete', request_method='POST',
529 route_name='repo_commit_comment_delete', request_method='POST',
525 renderer='json_ext')
530 renderer='json_ext')
526 def repo_commit_comment_delete(self):
531 def repo_commit_comment_delete(self):
527 commit_id = self.request.matchdict['commit_id']
532 commit_id = self.request.matchdict['commit_id']
528 comment_id = self.request.matchdict['comment_id']
533 comment_id = self.request.matchdict['comment_id']
529
534
530 comment = ChangesetComment.get_or_404(comment_id)
535 comment = ChangesetComment.get_or_404(comment_id)
531 if not comment:
536 if not comment:
532 log.debug('Comment with id:%s not found, skipping', comment_id)
537 log.debug('Comment with id:%s not found, skipping', comment_id)
533 # comment already deleted in another call probably
538 # comment already deleted in another call probably
534 return True
539 return True
535
540
536 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
541 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
537 super_admin = h.HasPermissionAny('hg.admin')()
542 super_admin = h.HasPermissionAny('hg.admin')()
538 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
543 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
539 is_repo_comment = comment.repo.repo_name == self.db_repo_name
544 is_repo_comment = comment.repo.repo_name == self.db_repo_name
540 comment_repo_admin = is_repo_admin and is_repo_comment
545 comment_repo_admin = is_repo_admin and is_repo_comment
541
546
542 if super_admin or comment_owner or comment_repo_admin:
547 if super_admin or comment_owner or comment_repo_admin:
543 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
548 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
544 Session().commit()
549 Session().commit()
545 return True
550 return True
546 else:
551 else:
547 log.warning('No permissions for user %s to delete comment_id: %s',
552 log.warning('No permissions for user %s to delete comment_id: %s',
548 self._rhodecode_db_user, comment_id)
553 self._rhodecode_db_user, comment_id)
549 raise HTTPNotFound()
554 raise HTTPNotFound()
550
555
551 @LoginRequired()
556 @LoginRequired()
552 @HasRepoPermissionAnyDecorator(
557 @HasRepoPermissionAnyDecorator(
553 'repository.read', 'repository.write', 'repository.admin')
558 'repository.read', 'repository.write', 'repository.admin')
554 @view_config(
559 @view_config(
555 route_name='repo_commit_data', request_method='GET',
560 route_name='repo_commit_data', request_method='GET',
556 renderer='json_ext', xhr=True)
561 renderer='json_ext', xhr=True)
557 def repo_commit_data(self):
562 def repo_commit_data(self):
558 commit_id = self.request.matchdict['commit_id']
563 commit_id = self.request.matchdict['commit_id']
559 self.load_default_context()
564 self.load_default_context()
560
565
561 try:
566 try:
562 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
567 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
563 except CommitDoesNotExistError as e:
568 except CommitDoesNotExistError as e:
564 return EmptyCommit(message=str(e))
569 return EmptyCommit(message=str(e))
565
570
566 @LoginRequired()
571 @LoginRequired()
567 @HasRepoPermissionAnyDecorator(
572 @HasRepoPermissionAnyDecorator(
568 'repository.read', 'repository.write', 'repository.admin')
573 'repository.read', 'repository.write', 'repository.admin')
569 @view_config(
574 @view_config(
570 route_name='repo_commit_children', request_method='GET',
575 route_name='repo_commit_children', request_method='GET',
571 renderer='json_ext', xhr=True)
576 renderer='json_ext', xhr=True)
572 def repo_commit_children(self):
577 def repo_commit_children(self):
573 commit_id = self.request.matchdict['commit_id']
578 commit_id = self.request.matchdict['commit_id']
574 self.load_default_context()
579 self.load_default_context()
575
580
576 try:
581 try:
577 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
582 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
578 children = commit.children
583 children = commit.children
579 except CommitDoesNotExistError:
584 except CommitDoesNotExistError:
580 children = []
585 children = []
581
586
582 result = {"results": children}
587 result = {"results": children}
583 return result
588 return result
584
589
585 @LoginRequired()
590 @LoginRequired()
586 @HasRepoPermissionAnyDecorator(
591 @HasRepoPermissionAnyDecorator(
587 'repository.read', 'repository.write', 'repository.admin')
592 'repository.read', 'repository.write', 'repository.admin')
588 @view_config(
593 @view_config(
589 route_name='repo_commit_parents', request_method='GET',
594 route_name='repo_commit_parents', request_method='GET',
590 renderer='json_ext')
595 renderer='json_ext')
591 def repo_commit_parents(self):
596 def repo_commit_parents(self):
592 commit_id = self.request.matchdict['commit_id']
597 commit_id = self.request.matchdict['commit_id']
593 self.load_default_context()
598 self.load_default_context()
594
599
595 try:
600 try:
596 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
601 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
597 parents = commit.parents
602 parents = commit.parents
598 except CommitDoesNotExistError:
603 except CommitDoesNotExistError:
599 parents = []
604 parents = []
600 result = {"results": parents}
605 result = {"results": parents}
601 return result
606 return result
@@ -1,56 +1,60 b''
1 # Copyright (C) 2016-2019 RhodeCode GmbH
1 # Copyright (C) 2016-2019 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 rcextensions module, please edit `hooks.py` to over write hooks logic
20 rcextensions module, please edit `hooks.py` to over write hooks logic
21 """
21 """
22
22
23 from .hooks import (
23 from .hooks import (
24 _create_repo_hook,
24 _create_repo_hook,
25 _create_repo_group_hook,
25 _create_repo_group_hook,
26 _pre_create_user_hook,
26 _pre_create_user_hook,
27 _create_user_hook,
27 _create_user_hook,
28 _comment_commit_repo_hook,
28 _delete_repo_hook,
29 _delete_repo_hook,
29 _delete_user_hook,
30 _delete_user_hook,
30 _pre_push_hook,
31 _pre_push_hook,
31 _push_hook,
32 _push_hook,
32 _pre_pull_hook,
33 _pre_pull_hook,
33 _pull_hook,
34 _pull_hook,
34 _create_pull_request_hook,
35 _create_pull_request_hook,
35 _review_pull_request_hook,
36 _review_pull_request_hook,
37 _comment_pull_request_hook,
36 _update_pull_request_hook,
38 _update_pull_request_hook,
37 _merge_pull_request_hook,
39 _merge_pull_request_hook,
38 _close_pull_request_hook,
40 _close_pull_request_hook,
39 )
41 )
40
42
41 # set as module attributes, we use those to call hooks. *do not change this*
43 # set as module attributes, we use those to call hooks. *do not change this*
42 CREATE_REPO_HOOK = _create_repo_hook
44 CREATE_REPO_HOOK = _create_repo_hook
45 COMMENT_COMMIT_REPO_HOOK = _comment_commit_repo_hook
43 CREATE_REPO_GROUP_HOOK = _create_repo_group_hook
46 CREATE_REPO_GROUP_HOOK = _create_repo_group_hook
44 PRE_CREATE_USER_HOOK = _pre_create_user_hook
47 PRE_CREATE_USER_HOOK = _pre_create_user_hook
45 CREATE_USER_HOOK = _create_user_hook
48 CREATE_USER_HOOK = _create_user_hook
46 DELETE_REPO_HOOK = _delete_repo_hook
49 DELETE_REPO_HOOK = _delete_repo_hook
47 DELETE_USER_HOOK = _delete_user_hook
50 DELETE_USER_HOOK = _delete_user_hook
48 PRE_PUSH_HOOK = _pre_push_hook
51 PRE_PUSH_HOOK = _pre_push_hook
49 PUSH_HOOK = _push_hook
52 PUSH_HOOK = _push_hook
50 PRE_PULL_HOOK = _pre_pull_hook
53 PRE_PULL_HOOK = _pre_pull_hook
51 PULL_HOOK = _pull_hook
54 PULL_HOOK = _pull_hook
52 CREATE_PULL_REQUEST = _create_pull_request_hook
55 CREATE_PULL_REQUEST = _create_pull_request_hook
53 REVIEW_PULL_REQUEST = _review_pull_request_hook
56 REVIEW_PULL_REQUEST = _review_pull_request_hook
57 COMMENT_PULL_REQUEST = _comment_pull_request_hook
54 UPDATE_PULL_REQUEST = _update_pull_request_hook
58 UPDATE_PULL_REQUEST = _update_pull_request_hook
55 MERGE_PULL_REQUEST = _merge_pull_request_hook
59 MERGE_PULL_REQUEST = _merge_pull_request_hook
56 CLOSE_PULL_REQUEST = _close_pull_request_hook
60 CLOSE_PULL_REQUEST = _close_pull_request_hook
@@ -1,37 +1,36 b''
1 # Example to trigger a HTTP call via an HTTP helper via post_push hook
1 # Example to trigger a HTTP call via an HTTP helper via post_push hook
2
2
3
3
4 @has_kwargs({
4 @has_kwargs({
5 'server_url': 'url of instance that triggered this hook',
5 'server_url': 'url of instance that triggered this hook',
6 'config': 'path to .ini config used',
6 'config': 'path to .ini config used',
7 'scm': 'type of version control "git", "hg", "svn"',
7 'scm': 'type of version control "git", "hg", "svn"',
8 'username': 'username of actor who triggered this event',
8 'username': 'username of actor who triggered this event',
9 'ip': 'ip address of actor who triggered this hook',
9 'ip': 'ip address of actor who triggered this hook',
10 'action': '',
10 'action': '',
11 'repository': 'repository name',
11 'repository': 'repository name',
12 'repo_store_path': 'full path to where repositories are stored',
12 'repo_store_path': 'full path to where repositories are stored',
13 'commit_ids': '',
13 'commit_ids': '',
14 'hook_type': '',
14 'hook_type': '',
15 'user_agent': '',
15 'user_agent': '',
16 })
16 })
17 def _push_hook(*args, **kwargs):
17 def _push_hook(*args, **kwargs):
18 """
18 """
19 POST PUSH HOOK, this function will be executed after each push it's
19 POST PUSH HOOK, this function will be executed after each push it's
20 executed after the build-in hook that RhodeCode uses for logging pushes
20 executed after the build-in hook that RhodeCode uses for logging pushes
21 """
21 """
22
22
23 from .helpers import http_call, extra_fields
23 from .helpers import http_call, extra_fields
24 # returns list of dicts with key-val fetched from extra fields
24 # returns list of dicts with key-val fetched from extra fields
25 repo_extra_fields = extra_fields.run(**kwargs)
25 repo_extra_fields = extra_fields.run(**kwargs)
26
26
27 if repo_extra_fields.get('endpoint_url'):
27 endpoint_url = extra_fields.get_field(repo_extra_fields, key='endpoint_url', default='')
28 field_metadata = repo_extra_fields['endpoint_url']
28
29 endpoint = field_metadata['field_value']
29 if endpoint_url:
30 if endpoint:
30 data = {
31 data = {
31 'project': kwargs['repository'],
32 'project': kwargs['repository'],
32 }
33 }
33 response = http_call.run(url=endpoint_url, params=data)
34 response = http_call.run(url=endpoint, params=data)
34 return HookResponse(0, 'Called endpoint {}, with response {}\n'.format(endpoint_url, response))
35 return HookResponse(0, 'Called endpoint {}, with response {}\n'.format(endpoint, response))
36
35
37 return HookResponse(0, '')
36 return HookResponse(0, '')
@@ -1,37 +1,35 b''
1 # Example to trigger a CI call via an HTTP helper via post_push hook
1 # Example to trigger a CI call via an HTTP helper via post_push hook
2
2
3
3
4 @has_kwargs({
4 @has_kwargs({
5 'server_url': 'url of instance that triggered this hook',
5 'server_url': 'url of instance that triggered this hook',
6 'config': 'path to .ini config used',
6 'config': 'path to .ini config used',
7 'scm': 'type of version control "git", "hg", "svn"',
7 'scm': 'type of version control "git", "hg", "svn"',
8 'username': 'username of actor who triggered this event',
8 'username': 'username of actor who triggered this event',
9 'ip': 'ip address of actor who triggered this hook',
9 'ip': 'ip address of actor who triggered this hook',
10 'action': '',
10 'action': '',
11 'repository': 'repository name',
11 'repository': 'repository name',
12 'repo_store_path': 'full path to where repositories are stored',
12 'repo_store_path': 'full path to where repositories are stored',
13 'commit_ids': '',
13 'commit_ids': '',
14 'hook_type': '',
14 'hook_type': '',
15 'user_agent': '',
15 'user_agent': '',
16 })
16 })
17 def _push_hook(*args, **kwargs):
17 def _push_hook(*args, **kwargs):
18 """
18 """
19 POST PUSH HOOK, this function will be executed after each push it's
19 POST PUSH HOOK, this function will be executed after each push it's
20 executed after the build-in hook that RhodeCode uses for logging pushes
20 executed after the build-in hook that RhodeCode uses for logging pushes
21 """
21 """
22
22
23 from .helpers import http_call, extra_fields
23 from .helpers import http_call, extra_fields
24 # returns list of dicts with key-val fetched from extra fields
24 # returns list of dicts with key-val fetched from extra fields
25 repo_extra_fields = extra_fields.run(**kwargs)
25 repo_extra_fields = extra_fields.run(**kwargs)
26
26
27 if repo_extra_fields.get('endpoint_url'):
27 endpoint_url = extra_fields.get_field(repo_extra_fields, key='endpoint_url', default='')
28 field_metadata = repo_extra_fields['endpoint_url']
28 if endpoint_url:
29 endpoint = field_metadata['field_value']
29 data = {
30 if endpoint:
30 'some_key': 'val'
31 data = {
31 }
32 'some_key': 'val'
32 response = http_call.run(url=endpoint_url, json_data=data)
33 }
33 return HookResponse(0, 'Called endpoint {}, with response {}'.format(endpoint_url, response))
34 response = http_call.run(url=endpoint, json_data=data)
35 return HookResponse(0, 'Called endpoint {}, with response {}'.format(endpoint, response))
36
34
37 return HookResponse(0, '')
35 return HookResponse(0, '')
@@ -1,79 +1,81 b''
1 # Example to validate commit message or author using some sort of rules
1 # Example to validate commit message or author using some sort of rules
2
2
3
3
4 @has_kwargs({
4 @has_kwargs({
5 'server_url': 'url of instance that triggered this hook',
5 'server_url': 'url of instance that triggered this hook',
6 'config': 'path to .ini config used',
6 'config': 'path to .ini config used',
7 'scm': 'type of version control "git", "hg", "svn"',
7 'scm': 'type of version control "git", "hg", "svn"',
8 'username': 'username of actor who triggered this event',
8 'username': 'username of actor who triggered this event',
9 'ip': 'ip address of actor who triggered this hook',
9 'ip': 'ip address of actor who triggered this hook',
10 'action': '',
10 'action': '',
11 'repository': 'repository name',
11 'repository': 'repository name',
12 'repo_store_path': 'full path to where repositories are stored',
12 'repo_store_path': 'full path to where repositories are stored',
13 'commit_ids': 'pre transaction metadata for commit ids',
13 'commit_ids': 'pre transaction metadata for commit ids',
14 'hook_type': '',
14 'hook_type': '',
15 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
15 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
16 })
16 })
17 def _pre_push_hook(*args, **kwargs):
17 def _pre_push_hook(*args, **kwargs):
18 """
18 """
19 Post push hook
19 Post push hook
20 To stop version control from storing the transaction and send a message to user
20 To stop version control from storing the transaction and send a message to user
21 use non-zero HookResponse with a message, e.g return HookResponse(1, 'Not allowed')
21 use non-zero HookResponse with a message, e.g return HookResponse(1, 'Not allowed')
22
22
23 This message will be shown back to client during PUSH operation
23 This message will be shown back to client during PUSH operation
24
24
25 Commit ids might look like that::
25 Commit ids might look like that::
26
26
27 [{u'hg_env|git_env': ...,
27 [{u'hg_env|git_env': ...,
28 u'multiple_heads': [],
28 u'multiple_heads': [],
29 u'name': u'default',
29 u'name': u'default',
30 u'new_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
30 u'new_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
31 u'old_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
31 u'old_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
32 u'ref': u'',
32 u'ref': u'',
33 u'total_commits': 2,
33 u'total_commits': 2,
34 u'type': u'branch'}]
34 u'type': u'branch'}]
35 """
35 """
36 import re
36 import re
37 from .helpers import extra_fields, extract_pre_commits
37 from .helpers import extra_fields, extract_pre_commits
38 from .utils import str2bool
38 from .utils import str2bool
39
39
40 # returns list of dicts with key-val fetched from extra fields
40 # returns list of dicts with key-val fetched from extra fields
41 repo_extra_fields = extra_fields.run(**kwargs)
41 repo_extra_fields = extra_fields.run(**kwargs)
42
42
43 # optionally use 'extra fields' to control the logic per repo
43 # optionally use 'extra fields' to control the logic per repo
44 validate_author = repo_extra_fields.get('validate_author', {}).get('field_value')
44 validate_author = extra_fields.get_field(
45 repo_extra_fields, key='validate_author', default=False)
45 should_validate = str2bool(validate_author)
46 should_validate = str2bool(validate_author)
46
47
47 # optionally store validation regex into extra fields
48 # optionally store validation regex into extra fields
48 validation_regex = repo_extra_fields.get('validation_regex', {}).get('field_value')
49 validation_regex = extra_fields.get_field(
50 repo_extra_fields, key='validation_regex', default='')
49
51
50 def validate_commit_message(commit_message, message_regex=None):
52 def validate_commit_message(commit_message, message_regex=None):
51 """
53 """
52 This function validates commit_message against some sort of rules.
54 This function validates commit_message against some sort of rules.
53 It should return a valid boolean, and a reason for failure
55 It should return a valid boolean, and a reason for failure
54 """
56 """
55
57
56 if "secret_string" in commit_message:
58 if "secret_string" in commit_message:
57 msg = "!!Push forbidden: secret string found in commit messages"
59 msg = "!!Push forbidden: secret string found in commit messages"
58 return False, msg
60 return False, msg
59
61
60 if validation_regex:
62 if validation_regex:
61 regexp = re.compile(validation_regex)
63 regexp = re.compile(validation_regex)
62 if not regexp.match(message):
64 if not regexp.match(message):
63 msg = "!!Push forbidden: commit message does not match regexp"
65 msg = "!!Push forbidden: commit message does not match regexp"
64 return False, msg
66 return False, msg
65
67
66 return True, ''
68 return True, ''
67
69
68 if should_validate:
70 if should_validate:
69 # returns list of dicts with key-val fetched from extra fields
71 # returns list of dicts with key-val fetched from extra fields
70 commit_list = extract_pre_commits.run(**kwargs)
72 commit_list = extract_pre_commits.run(**kwargs)
71
73
72 for commit_data in commit_list:
74 for commit_data in commit_list:
73 message = commit_data['message']
75 message = commit_data['message']
74
76
75 message_valid, reason = validate_commit_message(message, validation_regex)
77 message_valid, reason = validate_commit_message(message, validation_regex)
76 if not message_valid:
78 if not message_valid:
77 return HookResponse(1, reason)
79 return HookResponse(1, reason)
78
80
79 return HookResponse(0, '')
81 return HookResponse(0, '')
@@ -1,117 +1,120 b''
1 # Example to validate pushed files names and size using some sort of rules
1 # Example to validate pushed files names and size using some sort of rules
2
2
3
3
4
4
5 @has_kwargs({
5 @has_kwargs({
6 'server_url': 'url of instance that triggered this hook',
6 'server_url': 'url of instance that triggered this hook',
7 'config': 'path to .ini config used',
7 'config': 'path to .ini config used',
8 'scm': 'type of version control "git", "hg", "svn"',
8 'scm': 'type of version control "git", "hg", "svn"',
9 'username': 'username of actor who triggered this event',
9 'username': 'username of actor who triggered this event',
10 'ip': 'ip address of actor who triggered this hook',
10 'ip': 'ip address of actor who triggered this hook',
11 'action': '',
11 'action': '',
12 'repository': 'repository name',
12 'repository': 'repository name',
13 'repo_store_path': 'full path to where repositories are stored',
13 'repo_store_path': 'full path to where repositories are stored',
14 'commit_ids': 'pre transaction metadata for commit ids',
14 'commit_ids': 'pre transaction metadata for commit ids',
15 'hook_type': '',
15 'hook_type': '',
16 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
16 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
17 })
17 })
18 def _pre_push_hook(*args, **kwargs):
18 def _pre_push_hook(*args, **kwargs):
19 """
19 """
20 Post push hook
20 Post push hook
21 To stop version control from storing the transaction and send a message to user
21 To stop version control from storing the transaction and send a message to user
22 use non-zero HookResponse with a message, e.g return HookResponse(1, 'Not allowed')
22 use non-zero HookResponse with a message, e.g return HookResponse(1, 'Not allowed')
23
23
24 This message will be shown back to client during PUSH operation
24 This message will be shown back to client during PUSH operation
25
25
26 Commit ids might look like that::
26 Commit ids might look like that::
27
27
28 [{u'hg_env|git_env': ...,
28 [{u'hg_env|git_env': ...,
29 u'multiple_heads': [],
29 u'multiple_heads': [],
30 u'name': u'default',
30 u'name': u'default',
31 u'new_rev': u'd0b2ae0692e722e01d5677f27a104631cf798b69',
31 u'new_rev': u'd0b2ae0692e722e01d5677f27a104631cf798b69',
32 u'old_rev': u'd0b1ae0692e722e01d5677f27a104631cf798b69',
32 u'old_rev': u'd0b1ae0692e722e01d5677f27a104631cf798b69',
33 u'ref': u'',
33 u'ref': u'',
34 u'total_commits': 2,
34 u'total_commits': 2,
35 u'type': u'branch'}]
35 u'type': u'branch'}]
36 """
36 """
37 import fnmatch
37 import fnmatch
38 from .helpers import extra_fields, extract_pre_files
38 from .helpers import extra_fields, extract_pre_files
39 from .utils import str2bool, aslist
39 from .utils import str2bool, aslist
40 from rhodecode.lib.helpers import format_byte_size_binary
40 from rhodecode.lib.helpers import format_byte_size_binary
41
41
42 # returns list of dicts with key-val fetched from extra fields
42 # returns list of dicts with key-val fetched from extra fields
43 repo_extra_fields = extra_fields.run(**kwargs)
43 repo_extra_fields = extra_fields.run(**kwargs)
44
44
45 # optionally use 'extra fields' to control the logic per repo
45 # optionally use 'extra fields' to control the logic per repo
46 # e.g store a list of patterns to be forbidden e.g `*.exe, *.dump`
46 # e.g store a list of patterns to be forbidden e.g `*.exe, *.dump`
47 forbid_files = repo_extra_fields.get('forbid_files_glob', {}).get('field_value')
47 forbid_files = extra_fields.get_field(repo_extra_fields, key='forbid_files_glob',
48 convert_type=False, default=[])
48 forbid_files = aslist(forbid_files)
49 forbid_files = aslist(forbid_files)
49
50
50 # forbid_files = ['*'] # example pattern
51 # forbid_files = ['*'] # example pattern
51
52
52 # optionally get bytes limit for a single file, e.g 1024 for 1KB
53 # optionally get bytes limit for a single file, e.g 1024 for 1KB
53 forbid_size_over = repo_extra_fields.get('forbid_size_over', {}).get('field_value')
54 forbid_size_over = extra_fields.get_field(repo_extra_fields, key='forbid_size_over',
55 convert_type=False, default=0)
56
54 forbid_size_over = int(forbid_size_over or 0)
57 forbid_size_over = int(forbid_size_over or 0)
55
58
56 # forbid_size_over = 1024 # example 1024
59 # forbid_size_over = 1024 # example 1024
57
60
58 def validate_file_name_and_size(file_data, forbidden_files=None, size_limit=None):
61 def validate_file_name_and_size(file_data, forbidden_files=None, size_limit=None):
59 """
62 """
60 This function validates comited files against some sort of rules.
63 This function validates comited files against some sort of rules.
61 It should return a valid boolean, and a reason for failure
64 It should return a valid boolean, and a reason for failure
62
65
63 file_data =[
66 file_data =[
64 'raw_diff', 'old_revision', 'stats', 'original_filename', 'is_limited_diff',
67 'raw_diff', 'old_revision', 'stats', 'original_filename', 'is_limited_diff',
65 'chunks', 'new_revision', 'operation', 'exceeds_limit', 'filename'
68 'chunks', 'new_revision', 'operation', 'exceeds_limit', 'filename'
66 ]
69 ]
67 file_data['ops'] = {
70 file_data['ops'] = {
68 # is file binary
71 # is file binary
69 'binary': False,
72 'binary': False,
70
73
71 # lines
74 # lines
72 'added': 32,
75 'added': 32,
73 'deleted': 0
76 'deleted': 0
74
77
75 'ops': {3: 'modified file'},
78 'ops': {3: 'modified file'},
76 'new_mode': '100644',
79 'new_mode': '100644',
77 'old_mode': None
80 'old_mode': None
78 }
81 }
79 """
82 """
80 file_name = file_data['filename']
83 file_name = file_data['filename']
81 operation = file_data['operation'] # can be A(dded), M(odified), D(eleted)
84 operation = file_data['operation'] # can be A(dded), M(odified), D(eleted)
82
85
83 # check files names
86 # check files names
84 if forbidden_files:
87 if forbidden_files:
85 reason = 'File {} is forbidden to be pushed'.format(file_name)
88 reason = 'File {} is forbidden to be pushed'.format(file_name)
86 for forbidden_pattern in forbid_files:
89 for forbidden_pattern in forbid_files:
87 # here we can also filter for operation, e.g if check for only ADDED files
90 # here we can also filter for operation, e.g if check for only ADDED files
88 # if operation == 'A':
91 # if operation == 'A':
89 if fnmatch.fnmatch(file_name, forbidden_pattern):
92 if fnmatch.fnmatch(file_name, forbidden_pattern):
90 return False, reason
93 return False, reason
91
94
92 # validate A(dded) files and size
95 # validate A(dded) files and size
93 if size_limit and operation == 'A':
96 if size_limit and operation == 'A':
94 if 'file_size' in file_data:
97 if 'file_size' in file_data:
95 size = file_data['file_size']
98 size = file_data['file_size']
96 else:
99 else:
97 size = len(file_data['raw_diff'])
100 size = len(file_data['raw_diff'])
98
101
99 reason = 'File {} size of {} bytes exceeds limit {}'.format(
102 reason = 'File {} size of {} bytes exceeds limit {}'.format(
100 file_name, format_byte_size_binary(size),
103 file_name, format_byte_size_binary(size),
101 format_byte_size_binary(size_limit))
104 format_byte_size_binary(size_limit))
102 if size > size_limit:
105 if size > size_limit:
103 return False, reason
106 return False, reason
104
107
105 return True, ''
108 return True, ''
106
109
107 if forbid_files or forbid_size_over:
110 if forbid_files or forbid_size_over:
108 # returns list of dicts with key-val fetched from extra fields
111 # returns list of dicts with key-val fetched from extra fields
109 file_list = extract_pre_files.run(**kwargs)
112 file_list = extract_pre_files.run(**kwargs)
110
113
111 for file_data in file_list:
114 for file_data in file_list:
112 file_valid, reason = validate_file_name_and_size(
115 file_valid, reason = validate_file_name_and_size(
113 file_data, forbid_files, forbid_size_over)
116 file_data, forbid_files, forbid_size_over)
114 if not file_valid:
117 if not file_valid:
115 return HookResponse(1, reason)
118 return HookResponse(1, reason)
116
119
117 return HookResponse(0, '')
120 return HookResponse(0, '')
@@ -1,55 +1,88 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2016-2019 RhodeCode GmbH
2 # Copyright (C) 2016-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 example usage in hooks::
21 example usage in hooks::
22
22
23 from .helpers import extra_fields
23 from .helpers import extra_fields
24 # returns list of dicts with key-val fetched from extra fields
24 # returns list of dicts with key-val fetched from extra fields
25 repo_extra_fields = extra_fields.run(**kwargs)
25 repo_extra_fields = extra_fields.run(**kwargs)
26 repo_extra_fields.get('endpoint_url')
26 repo_extra_fields.get('endpoint_url')
27
27
28 # the field stored the following example values
28 # the field stored the following example values
29 {u'created_on': datetime.datetime(),
29 {u'created_on': datetime.datetime(),
30 u'field_key': u'endpoint_url',
30 u'field_key': u'endpoint_url',
31 u'field_label': u'Endpoint URL',
31 u'field_label': u'Endpoint URL',
32 u'field_desc': u'Full HTTP endpoint to call if given',
32 u'field_desc': u'Full HTTP endpoint to call if given',
33 u'field_type': u'str',
33 u'field_type': u'str',
34 u'field_value': u'http://server.com/post',
34 u'field_value': u'http://server.com/post',
35 u'repo_field_id': 1,
35 u'repo_field_id': 1,
36 u'repository_id': 1}
36 u'repository_id': 1}
37 # for example to obtain the value:
37 # for example to obtain the value:
38 endpoint_field = repo_extra_fields.get('endpoint_url')
38 endpoint_field = repo_extra_fields.get('endpoint_url')
39 if endpoint_field:
39 if endpoint_field:
40 url = endpoint_field['field_value']
40 url = endpoint_field['field_value']
41
41
42 """
42 """
43
43
44
44
45 def run(*args, **kwargs):
45 def run(*args, **kwargs):
46 from rhodecode.model.db import Repository
46 from rhodecode.model.db import Repository
47 # use temp name then the main one propagated
47 # use temp name then the main one propagated
48 repo_name = kwargs.pop('REPOSITORY', None) or kwargs['repository']
48 repo_name = kwargs.pop('REPOSITORY', None) or kwargs['repository']
49 repo = Repository.get_by_repo_name(repo_name)
49 repo = Repository.get_by_repo_name(repo_name)
50
50
51 fields = {}
51 fields = {}
52 for field in repo.extra_fields:
52 for field in repo.extra_fields:
53 fields[field.field_key] = field.get_dict()
53 fields[field.field_key] = field.get_dict()
54
54
55 return fields
55 return fields
56
57
58 class _Undefined(object):
59 pass
60
61
62 def get_field(extra_fields_data, key, default=_Undefined(), convert_type=True):
63 """
64 field_value = get_field(extra_fields, key='ci_endpoint_url', default='')
65 """
66 from ..utils import str2bool, aslist
67
68 if key not in extra_fields_data:
69 if isinstance(default, _Undefined):
70 raise ValueError('key {} not present in extra_fields'.format(key))
71 return default
72
73 # NOTE(dan): from metadata we get field_label, field_value, field_desc, field_type
74 field_metadata = extra_fields_data[key]
75
76 field_value = field_metadata['field_value']
77
78 # NOTE(dan): empty value, use default
79 if not field_value and not isinstance(default, _Undefined):
80 return default
81
82 if convert_type:
83 # 'str', 'unicode', 'list', 'tuple'
84 _type = field_metadata['field_type']
85 if _type in ['list', 'tuple']:
86 field_value = aslist(field_value)
87
88 return field_value
@@ -1,432 +1,492 b''
1 # Copyright (C) 2016-2019 RhodeCode GmbH
1 # Copyright (C) 2016-2019 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18 import logging
19 import logging
19 from .utils import DotDict, HookResponse, has_kwargs
20 from .utils import DotDict, HookResponse, has_kwargs
21
20 log = logging.getLogger('rhodecode.' + __name__)
22 log = logging.getLogger('rhodecode.' + __name__)
21
23
22
23 # Config shortcut to keep, all configuration in one place
24 # Config shortcut to keep, all configuration in one place
24 # Example: api_key = CONFIG.my_config.api_key
25 # Example: api_key = CONFIG.my_config.api_key
25 CONFIG = DotDict(
26 CONFIG = DotDict(
26 my_config=DotDict(
27 my_config=DotDict(
27 api_key='<secret>',
28 api_key='<secret>',
28 ),
29 ),
29
30
30 )
31 )
31
32
32
33
33 @has_kwargs({
34 @has_kwargs({
34 'repo_name': '',
35 'repo_name': '',
35 'repo_type': '',
36 'repo_type': '',
36 'description': '',
37 'description': '',
37 'private': '',
38 'private': '',
38 'created_on': '',
39 'created_on': '',
39 'enable_downloads': '',
40 'enable_downloads': '',
40 'repo_id': '',
41 'repo_id': '',
41 'user_id': '',
42 'user_id': '',
42 'enable_statistics': '',
43 'enable_statistics': '',
43 'clone_uri': '',
44 'clone_uri': '',
44 'fork_id': '',
45 'fork_id': '',
45 'group_id': '',
46 'group_id': '',
46 'created_by': ''
47 'created_by': ''
47 })
48 })
48 def _create_repo_hook(*args, **kwargs):
49 def _create_repo_hook(*args, **kwargs):
49 """
50 """
50 POST CREATE REPOSITORY HOOK. This function will be executed after
51 POST CREATE REPOSITORY HOOK. This function will be executed after
51 each repository is created. kwargs available:
52 each repository is created. kwargs available:
52
53
53 """
54 """
54 return HookResponse(0, '')
55 return HookResponse(0, '')
55
56
56
57
57 @has_kwargs({
58 @has_kwargs({
58 'group_name': '',
59 'repo_name': '',
59 'group_parent_id': '',
60 'repo_type': '',
61 'description': '',
62 'private': '',
63 'created_on': '',
64 'enable_downloads': '',
65 'repo_id': '',
66 'user_id': '',
67 'enable_statistics': '',
68 'clone_uri': '',
69 'fork_id': '',
70 'group_id': '',
71 'created_by': '',
72 'repository': '',
73 'comment': '',
74 'commit': ''
75 })
76 def _comment_commit_repo_hook(*args, **kwargs):
77 """
78 POST CREATE REPOSITORY COMMENT ON COMMIT HOOK. This function will be executed after
79 a comment is made on this repository commit.
80
81 """
82 return HookResponse(0, '')
83
84
85 @has_kwargs({
86 'group_name': '',
87 'group_parent_id': '',
60 'group_description': '',
88 'group_description': '',
61 'group_id': '',
89 'group_id': '',
62 'user_id': '',
90 'user_id': '',
63 'created_by': '',
91 'created_by': '',
64 'created_on': '',
92 'created_on': '',
65 'enable_locking': ''
93 'enable_locking': ''
66 })
94 })
67 def _create_repo_group_hook(*args, **kwargs):
95 def _create_repo_group_hook(*args, **kwargs):
68 """
96 """
69 POST CREATE REPOSITORY GROUP HOOK, this function will be
97 POST CREATE REPOSITORY GROUP HOOK, this function will be
70 executed after each repository group is created. kwargs available:
98 executed after each repository group is created. kwargs available:
71 """
99 """
72 return HookResponse(0, '')
100 return HookResponse(0, '')
73
101
74
102
75 @has_kwargs({
103 @has_kwargs({
76 'username': '',
104 'username': '',
77 'password': '',
105 'password': '',
78 'email': '',
106 'email': '',
79 'firstname': '',
107 'firstname': '',
80 'lastname': '',
108 'lastname': '',
81 'active': '',
109 'active': '',
82 'admin': '',
110 'admin': '',
83 'created_by': '',
111 'created_by': '',
84 })
112 })
85 def _pre_create_user_hook(*args, **kwargs):
113 def _pre_create_user_hook(*args, **kwargs):
86 """
114 """
87 PRE CREATE USER HOOK, this function will be executed before each
115 PRE CREATE USER HOOK, this function will be executed before each
88 user is created, it returns a tuple of bool, reason.
116 user is created, it returns a tuple of bool, reason.
89 If bool is False the user creation will be stopped and reason
117 If bool is False the user creation will be stopped and reason
90 will be displayed to the user.
118 will be displayed to the user.
91
119
92 Return HookResponse(1, reason) to block user creation
120 Return HookResponse(1, reason) to block user creation
93
121
94 """
122 """
95
123
96 reason = 'allowed'
124 reason = 'allowed'
97 return HookResponse(0, reason)
125 return HookResponse(0, reason)
98
126
99
127
100 @has_kwargs({
128 @has_kwargs({
101 'username': '',
129 'username': '',
102 'full_name_or_username': '',
130 'full_name_or_username': '',
103 'full_contact': '',
131 'full_contact': '',
104 'user_id': '',
132 'user_id': '',
105 'name': '',
133 'name': '',
106 'firstname': '',
134 'firstname': '',
107 'short_contact': '',
135 'short_contact': '',
108 'admin': '',
136 'admin': '',
109 'lastname': '',
137 'lastname': '',
110 'ip_addresses': '',
138 'ip_addresses': '',
111 'extern_type': '',
139 'extern_type': '',
112 'extern_name': '',
140 'extern_name': '',
113 'email': '',
141 'email': '',
114 'api_key': '',
142 'api_key': '',
115 'api_keys': '',
143 'api_keys': '',
116 'last_login': '',
144 'last_login': '',
117 'full_name': '',
145 'full_name': '',
118 'active': '',
146 'active': '',
119 'password': '',
147 'password': '',
120 'emails': '',
148 'emails': '',
121 'inherit_default_permissions': '',
149 'inherit_default_permissions': '',
122 'created_by': '',
150 'created_by': '',
123 'created_on': '',
151 'created_on': '',
124 })
152 })
125 def _create_user_hook(*args, **kwargs):
153 def _create_user_hook(*args, **kwargs):
126 """
154 """
127 POST CREATE USER HOOK, this function will be executed after each user is created
155 POST CREATE USER HOOK, this function will be executed after each user is created
128 """
156 """
129 return HookResponse(0, '')
157 return HookResponse(0, '')
130
158
131
159
132 @has_kwargs({
160 @has_kwargs({
133 'repo_name': '',
161 'repo_name': '',
134 'repo_type': '',
162 'repo_type': '',
135 'description': '',
163 'description': '',
136 'private': '',
164 'private': '',
137 'created_on': '',
165 'created_on': '',
138 'enable_downloads': '',
166 'enable_downloads': '',
139 'repo_id': '',
167 'repo_id': '',
140 'user_id': '',
168 'user_id': '',
141 'enable_statistics': '',
169 'enable_statistics': '',
142 'clone_uri': '',
170 'clone_uri': '',
143 'fork_id': '',
171 'fork_id': '',
144 'group_id': '',
172 'group_id': '',
145 'deleted_by': '',
173 'deleted_by': '',
146 'deleted_on': '',
174 'deleted_on': '',
147 })
175 })
148 def _delete_repo_hook(*args, **kwargs):
176 def _delete_repo_hook(*args, **kwargs):
149 """
177 """
150 POST DELETE REPOSITORY HOOK, this function will be executed after
178 POST DELETE REPOSITORY HOOK, this function will be executed after
151 each repository deletion
179 each repository deletion
152 """
180 """
153 return HookResponse(0, '')
181 return HookResponse(0, '')
154
182
155
183
156 @has_kwargs({
184 @has_kwargs({
157 'username': '',
185 'username': '',
158 'full_name_or_username': '',
186 'full_name_or_username': '',
159 'full_contact': '',
187 'full_contact': '',
160 'user_id': '',
188 'user_id': '',
161 'name': '',
189 'name': '',
162 'short_contact': '',
190 'short_contact': '',
163 'admin': '',
191 'admin': '',
164 'firstname': '',
192 'firstname': '',
165 'lastname': '',
193 'lastname': '',
166 'ip_addresses': '',
194 'ip_addresses': '',
167 'email': '',
195 'email': '',
168 'api_key': '',
196 'api_key': '',
169 'last_login': '',
197 'last_login': '',
170 'full_name': '',
198 'full_name': '',
171 'active': '',
199 'active': '',
172 'password': '',
200 'password': '',
173 'emails': '',
201 'emails': '',
174 'inherit_default_permissions': '',
202 'inherit_default_permissions': '',
175 'deleted_by': '',
203 'deleted_by': '',
176 })
204 })
177 def _delete_user_hook(*args, **kwargs):
205 def _delete_user_hook(*args, **kwargs):
178 """
206 """
179 POST DELETE USER HOOK, this function will be executed after each
207 POST DELETE USER HOOK, this function will be executed after each
180 user is deleted kwargs available:
208 user is deleted kwargs available:
181 """
209 """
182 return HookResponse(0, '')
210 return HookResponse(0, '')
183
211
184
212
185 # =============================================================================
213 # =============================================================================
186 # PUSH/PULL RELATED HOOKS
214 # PUSH/PULL RELATED HOOKS
187 # =============================================================================
215 # =============================================================================
188 @has_kwargs({
216 @has_kwargs({
189 'server_url': 'url of instance that triggered this hook',
217 'server_url': 'url of instance that triggered this hook',
190 'config': 'path to .ini config used',
218 'config': 'path to .ini config used',
191 'scm': 'type of version control "git", "hg", "svn"',
219 'scm': 'type of version control "git", "hg", "svn"',
192 'username': 'username of actor who triggered this event',
220 'username': 'username of actor who triggered this event',
193 'ip': 'ip address of actor who triggered this hook',
221 'ip': 'ip address of actor who triggered this hook',
194 'action': '',
222 'action': '',
195 'repository': 'repository name',
223 'repository': 'repository name',
196 'repo_store_path': 'full path to where repositories are stored',
224 'repo_store_path': 'full path to where repositories are stored',
197 'commit_ids': 'pre transaction metadata for commit ids',
225 'commit_ids': 'pre transaction metadata for commit ids',
198 'hook_type': '',
226 'hook_type': '',
199 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
227 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
200 })
228 })
201 def _pre_push_hook(*args, **kwargs):
229 def _pre_push_hook(*args, **kwargs):
202 """
230 """
203 Post push hook
231 Post push hook
204 To stop version control from storing the transaction and send a message to user
232 To stop version control from storing the transaction and send a message to user
205 use non-zero HookResponse with a message, e.g return HookResponse(1, 'Not allowed')
233 use non-zero HookResponse with a message, e.g return HookResponse(1, 'Not allowed')
206
234
207 This message will be shown back to client during PUSH operation
235 This message will be shown back to client during PUSH operation
208
236
209 Commit ids might look like that::
237 Commit ids might look like that::
210
238
211 [{u'hg_env|git_env': ...,
239 [{u'hg_env|git_env': ...,
212 u'multiple_heads': [],
240 u'multiple_heads': [],
213 u'name': u'default',
241 u'name': u'default',
214 u'new_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
242 u'new_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
215 u'old_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
243 u'old_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
216 u'ref': u'',
244 u'ref': u'',
217 u'total_commits': 2,
245 u'total_commits': 2,
218 u'type': u'branch'}]
246 u'type': u'branch'}]
219 """
247 """
220 return HookResponse(0, '')
248 return HookResponse(0, '')
221
249
222
250
223 @has_kwargs({
251 @has_kwargs({
224 'server_url': 'url of instance that triggered this hook',
252 'server_url': 'url of instance that triggered this hook',
225 'config': 'path to .ini config used',
253 'config': 'path to .ini config used',
226 'scm': 'type of version control "git", "hg", "svn"',
254 'scm': 'type of version control "git", "hg", "svn"',
227 'username': 'username of actor who triggered this event',
255 'username': 'username of actor who triggered this event',
228 'ip': 'ip address of actor who triggered this hook',
256 'ip': 'ip address of actor who triggered this hook',
229 'action': '',
257 'action': '',
230 'repository': 'repository name',
258 'repository': 'repository name',
231 'repo_store_path': 'full path to where repositories are stored',
259 'repo_store_path': 'full path to where repositories are stored',
232 'commit_ids': 'list of pushed commit_ids (sha1)',
260 'commit_ids': 'list of pushed commit_ids (sha1)',
233 'hook_type': '',
261 'hook_type': '',
234 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
262 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
235 })
263 })
236 def _push_hook(*args, **kwargs):
264 def _push_hook(*args, **kwargs):
237 """
265 """
238 POST PUSH HOOK, this function will be executed after each push it's
266 POST PUSH HOOK, this function will be executed after each push it's
239 executed after the build-in hook that RhodeCode uses for logging pushes
267 executed after the build-in hook that RhodeCode uses for logging pushes
240 """
268 """
241 return HookResponse(0, '')
269 return HookResponse(0, '')
242
270
243
271
244 @has_kwargs({
272 @has_kwargs({
245 'server_url': 'url of instance that triggered this hook',
273 'server_url': 'url of instance that triggered this hook',
246 'repo_store_path': 'full path to where repositories are stored',
274 'repo_store_path': 'full path to where repositories are stored',
247 'config': 'path to .ini config used',
275 'config': 'path to .ini config used',
248 'scm': 'type of version control "git", "hg", "svn"',
276 'scm': 'type of version control "git", "hg", "svn"',
249 'username': 'username of actor who triggered this event',
277 'username': 'username of actor who triggered this event',
250 'ip': 'ip address of actor who triggered this hook',
278 'ip': 'ip address of actor who triggered this hook',
251 'action': '',
279 'action': '',
252 'repository': 'repository name',
280 'repository': 'repository name',
253 'hook_type': '',
281 'hook_type': '',
254 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
282 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
255 })
283 })
256 def _pre_pull_hook(*args, **kwargs):
284 def _pre_pull_hook(*args, **kwargs):
257 """
285 """
258 Post pull hook
286 Post pull hook
259 """
287 """
260 return HookResponse(0, '')
288 return HookResponse(0, '')
261
289
262
290
263 @has_kwargs({
291 @has_kwargs({
264 'server_url': 'url of instance that triggered this hook',
292 'server_url': 'url of instance that triggered this hook',
265 'repo_store_path': 'full path to where repositories are stored',
293 'repo_store_path': 'full path to where repositories are stored',
266 'config': 'path to .ini config used',
294 'config': 'path to .ini config used',
267 'scm': 'type of version control "git", "hg", "svn"',
295 'scm': 'type of version control "git", "hg", "svn"',
268 'username': 'username of actor who triggered this event',
296 'username': 'username of actor who triggered this event',
269 'ip': 'ip address of actor who triggered this hook',
297 'ip': 'ip address of actor who triggered this hook',
270 'action': '',
298 'action': '',
271 'repository': 'repository name',
299 'repository': 'repository name',
272 'hook_type': '',
300 'hook_type': '',
273 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
301 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
274 })
302 })
275 def _pull_hook(*args, **kwargs):
303 def _pull_hook(*args, **kwargs):
276 """
304 """
277 This hook will be executed after each code pull.
305 This hook will be executed after each code pull.
278 """
306 """
279 return HookResponse(0, '')
307 return HookResponse(0, '')
280
308
281
309
282 # =============================================================================
310 # =============================================================================
283 # PULL REQUEST RELATED HOOKS
311 # PULL REQUEST RELATED HOOKS
284 # =============================================================================
312 # =============================================================================
285 @has_kwargs({
313 @has_kwargs({
286 'server_url': 'url of instance that triggered this hook',
314 'server_url': 'url of instance that triggered this hook',
287 'config': 'path to .ini config used',
315 'config': 'path to .ini config used',
288 'scm': 'type of version control "git", "hg", "svn"',
316 'scm': 'type of version control "git", "hg", "svn"',
289 'username': 'username of actor who triggered this event',
317 'username': 'username of actor who triggered this event',
290 'ip': 'ip address of actor who triggered this hook',
318 'ip': 'ip address of actor who triggered this hook',
291 'action': '',
319 'action': '',
292 'repository': 'repository name',
320 'repository': 'repository name',
293 'pull_request_id': '',
321 'pull_request_id': '',
294 'url': '',
322 'url': '',
295 'title': '',
323 'title': '',
296 'description': '',
324 'description': '',
297 'status': '',
325 'status': '',
298 'created_on': '',
326 'created_on': '',
299 'updated_on': '',
327 'updated_on': '',
300 'commit_ids': '',
328 'commit_ids': '',
301 'review_status': '',
329 'review_status': '',
302 'mergeable': '',
330 'mergeable': '',
303 'source': '',
331 'source': '',
304 'target': '',
332 'target': '',
305 'author': '',
333 'author': '',
306 'reviewers': '',
334 'reviewers': '',
307 })
335 })
308 def _create_pull_request_hook(*args, **kwargs):
336 def _create_pull_request_hook(*args, **kwargs):
309 """
337 """
310 This hook will be executed after creation of a pull request.
338 This hook will be executed after creation of a pull request.
311 """
339 """
312 return HookResponse(0, '')
340 return HookResponse(0, '')
313
341
314
342
315 @has_kwargs({
343 @has_kwargs({
316 'server_url': 'url of instance that triggered this hook',
344 'server_url': 'url of instance that triggered this hook',
317 'config': 'path to .ini config used',
345 'config': 'path to .ini config used',
318 'scm': 'type of version control "git", "hg", "svn"',
346 'scm': 'type of version control "git", "hg", "svn"',
319 'username': 'username of actor who triggered this event',
347 'username': 'username of actor who triggered this event',
320 'ip': 'ip address of actor who triggered this hook',
348 'ip': 'ip address of actor who triggered this hook',
321 'action': '',
349 'action': '',
322 'repository': 'repository name',
350 'repository': 'repository name',
323 'pull_request_id': '',
351 'pull_request_id': '',
324 'url': '',
352 'url': '',
325 'title': '',
353 'title': '',
326 'description': '',
354 'description': '',
327 'status': '',
355 'status': '',
328 'created_on': '',
356 'created_on': '',
329 'updated_on': '',
357 'updated_on': '',
330 'commit_ids': '',
358 'commit_ids': '',
331 'review_status': '',
359 'review_status': '',
332 'mergeable': '',
360 'mergeable': '',
333 'source': '',
361 'source': '',
334 'target': '',
362 'target': '',
335 'author': '',
363 'author': '',
336 'reviewers': '',
364 'reviewers': '',
337 })
365 })
338 def _review_pull_request_hook(*args, **kwargs):
366 def _review_pull_request_hook(*args, **kwargs):
339 """
367 """
340 This hook will be executed after review action was made on a pull request.
368 This hook will be executed after review action was made on a pull request.
341 """
369 """
342 return HookResponse(0, '')
370 return HookResponse(0, '')
343
371
344
372
345 @has_kwargs({
373 @has_kwargs({
346 'server_url': 'url of instance that triggered this hook',
374 'server_url': 'url of instance that triggered this hook',
347 'config': 'path to .ini config used',
375 'config': 'path to .ini config used',
348 'scm': 'type of version control "git", "hg", "svn"',
376 'scm': 'type of version control "git", "hg", "svn"',
349 'username': 'username of actor who triggered this event',
377 'username': 'username of actor who triggered this event',
350 'ip': 'ip address of actor who triggered this hook',
378 'ip': 'ip address of actor who triggered this hook',
379
380 'action': '',
381 'repository': 'repository name',
382 'pull_request_id': '',
383 'url': '',
384 'title': '',
385 'description': '',
386 'status': '',
387 'comment': '',
388 'created_on': '',
389 'updated_on': '',
390 'commit_ids': '',
391 'review_status': '',
392 'mergeable': '',
393 'source': '',
394 'target': '',
395 'author': '',
396 'reviewers': '',
397 })
398 def _comment_pull_request_hook(*args, **kwargs):
399 """
400 This hook will be executed after comment is made on a pull request
401 """
402 return HookResponse(0, '')
403
404
405 @has_kwargs({
406 'server_url': 'url of instance that triggered this hook',
407 'config': 'path to .ini config used',
408 'scm': 'type of version control "git", "hg", "svn"',
409 'username': 'username of actor who triggered this event',
410 'ip': 'ip address of actor who triggered this hook',
351 'action': '',
411 'action': '',
352 'repository': 'repository name',
412 'repository': 'repository name',
353 'pull_request_id': '',
413 'pull_request_id': '',
354 'url': '',
414 'url': '',
355 'title': '',
415 'title': '',
356 'description': '',
416 'description': '',
357 'status': '',
417 'status': '',
358 'created_on': '',
418 'created_on': '',
359 'updated_on': '',
419 'updated_on': '',
360 'commit_ids': '',
420 'commit_ids': '',
361 'review_status': '',
421 'review_status': '',
362 'mergeable': '',
422 'mergeable': '',
363 'source': '',
423 'source': '',
364 'target': '',
424 'target': '',
365 'author': '',
425 'author': '',
366 'reviewers': '',
426 'reviewers': '',
367 })
427 })
368 def _update_pull_request_hook(*args, **kwargs):
428 def _update_pull_request_hook(*args, **kwargs):
369 """
429 """
370 This hook will be executed after pull requests has been updated with new commits.
430 This hook will be executed after pull requests has been updated with new commits.
371 """
431 """
372 return HookResponse(0, '')
432 return HookResponse(0, '')
373
433
374
434
375 @has_kwargs({
435 @has_kwargs({
376 'server_url': 'url of instance that triggered this hook',
436 'server_url': 'url of instance that triggered this hook',
377 'config': 'path to .ini config used',
437 'config': 'path to .ini config used',
378 'scm': 'type of version control "git", "hg", "svn"',
438 'scm': 'type of version control "git", "hg", "svn"',
379 'username': 'username of actor who triggered this event',
439 'username': 'username of actor who triggered this event',
380 'ip': 'ip address of actor who triggered this hook',
440 'ip': 'ip address of actor who triggered this hook',
381 'action': '',
441 'action': '',
382 'repository': 'repository name',
442 'repository': 'repository name',
383 'pull_request_id': '',
443 'pull_request_id': '',
384 'url': '',
444 'url': '',
385 'title': '',
445 'title': '',
386 'description': '',
446 'description': '',
387 'status': '',
447 'status': '',
388 'created_on': '',
448 'created_on': '',
389 'updated_on': '',
449 'updated_on': '',
390 'commit_ids': '',
450 'commit_ids': '',
391 'review_status': '',
451 'review_status': '',
392 'mergeable': '',
452 'mergeable': '',
393 'source': '',
453 'source': '',
394 'target': '',
454 'target': '',
395 'author': '',
455 'author': '',
396 'reviewers': '',
456 'reviewers': '',
397 })
457 })
398 def _merge_pull_request_hook(*args, **kwargs):
458 def _merge_pull_request_hook(*args, **kwargs):
399 """
459 """
400 This hook will be executed after merge of a pull request.
460 This hook will be executed after merge of a pull request.
401 """
461 """
402 return HookResponse(0, '')
462 return HookResponse(0, '')
403
463
404
464
405 @has_kwargs({
465 @has_kwargs({
406 'server_url': 'url of instance that triggered this hook',
466 'server_url': 'url of instance that triggered this hook',
407 'config': 'path to .ini config used',
467 'config': 'path to .ini config used',
408 'scm': 'type of version control "git", "hg", "svn"',
468 'scm': 'type of version control "git", "hg", "svn"',
409 'username': 'username of actor who triggered this event',
469 'username': 'username of actor who triggered this event',
410 'ip': 'ip address of actor who triggered this hook',
470 'ip': 'ip address of actor who triggered this hook',
411 'action': '',
471 'action': '',
412 'repository': 'repository name',
472 'repository': 'repository name',
413 'pull_request_id': '',
473 'pull_request_id': '',
414 'url': '',
474 'url': '',
415 'title': '',
475 'title': '',
416 'description': '',
476 'description': '',
417 'status': '',
477 'status': '',
418 'created_on': '',
478 'created_on': '',
419 'updated_on': '',
479 'updated_on': '',
420 'commit_ids': '',
480 'commit_ids': '',
421 'review_status': '',
481 'review_status': '',
422 'mergeable': '',
482 'mergeable': '',
423 'source': '',
483 'source': '',
424 'target': '',
484 'target': '',
425 'author': '',
485 'author': '',
426 'reviewers': '',
486 'reviewers': '',
427 })
487 })
428 def _close_pull_request_hook(*args, **kwargs):
488 def _close_pull_request_hook(*args, **kwargs):
429 """
489 """
430 This hook will be executed after close of a pull request.
490 This hook will be executed after close of a pull request.
431 """
491 """
432 return HookResponse(0, '')
492 return HookResponse(0, '')
@@ -1,189 +1,199 b''
1 # Copyright (C) 2016-2019 RhodeCode GmbH
1 # Copyright (C) 2016-2019 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import os
20 import os
21 import string
21 import functools
22 import functools
22 import collections
23 import collections
24 import urllib
23
25
24 log = logging.getLogger('rhodecode.' + __name__)
26 log = logging.getLogger('rhodecode.' + __name__)
25
27
26
28
27 class HookResponse(object):
29 class HookResponse(object):
28 def __init__(self, status, output):
30 def __init__(self, status, output):
29 self.status = status
31 self.status = status
30 self.output = output
32 self.output = output
31
33
32 def __add__(self, other):
34 def __add__(self, other):
33 other_status = getattr(other, 'status', 0)
35 other_status = getattr(other, 'status', 0)
34 new_status = max(self.status, other_status)
36 new_status = max(self.status, other_status)
35 other_output = getattr(other, 'output', '')
37 other_output = getattr(other, 'output', '')
36 new_output = self.output + other_output
38 new_output = self.output + other_output
37
39
38 return HookResponse(new_status, new_output)
40 return HookResponse(new_status, new_output)
39
41
40 def __bool__(self):
42 def __bool__(self):
41 return self.status == 0
43 return self.status == 0
42
44
43
45
44 class DotDict(dict):
46 class DotDict(dict):
45
47
46 def __contains__(self, k):
48 def __contains__(self, k):
47 try:
49 try:
48 return dict.__contains__(self, k) or hasattr(self, k)
50 return dict.__contains__(self, k) or hasattr(self, k)
49 except:
51 except:
50 return False
52 return False
51
53
52 # only called if k not found in normal places
54 # only called if k not found in normal places
53 def __getattr__(self, k):
55 def __getattr__(self, k):
54 try:
56 try:
55 return object.__getattribute__(self, k)
57 return object.__getattribute__(self, k)
56 except AttributeError:
58 except AttributeError:
57 try:
59 try:
58 return self[k]
60 return self[k]
59 except KeyError:
61 except KeyError:
60 raise AttributeError(k)
62 raise AttributeError(k)
61
63
62 def __setattr__(self, k, v):
64 def __setattr__(self, k, v):
63 try:
65 try:
64 object.__getattribute__(self, k)
66 object.__getattribute__(self, k)
65 except AttributeError:
67 except AttributeError:
66 try:
68 try:
67 self[k] = v
69 self[k] = v
68 except:
70 except:
69 raise AttributeError(k)
71 raise AttributeError(k)
70 else:
72 else:
71 object.__setattr__(self, k, v)
73 object.__setattr__(self, k, v)
72
74
73 def __delattr__(self, k):
75 def __delattr__(self, k):
74 try:
76 try:
75 object.__getattribute__(self, k)
77 object.__getattribute__(self, k)
76 except AttributeError:
78 except AttributeError:
77 try:
79 try:
78 del self[k]
80 del self[k]
79 except KeyError:
81 except KeyError:
80 raise AttributeError(k)
82 raise AttributeError(k)
81 else:
83 else:
82 object.__delattr__(self, k)
84 object.__delattr__(self, k)
83
85
84 def toDict(self):
86 def toDict(self):
85 return unserialize(self)
87 return unserialize(self)
86
88
87 def __repr__(self):
89 def __repr__(self):
88 keys = list(self.keys())
90 keys = list(self.keys())
89 keys.sort()
91 keys.sort()
90 args = ', '.join(['%s=%r' % (key, self[key]) for key in keys])
92 args = ', '.join(['%s=%r' % (key, self[key]) for key in keys])
91 return '%s(%s)' % (self.__class__.__name__, args)
93 return '%s(%s)' % (self.__class__.__name__, args)
92
94
93 @staticmethod
95 @staticmethod
94 def fromDict(d):
96 def fromDict(d):
95 return serialize(d)
97 return serialize(d)
96
98
97
99
98 def serialize(x):
100 def serialize(x):
99 if isinstance(x, dict):
101 if isinstance(x, dict):
100 return DotDict((k, serialize(v)) for k, v in x.items())
102 return DotDict((k, serialize(v)) for k, v in x.items())
101 elif isinstance(x, (list, tuple)):
103 elif isinstance(x, (list, tuple)):
102 return type(x)(serialize(v) for v in x)
104 return type(x)(serialize(v) for v in x)
103 else:
105 else:
104 return x
106 return x
105
107
106
108
107 def unserialize(x):
109 def unserialize(x):
108 if isinstance(x, dict):
110 if isinstance(x, dict):
109 return dict((k, unserialize(v)) for k, v in x.items())
111 return dict((k, unserialize(v)) for k, v in x.items())
110 elif isinstance(x, (list, tuple)):
112 elif isinstance(x, (list, tuple)):
111 return type(x)(unserialize(v) for v in x)
113 return type(x)(unserialize(v) for v in x)
112 else:
114 else:
113 return x
115 return x
114
116
115
117
116 def _verify_kwargs(func_name, expected_parameters, kwargs):
118 def _verify_kwargs(func_name, expected_parameters, kwargs):
117 """
119 """
118 Verify that exactly `expected_parameters` are passed in as `kwargs`.
120 Verify that exactly `expected_parameters` are passed in as `kwargs`.
119 """
121 """
120 expected_parameters = set(expected_parameters)
122 expected_parameters = set(expected_parameters)
121 kwargs_keys = set(kwargs.keys())
123 kwargs_keys = set(kwargs.keys())
122 if kwargs_keys != expected_parameters:
124 if kwargs_keys != expected_parameters:
123 missing_kwargs = expected_parameters - kwargs_keys
125 missing_kwargs = expected_parameters - kwargs_keys
124 unexpected_kwargs = kwargs_keys - expected_parameters
126 unexpected_kwargs = kwargs_keys - expected_parameters
125 raise AssertionError(
127 raise AssertionError(
126 "func:%s: missing parameters: %r, unexpected parameters: %s" %
128 "func:%s: missing parameters: %r, unexpected parameters: %s" %
127 (func_name, missing_kwargs, unexpected_kwargs))
129 (func_name, missing_kwargs, unexpected_kwargs))
128
130
129
131
130 def has_kwargs(required_args):
132 def has_kwargs(required_args):
131 """
133 """
132 decorator to verify extension calls arguments.
134 decorator to verify extension calls arguments.
133
135
134 :param required_args:
136 :param required_args:
135 """
137 """
136 def wrap(func):
138 def wrap(func):
137 def wrapper(*args, **kwargs):
139 def wrapper(*args, **kwargs):
138 _verify_kwargs(func.func_name, required_args.keys(), kwargs)
140 _verify_kwargs(func.func_name, required_args.keys(), kwargs)
139 # in case there's `calls` defined on module we store the data
141 # in case there's `calls` defined on module we store the data
140 maybe_log_call(func.func_name, args, kwargs)
142 maybe_log_call(func.func_name, args, kwargs)
141 log.debug('Calling rcextensions function %s', func.func_name)
143 log.debug('Calling rcextensions function %s', func.func_name)
142 return func(*args, **kwargs)
144 return func(*args, **kwargs)
143 return wrapper
145 return wrapper
144 return wrap
146 return wrap
145
147
146
148
147 def maybe_log_call(name, args, kwargs):
149 def maybe_log_call(name, args, kwargs):
148 from rhodecode.config import rcextensions
150 from rhodecode.config import rcextensions
149 if hasattr(rcextensions, 'calls'):
151 if hasattr(rcextensions, 'calls'):
150 calls = rcextensions.calls
152 calls = rcextensions.calls
151 calls[name].append((args, kwargs))
153 calls[name].append((args, kwargs))
152
154
153
155
154 def str2bool(_str):
156 def str2bool(_str):
155 """
157 """
156 returns True/False value from given string, it tries to translate the
158 returns True/False value from given string, it tries to translate the
157 string into boolean
159 string into boolean
158
160
159 :param _str: string value to translate into boolean
161 :param _str: string value to translate into boolean
160 :rtype: boolean
162 :rtype: boolean
161 :returns: boolean from given string
163 :returns: boolean from given string
162 """
164 """
163 if _str is None:
165 if _str is None:
164 return False
166 return False
165 if _str in (True, False):
167 if _str in (True, False):
166 return _str
168 return _str
167 _str = str(_str).strip().lower()
169 _str = str(_str).strip().lower()
168 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
170 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
169
171
170
172
171 def aslist(obj, sep=None, strip=True):
173 def aslist(obj, sep=None, strip=True):
172 """
174 """
173 Returns given string separated by sep as list
175 Returns given string separated by sep as list
174
176
175 :param obj:
177 :param obj:
176 :param sep:
178 :param sep:
177 :param strip:
179 :param strip:
178 """
180 """
179 if isinstance(obj, (basestring,)):
181 if isinstance(obj, (basestring,)):
180 lst = obj.split(sep)
182 lst = obj.split(sep)
181 if strip:
183 if strip:
182 lst = [v.strip() for v in lst]
184 lst = [v.strip() for v in lst]
183 return lst
185 return lst
184 elif isinstance(obj, (list, tuple)):
186 elif isinstance(obj, (list, tuple)):
185 return obj
187 return obj
186 elif obj is None:
188 elif obj is None:
187 return []
189 return []
188 else:
190 else:
189 return [obj] No newline at end of file
191 return [obj]
192
193
194 class UrlTemplate(string.Template):
195
196 def safe_substitute(self, **kws):
197 # url encode the kw for usage in url
198 kws = {k: urllib.quote(str(v)) for k, v in kws.items()}
199 return super(UrlTemplate, self).safe_substitute(**kws)
@@ -1,78 +1,78 b''
1 # Copyright (C) 2016-2019 RhodeCode GmbH
1 # Copyright (C) 2016-2019 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 from pyramid.threadlocal import get_current_registry
20 from pyramid.threadlocal import get_current_registry
21 from rhodecode.events.base import RhodeCodeIntegrationEvent
21 from rhodecode.events.base import RhodeCodeIntegrationEvent
22
22
23
23
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 def trigger(event, registry=None):
27 def trigger(event, registry=None):
28 """
28 """
29 Helper method to send an event. This wraps the pyramid logic to send an
29 Helper method to send an event. This wraps the pyramid logic to send an
30 event.
30 event.
31 """
31 """
32 # For the first step we are using pyramids thread locals here. If the
32 # For the first step we are using pyramids thread locals here. If the
33 # event mechanism works out as a good solution we should think about
33 # event mechanism works out as a good solution we should think about
34 # passing the registry as an argument to get rid of it.
34 # passing the registry as an argument to get rid of it.
35 event_name = event.__class__
35 event_name = event.__class__
36 log.debug('event %s sent for execution', event_name)
36 log.debug('event %s sent for execution', event_name)
37 registry = registry or get_current_registry()
37 registry = registry or get_current_registry()
38 registry.notify(event)
38 registry.notify(event)
39 log.debug('event %s triggered using registry %s', event_name, registry)
39 log.debug('event %s triggered using registry %s', event_name, registry)
40
40
41 # Send the events to integrations directly
41 # Send the events to integrations directly
42 from rhodecode.integrations import integrations_event_handler
42 from rhodecode.integrations import integrations_event_handler
43 if isinstance(event, RhodeCodeIntegrationEvent):
43 if isinstance(event, RhodeCodeIntegrationEvent):
44 integrations_event_handler(event)
44 integrations_event_handler(event)
45
45
46
46
47 from rhodecode.events.user import ( # pragma: no cover
47 from rhodecode.events.user import ( # pragma: no cover
48 UserPreCreate,
48 UserPreCreate,
49 UserPostCreate,
49 UserPostCreate,
50 UserPreUpdate,
50 UserPreUpdate,
51 UserRegistered,
51 UserRegistered,
52 UserPermissionsChange,
52 UserPermissionsChange,
53 )
53 )
54
54
55 from rhodecode.events.repo import ( # pragma: no cover
55 from rhodecode.events.repo import ( # pragma: no cover
56 RepoEvent,
56 RepoEvent, RepoCommitCommentEvent,
57 RepoPreCreateEvent, RepoCreateEvent,
57 RepoPreCreateEvent, RepoCreateEvent,
58 RepoPreDeleteEvent, RepoDeleteEvent,
58 RepoPreDeleteEvent, RepoDeleteEvent,
59 RepoPrePushEvent, RepoPushEvent,
59 RepoPrePushEvent, RepoPushEvent,
60 RepoPrePullEvent, RepoPullEvent,
60 RepoPrePullEvent, RepoPullEvent,
61 )
61 )
62
62
63 from rhodecode.events.repo_group import ( # pragma: no cover
63 from rhodecode.events.repo_group import ( # pragma: no cover
64 RepoGroupEvent,
64 RepoGroupEvent,
65 RepoGroupCreateEvent,
65 RepoGroupCreateEvent,
66 RepoGroupUpdateEvent,
66 RepoGroupUpdateEvent,
67 RepoGroupDeleteEvent,
67 RepoGroupDeleteEvent,
68 )
68 )
69
69
70 from rhodecode.events.pullrequest import ( # pragma: no cover
70 from rhodecode.events.pullrequest import ( # pragma: no cover
71 PullRequestEvent,
71 PullRequestEvent,
72 PullRequestCreateEvent,
72 PullRequestCreateEvent,
73 PullRequestUpdateEvent,
73 PullRequestUpdateEvent,
74 PullRequestCommentEvent,
74 PullRequestCommentEvent,
75 PullRequestReviewEvent,
75 PullRequestReviewEvent,
76 PullRequestMergeEvent,
76 PullRequestMergeEvent,
77 PullRequestCloseEvent,
77 PullRequestCloseEvent,
78 )
78 )
@@ -1,356 +1,370 b''
1 # Copyright (C) 2016-2019 RhodeCode GmbH
1 # Copyright (C) 2016-2019 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import collections
19 import collections
20 import logging
20 import logging
21 import datetime
21 import datetime
22
22
23 from rhodecode.translation import lazy_ugettext
23 from rhodecode.translation import lazy_ugettext
24 from rhodecode.model.db import User, Repository, Session
24 from rhodecode.model.db import User, Repository, Session
25 from rhodecode.events.base import RhodeCodeIntegrationEvent
25 from rhodecode.events.base import RhodeCodeIntegrationEvent
26 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
26 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30
30
31 def _commits_as_dict(event, commit_ids, repos):
31 def _commits_as_dict(event, commit_ids, repos):
32 """
32 """
33 Helper function to serialize commit_ids
33 Helper function to serialize commit_ids
34
34
35 :param event: class calling this method
35 :param event: class calling this method
36 :param commit_ids: commits to get
36 :param commit_ids: commits to get
37 :param repos: list of repos to check
37 :param repos: list of repos to check
38 """
38 """
39 from rhodecode.lib.utils2 import extract_mentioned_users
39 from rhodecode.lib.utils2 import extract_mentioned_users
40 from rhodecode.lib.helpers import (
40 from rhodecode.lib.helpers import (
41 urlify_commit_message, process_patterns, chop_at_smart)
41 urlify_commit_message, process_patterns, chop_at_smart)
42 from rhodecode.model.repo import RepoModel
42 from rhodecode.model.repo import RepoModel
43
43
44 if not repos:
44 if not repos:
45 raise Exception('no repo defined')
45 raise Exception('no repo defined')
46
46
47 if not isinstance(repos, (tuple, list)):
47 if not isinstance(repos, (tuple, list)):
48 repos = [repos]
48 repos = [repos]
49
49
50 if not commit_ids:
50 if not commit_ids:
51 return []
51 return []
52
52
53 needed_commits = list(commit_ids)
53 needed_commits = list(commit_ids)
54
54
55 commits = []
55 commits = []
56 reviewers = []
56 reviewers = []
57 for repo in repos:
57 for repo in repos:
58 if not needed_commits:
58 if not needed_commits:
59 return commits # return early if we have the commits we need
59 return commits # return early if we have the commits we need
60
60
61 vcs_repo = repo.scm_instance(cache=False)
61 vcs_repo = repo.scm_instance(cache=False)
62
62
63 try:
63 try:
64 # use copy of needed_commits since we modify it while iterating
64 # use copy of needed_commits since we modify it while iterating
65 for commit_id in list(needed_commits):
65 for commit_id in list(needed_commits):
66 if commit_id.startswith('tag=>'):
66 if commit_id.startswith('tag=>'):
67 raw_id = commit_id[5:]
67 raw_id = commit_id[5:]
68 cs_data = {
68 cs_data = {
69 'raw_id': commit_id, 'short_id': commit_id,
69 'raw_id': commit_id, 'short_id': commit_id,
70 'branch': None,
70 'branch': None,
71 'git_ref_change': 'tag_add',
71 'git_ref_change': 'tag_add',
72 'message': 'Added new tag {}'.format(raw_id),
72 'message': 'Added new tag {}'.format(raw_id),
73 'author': event.actor.full_contact,
73 'author': event.actor.full_contact,
74 'date': datetime.datetime.now(),
74 'date': datetime.datetime.now(),
75 'refs': {
75 'refs': {
76 'branches': [],
76 'branches': [],
77 'bookmarks': [],
77 'bookmarks': [],
78 'tags': []
78 'tags': []
79 }
79 }
80 }
80 }
81 commits.append(cs_data)
81 commits.append(cs_data)
82
82
83 elif commit_id.startswith('delete_branch=>'):
83 elif commit_id.startswith('delete_branch=>'):
84 raw_id = commit_id[15:]
84 raw_id = commit_id[15:]
85 cs_data = {
85 cs_data = {
86 'raw_id': commit_id, 'short_id': commit_id,
86 'raw_id': commit_id, 'short_id': commit_id,
87 'branch': None,
87 'branch': None,
88 'git_ref_change': 'branch_delete',
88 'git_ref_change': 'branch_delete',
89 'message': 'Deleted branch {}'.format(raw_id),
89 'message': 'Deleted branch {}'.format(raw_id),
90 'author': event.actor.full_contact,
90 'author': event.actor.full_contact,
91 'date': datetime.datetime.now(),
91 'date': datetime.datetime.now(),
92 'refs': {
92 'refs': {
93 'branches': [],
93 'branches': [],
94 'bookmarks': [],
94 'bookmarks': [],
95 'tags': []
95 'tags': []
96 }
96 }
97 }
97 }
98 commits.append(cs_data)
98 commits.append(cs_data)
99
99
100 else:
100 else:
101 try:
101 try:
102 cs = vcs_repo.get_commit(commit_id)
102 cs = vcs_repo.get_commit(commit_id)
103 except CommitDoesNotExistError:
103 except CommitDoesNotExistError:
104 continue # maybe its in next repo
104 continue # maybe its in next repo
105
105
106 cs_data = cs.__json__()
106 cs_data = cs.__json__()
107 cs_data['refs'] = cs._get_refs()
107 cs_data['refs'] = cs._get_refs()
108
108
109 cs_data['mentions'] = extract_mentioned_users(cs_data['message'])
109 cs_data['mentions'] = extract_mentioned_users(cs_data['message'])
110 cs_data['reviewers'] = reviewers
110 cs_data['reviewers'] = reviewers
111 cs_data['url'] = RepoModel().get_commit_url(
111 cs_data['url'] = RepoModel().get_commit_url(
112 repo, cs_data['raw_id'], request=event.request)
112 repo, cs_data['raw_id'], request=event.request)
113 cs_data['permalink_url'] = RepoModel().get_commit_url(
113 cs_data['permalink_url'] = RepoModel().get_commit_url(
114 repo, cs_data['raw_id'], request=event.request,
114 repo, cs_data['raw_id'], request=event.request,
115 permalink=True)
115 permalink=True)
116 urlified_message, issues_data = process_patterns(
116 urlified_message, issues_data = process_patterns(
117 cs_data['message'], repo.repo_name)
117 cs_data['message'], repo.repo_name)
118 cs_data['issues'] = issues_data
118 cs_data['issues'] = issues_data
119 cs_data['message_html'] = urlify_commit_message(
119 cs_data['message_html'] = urlify_commit_message(
120 cs_data['message'], repo.repo_name)
120 cs_data['message'], repo.repo_name)
121 cs_data['message_html_title'] = chop_at_smart(
121 cs_data['message_html_title'] = chop_at_smart(
122 cs_data['message'], '\n', suffix_if_chopped='...')
122 cs_data['message'], '\n', suffix_if_chopped='...')
123 commits.append(cs_data)
123 commits.append(cs_data)
124
124
125 needed_commits.remove(commit_id)
125 needed_commits.remove(commit_id)
126
126
127 except Exception:
127 except Exception:
128 log.exception('Failed to extract commits data')
128 log.exception('Failed to extract commits data')
129 # we don't send any commits when crash happens, only full list
129 # we don't send any commits when crash happens, only full list
130 # matters we short circuit then.
130 # matters we short circuit then.
131 return []
131 return []
132
132
133 missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits)
133 missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits)
134 if missing_commits:
134 if missing_commits:
135 log.error('Inconsistent repository state. '
135 log.error('Inconsistent repository state. '
136 'Missing commits: %s', ', '.join(missing_commits))
136 'Missing commits: %s', ', '.join(missing_commits))
137
137
138 return commits
138 return commits
139
139
140
140
141 def _issues_as_dict(commits):
141 def _issues_as_dict(commits):
142 """ Helper function to serialize issues from commits """
142 """ Helper function to serialize issues from commits """
143 issues = {}
143 issues = {}
144 for commit in commits:
144 for commit in commits:
145 for issue in commit['issues']:
145 for issue in commit['issues']:
146 issues[issue['id']] = issue
146 issues[issue['id']] = issue
147 return issues
147 return issues
148
148
149
149
150 class RepoEvent(RhodeCodeIntegrationEvent):
150 class RepoEvent(RhodeCodeIntegrationEvent):
151 """
151 """
152 Base class for events acting on a repository.
152 Base class for events acting on a repository.
153
153
154 :param repo: a :class:`Repository` instance
154 :param repo: a :class:`Repository` instance
155 """
155 """
156
156
157 def __init__(self, repo):
157 def __init__(self, repo):
158 super(RepoEvent, self).__init__()
158 super(RepoEvent, self).__init__()
159 self.repo = repo
159 self.repo = repo
160
160
161 def as_dict(self):
161 def as_dict(self):
162 from rhodecode.model.repo import RepoModel
162 from rhodecode.model.repo import RepoModel
163 data = super(RepoEvent, self).as_dict()
163 data = super(RepoEvent, self).as_dict()
164
164
165 extra_fields = collections.OrderedDict()
165 extra_fields = collections.OrderedDict()
166 for field in self.repo.extra_fields:
166 for field in self.repo.extra_fields:
167 extra_fields[field.field_key] = field.field_value
167 extra_fields[field.field_key] = field.field_value
168
168
169 data.update({
169 data.update({
170 'repo': {
170 'repo': {
171 'repo_id': self.repo.repo_id,
171 'repo_id': self.repo.repo_id,
172 'repo_name': self.repo.repo_name,
172 'repo_name': self.repo.repo_name,
173 'repo_type': self.repo.repo_type,
173 'repo_type': self.repo.repo_type,
174 'url': RepoModel().get_url(
174 'url': RepoModel().get_url(
175 self.repo, request=self.request),
175 self.repo, request=self.request),
176 'permalink_url': RepoModel().get_url(
176 'permalink_url': RepoModel().get_url(
177 self.repo, request=self.request, permalink=True),
177 self.repo, request=self.request, permalink=True),
178 'extra_fields': extra_fields
178 'extra_fields': extra_fields
179 }
179 }
180 })
180 })
181 return data
181 return data
182
182
183
183
184 class RepoCommitCommentEvent(RepoEvent):
185 """
186 An instance of this class is emitted as an :term:`event` after a comment is made
187 on repository commit.
188 """
189 def __init__(self, repo, commit, comment):
190 super(RepoCommitCommentEvent, self).__init__(repo)
191 self.commit = commit
192 self.comment = comment
193
194 name = 'repo-commit-comment'
195 display_name = lazy_ugettext('repository commit comment')
196
197
184 class RepoPreCreateEvent(RepoEvent):
198 class RepoPreCreateEvent(RepoEvent):
185 """
199 """
186 An instance of this class is emitted as an :term:`event` before a repo is
200 An instance of this class is emitted as an :term:`event` before a repo is
187 created.
201 created.
188 """
202 """
189 name = 'repo-pre-create'
203 name = 'repo-pre-create'
190 display_name = lazy_ugettext('repository pre create')
204 display_name = lazy_ugettext('repository pre create')
191
205
192
206
193 class RepoCreateEvent(RepoEvent):
207 class RepoCreateEvent(RepoEvent):
194 """
208 """
195 An instance of this class is emitted as an :term:`event` whenever a repo is
209 An instance of this class is emitted as an :term:`event` whenever a repo is
196 created.
210 created.
197 """
211 """
198 name = 'repo-create'
212 name = 'repo-create'
199 display_name = lazy_ugettext('repository created')
213 display_name = lazy_ugettext('repository created')
200
214
201
215
202 class RepoPreDeleteEvent(RepoEvent):
216 class RepoPreDeleteEvent(RepoEvent):
203 """
217 """
204 An instance of this class is emitted as an :term:`event` whenever a repo is
218 An instance of this class is emitted as an :term:`event` whenever a repo is
205 created.
219 created.
206 """
220 """
207 name = 'repo-pre-delete'
221 name = 'repo-pre-delete'
208 display_name = lazy_ugettext('repository pre delete')
222 display_name = lazy_ugettext('repository pre delete')
209
223
210
224
211 class RepoDeleteEvent(RepoEvent):
225 class RepoDeleteEvent(RepoEvent):
212 """
226 """
213 An instance of this class is emitted as an :term:`event` whenever a repo is
227 An instance of this class is emitted as an :term:`event` whenever a repo is
214 created.
228 created.
215 """
229 """
216 name = 'repo-delete'
230 name = 'repo-delete'
217 display_name = lazy_ugettext('repository deleted')
231 display_name = lazy_ugettext('repository deleted')
218
232
219
233
220 class RepoVCSEvent(RepoEvent):
234 class RepoVCSEvent(RepoEvent):
221 """
235 """
222 Base class for events triggered by the VCS
236 Base class for events triggered by the VCS
223 """
237 """
224 def __init__(self, repo_name, extras):
238 def __init__(self, repo_name, extras):
225 self.repo = Repository.get_by_repo_name(repo_name)
239 self.repo = Repository.get_by_repo_name(repo_name)
226 if not self.repo:
240 if not self.repo:
227 raise Exception('repo by this name %s does not exist' % repo_name)
241 raise Exception('repo by this name %s does not exist' % repo_name)
228 self.extras = extras
242 self.extras = extras
229 super(RepoVCSEvent, self).__init__(self.repo)
243 super(RepoVCSEvent, self).__init__(self.repo)
230
244
231 @property
245 @property
232 def actor(self):
246 def actor(self):
233 if self.extras.get('username'):
247 if self.extras.get('username'):
234 return User.get_by_username(self.extras['username'])
248 return User.get_by_username(self.extras['username'])
235
249
236 @property
250 @property
237 def actor_ip(self):
251 def actor_ip(self):
238 if self.extras.get('ip'):
252 if self.extras.get('ip'):
239 return self.extras['ip']
253 return self.extras['ip']
240
254
241 @property
255 @property
242 def server_url(self):
256 def server_url(self):
243 if self.extras.get('server_url'):
257 if self.extras.get('server_url'):
244 return self.extras['server_url']
258 return self.extras['server_url']
245
259
246 @property
260 @property
247 def request(self):
261 def request(self):
248 return self.extras.get('request') or self.get_request()
262 return self.extras.get('request') or self.get_request()
249
263
250
264
251 class RepoPrePullEvent(RepoVCSEvent):
265 class RepoPrePullEvent(RepoVCSEvent):
252 """
266 """
253 An instance of this class is emitted as an :term:`event` before commits
267 An instance of this class is emitted as an :term:`event` before commits
254 are pulled from a repo.
268 are pulled from a repo.
255 """
269 """
256 name = 'repo-pre-pull'
270 name = 'repo-pre-pull'
257 display_name = lazy_ugettext('repository pre pull')
271 display_name = lazy_ugettext('repository pre pull')
258
272
259
273
260 class RepoPullEvent(RepoVCSEvent):
274 class RepoPullEvent(RepoVCSEvent):
261 """
275 """
262 An instance of this class is emitted as an :term:`event` after commits
276 An instance of this class is emitted as an :term:`event` after commits
263 are pulled from a repo.
277 are pulled from a repo.
264 """
278 """
265 name = 'repo-pull'
279 name = 'repo-pull'
266 display_name = lazy_ugettext('repository pull')
280 display_name = lazy_ugettext('repository pull')
267
281
268
282
269 class RepoPrePushEvent(RepoVCSEvent):
283 class RepoPrePushEvent(RepoVCSEvent):
270 """
284 """
271 An instance of this class is emitted as an :term:`event` before commits
285 An instance of this class is emitted as an :term:`event` before commits
272 are pushed to a repo.
286 are pushed to a repo.
273 """
287 """
274 name = 'repo-pre-push'
288 name = 'repo-pre-push'
275 display_name = lazy_ugettext('repository pre push')
289 display_name = lazy_ugettext('repository pre push')
276
290
277
291
278 class RepoPushEvent(RepoVCSEvent):
292 class RepoPushEvent(RepoVCSEvent):
279 """
293 """
280 An instance of this class is emitted as an :term:`event` after commits
294 An instance of this class is emitted as an :term:`event` after commits
281 are pushed to a repo.
295 are pushed to a repo.
282
296
283 :param extras: (optional) dict of data from proxied VCS actions
297 :param extras: (optional) dict of data from proxied VCS actions
284 """
298 """
285 name = 'repo-push'
299 name = 'repo-push'
286 display_name = lazy_ugettext('repository push')
300 display_name = lazy_ugettext('repository push')
287
301
288 def __init__(self, repo_name, pushed_commit_ids, extras):
302 def __init__(self, repo_name, pushed_commit_ids, extras):
289 super(RepoPushEvent, self).__init__(repo_name, extras)
303 super(RepoPushEvent, self).__init__(repo_name, extras)
290 self.pushed_commit_ids = pushed_commit_ids
304 self.pushed_commit_ids = pushed_commit_ids
291 self.new_refs = extras.new_refs
305 self.new_refs = extras.new_refs
292
306
293 def as_dict(self):
307 def as_dict(self):
294 data = super(RepoPushEvent, self).as_dict()
308 data = super(RepoPushEvent, self).as_dict()
295
309
296 def branch_url(branch_name):
310 def branch_url(branch_name):
297 return '{}/changelog?branch={}'.format(
311 return '{}/changelog?branch={}'.format(
298 data['repo']['url'], branch_name)
312 data['repo']['url'], branch_name)
299
313
300 def tag_url(tag_name):
314 def tag_url(tag_name):
301 return '{}/files/{}/'.format(
315 return '{}/files/{}/'.format(
302 data['repo']['url'], tag_name)
316 data['repo']['url'], tag_name)
303
317
304 commits = _commits_as_dict(
318 commits = _commits_as_dict(
305 self, commit_ids=self.pushed_commit_ids, repos=[self.repo])
319 self, commit_ids=self.pushed_commit_ids, repos=[self.repo])
306
320
307 last_branch = None
321 last_branch = None
308 for commit in reversed(commits):
322 for commit in reversed(commits):
309 commit['branch'] = commit['branch'] or last_branch
323 commit['branch'] = commit['branch'] or last_branch
310 last_branch = commit['branch']
324 last_branch = commit['branch']
311 issues = _issues_as_dict(commits)
325 issues = _issues_as_dict(commits)
312
326
313 branches = set()
327 branches = set()
314 tags = set()
328 tags = set()
315 for commit in commits:
329 for commit in commits:
316 if commit['refs']['tags']:
330 if commit['refs']['tags']:
317 for tag in commit['refs']['tags']:
331 for tag in commit['refs']['tags']:
318 tags.add(tag)
332 tags.add(tag)
319 if commit['branch']:
333 if commit['branch']:
320 branches.add(commit['branch'])
334 branches.add(commit['branch'])
321
335
322 # maybe we have branches in new_refs ?
336 # maybe we have branches in new_refs ?
323 try:
337 try:
324 branches = branches.union(set(self.new_refs['branches']))
338 branches = branches.union(set(self.new_refs['branches']))
325 except Exception:
339 except Exception:
326 pass
340 pass
327
341
328 branches = [
342 branches = [
329 {
343 {
330 'name': branch,
344 'name': branch,
331 'url': branch_url(branch)
345 'url': branch_url(branch)
332 }
346 }
333 for branch in branches
347 for branch in branches
334 ]
348 ]
335
349
336 # maybe we have branches in new_refs ?
350 # maybe we have branches in new_refs ?
337 try:
351 try:
338 tags = tags.union(set(self.new_refs['tags']))
352 tags = tags.union(set(self.new_refs['tags']))
339 except Exception:
353 except Exception:
340 pass
354 pass
341
355
342 tags = [
356 tags = [
343 {
357 {
344 'name': tag,
358 'name': tag,
345 'url': tag_url(tag)
359 'url': tag_url(tag)
346 }
360 }
347 for tag in tags
361 for tag in tags
348 ]
362 ]
349
363
350 data['push'] = {
364 data['push'] = {
351 'commits': commits,
365 'commits': commits,
352 'issues': issues,
366 'issues': issues,
353 'branches': branches,
367 'branches': branches,
354 'tags': tags,
368 'tags': tags,
355 }
369 }
356 return data
370 return data
@@ -1,492 +1,509 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2013-2019 RhodeCode GmbH
3 # Copyright (C) 2013-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Set of hooks run by RhodeCode Enterprise
23 Set of hooks run by RhodeCode Enterprise
24 """
24 """
25
25
26 import os
26 import os
27 import collections
28 import logging
27 import logging
29
28
30 import rhodecode
29 import rhodecode
31 from rhodecode import events
30 from rhodecode import events
32 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
33 from rhodecode.lib import audit_logger
32 from rhodecode.lib import audit_logger
34 from rhodecode.lib.utils2 import safe_str
33 from rhodecode.lib.utils2 import safe_str
35 from rhodecode.lib.exceptions import (
34 from rhodecode.lib.exceptions import (
36 HTTPLockedRC, HTTPBranchProtected, UserCreationError)
35 HTTPLockedRC, HTTPBranchProtected, UserCreationError)
37 from rhodecode.model.db import Repository, User
36 from rhodecode.model.db import Repository, User
38
37
39 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
40
39
41
40
42 class HookResponse(object):
41 class HookResponse(object):
43 def __init__(self, status, output):
42 def __init__(self, status, output):
44 self.status = status
43 self.status = status
45 self.output = output
44 self.output = output
46
45
47 def __add__(self, other):
46 def __add__(self, other):
48 other_status = getattr(other, 'status', 0)
47 other_status = getattr(other, 'status', 0)
49 new_status = max(self.status, other_status)
48 new_status = max(self.status, other_status)
50 other_output = getattr(other, 'output', '')
49 other_output = getattr(other, 'output', '')
51 new_output = self.output + other_output
50 new_output = self.output + other_output
52
51
53 return HookResponse(new_status, new_output)
52 return HookResponse(new_status, new_output)
54
53
55 def __bool__(self):
54 def __bool__(self):
56 return self.status == 0
55 return self.status == 0
57
56
58
57
59 def is_shadow_repo(extras):
58 def is_shadow_repo(extras):
60 """
59 """
61 Returns ``True`` if this is an action executed against a shadow repository.
60 Returns ``True`` if this is an action executed against a shadow repository.
62 """
61 """
63 return extras['is_shadow_repo']
62 return extras['is_shadow_repo']
64
63
65
64
66 def _get_scm_size(alias, root_path):
65 def _get_scm_size(alias, root_path):
67
66
68 if not alias.startswith('.'):
67 if not alias.startswith('.'):
69 alias += '.'
68 alias += '.'
70
69
71 size_scm, size_root = 0, 0
70 size_scm, size_root = 0, 0
72 for path, unused_dirs, files in os.walk(safe_str(root_path)):
71 for path, unused_dirs, files in os.walk(safe_str(root_path)):
73 if path.find(alias) != -1:
72 if path.find(alias) != -1:
74 for f in files:
73 for f in files:
75 try:
74 try:
76 size_scm += os.path.getsize(os.path.join(path, f))
75 size_scm += os.path.getsize(os.path.join(path, f))
77 except OSError:
76 except OSError:
78 pass
77 pass
79 else:
78 else:
80 for f in files:
79 for f in files:
81 try:
80 try:
82 size_root += os.path.getsize(os.path.join(path, f))
81 size_root += os.path.getsize(os.path.join(path, f))
83 except OSError:
82 except OSError:
84 pass
83 pass
85
84
86 size_scm_f = h.format_byte_size_binary(size_scm)
85 size_scm_f = h.format_byte_size_binary(size_scm)
87 size_root_f = h.format_byte_size_binary(size_root)
86 size_root_f = h.format_byte_size_binary(size_root)
88 size_total_f = h.format_byte_size_binary(size_root + size_scm)
87 size_total_f = h.format_byte_size_binary(size_root + size_scm)
89
88
90 return size_scm_f, size_root_f, size_total_f
89 return size_scm_f, size_root_f, size_total_f
91
90
92
91
93 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
92 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
94 def repo_size(extras):
93 def repo_size(extras):
95 """Present size of repository after push."""
94 """Present size of repository after push."""
96 repo = Repository.get_by_repo_name(extras.repository)
95 repo = Repository.get_by_repo_name(extras.repository)
97 vcs_part = safe_str(u'.%s' % repo.repo_type)
96 vcs_part = safe_str(u'.%s' % repo.repo_type)
98 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
97 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
99 repo.repo_full_path)
98 repo.repo_full_path)
100 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
99 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
101 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
100 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
102 return HookResponse(0, msg)
101 return HookResponse(0, msg)
103
102
104
103
105 def pre_push(extras):
104 def pre_push(extras):
106 """
105 """
107 Hook executed before pushing code.
106 Hook executed before pushing code.
108
107
109 It bans pushing when the repository is locked.
108 It bans pushing when the repository is locked.
110 """
109 """
111
110
112 user = User.get_by_username(extras.username)
111 user = User.get_by_username(extras.username)
113 output = ''
112 output = ''
114 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
113 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
115 locked_by = User.get(extras.locked_by[0]).username
114 locked_by = User.get(extras.locked_by[0]).username
116 reason = extras.locked_by[2]
115 reason = extras.locked_by[2]
117 # this exception is interpreted in git/hg middlewares and based
116 # this exception is interpreted in git/hg middlewares and based
118 # on that proper return code is server to client
117 # on that proper return code is server to client
119 _http_ret = HTTPLockedRC(
118 _http_ret = HTTPLockedRC(
120 _locked_by_explanation(extras.repository, locked_by, reason))
119 _locked_by_explanation(extras.repository, locked_by, reason))
121 if str(_http_ret.code).startswith('2'):
120 if str(_http_ret.code).startswith('2'):
122 # 2xx Codes don't raise exceptions
121 # 2xx Codes don't raise exceptions
123 output = _http_ret.title
122 output = _http_ret.title
124 else:
123 else:
125 raise _http_ret
124 raise _http_ret
126
125
127 hook_response = ''
126 hook_response = ''
128 if not is_shadow_repo(extras):
127 if not is_shadow_repo(extras):
129 if extras.commit_ids and extras.check_branch_perms:
128 if extras.commit_ids and extras.check_branch_perms:
130
129
131 auth_user = user.AuthUser()
130 auth_user = user.AuthUser()
132 repo = Repository.get_by_repo_name(extras.repository)
131 repo = Repository.get_by_repo_name(extras.repository)
133 affected_branches = []
132 affected_branches = []
134 if repo.repo_type == 'hg':
133 if repo.repo_type == 'hg':
135 for entry in extras.commit_ids:
134 for entry in extras.commit_ids:
136 if entry['type'] == 'branch':
135 if entry['type'] == 'branch':
137 is_forced = bool(entry['multiple_heads'])
136 is_forced = bool(entry['multiple_heads'])
138 affected_branches.append([entry['name'], is_forced])
137 affected_branches.append([entry['name'], is_forced])
139 elif repo.repo_type == 'git':
138 elif repo.repo_type == 'git':
140 for entry in extras.commit_ids:
139 for entry in extras.commit_ids:
141 if entry['type'] == 'heads':
140 if entry['type'] == 'heads':
142 is_forced = bool(entry['pruned_sha'])
141 is_forced = bool(entry['pruned_sha'])
143 affected_branches.append([entry['name'], is_forced])
142 affected_branches.append([entry['name'], is_forced])
144
143
145 for branch_name, is_forced in affected_branches:
144 for branch_name, is_forced in affected_branches:
146
145
147 rule, branch_perm = auth_user.get_rule_and_branch_permission(
146 rule, branch_perm = auth_user.get_rule_and_branch_permission(
148 extras.repository, branch_name)
147 extras.repository, branch_name)
149 if not branch_perm:
148 if not branch_perm:
150 # no branch permission found for this branch, just keep checking
149 # no branch permission found for this branch, just keep checking
151 continue
150 continue
152
151
153 if branch_perm == 'branch.push_force':
152 if branch_perm == 'branch.push_force':
154 continue
153 continue
155 elif branch_perm == 'branch.push' and is_forced is False:
154 elif branch_perm == 'branch.push' and is_forced is False:
156 continue
155 continue
157 elif branch_perm == 'branch.push' and is_forced is True:
156 elif branch_perm == 'branch.push' and is_forced is True:
158 halt_message = 'Branch `{}` changes rejected by rule {}. ' \
157 halt_message = 'Branch `{}` changes rejected by rule {}. ' \
159 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule)
158 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule)
160 else:
159 else:
161 halt_message = 'Branch `{}` changes rejected by rule {}.'.format(
160 halt_message = 'Branch `{}` changes rejected by rule {}.'.format(
162 branch_name, rule)
161 branch_name, rule)
163
162
164 if halt_message:
163 if halt_message:
165 _http_ret = HTTPBranchProtected(halt_message)
164 _http_ret = HTTPBranchProtected(halt_message)
166 raise _http_ret
165 raise _http_ret
167
166
168 # Propagate to external components. This is done after checking the
167 # Propagate to external components. This is done after checking the
169 # lock, for consistent behavior.
168 # lock, for consistent behavior.
170 hook_response = pre_push_extension(
169 hook_response = pre_push_extension(
171 repo_store_path=Repository.base_path(), **extras)
170 repo_store_path=Repository.base_path(), **extras)
172 events.trigger(events.RepoPrePushEvent(
171 events.trigger(events.RepoPrePushEvent(
173 repo_name=extras.repository, extras=extras))
172 repo_name=extras.repository, extras=extras))
174
173
175 return HookResponse(0, output) + hook_response
174 return HookResponse(0, output) + hook_response
176
175
177
176
178 def pre_pull(extras):
177 def pre_pull(extras):
179 """
178 """
180 Hook executed before pulling the code.
179 Hook executed before pulling the code.
181
180
182 It bans pulling when the repository is locked.
181 It bans pulling when the repository is locked.
183 """
182 """
184
183
185 output = ''
184 output = ''
186 if extras.locked_by[0]:
185 if extras.locked_by[0]:
187 locked_by = User.get(extras.locked_by[0]).username
186 locked_by = User.get(extras.locked_by[0]).username
188 reason = extras.locked_by[2]
187 reason = extras.locked_by[2]
189 # this exception is interpreted in git/hg middlewares and based
188 # this exception is interpreted in git/hg middlewares and based
190 # on that proper return code is server to client
189 # on that proper return code is server to client
191 _http_ret = HTTPLockedRC(
190 _http_ret = HTTPLockedRC(
192 _locked_by_explanation(extras.repository, locked_by, reason))
191 _locked_by_explanation(extras.repository, locked_by, reason))
193 if str(_http_ret.code).startswith('2'):
192 if str(_http_ret.code).startswith('2'):
194 # 2xx Codes don't raise exceptions
193 # 2xx Codes don't raise exceptions
195 output = _http_ret.title
194 output = _http_ret.title
196 else:
195 else:
197 raise _http_ret
196 raise _http_ret
198
197
199 # Propagate to external components. This is done after checking the
198 # Propagate to external components. This is done after checking the
200 # lock, for consistent behavior.
199 # lock, for consistent behavior.
201 hook_response = ''
200 hook_response = ''
202 if not is_shadow_repo(extras):
201 if not is_shadow_repo(extras):
203 extras.hook_type = extras.hook_type or 'pre_pull'
202 extras.hook_type = extras.hook_type or 'pre_pull'
204 hook_response = pre_pull_extension(
203 hook_response = pre_pull_extension(
205 repo_store_path=Repository.base_path(), **extras)
204 repo_store_path=Repository.base_path(), **extras)
206 events.trigger(events.RepoPrePullEvent(
205 events.trigger(events.RepoPrePullEvent(
207 repo_name=extras.repository, extras=extras))
206 repo_name=extras.repository, extras=extras))
208
207
209 return HookResponse(0, output) + hook_response
208 return HookResponse(0, output) + hook_response
210
209
211
210
212 def post_pull(extras):
211 def post_pull(extras):
213 """Hook executed after client pulls the code."""
212 """Hook executed after client pulls the code."""
214
213
215 audit_user = audit_logger.UserWrap(
214 audit_user = audit_logger.UserWrap(
216 username=extras.username,
215 username=extras.username,
217 ip_addr=extras.ip)
216 ip_addr=extras.ip)
218 repo = audit_logger.RepoWrap(repo_name=extras.repository)
217 repo = audit_logger.RepoWrap(repo_name=extras.repository)
219 audit_logger.store(
218 audit_logger.store(
220 'user.pull', action_data={'user_agent': extras.user_agent},
219 'user.pull', action_data={'user_agent': extras.user_agent},
221 user=audit_user, repo=repo, commit=True)
220 user=audit_user, repo=repo, commit=True)
222
221
223 output = ''
222 output = ''
224 # make lock is a tri state False, True, None. We only make lock on True
223 # make lock is a tri state False, True, None. We only make lock on True
225 if extras.make_lock is True and not is_shadow_repo(extras):
224 if extras.make_lock is True and not is_shadow_repo(extras):
226 user = User.get_by_username(extras.username)
225 user = User.get_by_username(extras.username)
227 Repository.lock(Repository.get_by_repo_name(extras.repository),
226 Repository.lock(Repository.get_by_repo_name(extras.repository),
228 user.user_id,
227 user.user_id,
229 lock_reason=Repository.LOCK_PULL)
228 lock_reason=Repository.LOCK_PULL)
230 msg = 'Made lock on repo `%s`' % (extras.repository,)
229 msg = 'Made lock on repo `%s`' % (extras.repository,)
231 output += msg
230 output += msg
232
231
233 if extras.locked_by[0]:
232 if extras.locked_by[0]:
234 locked_by = User.get(extras.locked_by[0]).username
233 locked_by = User.get(extras.locked_by[0]).username
235 reason = extras.locked_by[2]
234 reason = extras.locked_by[2]
236 _http_ret = HTTPLockedRC(
235 _http_ret = HTTPLockedRC(
237 _locked_by_explanation(extras.repository, locked_by, reason))
236 _locked_by_explanation(extras.repository, locked_by, reason))
238 if str(_http_ret.code).startswith('2'):
237 if str(_http_ret.code).startswith('2'):
239 # 2xx Codes don't raise exceptions
238 # 2xx Codes don't raise exceptions
240 output += _http_ret.title
239 output += _http_ret.title
241
240
242 # Propagate to external components.
241 # Propagate to external components.
243 hook_response = ''
242 hook_response = ''
244 if not is_shadow_repo(extras):
243 if not is_shadow_repo(extras):
245 extras.hook_type = extras.hook_type or 'post_pull'
244 extras.hook_type = extras.hook_type or 'post_pull'
246 hook_response = post_pull_extension(
245 hook_response = post_pull_extension(
247 repo_store_path=Repository.base_path(), **extras)
246 repo_store_path=Repository.base_path(), **extras)
248 events.trigger(events.RepoPullEvent(
247 events.trigger(events.RepoPullEvent(
249 repo_name=extras.repository, extras=extras))
248 repo_name=extras.repository, extras=extras))
250
249
251 return HookResponse(0, output) + hook_response
250 return HookResponse(0, output) + hook_response
252
251
253
252
254 def post_push(extras):
253 def post_push(extras):
255 """Hook executed after user pushes to the repository."""
254 """Hook executed after user pushes to the repository."""
256 commit_ids = extras.commit_ids
255 commit_ids = extras.commit_ids
257
256
258 # log the push call
257 # log the push call
259 audit_user = audit_logger.UserWrap(
258 audit_user = audit_logger.UserWrap(
260 username=extras.username, ip_addr=extras.ip)
259 username=extras.username, ip_addr=extras.ip)
261 repo = audit_logger.RepoWrap(repo_name=extras.repository)
260 repo = audit_logger.RepoWrap(repo_name=extras.repository)
262 audit_logger.store(
261 audit_logger.store(
263 'user.push', action_data={
262 'user.push', action_data={
264 'user_agent': extras.user_agent,
263 'user_agent': extras.user_agent,
265 'commit_ids': commit_ids[:400]},
264 'commit_ids': commit_ids[:400]},
266 user=audit_user, repo=repo, commit=True)
265 user=audit_user, repo=repo, commit=True)
267
266
268 # Propagate to external components.
267 # Propagate to external components.
269 output = ''
268 output = ''
270 # make lock is a tri state False, True, None. We only release lock on False
269 # make lock is a tri state False, True, None. We only release lock on False
271 if extras.make_lock is False and not is_shadow_repo(extras):
270 if extras.make_lock is False and not is_shadow_repo(extras):
272 Repository.unlock(Repository.get_by_repo_name(extras.repository))
271 Repository.unlock(Repository.get_by_repo_name(extras.repository))
273 msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository))
272 msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository))
274 output += msg
273 output += msg
275
274
276 if extras.locked_by[0]:
275 if extras.locked_by[0]:
277 locked_by = User.get(extras.locked_by[0]).username
276 locked_by = User.get(extras.locked_by[0]).username
278 reason = extras.locked_by[2]
277 reason = extras.locked_by[2]
279 _http_ret = HTTPLockedRC(
278 _http_ret = HTTPLockedRC(
280 _locked_by_explanation(extras.repository, locked_by, reason))
279 _locked_by_explanation(extras.repository, locked_by, reason))
281 # TODO: johbo: if not?
280 # TODO: johbo: if not?
282 if str(_http_ret.code).startswith('2'):
281 if str(_http_ret.code).startswith('2'):
283 # 2xx Codes don't raise exceptions
282 # 2xx Codes don't raise exceptions
284 output += _http_ret.title
283 output += _http_ret.title
285
284
286 if extras.new_refs:
285 if extras.new_refs:
287 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
286 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
288 safe_str(extras.server_url), safe_str(extras.repository))
287 safe_str(extras.server_url), safe_str(extras.repository))
289
288
290 for branch_name in extras.new_refs['branches']:
289 for branch_name in extras.new_refs['branches']:
291 output += 'RhodeCode: open pull request link: {}\n'.format(
290 output += 'RhodeCode: open pull request link: {}\n'.format(
292 tmpl.format(ref_type='branch', ref_name=safe_str(branch_name)))
291 tmpl.format(ref_type='branch', ref_name=safe_str(branch_name)))
293
292
294 for book_name in extras.new_refs['bookmarks']:
293 for book_name in extras.new_refs['bookmarks']:
295 output += 'RhodeCode: open pull request link: {}\n'.format(
294 output += 'RhodeCode: open pull request link: {}\n'.format(
296 tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name)))
295 tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name)))
297
296
298 hook_response = ''
297 hook_response = ''
299 if not is_shadow_repo(extras):
298 if not is_shadow_repo(extras):
300 hook_response = post_push_extension(
299 hook_response = post_push_extension(
301 repo_store_path=Repository.base_path(),
300 repo_store_path=Repository.base_path(),
302 **extras)
301 **extras)
303 events.trigger(events.RepoPushEvent(
302 events.trigger(events.RepoPushEvent(
304 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
303 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
305
304
306 output += 'RhodeCode: push completed\n'
305 output += 'RhodeCode: push completed\n'
307 return HookResponse(0, output) + hook_response
306 return HookResponse(0, output) + hook_response
308
307
309
308
310 def _locked_by_explanation(repo_name, user_name, reason):
309 def _locked_by_explanation(repo_name, user_name, reason):
311 message = (
310 message = (
312 'Repository `%s` locked by user `%s`. Reason:`%s`'
311 'Repository `%s` locked by user `%s`. Reason:`%s`'
313 % (repo_name, user_name, reason))
312 % (repo_name, user_name, reason))
314 return message
313 return message
315
314
316
315
317 def check_allowed_create_user(user_dict, created_by, **kwargs):
316 def check_allowed_create_user(user_dict, created_by, **kwargs):
318 # pre create hooks
317 # pre create hooks
319 if pre_create_user.is_active():
318 if pre_create_user.is_active():
320 hook_result = pre_create_user(created_by=created_by, **user_dict)
319 hook_result = pre_create_user(created_by=created_by, **user_dict)
321 allowed = hook_result.status == 0
320 allowed = hook_result.status == 0
322 if not allowed:
321 if not allowed:
323 reason = hook_result.output
322 reason = hook_result.output
324 raise UserCreationError(reason)
323 raise UserCreationError(reason)
325
324
326
325
327 class ExtensionCallback(object):
326 class ExtensionCallback(object):
328 """
327 """
329 Forwards a given call to rcextensions, sanitizes keyword arguments.
328 Forwards a given call to rcextensions, sanitizes keyword arguments.
330
329
331 Does check if there is an extension active for that hook. If it is
330 Does check if there is an extension active for that hook. If it is
332 there, it will forward all `kwargs_keys` keyword arguments to the
331 there, it will forward all `kwargs_keys` keyword arguments to the
333 extension callback.
332 extension callback.
334 """
333 """
335
334
336 def __init__(self, hook_name, kwargs_keys):
335 def __init__(self, hook_name, kwargs_keys):
337 self._hook_name = hook_name
336 self._hook_name = hook_name
338 self._kwargs_keys = set(kwargs_keys)
337 self._kwargs_keys = set(kwargs_keys)
339
338
340 def __call__(self, *args, **kwargs):
339 def __call__(self, *args, **kwargs):
341 log.debug('Calling extension callback for `%s`', self._hook_name)
340 log.debug('Calling extension callback for `%s`', self._hook_name)
342 callback = self._get_callback()
341 callback = self._get_callback()
343 if not callback:
342 if not callback:
344 log.debug('extension callback `%s` not found, skipping...', self._hook_name)
343 log.debug('extension callback `%s` not found, skipping...', self._hook_name)
345 return
344 return
346
345
347 kwargs_to_pass = {}
346 kwargs_to_pass = {}
348 for key in self._kwargs_keys:
347 for key in self._kwargs_keys:
349 try:
348 try:
350 kwargs_to_pass[key] = kwargs[key]
349 kwargs_to_pass[key] = kwargs[key]
351 except KeyError:
350 except KeyError:
352 log.error('Failed to fetch %s key. Expected keys: %s',
351 log.error('Failed to fetch %s key from given kwargs. '
353 key, self._kwargs_keys)
352 'Expected keys: %s', key, self._kwargs_keys)
354 raise
353 raise
355
354
356 # backward compat for removed api_key for old hooks. This was it works
355 # backward compat for removed api_key for old hooks. This was it works
357 # with older rcextensions that require api_key present
356 # with older rcextensions that require api_key present
358 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
357 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
359 kwargs_to_pass['api_key'] = '_DEPRECATED_'
358 kwargs_to_pass['api_key'] = '_DEPRECATED_'
360 return callback(**kwargs_to_pass)
359 return callback(**kwargs_to_pass)
361
360
362 def is_active(self):
361 def is_active(self):
363 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
362 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
364
363
365 def _get_callback(self):
364 def _get_callback(self):
366 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
365 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
367
366
368
367
369 pre_pull_extension = ExtensionCallback(
368 pre_pull_extension = ExtensionCallback(
370 hook_name='PRE_PULL_HOOK',
369 hook_name='PRE_PULL_HOOK',
371 kwargs_keys=(
370 kwargs_keys=(
372 'server_url', 'config', 'scm', 'username', 'ip', 'action',
371 'server_url', 'config', 'scm', 'username', 'ip', 'action',
373 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
372 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
374
373
375
374
376 post_pull_extension = ExtensionCallback(
375 post_pull_extension = ExtensionCallback(
377 hook_name='PULL_HOOK',
376 hook_name='PULL_HOOK',
378 kwargs_keys=(
377 kwargs_keys=(
379 'server_url', 'config', 'scm', 'username', 'ip', 'action',
378 'server_url', 'config', 'scm', 'username', 'ip', 'action',
380 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
379 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
381
380
382
381
383 pre_push_extension = ExtensionCallback(
382 pre_push_extension = ExtensionCallback(
384 hook_name='PRE_PUSH_HOOK',
383 hook_name='PRE_PUSH_HOOK',
385 kwargs_keys=(
384 kwargs_keys=(
386 'server_url', 'config', 'scm', 'username', 'ip', 'action',
385 'server_url', 'config', 'scm', 'username', 'ip', 'action',
387 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
386 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
388
387
389
388
390 post_push_extension = ExtensionCallback(
389 post_push_extension = ExtensionCallback(
391 hook_name='PUSH_HOOK',
390 hook_name='PUSH_HOOK',
392 kwargs_keys=(
391 kwargs_keys=(
393 'server_url', 'config', 'scm', 'username', 'ip', 'action',
392 'server_url', 'config', 'scm', 'username', 'ip', 'action',
394 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
393 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
395
394
396
395
397 pre_create_user = ExtensionCallback(
396 pre_create_user = ExtensionCallback(
398 hook_name='PRE_CREATE_USER_HOOK',
397 hook_name='PRE_CREATE_USER_HOOK',
399 kwargs_keys=(
398 kwargs_keys=(
400 'username', 'password', 'email', 'firstname', 'lastname', 'active',
399 'username', 'password', 'email', 'firstname', 'lastname', 'active',
401 'admin', 'created_by'))
400 'admin', 'created_by'))
402
401
403
402
404 log_create_pull_request = ExtensionCallback(
403 log_create_pull_request = ExtensionCallback(
405 hook_name='CREATE_PULL_REQUEST',
404 hook_name='CREATE_PULL_REQUEST',
406 kwargs_keys=(
405 kwargs_keys=(
407 'server_url', 'config', 'scm', 'username', 'ip', 'action',
406 'server_url', 'config', 'scm', 'username', 'ip', 'action',
408 'repository', 'pull_request_id', 'url', 'title', 'description',
407 'repository', 'pull_request_id', 'url', 'title', 'description',
409 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
408 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
410 'mergeable', 'source', 'target', 'author', 'reviewers'))
409 'mergeable', 'source', 'target', 'author', 'reviewers'))
411
410
412
411
413 log_merge_pull_request = ExtensionCallback(
412 log_merge_pull_request = ExtensionCallback(
414 hook_name='MERGE_PULL_REQUEST',
413 hook_name='MERGE_PULL_REQUEST',
415 kwargs_keys=(
414 kwargs_keys=(
416 'server_url', 'config', 'scm', 'username', 'ip', 'action',
415 'server_url', 'config', 'scm', 'username', 'ip', 'action',
417 'repository', 'pull_request_id', 'url', 'title', 'description',
416 'repository', 'pull_request_id', 'url', 'title', 'description',
418 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
417 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
419 'mergeable', 'source', 'target', 'author', 'reviewers'))
418 'mergeable', 'source', 'target', 'author', 'reviewers'))
420
419
421
420
422 log_close_pull_request = ExtensionCallback(
421 log_close_pull_request = ExtensionCallback(
423 hook_name='CLOSE_PULL_REQUEST',
422 hook_name='CLOSE_PULL_REQUEST',
424 kwargs_keys=(
423 kwargs_keys=(
425 'server_url', 'config', 'scm', 'username', 'ip', 'action',
424 'server_url', 'config', 'scm', 'username', 'ip', 'action',
426 'repository', 'pull_request_id', 'url', 'title', 'description',
425 'repository', 'pull_request_id', 'url', 'title', 'description',
427 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
426 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
428 'mergeable', 'source', 'target', 'author', 'reviewers'))
427 'mergeable', 'source', 'target', 'author', 'reviewers'))
429
428
430
429
431 log_review_pull_request = ExtensionCallback(
430 log_review_pull_request = ExtensionCallback(
432 hook_name='REVIEW_PULL_REQUEST',
431 hook_name='REVIEW_PULL_REQUEST',
433 kwargs_keys=(
432 kwargs_keys=(
434 'server_url', 'config', 'scm', 'username', 'ip', 'action',
433 'server_url', 'config', 'scm', 'username', 'ip', 'action',
435 'repository', 'pull_request_id', 'url', 'title', 'description',
434 'repository', 'pull_request_id', 'url', 'title', 'description',
436 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
435 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
437 'mergeable', 'source', 'target', 'author', 'reviewers'))
436 'mergeable', 'source', 'target', 'author', 'reviewers'))
438
437
439
438
439 log_comment_pull_request = ExtensionCallback(
440 hook_name='COMMENT_PULL_REQUEST',
441 kwargs_keys=(
442 'server_url', 'config', 'scm', 'username', 'ip', 'action',
443 'repository', 'pull_request_id', 'url', 'title', 'description',
444 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
445 'mergeable', 'source', 'target', 'author', 'reviewers'))
446
447
440 log_update_pull_request = ExtensionCallback(
448 log_update_pull_request = ExtensionCallback(
441 hook_name='UPDATE_PULL_REQUEST',
449 hook_name='UPDATE_PULL_REQUEST',
442 kwargs_keys=(
450 kwargs_keys=(
443 'server_url', 'config', 'scm', 'username', 'ip', 'action',
451 'server_url', 'config', 'scm', 'username', 'ip', 'action',
444 'repository', 'pull_request_id', 'url', 'title', 'description',
452 'repository', 'pull_request_id', 'url', 'title', 'description',
445 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
453 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
446 'mergeable', 'source', 'target', 'author', 'reviewers'))
454 'mergeable', 'source', 'target', 'author', 'reviewers'))
447
455
448
456
449 log_create_user = ExtensionCallback(
457 log_create_user = ExtensionCallback(
450 hook_name='CREATE_USER_HOOK',
458 hook_name='CREATE_USER_HOOK',
451 kwargs_keys=(
459 kwargs_keys=(
452 'username', 'full_name_or_username', 'full_contact', 'user_id',
460 'username', 'full_name_or_username', 'full_contact', 'user_id',
453 'name', 'firstname', 'short_contact', 'admin', 'lastname',
461 'name', 'firstname', 'short_contact', 'admin', 'lastname',
454 'ip_addresses', 'extern_type', 'extern_name',
462 'ip_addresses', 'extern_type', 'extern_name',
455 'email', 'api_keys', 'last_login',
463 'email', 'api_keys', 'last_login',
456 'full_name', 'active', 'password', 'emails',
464 'full_name', 'active', 'password', 'emails',
457 'inherit_default_permissions', 'created_by', 'created_on'))
465 'inherit_default_permissions', 'created_by', 'created_on'))
458
466
459
467
460 log_delete_user = ExtensionCallback(
468 log_delete_user = ExtensionCallback(
461 hook_name='DELETE_USER_HOOK',
469 hook_name='DELETE_USER_HOOK',
462 kwargs_keys=(
470 kwargs_keys=(
463 'username', 'full_name_or_username', 'full_contact', 'user_id',
471 'username', 'full_name_or_username', 'full_contact', 'user_id',
464 'name', 'firstname', 'short_contact', 'admin', 'lastname',
472 'name', 'firstname', 'short_contact', 'admin', 'lastname',
465 'ip_addresses',
473 'ip_addresses',
466 'email', 'last_login',
474 'email', 'last_login',
467 'full_name', 'active', 'password', 'emails',
475 'full_name', 'active', 'password', 'emails',
468 'inherit_default_permissions', 'deleted_by'))
476 'inherit_default_permissions', 'deleted_by'))
469
477
470
478
471 log_create_repository = ExtensionCallback(
479 log_create_repository = ExtensionCallback(
472 hook_name='CREATE_REPO_HOOK',
480 hook_name='CREATE_REPO_HOOK',
473 kwargs_keys=(
481 kwargs_keys=(
474 'repo_name', 'repo_type', 'description', 'private', 'created_on',
482 'repo_name', 'repo_type', 'description', 'private', 'created_on',
475 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
483 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
476 'clone_uri', 'fork_id', 'group_id', 'created_by'))
484 'clone_uri', 'fork_id', 'group_id', 'created_by'))
477
485
478
486
479 log_delete_repository = ExtensionCallback(
487 log_delete_repository = ExtensionCallback(
480 hook_name='DELETE_REPO_HOOK',
488 hook_name='DELETE_REPO_HOOK',
481 kwargs_keys=(
489 kwargs_keys=(
482 'repo_name', 'repo_type', 'description', 'private', 'created_on',
490 'repo_name', 'repo_type', 'description', 'private', 'created_on',
483 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
491 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
484 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
492 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
485
493
486
494
495 log_comment_commit_repository = ExtensionCallback(
496 hook_name='COMMENT_COMMIT_REPO_HOOK',
497 kwargs_keys=(
498 'repo_name', 'repo_type', 'description', 'private', 'created_on',
499 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
500 'clone_uri', 'fork_id', 'group_id',
501 'repository', 'created_by', 'comment', 'commit'))
502
503
487 log_create_repository_group = ExtensionCallback(
504 log_create_repository_group = ExtensionCallback(
488 hook_name='CREATE_REPO_GROUP_HOOK',
505 hook_name='CREATE_REPO_GROUP_HOOK',
489 kwargs_keys=(
506 kwargs_keys=(
490 'group_name', 'group_parent_id', 'group_description',
507 'group_name', 'group_parent_id', 'group_description',
491 'group_id', 'user_id', 'created_by', 'created_on',
508 'group_id', 'user_id', 'created_by', 'created_on',
492 'enable_locking'))
509 'enable_locking'))
@@ -1,169 +1,215 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import webob
21 import webob
22 from pyramid.threadlocal import get_current_request
22 from pyramid.threadlocal import get_current_request
23
23
24 from rhodecode import events
24 from rhodecode import events
25 from rhodecode.lib import hooks_base
25 from rhodecode.lib import hooks_base
26 from rhodecode.lib import utils2
26 from rhodecode.lib import utils2
27
27
28
28
29 def _get_rc_scm_extras(username, repo_name, repo_alias, action):
29 def _supports_repo_type(repo_type):
30 # TODO: johbo: Replace by vcs_operation_context and remove fully
30 if repo_type in ('hg', 'git'):
31 return True
32 return False
33
34
35 def _get_vcs_operation_context(username, repo_name, repo_type, action):
36 # NOTE(dan): import loop
31 from rhodecode.lib.base import vcs_operation_context
37 from rhodecode.lib.base import vcs_operation_context
38
32 check_locking = action in ('pull', 'push')
39 check_locking = action in ('pull', 'push')
33
40
34 request = get_current_request()
41 request = get_current_request()
35
42
36 # default
37 dummy_environ = webob.Request.blank('').environ
38 try:
43 try:
39 environ = request.environ or dummy_environ
44 environ = request.environ
40 except TypeError:
45 except TypeError:
41 # we might use this outside of request context
46 # we might use this outside of request context
42 environ = dummy_environ
47 environ = {}
43
48
44 extras = vcs_operation_context(
49 if not environ:
45 environ, repo_name, username, action, repo_alias, check_locking)
50 environ = webob.Request.blank('').environ
51
52 extras = vcs_operation_context(environ, repo_name, username, action, repo_type, check_locking)
46 return utils2.AttributeDict(extras)
53 return utils2.AttributeDict(extras)
47
54
48
55
49 def trigger_post_push_hook(
56 def trigger_post_push_hook(username, action, hook_type, repo_name, repo_type, commit_ids):
50 username, action, hook_type, repo_name, repo_alias, commit_ids):
51 """
57 """
52 Triggers push action hooks
58 Triggers push action hooks
53
59
54 :param username: username who pushes
60 :param username: username who pushes
55 :param action: push/push_local/push_remote
61 :param action: push/push_local/push_remote
62 :param hook_type: type of hook executed
56 :param repo_name: name of repo
63 :param repo_name: name of repo
57 :param repo_alias: the type of SCM repo
64 :param repo_type: the type of SCM repo
58 :param commit_ids: list of commit ids that we pushed
65 :param commit_ids: list of commit ids that we pushed
59 """
66 """
60 extras = _get_rc_scm_extras(username, repo_name, repo_alias, action)
67 extras = _get_vcs_operation_context(username, repo_name, repo_type, action)
61 extras.commit_ids = commit_ids
68 extras.commit_ids = commit_ids
62 extras.hook_type = hook_type
69 extras.hook_type = hook_type
63 hooks_base.post_push(extras)
70 hooks_base.post_push(extras)
64
71
65
72
66 def trigger_log_create_pull_request_hook(username, repo_name, repo_alias,
73 def trigger_comment_commit_hooks(username, repo_name, repo_type, repo, data=None):
67 pull_request, data=None):
74 """
75 Triggers when a comment is made on a commit
76
77 :param username: username who creates the comment
78 :param repo_name: name of target repo
79 :param repo_type: the type of SCM target repo
80 :param repo: the repo object we trigger the event for
81 :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj}
82 """
83 if not _supports_repo_type(repo_type):
84 return
85
86 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit')
87
88 comment = data['comment']
89 commit = data['commit']
90
91 events.trigger(events.RepoCommitCommentEvent(repo, commit, comment))
92 extras.update(repo.get_dict())
93
94 extras.commit = commit.serialize()
95 extras.comment = comment.get_api_data()
96 extras.created_by = username
97 hooks_base.log_comment_commit_repository(**extras)
98
99
100 def trigger_create_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
68 """
101 """
69 Triggers create pull request action hooks
102 Triggers create pull request action hooks
70
103
71 :param username: username who creates the pull request
104 :param username: username who creates the pull request
72 :param repo_name: name of target repo
105 :param repo_name: name of target repo
73 :param repo_alias: the type of SCM target repo
106 :param repo_type: the type of SCM target repo
74 :param pull_request: the pull request that was created
107 :param pull_request: the pull request that was created
75 :param data: extra data for specific events e.g {'comment': comment_obj}
108 :param data: extra data for specific events e.g {'comment': comment_obj}
76 """
109 """
77 if repo_alias not in ('hg', 'git'):
110 if not _supports_repo_type(repo_type):
78 return
111 return
79
112
80 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
113 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'create_pull_request')
81 'create_pull_request')
82 events.trigger(events.PullRequestCreateEvent(pull_request))
114 events.trigger(events.PullRequestCreateEvent(pull_request))
83 extras.update(pull_request.get_api_data(with_merge_state=False))
115 extras.update(pull_request.get_api_data(with_merge_state=False))
84 hooks_base.log_create_pull_request(**extras)
116 hooks_base.log_create_pull_request(**extras)
85
117
86
118
87 def trigger_log_merge_pull_request_hook(username, repo_name, repo_alias,
119 def trigger_merge_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
88 pull_request, data=None):
89 """
120 """
90 Triggers merge pull request action hooks
121 Triggers merge pull request action hooks
91
122
92 :param username: username who creates the pull request
123 :param username: username who creates the pull request
93 :param repo_name: name of target repo
124 :param repo_name: name of target repo
94 :param repo_alias: the type of SCM target repo
125 :param repo_type: the type of SCM target repo
95 :param pull_request: the pull request that was merged
126 :param pull_request: the pull request that was merged
96 :param data: extra data for specific events e.g {'comment': comment_obj}
127 :param data: extra data for specific events e.g {'comment': comment_obj}
97 """
128 """
98 if repo_alias not in ('hg', 'git'):
129 if not _supports_repo_type(repo_type):
99 return
130 return
100
131
101 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
132 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'merge_pull_request')
102 'merge_pull_request')
103 events.trigger(events.PullRequestMergeEvent(pull_request))
133 events.trigger(events.PullRequestMergeEvent(pull_request))
104 extras.update(pull_request.get_api_data())
134 extras.update(pull_request.get_api_data())
105 hooks_base.log_merge_pull_request(**extras)
135 hooks_base.log_merge_pull_request(**extras)
106
136
107
137
108 def trigger_log_close_pull_request_hook(username, repo_name, repo_alias,
138 def trigger_close_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
109 pull_request, data=None):
110 """
139 """
111 Triggers close pull request action hooks
140 Triggers close pull request action hooks
112
141
113 :param username: username who creates the pull request
142 :param username: username who creates the pull request
114 :param repo_name: name of target repo
143 :param repo_name: name of target repo
115 :param repo_alias: the type of SCM target repo
144 :param repo_type: the type of SCM target repo
116 :param pull_request: the pull request that was closed
145 :param pull_request: the pull request that was closed
117 :param data: extra data for specific events e.g {'comment': comment_obj}
146 :param data: extra data for specific events e.g {'comment': comment_obj}
118 """
147 """
119 if repo_alias not in ('hg', 'git'):
148 if not _supports_repo_type(repo_type):
120 return
149 return
121
150
122 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
151 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'close_pull_request')
123 'close_pull_request')
124 events.trigger(events.PullRequestCloseEvent(pull_request))
152 events.trigger(events.PullRequestCloseEvent(pull_request))
125 extras.update(pull_request.get_api_data())
153 extras.update(pull_request.get_api_data())
126 hooks_base.log_close_pull_request(**extras)
154 hooks_base.log_close_pull_request(**extras)
127
155
128
156
129 def trigger_log_review_pull_request_hook(username, repo_name, repo_alias,
157 def trigger_review_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
130 pull_request, data=None):
131 """
158 """
132 Triggers review status change pull request action hooks
159 Triggers review status change pull request action hooks
133
160
134 :param username: username who creates the pull request
161 :param username: username who creates the pull request
135 :param repo_name: name of target repo
162 :param repo_name: name of target repo
136 :param repo_alias: the type of SCM target repo
163 :param repo_type: the type of SCM target repo
137 :param pull_request: the pull request that review status changed
164 :param pull_request: the pull request that review status changed
138 :param data: extra data for specific events e.g {'comment': comment_obj}
165 :param data: extra data for specific events e.g {'comment': comment_obj}
139 """
166 """
140 if repo_alias not in ('hg', 'git'):
167 if not _supports_repo_type(repo_type):
141 return
168 return
142
169
143 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
170 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'review_pull_request')
144 'review_pull_request')
145 status = data.get('status')
171 status = data.get('status')
146 events.trigger(events.PullRequestReviewEvent(pull_request, status))
172 events.trigger(events.PullRequestReviewEvent(pull_request, status))
147 extras.update(pull_request.get_api_data())
173 extras.update(pull_request.get_api_data())
148 hooks_base.log_review_pull_request(**extras)
174 hooks_base.log_review_pull_request(**extras)
149
175
150
176
151 def trigger_log_update_pull_request_hook(username, repo_name, repo_alias,
177 def trigger_comment_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
152 pull_request, data=None):
178 """
179 Triggers when a comment is made on a pull request
180
181 :param username: username who creates the pull request
182 :param repo_name: name of target repo
183 :param repo_type: the type of SCM target repo
184 :param pull_request: the pull request that comment was made on
185 :param data: extra data for specific events e.g {'comment': comment_obj}
186 """
187 if not _supports_repo_type(repo_type):
188 return
189
190 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request')
191
192 comment = data['comment']
193 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
194 extras.update(pull_request.get_api_data())
195 extras.comment = comment.get_api_data()
196 hooks_base.log_comment_pull_request(**extras)
197
198
199 def trigger_update_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
153 """
200 """
154 Triggers update pull request action hooks
201 Triggers update pull request action hooks
155
202
156 :param username: username who creates the pull request
203 :param username: username who creates the pull request
157 :param repo_name: name of target repo
204 :param repo_name: name of target repo
158 :param repo_alias: the type of SCM target repo
205 :param repo_type: the type of SCM target repo
159 :param pull_request: the pull request that was updated
206 :param pull_request: the pull request that was updated
160 :param data: extra data for specific events e.g {'comment': comment_obj}
207 :param data: extra data for specific events e.g {'comment': comment_obj}
161 """
208 """
162 if repo_alias not in ('hg', 'git'):
209 if not _supports_repo_type(repo_type):
163 return
210 return
164
211
165 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
212 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'update_pull_request')
166 'update_pull_request')
167 events.trigger(events.PullRequestUpdateEvent(pull_request))
213 events.trigger(events.PullRequestUpdateEvent(pull_request))
168 extras.update(pull_request.get_api_data())
214 extras.update(pull_request.get_api_data())
169 hooks_base.log_update_pull_request(**extras)
215 hooks_base.log_update_pull_request(**extras)
@@ -1,1901 +1,1904 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from pyramid import compat
37 from pyramid import compat
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
43 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 RepositoryError)
50 RepositoryError)
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
57 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
58 EMPTY_COMMIT_ID = '0' * 40
58 EMPTY_COMMIT_ID = '0' * 40
59
59
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
61
62
62
63 class MergeFailureReason(object):
63 class MergeFailureReason(object):
64 """
64 """
65 Enumeration with all the reasons why the server side merge could fail.
65 Enumeration with all the reasons why the server side merge could fail.
66
66
67 DO NOT change the number of the reasons, as they may be stored in the
67 DO NOT change the number of the reasons, as they may be stored in the
68 database.
68 database.
69
69
70 Changing the name of a reason is acceptable and encouraged to deprecate old
70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 reasons.
71 reasons.
72 """
72 """
73
73
74 # Everything went well.
74 # Everything went well.
75 NONE = 0
75 NONE = 0
76
76
77 # An unexpected exception was raised. Check the logs for more details.
77 # An unexpected exception was raised. Check the logs for more details.
78 UNKNOWN = 1
78 UNKNOWN = 1
79
79
80 # The merge was not successful, there are conflicts.
80 # The merge was not successful, there are conflicts.
81 MERGE_FAILED = 2
81 MERGE_FAILED = 2
82
82
83 # The merge succeeded but we could not push it to the target repository.
83 # The merge succeeded but we could not push it to the target repository.
84 PUSH_FAILED = 3
84 PUSH_FAILED = 3
85
85
86 # The specified target is not a head in the target repository.
86 # The specified target is not a head in the target repository.
87 TARGET_IS_NOT_HEAD = 4
87 TARGET_IS_NOT_HEAD = 4
88
88
89 # The source repository contains more branches than the target. Pushing
89 # The source repository contains more branches than the target. Pushing
90 # the merge will create additional branches in the target.
90 # the merge will create additional branches in the target.
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92
92
93 # The target reference has multiple heads. That does not allow to correctly
93 # The target reference has multiple heads. That does not allow to correctly
94 # identify the target location. This could only happen for mercurial
94 # identify the target location. This could only happen for mercurial
95 # branches.
95 # branches.
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97
97
98 # The target repository is locked
98 # The target repository is locked
99 TARGET_IS_LOCKED = 7
99 TARGET_IS_LOCKED = 7
100
100
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 # A involved commit could not be found.
102 # A involved commit could not be found.
103 _DEPRECATED_MISSING_COMMIT = 8
103 _DEPRECATED_MISSING_COMMIT = 8
104
104
105 # The target repo reference is missing.
105 # The target repo reference is missing.
106 MISSING_TARGET_REF = 9
106 MISSING_TARGET_REF = 9
107
107
108 # The source repo reference is missing.
108 # The source repo reference is missing.
109 MISSING_SOURCE_REF = 10
109 MISSING_SOURCE_REF = 10
110
110
111 # The merge was not successful, there are conflicts related to sub
111 # The merge was not successful, there are conflicts related to sub
112 # repositories.
112 # repositories.
113 SUBREPO_MERGE_FAILED = 11
113 SUBREPO_MERGE_FAILED = 11
114
114
115
115
116 class UpdateFailureReason(object):
116 class UpdateFailureReason(object):
117 """
117 """
118 Enumeration with all the reasons why the pull request update could fail.
118 Enumeration with all the reasons why the pull request update could fail.
119
119
120 DO NOT change the number of the reasons, as they may be stored in the
120 DO NOT change the number of the reasons, as they may be stored in the
121 database.
121 database.
122
122
123 Changing the name of a reason is acceptable and encouraged to deprecate old
123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 reasons.
124 reasons.
125 """
125 """
126
126
127 # Everything went well.
127 # Everything went well.
128 NONE = 0
128 NONE = 0
129
129
130 # An unexpected exception was raised. Check the logs for more details.
130 # An unexpected exception was raised. Check the logs for more details.
131 UNKNOWN = 1
131 UNKNOWN = 1
132
132
133 # The pull request is up to date.
133 # The pull request is up to date.
134 NO_CHANGE = 2
134 NO_CHANGE = 2
135
135
136 # The pull request has a reference type that is not supported for update.
136 # The pull request has a reference type that is not supported for update.
137 WRONG_REF_TYPE = 3
137 WRONG_REF_TYPE = 3
138
138
139 # Update failed because the target reference is missing.
139 # Update failed because the target reference is missing.
140 MISSING_TARGET_REF = 4
140 MISSING_TARGET_REF = 4
141
141
142 # Update failed because the source reference is missing.
142 # Update failed because the source reference is missing.
143 MISSING_SOURCE_REF = 5
143 MISSING_SOURCE_REF = 5
144
144
145
145
146 class MergeResponse(object):
146 class MergeResponse(object):
147
147
148 # uses .format(**metadata) for variables
148 # uses .format(**metadata) for variables
149 MERGE_STATUS_MESSAGES = {
149 MERGE_STATUS_MESSAGES = {
150 MergeFailureReason.NONE: lazy_ugettext(
150 MergeFailureReason.NONE: lazy_ugettext(
151 u'This pull request can be automatically merged.'),
151 u'This pull request can be automatically merged.'),
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 u'This pull request cannot be merged because of an unhandled exception. '
153 u'This pull request cannot be merged because of an unhandled exception. '
154 u'{exception}'),
154 u'{exception}'),
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 u'This pull request could not be merged because push to '
158 u'This pull request could not be merged because push to '
159 u'target:`{target}@{merge_commit}` failed.'),
159 u'target:`{target}@{merge_commit}` failed.'),
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 u'This pull request cannot be merged because the target '
161 u'This pull request cannot be merged because the target '
162 u'`{target_ref.name}` is not a head.'),
162 u'`{target_ref.name}` is not a head.'),
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 u'This pull request cannot be merged because the source contains '
164 u'This pull request cannot be merged because the source contains '
165 u'more branches than the target.'),
165 u'more branches than the target.'),
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 u'has multiple heads: `{heads}`.'),
168 u'has multiple heads: `{heads}`.'),
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 u'This pull request cannot be merged because the target repository is '
170 u'This pull request cannot be merged because the target repository is '
171 u'locked by {locked_by}.'),
171 u'locked by {locked_by}.'),
172
172
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 u'This pull request cannot be merged because the target '
174 u'This pull request cannot be merged because the target '
175 u'reference `{target_ref.name}` is missing.'),
175 u'reference `{target_ref.name}` is missing.'),
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 u'This pull request cannot be merged because the source '
177 u'This pull request cannot be merged because the source '
178 u'reference `{source_ref.name}` is missing.'),
178 u'reference `{source_ref.name}` is missing.'),
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 u'This pull request cannot be merged because of conflicts related '
180 u'This pull request cannot be merged because of conflicts related '
181 u'to sub repositories.'),
181 u'to sub repositories.'),
182
182
183 # Deprecations
183 # Deprecations
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 u'This pull request cannot be merged because the target or the '
185 u'This pull request cannot be merged because the target or the '
186 u'source reference is missing.'),
186 u'source reference is missing.'),
187
187
188 }
188 }
189
189
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 self.possible = possible
191 self.possible = possible
192 self.executed = executed
192 self.executed = executed
193 self.merge_ref = merge_ref
193 self.merge_ref = merge_ref
194 self.failure_reason = failure_reason
194 self.failure_reason = failure_reason
195 self.metadata = metadata or {}
195 self.metadata = metadata or {}
196
196
197 def __repr__(self):
197 def __repr__(self):
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199
199
200 def __eq__(self, other):
200 def __eq__(self, other):
201 same_instance = isinstance(other, self.__class__)
201 same_instance = isinstance(other, self.__class__)
202 return same_instance \
202 return same_instance \
203 and self.possible == other.possible \
203 and self.possible == other.possible \
204 and self.executed == other.executed \
204 and self.executed == other.executed \
205 and self.failure_reason == other.failure_reason
205 and self.failure_reason == other.failure_reason
206
206
207 @property
207 @property
208 def label(self):
208 def label(self):
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 not k.startswith('_'))
210 not k.startswith('_'))
211 return label_dict.get(self.failure_reason)
211 return label_dict.get(self.failure_reason)
212
212
213 @property
213 @property
214 def merge_status_message(self):
214 def merge_status_message(self):
215 """
215 """
216 Return a human friendly error message for the given merge status code.
216 Return a human friendly error message for the given merge status code.
217 """
217 """
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219
219
220 try:
220 try:
221 return msg.format(**self.metadata)
221 return msg.format(**self.metadata)
222 except Exception:
222 except Exception:
223 log.exception('Failed to format %s message', self)
223 log.exception('Failed to format %s message', self)
224 return msg
224 return msg
225
225
226 def asdict(self):
226 def asdict(self):
227 data = {}
227 data = {}
228 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
229 'merge_status_message']:
229 'merge_status_message']:
230 data[k] = getattr(self, k)
230 data[k] = getattr(self, k)
231 return data
231 return data
232
232
233
233
234 class BaseRepository(object):
234 class BaseRepository(object):
235 """
235 """
236 Base Repository for final backends
236 Base Repository for final backends
237
237
238 .. attribute:: DEFAULT_BRANCH_NAME
238 .. attribute:: DEFAULT_BRANCH_NAME
239
239
240 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240 name of default branch (i.e. "trunk" for svn, "master" for git etc.
241
241
242 .. attribute:: commit_ids
242 .. attribute:: commit_ids
243
243
244 list of all available commit ids, in ascending order
244 list of all available commit ids, in ascending order
245
245
246 .. attribute:: path
246 .. attribute:: path
247
247
248 absolute path to the repository
248 absolute path to the repository
249
249
250 .. attribute:: bookmarks
250 .. attribute:: bookmarks
251
251
252 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
253 there are no bookmarks or the backend implementation does not support
253 there are no bookmarks or the backend implementation does not support
254 bookmarks.
254 bookmarks.
255
255
256 .. attribute:: tags
256 .. attribute:: tags
257
257
258 Mapping from name to :term:`Commit ID` of the tag.
258 Mapping from name to :term:`Commit ID` of the tag.
259
259
260 """
260 """
261
261
262 DEFAULT_BRANCH_NAME = None
262 DEFAULT_BRANCH_NAME = None
263 DEFAULT_CONTACT = u"Unknown"
263 DEFAULT_CONTACT = u"Unknown"
264 DEFAULT_DESCRIPTION = u"unknown"
264 DEFAULT_DESCRIPTION = u"unknown"
265 EMPTY_COMMIT_ID = '0' * 40
265 EMPTY_COMMIT_ID = '0' * 40
266
266
267 path = None
267 path = None
268
268
269 _is_empty = None
269 _is_empty = None
270 _commit_ids = {}
270 _commit_ids = {}
271
271
272 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 def __init__(self, repo_path, config=None, create=False, **kwargs):
273 """
273 """
274 Initializes repository. Raises RepositoryError if repository could
274 Initializes repository. Raises RepositoryError if repository could
275 not be find at the given ``repo_path`` or directory at ``repo_path``
275 not be find at the given ``repo_path`` or directory at ``repo_path``
276 exists and ``create`` is set to True.
276 exists and ``create`` is set to True.
277
277
278 :param repo_path: local path of the repository
278 :param repo_path: local path of the repository
279 :param config: repository configuration
279 :param config: repository configuration
280 :param create=False: if set to True, would try to create repository.
280 :param create=False: if set to True, would try to create repository.
281 :param src_url=None: if set, should be proper url from which repository
281 :param src_url=None: if set, should be proper url from which repository
282 would be cloned; requires ``create`` parameter to be set to True -
282 would be cloned; requires ``create`` parameter to be set to True -
283 raises RepositoryError if src_url is set and create evaluates to
283 raises RepositoryError if src_url is set and create evaluates to
284 False
284 False
285 """
285 """
286 raise NotImplementedError
286 raise NotImplementedError
287
287
288 def __repr__(self):
288 def __repr__(self):
289 return '<%s at %s>' % (self.__class__.__name__, self.path)
289 return '<%s at %s>' % (self.__class__.__name__, self.path)
290
290
291 def __len__(self):
291 def __len__(self):
292 return self.count()
292 return self.count()
293
293
294 def __eq__(self, other):
294 def __eq__(self, other):
295 same_instance = isinstance(other, self.__class__)
295 same_instance = isinstance(other, self.__class__)
296 return same_instance and other.path == self.path
296 return same_instance and other.path == self.path
297
297
298 def __ne__(self, other):
298 def __ne__(self, other):
299 return not self.__eq__(other)
299 return not self.__eq__(other)
300
300
301 def get_create_shadow_cache_pr_path(self, db_repo):
301 def get_create_shadow_cache_pr_path(self, db_repo):
302 path = db_repo.cached_diffs_dir
302 path = db_repo.cached_diffs_dir
303 if not os.path.exists(path):
303 if not os.path.exists(path):
304 os.makedirs(path, 0o755)
304 os.makedirs(path, 0o755)
305 return path
305 return path
306
306
307 @classmethod
307 @classmethod
308 def get_default_config(cls, default=None):
308 def get_default_config(cls, default=None):
309 config = Config()
309 config = Config()
310 if default and isinstance(default, list):
310 if default and isinstance(default, list):
311 for section, key, val in default:
311 for section, key, val in default:
312 config.set(section, key, val)
312 config.set(section, key, val)
313 return config
313 return config
314
314
315 @LazyProperty
315 @LazyProperty
316 def _remote(self):
316 def _remote(self):
317 raise NotImplementedError
317 raise NotImplementedError
318
318
319 def _heads(self, branch=None):
319 def _heads(self, branch=None):
320 return []
320 return []
321
321
322 @LazyProperty
322 @LazyProperty
323 def EMPTY_COMMIT(self):
323 def EMPTY_COMMIT(self):
324 return EmptyCommit(self.EMPTY_COMMIT_ID)
324 return EmptyCommit(self.EMPTY_COMMIT_ID)
325
325
326 @LazyProperty
326 @LazyProperty
327 def alias(self):
327 def alias(self):
328 for k, v in settings.BACKENDS.items():
328 for k, v in settings.BACKENDS.items():
329 if v.split('.')[-1] == str(self.__class__.__name__):
329 if v.split('.')[-1] == str(self.__class__.__name__):
330 return k
330 return k
331
331
332 @LazyProperty
332 @LazyProperty
333 def name(self):
333 def name(self):
334 return safe_unicode(os.path.basename(self.path))
334 return safe_unicode(os.path.basename(self.path))
335
335
336 @LazyProperty
336 @LazyProperty
337 def description(self):
337 def description(self):
338 raise NotImplementedError
338 raise NotImplementedError
339
339
340 def refs(self):
340 def refs(self):
341 """
341 """
342 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 returns a `dict` with branches, bookmarks, tags, and closed_branches
343 for this repository
343 for this repository
344 """
344 """
345 return dict(
345 return dict(
346 branches=self.branches,
346 branches=self.branches,
347 branches_closed=self.branches_closed,
347 branches_closed=self.branches_closed,
348 tags=self.tags,
348 tags=self.tags,
349 bookmarks=self.bookmarks
349 bookmarks=self.bookmarks
350 )
350 )
351
351
352 @LazyProperty
352 @LazyProperty
353 def branches(self):
353 def branches(self):
354 """
354 """
355 A `dict` which maps branch names to commit ids.
355 A `dict` which maps branch names to commit ids.
356 """
356 """
357 raise NotImplementedError
357 raise NotImplementedError
358
358
359 @LazyProperty
359 @LazyProperty
360 def branches_closed(self):
360 def branches_closed(self):
361 """
361 """
362 A `dict` which maps tags names to commit ids.
362 A `dict` which maps tags names to commit ids.
363 """
363 """
364 raise NotImplementedError
364 raise NotImplementedError
365
365
366 @LazyProperty
366 @LazyProperty
367 def bookmarks(self):
367 def bookmarks(self):
368 """
368 """
369 A `dict` which maps tags names to commit ids.
369 A `dict` which maps tags names to commit ids.
370 """
370 """
371 raise NotImplementedError
371 raise NotImplementedError
372
372
373 @LazyProperty
373 @LazyProperty
374 def tags(self):
374 def tags(self):
375 """
375 """
376 A `dict` which maps tags names to commit ids.
376 A `dict` which maps tags names to commit ids.
377 """
377 """
378 raise NotImplementedError
378 raise NotImplementedError
379
379
380 @LazyProperty
380 @LazyProperty
381 def size(self):
381 def size(self):
382 """
382 """
383 Returns combined size in bytes for all repository files
383 Returns combined size in bytes for all repository files
384 """
384 """
385 tip = self.get_commit()
385 tip = self.get_commit()
386 return tip.size
386 return tip.size
387
387
388 def size_at_commit(self, commit_id):
388 def size_at_commit(self, commit_id):
389 commit = self.get_commit(commit_id)
389 commit = self.get_commit(commit_id)
390 return commit.size
390 return commit.size
391
391
392 def _check_for_empty(self):
392 def _check_for_empty(self):
393 no_commits = len(self._commit_ids) == 0
393 no_commits = len(self._commit_ids) == 0
394 if no_commits:
394 if no_commits:
395 # check on remote to be sure
395 # check on remote to be sure
396 return self._remote.is_empty()
396 return self._remote.is_empty()
397 else:
397 else:
398 return False
398 return False
399
399
400 def is_empty(self):
400 def is_empty(self):
401 if rhodecode.is_test:
401 if rhodecode.is_test:
402 return self._check_for_empty()
402 return self._check_for_empty()
403
403
404 if self._is_empty is None:
404 if self._is_empty is None:
405 # cache empty for production, but not tests
405 # cache empty for production, but not tests
406 self._is_empty = self._check_for_empty()
406 self._is_empty = self._check_for_empty()
407
407
408 return self._is_empty
408 return self._is_empty
409
409
410 @staticmethod
410 @staticmethod
411 def check_url(url, config):
411 def check_url(url, config):
412 """
412 """
413 Function will check given url and try to verify if it's a valid
413 Function will check given url and try to verify if it's a valid
414 link.
414 link.
415 """
415 """
416 raise NotImplementedError
416 raise NotImplementedError
417
417
418 @staticmethod
418 @staticmethod
419 def is_valid_repository(path):
419 def is_valid_repository(path):
420 """
420 """
421 Check if given `path` contains a valid repository of this backend
421 Check if given `path` contains a valid repository of this backend
422 """
422 """
423 raise NotImplementedError
423 raise NotImplementedError
424
424
425 # ==========================================================================
425 # ==========================================================================
426 # COMMITS
426 # COMMITS
427 # ==========================================================================
427 # ==========================================================================
428
428
429 @CachedProperty
429 @CachedProperty
430 def commit_ids(self):
430 def commit_ids(self):
431 raise NotImplementedError
431 raise NotImplementedError
432
432
433 def append_commit_id(self, commit_id):
433 def append_commit_id(self, commit_id):
434 if commit_id not in self.commit_ids:
434 if commit_id not in self.commit_ids:
435 self._rebuild_cache(self.commit_ids + [commit_id])
435 self._rebuild_cache(self.commit_ids + [commit_id])
436
436
437 # clear cache
437 # clear cache
438 self._invalidate_prop_cache('commit_ids')
438 self._invalidate_prop_cache('commit_ids')
439 self._is_empty = False
439 self._is_empty = False
440
440
441 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
441 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
442 translate_tag=None, maybe_unreachable=False):
442 translate_tag=None, maybe_unreachable=False):
443 """
443 """
444 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
444 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
445 are both None, most recent commit is returned.
445 are both None, most recent commit is returned.
446
446
447 :param pre_load: Optional. List of commit attributes to load.
447 :param pre_load: Optional. List of commit attributes to load.
448
448
449 :raises ``EmptyRepositoryError``: if there are no commits
449 :raises ``EmptyRepositoryError``: if there are no commits
450 """
450 """
451 raise NotImplementedError
451 raise NotImplementedError
452
452
453 def __iter__(self):
453 def __iter__(self):
454 for commit_id in self.commit_ids:
454 for commit_id in self.commit_ids:
455 yield self.get_commit(commit_id=commit_id)
455 yield self.get_commit(commit_id=commit_id)
456
456
457 def get_commits(
457 def get_commits(
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 """
460 """
461 Returns iterator of `BaseCommit` objects from start to end
461 Returns iterator of `BaseCommit` objects from start to end
462 not inclusive. This should behave just like a list, ie. end is not
462 not inclusive. This should behave just like a list, ie. end is not
463 inclusive.
463 inclusive.
464
464
465 :param start_id: None or str, must be a valid commit id
465 :param start_id: None or str, must be a valid commit id
466 :param end_id: None or str, must be a valid commit id
466 :param end_id: None or str, must be a valid commit id
467 :param start_date:
467 :param start_date:
468 :param end_date:
468 :param end_date:
469 :param branch_name:
469 :param branch_name:
470 :param show_hidden:
470 :param show_hidden:
471 :param pre_load:
471 :param pre_load:
472 :param translate_tags:
472 :param translate_tags:
473 """
473 """
474 raise NotImplementedError
474 raise NotImplementedError
475
475
476 def __getitem__(self, key):
476 def __getitem__(self, key):
477 """
477 """
478 Allows index based access to the commit objects of this repository.
478 Allows index based access to the commit objects of this repository.
479 """
479 """
480 pre_load = ["author", "branch", "date", "message", "parents"]
480 pre_load = ["author", "branch", "date", "message", "parents"]
481 if isinstance(key, slice):
481 if isinstance(key, slice):
482 return self._get_range(key, pre_load)
482 return self._get_range(key, pre_load)
483 return self.get_commit(commit_idx=key, pre_load=pre_load)
483 return self.get_commit(commit_idx=key, pre_load=pre_load)
484
484
485 def _get_range(self, slice_obj, pre_load):
485 def _get_range(self, slice_obj, pre_load):
486 for commit_id in self.commit_ids.__getitem__(slice_obj):
486 for commit_id in self.commit_ids.__getitem__(slice_obj):
487 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
487 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
488
488
489 def count(self):
489 def count(self):
490 return len(self.commit_ids)
490 return len(self.commit_ids)
491
491
492 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
492 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
493 """
493 """
494 Creates and returns a tag for the given ``commit_id``.
494 Creates and returns a tag for the given ``commit_id``.
495
495
496 :param name: name for new tag
496 :param name: name for new tag
497 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
497 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
498 :param commit_id: commit id for which new tag would be created
498 :param commit_id: commit id for which new tag would be created
499 :param message: message of the tag's commit
499 :param message: message of the tag's commit
500 :param date: date of tag's commit
500 :param date: date of tag's commit
501
501
502 :raises TagAlreadyExistError: if tag with same name already exists
502 :raises TagAlreadyExistError: if tag with same name already exists
503 """
503 """
504 raise NotImplementedError
504 raise NotImplementedError
505
505
506 def remove_tag(self, name, user, message=None, date=None):
506 def remove_tag(self, name, user, message=None, date=None):
507 """
507 """
508 Removes tag with the given ``name``.
508 Removes tag with the given ``name``.
509
509
510 :param name: name of the tag to be removed
510 :param name: name of the tag to be removed
511 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
511 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
512 :param message: message of the tag's removal commit
512 :param message: message of the tag's removal commit
513 :param date: date of tag's removal commit
513 :param date: date of tag's removal commit
514
514
515 :raises TagDoesNotExistError: if tag with given name does not exists
515 :raises TagDoesNotExistError: if tag with given name does not exists
516 """
516 """
517 raise NotImplementedError
517 raise NotImplementedError
518
518
519 def get_diff(
519 def get_diff(
520 self, commit1, commit2, path=None, ignore_whitespace=False,
520 self, commit1, commit2, path=None, ignore_whitespace=False,
521 context=3, path1=None):
521 context=3, path1=None):
522 """
522 """
523 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 Returns (git like) *diff*, as plain text. Shows changes introduced by
524 `commit2` since `commit1`.
524 `commit2` since `commit1`.
525
525
526 :param commit1: Entry point from which diff is shown. Can be
526 :param commit1: Entry point from which diff is shown. Can be
527 ``self.EMPTY_COMMIT`` - in this case, patch showing all
527 ``self.EMPTY_COMMIT`` - in this case, patch showing all
528 the changes since empty state of the repository until `commit2`
528 the changes since empty state of the repository until `commit2`
529 :param commit2: Until which commit changes should be shown.
529 :param commit2: Until which commit changes should be shown.
530 :param path: Can be set to a path of a file to create a diff of that
530 :param path: Can be set to a path of a file to create a diff of that
531 file. If `path1` is also set, this value is only associated to
531 file. If `path1` is also set, this value is only associated to
532 `commit2`.
532 `commit2`.
533 :param ignore_whitespace: If set to ``True``, would not show whitespace
533 :param ignore_whitespace: If set to ``True``, would not show whitespace
534 changes. Defaults to ``False``.
534 changes. Defaults to ``False``.
535 :param context: How many lines before/after changed lines should be
535 :param context: How many lines before/after changed lines should be
536 shown. Defaults to ``3``.
536 shown. Defaults to ``3``.
537 :param path1: Can be set to a path to associate with `commit1`. This
537 :param path1: Can be set to a path to associate with `commit1`. This
538 parameter works only for backends which support diff generation for
538 parameter works only for backends which support diff generation for
539 different paths. Other backends will raise a `ValueError` if `path1`
539 different paths. Other backends will raise a `ValueError` if `path1`
540 is set and has a different value than `path`.
540 is set and has a different value than `path`.
541 :param file_path: filter this diff by given path pattern
541 :param file_path: filter this diff by given path pattern
542 """
542 """
543 raise NotImplementedError
543 raise NotImplementedError
544
544
545 def strip(self, commit_id, branch=None):
545 def strip(self, commit_id, branch=None):
546 """
546 """
547 Strip given commit_id from the repository
547 Strip given commit_id from the repository
548 """
548 """
549 raise NotImplementedError
549 raise NotImplementedError
550
550
551 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
551 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
552 """
552 """
553 Return a latest common ancestor commit if one exists for this repo
553 Return a latest common ancestor commit if one exists for this repo
554 `commit_id1` vs `commit_id2` from `repo2`.
554 `commit_id1` vs `commit_id2` from `repo2`.
555
555
556 :param commit_id1: Commit it from this repository to use as a
556 :param commit_id1: Commit it from this repository to use as a
557 target for the comparison.
557 target for the comparison.
558 :param commit_id2: Source commit id to use for comparison.
558 :param commit_id2: Source commit id to use for comparison.
559 :param repo2: Source repository to use for comparison.
559 :param repo2: Source repository to use for comparison.
560 """
560 """
561 raise NotImplementedError
561 raise NotImplementedError
562
562
563 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
563 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
564 """
564 """
565 Compare this repository's revision `commit_id1` with `commit_id2`.
565 Compare this repository's revision `commit_id1` with `commit_id2`.
566
566
567 Returns a tuple(commits, ancestor) that would be merged from
567 Returns a tuple(commits, ancestor) that would be merged from
568 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
568 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
569 will be returned as ancestor.
569 will be returned as ancestor.
570
570
571 :param commit_id1: Commit it from this repository to use as a
571 :param commit_id1: Commit it from this repository to use as a
572 target for the comparison.
572 target for the comparison.
573 :param commit_id2: Source commit id to use for comparison.
573 :param commit_id2: Source commit id to use for comparison.
574 :param repo2: Source repository to use for comparison.
574 :param repo2: Source repository to use for comparison.
575 :param merge: If set to ``True`` will do a merge compare which also
575 :param merge: If set to ``True`` will do a merge compare which also
576 returns the common ancestor.
576 returns the common ancestor.
577 :param pre_load: Optional. List of commit attributes to load.
577 :param pre_load: Optional. List of commit attributes to load.
578 """
578 """
579 raise NotImplementedError
579 raise NotImplementedError
580
580
581 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
581 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
582 user_name='', user_email='', message='', dry_run=False,
582 user_name='', user_email='', message='', dry_run=False,
583 use_rebase=False, close_branch=False):
583 use_rebase=False, close_branch=False):
584 """
584 """
585 Merge the revisions specified in `source_ref` from `source_repo`
585 Merge the revisions specified in `source_ref` from `source_repo`
586 onto the `target_ref` of this repository.
586 onto the `target_ref` of this repository.
587
587
588 `source_ref` and `target_ref` are named tupls with the following
588 `source_ref` and `target_ref` are named tupls with the following
589 fields `type`, `name` and `commit_id`.
589 fields `type`, `name` and `commit_id`.
590
590
591 Returns a MergeResponse named tuple with the following fields
591 Returns a MergeResponse named tuple with the following fields
592 'possible', 'executed', 'source_commit', 'target_commit',
592 'possible', 'executed', 'source_commit', 'target_commit',
593 'merge_commit'.
593 'merge_commit'.
594
594
595 :param repo_id: `repo_id` target repo id.
595 :param repo_id: `repo_id` target repo id.
596 :param workspace_id: `workspace_id` unique identifier.
596 :param workspace_id: `workspace_id` unique identifier.
597 :param target_ref: `target_ref` points to the commit on top of which
597 :param target_ref: `target_ref` points to the commit on top of which
598 the `source_ref` should be merged.
598 the `source_ref` should be merged.
599 :param source_repo: The repository that contains the commits to be
599 :param source_repo: The repository that contains the commits to be
600 merged.
600 merged.
601 :param source_ref: `source_ref` points to the topmost commit from
601 :param source_ref: `source_ref` points to the topmost commit from
602 the `source_repo` which should be merged.
602 the `source_repo` which should be merged.
603 :param user_name: Merge commit `user_name`.
603 :param user_name: Merge commit `user_name`.
604 :param user_email: Merge commit `user_email`.
604 :param user_email: Merge commit `user_email`.
605 :param message: Merge commit `message`.
605 :param message: Merge commit `message`.
606 :param dry_run: If `True` the merge will not take place.
606 :param dry_run: If `True` the merge will not take place.
607 :param use_rebase: If `True` commits from the source will be rebased
607 :param use_rebase: If `True` commits from the source will be rebased
608 on top of the target instead of being merged.
608 on top of the target instead of being merged.
609 :param close_branch: If `True` branch will be close before merging it
609 :param close_branch: If `True` branch will be close before merging it
610 """
610 """
611 if dry_run:
611 if dry_run:
612 message = message or settings.MERGE_DRY_RUN_MESSAGE
612 message = message or settings.MERGE_DRY_RUN_MESSAGE
613 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
613 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
614 user_name = user_name or settings.MERGE_DRY_RUN_USER
614 user_name = user_name or settings.MERGE_DRY_RUN_USER
615 else:
615 else:
616 if not user_name:
616 if not user_name:
617 raise ValueError('user_name cannot be empty')
617 raise ValueError('user_name cannot be empty')
618 if not user_email:
618 if not user_email:
619 raise ValueError('user_email cannot be empty')
619 raise ValueError('user_email cannot be empty')
620 if not message:
620 if not message:
621 raise ValueError('message cannot be empty')
621 raise ValueError('message cannot be empty')
622
622
623 try:
623 try:
624 return self._merge_repo(
624 return self._merge_repo(
625 repo_id, workspace_id, target_ref, source_repo,
625 repo_id, workspace_id, target_ref, source_repo,
626 source_ref, message, user_name, user_email, dry_run=dry_run,
626 source_ref, message, user_name, user_email, dry_run=dry_run,
627 use_rebase=use_rebase, close_branch=close_branch)
627 use_rebase=use_rebase, close_branch=close_branch)
628 except RepositoryError as exc:
628 except RepositoryError as exc:
629 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
629 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
630 return MergeResponse(
630 return MergeResponse(
631 False, False, None, MergeFailureReason.UNKNOWN,
631 False, False, None, MergeFailureReason.UNKNOWN,
632 metadata={'exception': str(exc)})
632 metadata={'exception': str(exc)})
633
633
634 def _merge_repo(self, repo_id, workspace_id, target_ref,
634 def _merge_repo(self, repo_id, workspace_id, target_ref,
635 source_repo, source_ref, merge_message,
635 source_repo, source_ref, merge_message,
636 merger_name, merger_email, dry_run=False,
636 merger_name, merger_email, dry_run=False,
637 use_rebase=False, close_branch=False):
637 use_rebase=False, close_branch=False):
638 """Internal implementation of merge."""
638 """Internal implementation of merge."""
639 raise NotImplementedError
639 raise NotImplementedError
640
640
641 def _maybe_prepare_merge_workspace(
641 def _maybe_prepare_merge_workspace(
642 self, repo_id, workspace_id, target_ref, source_ref):
642 self, repo_id, workspace_id, target_ref, source_ref):
643 """
643 """
644 Create the merge workspace.
644 Create the merge workspace.
645
645
646 :param workspace_id: `workspace_id` unique identifier.
646 :param workspace_id: `workspace_id` unique identifier.
647 """
647 """
648 raise NotImplementedError
648 raise NotImplementedError
649
649
650 @classmethod
650 @classmethod
651 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
651 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
652 """
652 """
653 Legacy version that was used before. We still need it for
653 Legacy version that was used before. We still need it for
654 backward compat
654 backward compat
655 """
655 """
656 return os.path.join(
656 return os.path.join(
657 os.path.dirname(repo_path),
657 os.path.dirname(repo_path),
658 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
658 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
659
659
660 @classmethod
660 @classmethod
661 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
661 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
662 # The name of the shadow repository must start with '.', so it is
662 # The name of the shadow repository must start with '.', so it is
663 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
663 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
664 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
664 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
665 if os.path.exists(legacy_repository_path):
665 if os.path.exists(legacy_repository_path):
666 return legacy_repository_path
666 return legacy_repository_path
667 else:
667 else:
668 return os.path.join(
668 return os.path.join(
669 os.path.dirname(repo_path),
669 os.path.dirname(repo_path),
670 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
670 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
671
671
672 def cleanup_merge_workspace(self, repo_id, workspace_id):
672 def cleanup_merge_workspace(self, repo_id, workspace_id):
673 """
673 """
674 Remove merge workspace.
674 Remove merge workspace.
675
675
676 This function MUST not fail in case there is no workspace associated to
676 This function MUST not fail in case there is no workspace associated to
677 the given `workspace_id`.
677 the given `workspace_id`.
678
678
679 :param workspace_id: `workspace_id` unique identifier.
679 :param workspace_id: `workspace_id` unique identifier.
680 """
680 """
681 shadow_repository_path = self._get_shadow_repository_path(
681 shadow_repository_path = self._get_shadow_repository_path(
682 self.path, repo_id, workspace_id)
682 self.path, repo_id, workspace_id)
683 shadow_repository_path_del = '{}.{}.delete'.format(
683 shadow_repository_path_del = '{}.{}.delete'.format(
684 shadow_repository_path, time.time())
684 shadow_repository_path, time.time())
685
685
686 # move the shadow repo, so it never conflicts with the one used.
686 # move the shadow repo, so it never conflicts with the one used.
687 # we use this method because shutil.rmtree had some edge case problems
687 # we use this method because shutil.rmtree had some edge case problems
688 # removing symlinked repositories
688 # removing symlinked repositories
689 if not os.path.isdir(shadow_repository_path):
689 if not os.path.isdir(shadow_repository_path):
690 return
690 return
691
691
692 shutil.move(shadow_repository_path, shadow_repository_path_del)
692 shutil.move(shadow_repository_path, shadow_repository_path_del)
693 try:
693 try:
694 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
694 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
695 except Exception:
695 except Exception:
696 log.exception('Failed to gracefully remove shadow repo under %s',
696 log.exception('Failed to gracefully remove shadow repo under %s',
697 shadow_repository_path_del)
697 shadow_repository_path_del)
698 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
698 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
699
699
700 # ========== #
700 # ========== #
701 # COMMIT API #
701 # COMMIT API #
702 # ========== #
702 # ========== #
703
703
704 @LazyProperty
704 @LazyProperty
705 def in_memory_commit(self):
705 def in_memory_commit(self):
706 """
706 """
707 Returns :class:`InMemoryCommit` object for this repository.
707 Returns :class:`InMemoryCommit` object for this repository.
708 """
708 """
709 raise NotImplementedError
709 raise NotImplementedError
710
710
711 # ======================== #
711 # ======================== #
712 # UTILITIES FOR SUBCLASSES #
712 # UTILITIES FOR SUBCLASSES #
713 # ======================== #
713 # ======================== #
714
714
715 def _validate_diff_commits(self, commit1, commit2):
715 def _validate_diff_commits(self, commit1, commit2):
716 """
716 """
717 Validates that the given commits are related to this repository.
717 Validates that the given commits are related to this repository.
718
718
719 Intended as a utility for sub classes to have a consistent validation
719 Intended as a utility for sub classes to have a consistent validation
720 of input parameters in methods like :meth:`get_diff`.
720 of input parameters in methods like :meth:`get_diff`.
721 """
721 """
722 self._validate_commit(commit1)
722 self._validate_commit(commit1)
723 self._validate_commit(commit2)
723 self._validate_commit(commit2)
724 if (isinstance(commit1, EmptyCommit) and
724 if (isinstance(commit1, EmptyCommit) and
725 isinstance(commit2, EmptyCommit)):
725 isinstance(commit2, EmptyCommit)):
726 raise ValueError("Cannot compare two empty commits")
726 raise ValueError("Cannot compare two empty commits")
727
727
728 def _validate_commit(self, commit):
728 def _validate_commit(self, commit):
729 if not isinstance(commit, BaseCommit):
729 if not isinstance(commit, BaseCommit):
730 raise TypeError(
730 raise TypeError(
731 "%s is not of type BaseCommit" % repr(commit))
731 "%s is not of type BaseCommit" % repr(commit))
732 if commit.repository != self and not isinstance(commit, EmptyCommit):
732 if commit.repository != self and not isinstance(commit, EmptyCommit):
733 raise ValueError(
733 raise ValueError(
734 "Commit %s must be a valid commit from this repository %s, "
734 "Commit %s must be a valid commit from this repository %s, "
735 "related to this repository instead %s." %
735 "related to this repository instead %s." %
736 (commit, self, commit.repository))
736 (commit, self, commit.repository))
737
737
738 def _validate_commit_id(self, commit_id):
738 def _validate_commit_id(self, commit_id):
739 if not isinstance(commit_id, compat.string_types):
739 if not isinstance(commit_id, compat.string_types):
740 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
740 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
741
741
742 def _validate_commit_idx(self, commit_idx):
742 def _validate_commit_idx(self, commit_idx):
743 if not isinstance(commit_idx, (int, long)):
743 if not isinstance(commit_idx, (int, long)):
744 raise TypeError("commit_idx must be a numeric value")
744 raise TypeError("commit_idx must be a numeric value")
745
745
746 def _validate_branch_name(self, branch_name):
746 def _validate_branch_name(self, branch_name):
747 if branch_name and branch_name not in self.branches_all:
747 if branch_name and branch_name not in self.branches_all:
748 msg = ("Branch %s not found in %s" % (branch_name, self))
748 msg = ("Branch %s not found in %s" % (branch_name, self))
749 raise BranchDoesNotExistError(msg)
749 raise BranchDoesNotExistError(msg)
750
750
751 #
751 #
752 # Supporting deprecated API parts
752 # Supporting deprecated API parts
753 # TODO: johbo: consider to move this into a mixin
753 # TODO: johbo: consider to move this into a mixin
754 #
754 #
755
755
756 @property
756 @property
757 def EMPTY_CHANGESET(self):
757 def EMPTY_CHANGESET(self):
758 warnings.warn(
758 warnings.warn(
759 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
759 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
760 return self.EMPTY_COMMIT_ID
760 return self.EMPTY_COMMIT_ID
761
761
762 @property
762 @property
763 def revisions(self):
763 def revisions(self):
764 warnings.warn("Use commits attribute instead", DeprecationWarning)
764 warnings.warn("Use commits attribute instead", DeprecationWarning)
765 return self.commit_ids
765 return self.commit_ids
766
766
767 @revisions.setter
767 @revisions.setter
768 def revisions(self, value):
768 def revisions(self, value):
769 warnings.warn("Use commits attribute instead", DeprecationWarning)
769 warnings.warn("Use commits attribute instead", DeprecationWarning)
770 self.commit_ids = value
770 self.commit_ids = value
771
771
772 def get_changeset(self, revision=None, pre_load=None):
772 def get_changeset(self, revision=None, pre_load=None):
773 warnings.warn("Use get_commit instead", DeprecationWarning)
773 warnings.warn("Use get_commit instead", DeprecationWarning)
774 commit_id = None
774 commit_id = None
775 commit_idx = None
775 commit_idx = None
776 if isinstance(revision, compat.string_types):
776 if isinstance(revision, compat.string_types):
777 commit_id = revision
777 commit_id = revision
778 else:
778 else:
779 commit_idx = revision
779 commit_idx = revision
780 return self.get_commit(
780 return self.get_commit(
781 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
781 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
782
782
783 def get_changesets(
783 def get_changesets(
784 self, start=None, end=None, start_date=None, end_date=None,
784 self, start=None, end=None, start_date=None, end_date=None,
785 branch_name=None, pre_load=None):
785 branch_name=None, pre_load=None):
786 warnings.warn("Use get_commits instead", DeprecationWarning)
786 warnings.warn("Use get_commits instead", DeprecationWarning)
787 start_id = self._revision_to_commit(start)
787 start_id = self._revision_to_commit(start)
788 end_id = self._revision_to_commit(end)
788 end_id = self._revision_to_commit(end)
789 return self.get_commits(
789 return self.get_commits(
790 start_id=start_id, end_id=end_id, start_date=start_date,
790 start_id=start_id, end_id=end_id, start_date=start_date,
791 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
791 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
792
792
793 def _revision_to_commit(self, revision):
793 def _revision_to_commit(self, revision):
794 """
794 """
795 Translates a revision to a commit_id
795 Translates a revision to a commit_id
796
796
797 Helps to support the old changeset based API which allows to use
797 Helps to support the old changeset based API which allows to use
798 commit ids and commit indices interchangeable.
798 commit ids and commit indices interchangeable.
799 """
799 """
800 if revision is None:
800 if revision is None:
801 return revision
801 return revision
802
802
803 if isinstance(revision, compat.string_types):
803 if isinstance(revision, compat.string_types):
804 commit_id = revision
804 commit_id = revision
805 else:
805 else:
806 commit_id = self.commit_ids[revision]
806 commit_id = self.commit_ids[revision]
807 return commit_id
807 return commit_id
808
808
809 @property
809 @property
810 def in_memory_changeset(self):
810 def in_memory_changeset(self):
811 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
811 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
812 return self.in_memory_commit
812 return self.in_memory_commit
813
813
814 def get_path_permissions(self, username):
814 def get_path_permissions(self, username):
815 """
815 """
816 Returns a path permission checker or None if not supported
816 Returns a path permission checker or None if not supported
817
817
818 :param username: session user name
818 :param username: session user name
819 :return: an instance of BasePathPermissionChecker or None
819 :return: an instance of BasePathPermissionChecker or None
820 """
820 """
821 return None
821 return None
822
822
823 def install_hooks(self, force=False):
823 def install_hooks(self, force=False):
824 return self._remote.install_hooks(force)
824 return self._remote.install_hooks(force)
825
825
826 def get_hooks_info(self):
826 def get_hooks_info(self):
827 return self._remote.get_hooks_info()
827 return self._remote.get_hooks_info()
828
828
829
829
830 class BaseCommit(object):
830 class BaseCommit(object):
831 """
831 """
832 Each backend should implement it's commit representation.
832 Each backend should implement it's commit representation.
833
833
834 **Attributes**
834 **Attributes**
835
835
836 ``repository``
836 ``repository``
837 repository object within which commit exists
837 repository object within which commit exists
838
838
839 ``id``
839 ``id``
840 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
840 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
841 just ``tip``.
841 just ``tip``.
842
842
843 ``raw_id``
843 ``raw_id``
844 raw commit representation (i.e. full 40 length sha for git
844 raw commit representation (i.e. full 40 length sha for git
845 backend)
845 backend)
846
846
847 ``short_id``
847 ``short_id``
848 shortened (if apply) version of ``raw_id``; it would be simple
848 shortened (if apply) version of ``raw_id``; it would be simple
849 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
849 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
850 as ``raw_id`` for subversion
850 as ``raw_id`` for subversion
851
851
852 ``idx``
852 ``idx``
853 commit index
853 commit index
854
854
855 ``files``
855 ``files``
856 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
856 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
857
857
858 ``dirs``
858 ``dirs``
859 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
859 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
860
860
861 ``nodes``
861 ``nodes``
862 combined list of ``Node`` objects
862 combined list of ``Node`` objects
863
863
864 ``author``
864 ``author``
865 author of the commit, as unicode
865 author of the commit, as unicode
866
866
867 ``message``
867 ``message``
868 message of the commit, as unicode
868 message of the commit, as unicode
869
869
870 ``parents``
870 ``parents``
871 list of parent commits
871 list of parent commits
872
872
873 """
873 """
874
874
875 branch = None
875 branch = None
876 """
876 """
877 Depending on the backend this should be set to the branch name of the
877 Depending on the backend this should be set to the branch name of the
878 commit. Backends not supporting branches on commits should leave this
878 commit. Backends not supporting branches on commits should leave this
879 value as ``None``.
879 value as ``None``.
880 """
880 """
881
881
882 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
882 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
883 """
883 """
884 This template is used to generate a default prefix for repository archives
884 This template is used to generate a default prefix for repository archives
885 if no prefix has been specified.
885 if no prefix has been specified.
886 """
886 """
887
887
888 def __str__(self):
888 def __str__(self):
889 return '<%s at %s:%s>' % (
889 return '<%s at %s:%s>' % (
890 self.__class__.__name__, self.idx, self.short_id)
890 self.__class__.__name__, self.idx, self.short_id)
891
891
892 def __repr__(self):
892 def __repr__(self):
893 return self.__str__()
893 return self.__str__()
894
894
895 def __unicode__(self):
895 def __unicode__(self):
896 return u'%s:%s' % (self.idx, self.short_id)
896 return u'%s:%s' % (self.idx, self.short_id)
897
897
898 def __eq__(self, other):
898 def __eq__(self, other):
899 same_instance = isinstance(other, self.__class__)
899 same_instance = isinstance(other, self.__class__)
900 return same_instance and self.raw_id == other.raw_id
900 return same_instance and self.raw_id == other.raw_id
901
901
902 def __json__(self):
902 def __json__(self):
903 parents = []
903 parents = []
904 try:
904 try:
905 for parent in self.parents:
905 for parent in self.parents:
906 parents.append({'raw_id': parent.raw_id})
906 parents.append({'raw_id': parent.raw_id})
907 except NotImplementedError:
907 except NotImplementedError:
908 # empty commit doesn't have parents implemented
908 # empty commit doesn't have parents implemented
909 pass
909 pass
910
910
911 return {
911 return {
912 'short_id': self.short_id,
912 'short_id': self.short_id,
913 'raw_id': self.raw_id,
913 'raw_id': self.raw_id,
914 'revision': self.idx,
914 'revision': self.idx,
915 'message': self.message,
915 'message': self.message,
916 'date': self.date,
916 'date': self.date,
917 'author': self.author,
917 'author': self.author,
918 'parents': parents,
918 'parents': parents,
919 'branch': self.branch
919 'branch': self.branch
920 }
920 }
921
921
922 def __getstate__(self):
922 def __getstate__(self):
923 d = self.__dict__.copy()
923 d = self.__dict__.copy()
924 d.pop('_remote', None)
924 d.pop('_remote', None)
925 d.pop('repository', None)
925 d.pop('repository', None)
926 return d
926 return d
927
927
928 def serialize(self):
929 return self.__json__()
930
928 def _get_refs(self):
931 def _get_refs(self):
929 return {
932 return {
930 'branches': [self.branch] if self.branch else [],
933 'branches': [self.branch] if self.branch else [],
931 'bookmarks': getattr(self, 'bookmarks', []),
934 'bookmarks': getattr(self, 'bookmarks', []),
932 'tags': self.tags
935 'tags': self.tags
933 }
936 }
934
937
935 @LazyProperty
938 @LazyProperty
936 def last(self):
939 def last(self):
937 """
940 """
938 ``True`` if this is last commit in repository, ``False``
941 ``True`` if this is last commit in repository, ``False``
939 otherwise; trying to access this attribute while there is no
942 otherwise; trying to access this attribute while there is no
940 commits would raise `EmptyRepositoryError`
943 commits would raise `EmptyRepositoryError`
941 """
944 """
942 if self.repository is None:
945 if self.repository is None:
943 raise CommitError("Cannot check if it's most recent commit")
946 raise CommitError("Cannot check if it's most recent commit")
944 return self.raw_id == self.repository.commit_ids[-1]
947 return self.raw_id == self.repository.commit_ids[-1]
945
948
946 @LazyProperty
949 @LazyProperty
947 def parents(self):
950 def parents(self):
948 """
951 """
949 Returns list of parent commits.
952 Returns list of parent commits.
950 """
953 """
951 raise NotImplementedError
954 raise NotImplementedError
952
955
953 @LazyProperty
956 @LazyProperty
954 def first_parent(self):
957 def first_parent(self):
955 """
958 """
956 Returns list of parent commits.
959 Returns list of parent commits.
957 """
960 """
958 return self.parents[0] if self.parents else EmptyCommit()
961 return self.parents[0] if self.parents else EmptyCommit()
959
962
960 @property
963 @property
961 def merge(self):
964 def merge(self):
962 """
965 """
963 Returns boolean if commit is a merge.
966 Returns boolean if commit is a merge.
964 """
967 """
965 return len(self.parents) > 1
968 return len(self.parents) > 1
966
969
967 @LazyProperty
970 @LazyProperty
968 def children(self):
971 def children(self):
969 """
972 """
970 Returns list of child commits.
973 Returns list of child commits.
971 """
974 """
972 raise NotImplementedError
975 raise NotImplementedError
973
976
974 @LazyProperty
977 @LazyProperty
975 def id(self):
978 def id(self):
976 """
979 """
977 Returns string identifying this commit.
980 Returns string identifying this commit.
978 """
981 """
979 raise NotImplementedError
982 raise NotImplementedError
980
983
981 @LazyProperty
984 @LazyProperty
982 def raw_id(self):
985 def raw_id(self):
983 """
986 """
984 Returns raw string identifying this commit.
987 Returns raw string identifying this commit.
985 """
988 """
986 raise NotImplementedError
989 raise NotImplementedError
987
990
988 @LazyProperty
991 @LazyProperty
989 def short_id(self):
992 def short_id(self):
990 """
993 """
991 Returns shortened version of ``raw_id`` attribute, as string,
994 Returns shortened version of ``raw_id`` attribute, as string,
992 identifying this commit, useful for presentation to users.
995 identifying this commit, useful for presentation to users.
993 """
996 """
994 raise NotImplementedError
997 raise NotImplementedError
995
998
996 @LazyProperty
999 @LazyProperty
997 def idx(self):
1000 def idx(self):
998 """
1001 """
999 Returns integer identifying this commit.
1002 Returns integer identifying this commit.
1000 """
1003 """
1001 raise NotImplementedError
1004 raise NotImplementedError
1002
1005
1003 @LazyProperty
1006 @LazyProperty
1004 def committer(self):
1007 def committer(self):
1005 """
1008 """
1006 Returns committer for this commit
1009 Returns committer for this commit
1007 """
1010 """
1008 raise NotImplementedError
1011 raise NotImplementedError
1009
1012
1010 @LazyProperty
1013 @LazyProperty
1011 def committer_name(self):
1014 def committer_name(self):
1012 """
1015 """
1013 Returns committer name for this commit
1016 Returns committer name for this commit
1014 """
1017 """
1015
1018
1016 return author_name(self.committer)
1019 return author_name(self.committer)
1017
1020
1018 @LazyProperty
1021 @LazyProperty
1019 def committer_email(self):
1022 def committer_email(self):
1020 """
1023 """
1021 Returns committer email address for this commit
1024 Returns committer email address for this commit
1022 """
1025 """
1023
1026
1024 return author_email(self.committer)
1027 return author_email(self.committer)
1025
1028
1026 @LazyProperty
1029 @LazyProperty
1027 def author(self):
1030 def author(self):
1028 """
1031 """
1029 Returns author for this commit
1032 Returns author for this commit
1030 """
1033 """
1031
1034
1032 raise NotImplementedError
1035 raise NotImplementedError
1033
1036
1034 @LazyProperty
1037 @LazyProperty
1035 def author_name(self):
1038 def author_name(self):
1036 """
1039 """
1037 Returns author name for this commit
1040 Returns author name for this commit
1038 """
1041 """
1039
1042
1040 return author_name(self.author)
1043 return author_name(self.author)
1041
1044
1042 @LazyProperty
1045 @LazyProperty
1043 def author_email(self):
1046 def author_email(self):
1044 """
1047 """
1045 Returns author email address for this commit
1048 Returns author email address for this commit
1046 """
1049 """
1047
1050
1048 return author_email(self.author)
1051 return author_email(self.author)
1049
1052
1050 def get_file_mode(self, path):
1053 def get_file_mode(self, path):
1051 """
1054 """
1052 Returns stat mode of the file at `path`.
1055 Returns stat mode of the file at `path`.
1053 """
1056 """
1054 raise NotImplementedError
1057 raise NotImplementedError
1055
1058
1056 def is_link(self, path):
1059 def is_link(self, path):
1057 """
1060 """
1058 Returns ``True`` if given `path` is a symlink
1061 Returns ``True`` if given `path` is a symlink
1059 """
1062 """
1060 raise NotImplementedError
1063 raise NotImplementedError
1061
1064
1062 def is_node_binary(self, path):
1065 def is_node_binary(self, path):
1063 """
1066 """
1064 Returns ``True`` is given path is a binary file
1067 Returns ``True`` is given path is a binary file
1065 """
1068 """
1066 raise NotImplementedError
1069 raise NotImplementedError
1067
1070
1068 def get_file_content(self, path):
1071 def get_file_content(self, path):
1069 """
1072 """
1070 Returns content of the file at the given `path`.
1073 Returns content of the file at the given `path`.
1071 """
1074 """
1072 raise NotImplementedError
1075 raise NotImplementedError
1073
1076
1074 def get_file_content_streamed(self, path):
1077 def get_file_content_streamed(self, path):
1075 """
1078 """
1076 returns a streaming response from vcsserver with file content
1079 returns a streaming response from vcsserver with file content
1077 """
1080 """
1078 raise NotImplementedError
1081 raise NotImplementedError
1079
1082
1080 def get_file_size(self, path):
1083 def get_file_size(self, path):
1081 """
1084 """
1082 Returns size of the file at the given `path`.
1085 Returns size of the file at the given `path`.
1083 """
1086 """
1084 raise NotImplementedError
1087 raise NotImplementedError
1085
1088
1086 def get_path_commit(self, path, pre_load=None):
1089 def get_path_commit(self, path, pre_load=None):
1087 """
1090 """
1088 Returns last commit of the file at the given `path`.
1091 Returns last commit of the file at the given `path`.
1089
1092
1090 :param pre_load: Optional. List of commit attributes to load.
1093 :param pre_load: Optional. List of commit attributes to load.
1091 """
1094 """
1092 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1095 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1093 if not commits:
1096 if not commits:
1094 raise RepositoryError(
1097 raise RepositoryError(
1095 'Failed to fetch history for path {}. '
1098 'Failed to fetch history for path {}. '
1096 'Please check if such path exists in your repository'.format(
1099 'Please check if such path exists in your repository'.format(
1097 path))
1100 path))
1098 return commits[0]
1101 return commits[0]
1099
1102
1100 def get_path_history(self, path, limit=None, pre_load=None):
1103 def get_path_history(self, path, limit=None, pre_load=None):
1101 """
1104 """
1102 Returns history of file as reversed list of :class:`BaseCommit`
1105 Returns history of file as reversed list of :class:`BaseCommit`
1103 objects for which file at given `path` has been modified.
1106 objects for which file at given `path` has been modified.
1104
1107
1105 :param limit: Optional. Allows to limit the size of the returned
1108 :param limit: Optional. Allows to limit the size of the returned
1106 history. This is intended as a hint to the underlying backend, so
1109 history. This is intended as a hint to the underlying backend, so
1107 that it can apply optimizations depending on the limit.
1110 that it can apply optimizations depending on the limit.
1108 :param pre_load: Optional. List of commit attributes to load.
1111 :param pre_load: Optional. List of commit attributes to load.
1109 """
1112 """
1110 raise NotImplementedError
1113 raise NotImplementedError
1111
1114
1112 def get_file_annotate(self, path, pre_load=None):
1115 def get_file_annotate(self, path, pre_load=None):
1113 """
1116 """
1114 Returns a generator of four element tuples with
1117 Returns a generator of four element tuples with
1115 lineno, sha, commit lazy loader and line
1118 lineno, sha, commit lazy loader and line
1116
1119
1117 :param pre_load: Optional. List of commit attributes to load.
1120 :param pre_load: Optional. List of commit attributes to load.
1118 """
1121 """
1119 raise NotImplementedError
1122 raise NotImplementedError
1120
1123
1121 def get_nodes(self, path):
1124 def get_nodes(self, path):
1122 """
1125 """
1123 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1126 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1124 state of commit at the given ``path``.
1127 state of commit at the given ``path``.
1125
1128
1126 :raises ``CommitError``: if node at the given ``path`` is not
1129 :raises ``CommitError``: if node at the given ``path`` is not
1127 instance of ``DirNode``
1130 instance of ``DirNode``
1128 """
1131 """
1129 raise NotImplementedError
1132 raise NotImplementedError
1130
1133
1131 def get_node(self, path):
1134 def get_node(self, path):
1132 """
1135 """
1133 Returns ``Node`` object from the given ``path``.
1136 Returns ``Node`` object from the given ``path``.
1134
1137
1135 :raises ``NodeDoesNotExistError``: if there is no node at the given
1138 :raises ``NodeDoesNotExistError``: if there is no node at the given
1136 ``path``
1139 ``path``
1137 """
1140 """
1138 raise NotImplementedError
1141 raise NotImplementedError
1139
1142
1140 def get_largefile_node(self, path):
1143 def get_largefile_node(self, path):
1141 """
1144 """
1142 Returns the path to largefile from Mercurial/Git-lfs storage.
1145 Returns the path to largefile from Mercurial/Git-lfs storage.
1143 or None if it's not a largefile node
1146 or None if it's not a largefile node
1144 """
1147 """
1145 return None
1148 return None
1146
1149
1147 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1150 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1148 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1151 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1149 """
1152 """
1150 Creates an archive containing the contents of the repository.
1153 Creates an archive containing the contents of the repository.
1151
1154
1152 :param archive_dest_path: path to the file which to create the archive.
1155 :param archive_dest_path: path to the file which to create the archive.
1153 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1156 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1154 :param prefix: name of root directory in archive.
1157 :param prefix: name of root directory in archive.
1155 Default is repository name and commit's short_id joined with dash:
1158 Default is repository name and commit's short_id joined with dash:
1156 ``"{repo_name}-{short_id}"``.
1159 ``"{repo_name}-{short_id}"``.
1157 :param write_metadata: write a metadata file into archive.
1160 :param write_metadata: write a metadata file into archive.
1158 :param mtime: custom modification time for archive creation, defaults
1161 :param mtime: custom modification time for archive creation, defaults
1159 to time.time() if not given.
1162 to time.time() if not given.
1160 :param archive_at_path: pack files at this path (default '/')
1163 :param archive_at_path: pack files at this path (default '/')
1161
1164
1162 :raise VCSError: If prefix has a problem.
1165 :raise VCSError: If prefix has a problem.
1163 """
1166 """
1164 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1167 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1165 if kind not in allowed_kinds:
1168 if kind not in allowed_kinds:
1166 raise ImproperArchiveTypeError(
1169 raise ImproperArchiveTypeError(
1167 'Archive kind (%s) not supported use one of %s' %
1170 'Archive kind (%s) not supported use one of %s' %
1168 (kind, allowed_kinds))
1171 (kind, allowed_kinds))
1169
1172
1170 prefix = self._validate_archive_prefix(prefix)
1173 prefix = self._validate_archive_prefix(prefix)
1171
1174
1172 mtime = mtime is not None or time.mktime(self.date.timetuple())
1175 mtime = mtime is not None or time.mktime(self.date.timetuple())
1173
1176
1174 file_info = []
1177 file_info = []
1175 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1178 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1176 for _r, _d, files in cur_rev.walk(archive_at_path):
1179 for _r, _d, files in cur_rev.walk(archive_at_path):
1177 for f in files:
1180 for f in files:
1178 f_path = os.path.join(prefix, f.path)
1181 f_path = os.path.join(prefix, f.path)
1179 file_info.append(
1182 file_info.append(
1180 (f_path, f.mode, f.is_link(), f.raw_bytes))
1183 (f_path, f.mode, f.is_link(), f.raw_bytes))
1181
1184
1182 if write_metadata:
1185 if write_metadata:
1183 metadata = [
1186 metadata = [
1184 ('repo_name', self.repository.name),
1187 ('repo_name', self.repository.name),
1185 ('commit_id', self.raw_id),
1188 ('commit_id', self.raw_id),
1186 ('mtime', mtime),
1189 ('mtime', mtime),
1187 ('branch', self.branch),
1190 ('branch', self.branch),
1188 ('tags', ','.join(self.tags)),
1191 ('tags', ','.join(self.tags)),
1189 ]
1192 ]
1190 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1193 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1191 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1194 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1192
1195
1193 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1196 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1194
1197
1195 def _validate_archive_prefix(self, prefix):
1198 def _validate_archive_prefix(self, prefix):
1196 if prefix is None:
1199 if prefix is None:
1197 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1200 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1198 repo_name=safe_str(self.repository.name),
1201 repo_name=safe_str(self.repository.name),
1199 short_id=self.short_id)
1202 short_id=self.short_id)
1200 elif not isinstance(prefix, str):
1203 elif not isinstance(prefix, str):
1201 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1204 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1202 elif prefix.startswith('/'):
1205 elif prefix.startswith('/'):
1203 raise VCSError("Prefix cannot start with leading slash")
1206 raise VCSError("Prefix cannot start with leading slash")
1204 elif prefix.strip() == '':
1207 elif prefix.strip() == '':
1205 raise VCSError("Prefix cannot be empty")
1208 raise VCSError("Prefix cannot be empty")
1206 return prefix
1209 return prefix
1207
1210
1208 @LazyProperty
1211 @LazyProperty
1209 def root(self):
1212 def root(self):
1210 """
1213 """
1211 Returns ``RootNode`` object for this commit.
1214 Returns ``RootNode`` object for this commit.
1212 """
1215 """
1213 return self.get_node('')
1216 return self.get_node('')
1214
1217
1215 def next(self, branch=None):
1218 def next(self, branch=None):
1216 """
1219 """
1217 Returns next commit from current, if branch is gives it will return
1220 Returns next commit from current, if branch is gives it will return
1218 next commit belonging to this branch
1221 next commit belonging to this branch
1219
1222
1220 :param branch: show commits within the given named branch
1223 :param branch: show commits within the given named branch
1221 """
1224 """
1222 indexes = xrange(self.idx + 1, self.repository.count())
1225 indexes = xrange(self.idx + 1, self.repository.count())
1223 return self._find_next(indexes, branch)
1226 return self._find_next(indexes, branch)
1224
1227
1225 def prev(self, branch=None):
1228 def prev(self, branch=None):
1226 """
1229 """
1227 Returns previous commit from current, if branch is gives it will
1230 Returns previous commit from current, if branch is gives it will
1228 return previous commit belonging to this branch
1231 return previous commit belonging to this branch
1229
1232
1230 :param branch: show commit within the given named branch
1233 :param branch: show commit within the given named branch
1231 """
1234 """
1232 indexes = xrange(self.idx - 1, -1, -1)
1235 indexes = xrange(self.idx - 1, -1, -1)
1233 return self._find_next(indexes, branch)
1236 return self._find_next(indexes, branch)
1234
1237
1235 def _find_next(self, indexes, branch=None):
1238 def _find_next(self, indexes, branch=None):
1236 if branch and self.branch != branch:
1239 if branch and self.branch != branch:
1237 raise VCSError('Branch option used on commit not belonging '
1240 raise VCSError('Branch option used on commit not belonging '
1238 'to that branch')
1241 'to that branch')
1239
1242
1240 for next_idx in indexes:
1243 for next_idx in indexes:
1241 commit = self.repository.get_commit(commit_idx=next_idx)
1244 commit = self.repository.get_commit(commit_idx=next_idx)
1242 if branch and branch != commit.branch:
1245 if branch and branch != commit.branch:
1243 continue
1246 continue
1244 return commit
1247 return commit
1245 raise CommitDoesNotExistError
1248 raise CommitDoesNotExistError
1246
1249
1247 def diff(self, ignore_whitespace=True, context=3):
1250 def diff(self, ignore_whitespace=True, context=3):
1248 """
1251 """
1249 Returns a `Diff` object representing the change made by this commit.
1252 Returns a `Diff` object representing the change made by this commit.
1250 """
1253 """
1251 parent = self.first_parent
1254 parent = self.first_parent
1252 diff = self.repository.get_diff(
1255 diff = self.repository.get_diff(
1253 parent, self,
1256 parent, self,
1254 ignore_whitespace=ignore_whitespace,
1257 ignore_whitespace=ignore_whitespace,
1255 context=context)
1258 context=context)
1256 return diff
1259 return diff
1257
1260
1258 @LazyProperty
1261 @LazyProperty
1259 def added(self):
1262 def added(self):
1260 """
1263 """
1261 Returns list of added ``FileNode`` objects.
1264 Returns list of added ``FileNode`` objects.
1262 """
1265 """
1263 raise NotImplementedError
1266 raise NotImplementedError
1264
1267
1265 @LazyProperty
1268 @LazyProperty
1266 def changed(self):
1269 def changed(self):
1267 """
1270 """
1268 Returns list of modified ``FileNode`` objects.
1271 Returns list of modified ``FileNode`` objects.
1269 """
1272 """
1270 raise NotImplementedError
1273 raise NotImplementedError
1271
1274
1272 @LazyProperty
1275 @LazyProperty
1273 def removed(self):
1276 def removed(self):
1274 """
1277 """
1275 Returns list of removed ``FileNode`` objects.
1278 Returns list of removed ``FileNode`` objects.
1276 """
1279 """
1277 raise NotImplementedError
1280 raise NotImplementedError
1278
1281
1279 @LazyProperty
1282 @LazyProperty
1280 def size(self):
1283 def size(self):
1281 """
1284 """
1282 Returns total number of bytes from contents of all filenodes.
1285 Returns total number of bytes from contents of all filenodes.
1283 """
1286 """
1284 return sum((node.size for node in self.get_filenodes_generator()))
1287 return sum((node.size for node in self.get_filenodes_generator()))
1285
1288
1286 def walk(self, topurl=''):
1289 def walk(self, topurl=''):
1287 """
1290 """
1288 Similar to os.walk method. Insted of filesystem it walks through
1291 Similar to os.walk method. Insted of filesystem it walks through
1289 commit starting at given ``topurl``. Returns generator of tuples
1292 commit starting at given ``topurl``. Returns generator of tuples
1290 (topnode, dirnodes, filenodes).
1293 (topnode, dirnodes, filenodes).
1291 """
1294 """
1292 topnode = self.get_node(topurl)
1295 topnode = self.get_node(topurl)
1293 if not topnode.is_dir():
1296 if not topnode.is_dir():
1294 return
1297 return
1295 yield (topnode, topnode.dirs, topnode.files)
1298 yield (topnode, topnode.dirs, topnode.files)
1296 for dirnode in topnode.dirs:
1299 for dirnode in topnode.dirs:
1297 for tup in self.walk(dirnode.path):
1300 for tup in self.walk(dirnode.path):
1298 yield tup
1301 yield tup
1299
1302
1300 def get_filenodes_generator(self):
1303 def get_filenodes_generator(self):
1301 """
1304 """
1302 Returns generator that yields *all* file nodes.
1305 Returns generator that yields *all* file nodes.
1303 """
1306 """
1304 for topnode, dirs, files in self.walk():
1307 for topnode, dirs, files in self.walk():
1305 for node in files:
1308 for node in files:
1306 yield node
1309 yield node
1307
1310
1308 #
1311 #
1309 # Utilities for sub classes to support consistent behavior
1312 # Utilities for sub classes to support consistent behavior
1310 #
1313 #
1311
1314
1312 def no_node_at_path(self, path):
1315 def no_node_at_path(self, path):
1313 return NodeDoesNotExistError(
1316 return NodeDoesNotExistError(
1314 u"There is no file nor directory at the given path: "
1317 u"There is no file nor directory at the given path: "
1315 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1318 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1316
1319
1317 def _fix_path(self, path):
1320 def _fix_path(self, path):
1318 """
1321 """
1319 Paths are stored without trailing slash so we need to get rid off it if
1322 Paths are stored without trailing slash so we need to get rid off it if
1320 needed.
1323 needed.
1321 """
1324 """
1322 return path.rstrip('/')
1325 return path.rstrip('/')
1323
1326
1324 #
1327 #
1325 # Deprecated API based on changesets
1328 # Deprecated API based on changesets
1326 #
1329 #
1327
1330
1328 @property
1331 @property
1329 def revision(self):
1332 def revision(self):
1330 warnings.warn("Use idx instead", DeprecationWarning)
1333 warnings.warn("Use idx instead", DeprecationWarning)
1331 return self.idx
1334 return self.idx
1332
1335
1333 @revision.setter
1336 @revision.setter
1334 def revision(self, value):
1337 def revision(self, value):
1335 warnings.warn("Use idx instead", DeprecationWarning)
1338 warnings.warn("Use idx instead", DeprecationWarning)
1336 self.idx = value
1339 self.idx = value
1337
1340
1338 def get_file_changeset(self, path):
1341 def get_file_changeset(self, path):
1339 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1342 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1340 return self.get_path_commit(path)
1343 return self.get_path_commit(path)
1341
1344
1342
1345
1343 class BaseChangesetClass(type):
1346 class BaseChangesetClass(type):
1344
1347
1345 def __instancecheck__(self, instance):
1348 def __instancecheck__(self, instance):
1346 return isinstance(instance, BaseCommit)
1349 return isinstance(instance, BaseCommit)
1347
1350
1348
1351
1349 class BaseChangeset(BaseCommit):
1352 class BaseChangeset(BaseCommit):
1350
1353
1351 __metaclass__ = BaseChangesetClass
1354 __metaclass__ = BaseChangesetClass
1352
1355
1353 def __new__(cls, *args, **kwargs):
1356 def __new__(cls, *args, **kwargs):
1354 warnings.warn(
1357 warnings.warn(
1355 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1358 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1356 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1359 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1357
1360
1358
1361
1359 class BaseInMemoryCommit(object):
1362 class BaseInMemoryCommit(object):
1360 """
1363 """
1361 Represents differences between repository's state (most recent head) and
1364 Represents differences between repository's state (most recent head) and
1362 changes made *in place*.
1365 changes made *in place*.
1363
1366
1364 **Attributes**
1367 **Attributes**
1365
1368
1366 ``repository``
1369 ``repository``
1367 repository object for this in-memory-commit
1370 repository object for this in-memory-commit
1368
1371
1369 ``added``
1372 ``added``
1370 list of ``FileNode`` objects marked as *added*
1373 list of ``FileNode`` objects marked as *added*
1371
1374
1372 ``changed``
1375 ``changed``
1373 list of ``FileNode`` objects marked as *changed*
1376 list of ``FileNode`` objects marked as *changed*
1374
1377
1375 ``removed``
1378 ``removed``
1376 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1379 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1377 *removed*
1380 *removed*
1378
1381
1379 ``parents``
1382 ``parents``
1380 list of :class:`BaseCommit` instances representing parents of
1383 list of :class:`BaseCommit` instances representing parents of
1381 in-memory commit. Should always be 2-element sequence.
1384 in-memory commit. Should always be 2-element sequence.
1382
1385
1383 """
1386 """
1384
1387
1385 def __init__(self, repository):
1388 def __init__(self, repository):
1386 self.repository = repository
1389 self.repository = repository
1387 self.added = []
1390 self.added = []
1388 self.changed = []
1391 self.changed = []
1389 self.removed = []
1392 self.removed = []
1390 self.parents = []
1393 self.parents = []
1391
1394
1392 def add(self, *filenodes):
1395 def add(self, *filenodes):
1393 """
1396 """
1394 Marks given ``FileNode`` objects as *to be committed*.
1397 Marks given ``FileNode`` objects as *to be committed*.
1395
1398
1396 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1399 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1397 latest commit
1400 latest commit
1398 :raises ``NodeAlreadyAddedError``: if node with same path is already
1401 :raises ``NodeAlreadyAddedError``: if node with same path is already
1399 marked as *added*
1402 marked as *added*
1400 """
1403 """
1401 # Check if not already marked as *added* first
1404 # Check if not already marked as *added* first
1402 for node in filenodes:
1405 for node in filenodes:
1403 if node.path in (n.path for n in self.added):
1406 if node.path in (n.path for n in self.added):
1404 raise NodeAlreadyAddedError(
1407 raise NodeAlreadyAddedError(
1405 "Such FileNode %s is already marked for addition"
1408 "Such FileNode %s is already marked for addition"
1406 % node.path)
1409 % node.path)
1407 for node in filenodes:
1410 for node in filenodes:
1408 self.added.append(node)
1411 self.added.append(node)
1409
1412
1410 def change(self, *filenodes):
1413 def change(self, *filenodes):
1411 """
1414 """
1412 Marks given ``FileNode`` objects to be *changed* in next commit.
1415 Marks given ``FileNode`` objects to be *changed* in next commit.
1413
1416
1414 :raises ``EmptyRepositoryError``: if there are no commits yet
1417 :raises ``EmptyRepositoryError``: if there are no commits yet
1415 :raises ``NodeAlreadyExistsError``: if node with same path is already
1418 :raises ``NodeAlreadyExistsError``: if node with same path is already
1416 marked to be *changed*
1419 marked to be *changed*
1417 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1420 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1418 marked to be *removed*
1421 marked to be *removed*
1419 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1422 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1420 commit
1423 commit
1421 :raises ``NodeNotChangedError``: if node hasn't really be changed
1424 :raises ``NodeNotChangedError``: if node hasn't really be changed
1422 """
1425 """
1423 for node in filenodes:
1426 for node in filenodes:
1424 if node.path in (n.path for n in self.removed):
1427 if node.path in (n.path for n in self.removed):
1425 raise NodeAlreadyRemovedError(
1428 raise NodeAlreadyRemovedError(
1426 "Node at %s is already marked as removed" % node.path)
1429 "Node at %s is already marked as removed" % node.path)
1427 try:
1430 try:
1428 self.repository.get_commit()
1431 self.repository.get_commit()
1429 except EmptyRepositoryError:
1432 except EmptyRepositoryError:
1430 raise EmptyRepositoryError(
1433 raise EmptyRepositoryError(
1431 "Nothing to change - try to *add* new nodes rather than "
1434 "Nothing to change - try to *add* new nodes rather than "
1432 "changing them")
1435 "changing them")
1433 for node in filenodes:
1436 for node in filenodes:
1434 if node.path in (n.path for n in self.changed):
1437 if node.path in (n.path for n in self.changed):
1435 raise NodeAlreadyChangedError(
1438 raise NodeAlreadyChangedError(
1436 "Node at '%s' is already marked as changed" % node.path)
1439 "Node at '%s' is already marked as changed" % node.path)
1437 self.changed.append(node)
1440 self.changed.append(node)
1438
1441
1439 def remove(self, *filenodes):
1442 def remove(self, *filenodes):
1440 """
1443 """
1441 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1444 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1442 *removed* in next commit.
1445 *removed* in next commit.
1443
1446
1444 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1447 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1445 be *removed*
1448 be *removed*
1446 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1449 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1447 be *changed*
1450 be *changed*
1448 """
1451 """
1449 for node in filenodes:
1452 for node in filenodes:
1450 if node.path in (n.path for n in self.removed):
1453 if node.path in (n.path for n in self.removed):
1451 raise NodeAlreadyRemovedError(
1454 raise NodeAlreadyRemovedError(
1452 "Node is already marked to for removal at %s" % node.path)
1455 "Node is already marked to for removal at %s" % node.path)
1453 if node.path in (n.path for n in self.changed):
1456 if node.path in (n.path for n in self.changed):
1454 raise NodeAlreadyChangedError(
1457 raise NodeAlreadyChangedError(
1455 "Node is already marked to be changed at %s" % node.path)
1458 "Node is already marked to be changed at %s" % node.path)
1456 # We only mark node as *removed* - real removal is done by
1459 # We only mark node as *removed* - real removal is done by
1457 # commit method
1460 # commit method
1458 self.removed.append(node)
1461 self.removed.append(node)
1459
1462
1460 def reset(self):
1463 def reset(self):
1461 """
1464 """
1462 Resets this instance to initial state (cleans ``added``, ``changed``
1465 Resets this instance to initial state (cleans ``added``, ``changed``
1463 and ``removed`` lists).
1466 and ``removed`` lists).
1464 """
1467 """
1465 self.added = []
1468 self.added = []
1466 self.changed = []
1469 self.changed = []
1467 self.removed = []
1470 self.removed = []
1468 self.parents = []
1471 self.parents = []
1469
1472
1470 def get_ipaths(self):
1473 def get_ipaths(self):
1471 """
1474 """
1472 Returns generator of paths from nodes marked as added, changed or
1475 Returns generator of paths from nodes marked as added, changed or
1473 removed.
1476 removed.
1474 """
1477 """
1475 for node in itertools.chain(self.added, self.changed, self.removed):
1478 for node in itertools.chain(self.added, self.changed, self.removed):
1476 yield node.path
1479 yield node.path
1477
1480
1478 def get_paths(self):
1481 def get_paths(self):
1479 """
1482 """
1480 Returns list of paths from nodes marked as added, changed or removed.
1483 Returns list of paths from nodes marked as added, changed or removed.
1481 """
1484 """
1482 return list(self.get_ipaths())
1485 return list(self.get_ipaths())
1483
1486
1484 def check_integrity(self, parents=None):
1487 def check_integrity(self, parents=None):
1485 """
1488 """
1486 Checks in-memory commit's integrity. Also, sets parents if not
1489 Checks in-memory commit's integrity. Also, sets parents if not
1487 already set.
1490 already set.
1488
1491
1489 :raises CommitError: if any error occurs (i.e.
1492 :raises CommitError: if any error occurs (i.e.
1490 ``NodeDoesNotExistError``).
1493 ``NodeDoesNotExistError``).
1491 """
1494 """
1492 if not self.parents:
1495 if not self.parents:
1493 parents = parents or []
1496 parents = parents or []
1494 if len(parents) == 0:
1497 if len(parents) == 0:
1495 try:
1498 try:
1496 parents = [self.repository.get_commit(), None]
1499 parents = [self.repository.get_commit(), None]
1497 except EmptyRepositoryError:
1500 except EmptyRepositoryError:
1498 parents = [None, None]
1501 parents = [None, None]
1499 elif len(parents) == 1:
1502 elif len(parents) == 1:
1500 parents += [None]
1503 parents += [None]
1501 self.parents = parents
1504 self.parents = parents
1502
1505
1503 # Local parents, only if not None
1506 # Local parents, only if not None
1504 parents = [p for p in self.parents if p]
1507 parents = [p for p in self.parents if p]
1505
1508
1506 # Check nodes marked as added
1509 # Check nodes marked as added
1507 for p in parents:
1510 for p in parents:
1508 for node in self.added:
1511 for node in self.added:
1509 try:
1512 try:
1510 p.get_node(node.path)
1513 p.get_node(node.path)
1511 except NodeDoesNotExistError:
1514 except NodeDoesNotExistError:
1512 pass
1515 pass
1513 else:
1516 else:
1514 raise NodeAlreadyExistsError(
1517 raise NodeAlreadyExistsError(
1515 "Node `%s` already exists at %s" % (node.path, p))
1518 "Node `%s` already exists at %s" % (node.path, p))
1516
1519
1517 # Check nodes marked as changed
1520 # Check nodes marked as changed
1518 missing = set(self.changed)
1521 missing = set(self.changed)
1519 not_changed = set(self.changed)
1522 not_changed = set(self.changed)
1520 if self.changed and not parents:
1523 if self.changed and not parents:
1521 raise NodeDoesNotExistError(str(self.changed[0].path))
1524 raise NodeDoesNotExistError(str(self.changed[0].path))
1522 for p in parents:
1525 for p in parents:
1523 for node in self.changed:
1526 for node in self.changed:
1524 try:
1527 try:
1525 old = p.get_node(node.path)
1528 old = p.get_node(node.path)
1526 missing.remove(node)
1529 missing.remove(node)
1527 # if content actually changed, remove node from not_changed
1530 # if content actually changed, remove node from not_changed
1528 if old.content != node.content:
1531 if old.content != node.content:
1529 not_changed.remove(node)
1532 not_changed.remove(node)
1530 except NodeDoesNotExistError:
1533 except NodeDoesNotExistError:
1531 pass
1534 pass
1532 if self.changed and missing:
1535 if self.changed and missing:
1533 raise NodeDoesNotExistError(
1536 raise NodeDoesNotExistError(
1534 "Node `%s` marked as modified but missing in parents: %s"
1537 "Node `%s` marked as modified but missing in parents: %s"
1535 % (node.path, parents))
1538 % (node.path, parents))
1536
1539
1537 if self.changed and not_changed:
1540 if self.changed and not_changed:
1538 raise NodeNotChangedError(
1541 raise NodeNotChangedError(
1539 "Node `%s` wasn't actually changed (parents: %s)"
1542 "Node `%s` wasn't actually changed (parents: %s)"
1540 % (not_changed.pop().path, parents))
1543 % (not_changed.pop().path, parents))
1541
1544
1542 # Check nodes marked as removed
1545 # Check nodes marked as removed
1543 if self.removed and not parents:
1546 if self.removed and not parents:
1544 raise NodeDoesNotExistError(
1547 raise NodeDoesNotExistError(
1545 "Cannot remove node at %s as there "
1548 "Cannot remove node at %s as there "
1546 "were no parents specified" % self.removed[0].path)
1549 "were no parents specified" % self.removed[0].path)
1547 really_removed = set()
1550 really_removed = set()
1548 for p in parents:
1551 for p in parents:
1549 for node in self.removed:
1552 for node in self.removed:
1550 try:
1553 try:
1551 p.get_node(node.path)
1554 p.get_node(node.path)
1552 really_removed.add(node)
1555 really_removed.add(node)
1553 except CommitError:
1556 except CommitError:
1554 pass
1557 pass
1555 not_removed = set(self.removed) - really_removed
1558 not_removed = set(self.removed) - really_removed
1556 if not_removed:
1559 if not_removed:
1557 # TODO: johbo: This code branch does not seem to be covered
1560 # TODO: johbo: This code branch does not seem to be covered
1558 raise NodeDoesNotExistError(
1561 raise NodeDoesNotExistError(
1559 "Cannot remove node at %s from "
1562 "Cannot remove node at %s from "
1560 "following parents: %s" % (not_removed, parents))
1563 "following parents: %s" % (not_removed, parents))
1561
1564
1562 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1565 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1563 """
1566 """
1564 Performs in-memory commit (doesn't check workdir in any way) and
1567 Performs in-memory commit (doesn't check workdir in any way) and
1565 returns newly created :class:`BaseCommit`. Updates repository's
1568 returns newly created :class:`BaseCommit`. Updates repository's
1566 attribute `commits`.
1569 attribute `commits`.
1567
1570
1568 .. note::
1571 .. note::
1569
1572
1570 While overriding this method each backend's should call
1573 While overriding this method each backend's should call
1571 ``self.check_integrity(parents)`` in the first place.
1574 ``self.check_integrity(parents)`` in the first place.
1572
1575
1573 :param message: message of the commit
1576 :param message: message of the commit
1574 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1577 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1575 :param parents: single parent or sequence of parents from which commit
1578 :param parents: single parent or sequence of parents from which commit
1576 would be derived
1579 would be derived
1577 :param date: ``datetime.datetime`` instance. Defaults to
1580 :param date: ``datetime.datetime`` instance. Defaults to
1578 ``datetime.datetime.now()``.
1581 ``datetime.datetime.now()``.
1579 :param branch: branch name, as string. If none given, default backend's
1582 :param branch: branch name, as string. If none given, default backend's
1580 branch would be used.
1583 branch would be used.
1581
1584
1582 :raises ``CommitError``: if any error occurs while committing
1585 :raises ``CommitError``: if any error occurs while committing
1583 """
1586 """
1584 raise NotImplementedError
1587 raise NotImplementedError
1585
1588
1586
1589
1587 class BaseInMemoryChangesetClass(type):
1590 class BaseInMemoryChangesetClass(type):
1588
1591
1589 def __instancecheck__(self, instance):
1592 def __instancecheck__(self, instance):
1590 return isinstance(instance, BaseInMemoryCommit)
1593 return isinstance(instance, BaseInMemoryCommit)
1591
1594
1592
1595
1593 class BaseInMemoryChangeset(BaseInMemoryCommit):
1596 class BaseInMemoryChangeset(BaseInMemoryCommit):
1594
1597
1595 __metaclass__ = BaseInMemoryChangesetClass
1598 __metaclass__ = BaseInMemoryChangesetClass
1596
1599
1597 def __new__(cls, *args, **kwargs):
1600 def __new__(cls, *args, **kwargs):
1598 warnings.warn(
1601 warnings.warn(
1599 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1602 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1600 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1603 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1601
1604
1602
1605
1603 class EmptyCommit(BaseCommit):
1606 class EmptyCommit(BaseCommit):
1604 """
1607 """
1605 An dummy empty commit. It's possible to pass hash when creating
1608 An dummy empty commit. It's possible to pass hash when creating
1606 an EmptyCommit
1609 an EmptyCommit
1607 """
1610 """
1608
1611
1609 def __init__(
1612 def __init__(
1610 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1613 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1611 message='', author='', date=None):
1614 message='', author='', date=None):
1612 self._empty_commit_id = commit_id
1615 self._empty_commit_id = commit_id
1613 # TODO: johbo: Solve idx parameter, default value does not make
1616 # TODO: johbo: Solve idx parameter, default value does not make
1614 # too much sense
1617 # too much sense
1615 self.idx = idx
1618 self.idx = idx
1616 self.message = message
1619 self.message = message
1617 self.author = author
1620 self.author = author
1618 self.date = date or datetime.datetime.fromtimestamp(0)
1621 self.date = date or datetime.datetime.fromtimestamp(0)
1619 self.repository = repo
1622 self.repository = repo
1620 self.alias = alias
1623 self.alias = alias
1621
1624
1622 @LazyProperty
1625 @LazyProperty
1623 def raw_id(self):
1626 def raw_id(self):
1624 """
1627 """
1625 Returns raw string identifying this commit, useful for web
1628 Returns raw string identifying this commit, useful for web
1626 representation.
1629 representation.
1627 """
1630 """
1628
1631
1629 return self._empty_commit_id
1632 return self._empty_commit_id
1630
1633
1631 @LazyProperty
1634 @LazyProperty
1632 def branch(self):
1635 def branch(self):
1633 if self.alias:
1636 if self.alias:
1634 from rhodecode.lib.vcs.backends import get_backend
1637 from rhodecode.lib.vcs.backends import get_backend
1635 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1638 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1636
1639
1637 @LazyProperty
1640 @LazyProperty
1638 def short_id(self):
1641 def short_id(self):
1639 return self.raw_id[:12]
1642 return self.raw_id[:12]
1640
1643
1641 @LazyProperty
1644 @LazyProperty
1642 def id(self):
1645 def id(self):
1643 return self.raw_id
1646 return self.raw_id
1644
1647
1645 def get_path_commit(self, path):
1648 def get_path_commit(self, path):
1646 return self
1649 return self
1647
1650
1648 def get_file_content(self, path):
1651 def get_file_content(self, path):
1649 return u''
1652 return u''
1650
1653
1651 def get_file_content_streamed(self, path):
1654 def get_file_content_streamed(self, path):
1652 yield self.get_file_content()
1655 yield self.get_file_content()
1653
1656
1654 def get_file_size(self, path):
1657 def get_file_size(self, path):
1655 return 0
1658 return 0
1656
1659
1657
1660
1658 class EmptyChangesetClass(type):
1661 class EmptyChangesetClass(type):
1659
1662
1660 def __instancecheck__(self, instance):
1663 def __instancecheck__(self, instance):
1661 return isinstance(instance, EmptyCommit)
1664 return isinstance(instance, EmptyCommit)
1662
1665
1663
1666
1664 class EmptyChangeset(EmptyCommit):
1667 class EmptyChangeset(EmptyCommit):
1665
1668
1666 __metaclass__ = EmptyChangesetClass
1669 __metaclass__ = EmptyChangesetClass
1667
1670
1668 def __new__(cls, *args, **kwargs):
1671 def __new__(cls, *args, **kwargs):
1669 warnings.warn(
1672 warnings.warn(
1670 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1673 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1671 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1674 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1672
1675
1673 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1676 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1674 alias=None, revision=-1, message='', author='', date=None):
1677 alias=None, revision=-1, message='', author='', date=None):
1675 if requested_revision is not None:
1678 if requested_revision is not None:
1676 warnings.warn(
1679 warnings.warn(
1677 "Parameter requested_revision not supported anymore",
1680 "Parameter requested_revision not supported anymore",
1678 DeprecationWarning)
1681 DeprecationWarning)
1679 super(EmptyChangeset, self).__init__(
1682 super(EmptyChangeset, self).__init__(
1680 commit_id=cs, repo=repo, alias=alias, idx=revision,
1683 commit_id=cs, repo=repo, alias=alias, idx=revision,
1681 message=message, author=author, date=date)
1684 message=message, author=author, date=date)
1682
1685
1683 @property
1686 @property
1684 def revision(self):
1687 def revision(self):
1685 warnings.warn("Use idx instead", DeprecationWarning)
1688 warnings.warn("Use idx instead", DeprecationWarning)
1686 return self.idx
1689 return self.idx
1687
1690
1688 @revision.setter
1691 @revision.setter
1689 def revision(self, value):
1692 def revision(self, value):
1690 warnings.warn("Use idx instead", DeprecationWarning)
1693 warnings.warn("Use idx instead", DeprecationWarning)
1691 self.idx = value
1694 self.idx = value
1692
1695
1693
1696
1694 class EmptyRepository(BaseRepository):
1697 class EmptyRepository(BaseRepository):
1695 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1698 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1696 pass
1699 pass
1697
1700
1698 def get_diff(self, *args, **kwargs):
1701 def get_diff(self, *args, **kwargs):
1699 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1702 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1700 return GitDiff('')
1703 return GitDiff('')
1701
1704
1702
1705
1703 class CollectionGenerator(object):
1706 class CollectionGenerator(object):
1704
1707
1705 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1708 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1706 self.repo = repo
1709 self.repo = repo
1707 self.commit_ids = commit_ids
1710 self.commit_ids = commit_ids
1708 # TODO: (oliver) this isn't currently hooked up
1711 # TODO: (oliver) this isn't currently hooked up
1709 self.collection_size = None
1712 self.collection_size = None
1710 self.pre_load = pre_load
1713 self.pre_load = pre_load
1711 self.translate_tag = translate_tag
1714 self.translate_tag = translate_tag
1712
1715
1713 def __len__(self):
1716 def __len__(self):
1714 if self.collection_size is not None:
1717 if self.collection_size is not None:
1715 return self.collection_size
1718 return self.collection_size
1716 return self.commit_ids.__len__()
1719 return self.commit_ids.__len__()
1717
1720
1718 def __iter__(self):
1721 def __iter__(self):
1719 for commit_id in self.commit_ids:
1722 for commit_id in self.commit_ids:
1720 # TODO: johbo: Mercurial passes in commit indices or commit ids
1723 # TODO: johbo: Mercurial passes in commit indices or commit ids
1721 yield self._commit_factory(commit_id)
1724 yield self._commit_factory(commit_id)
1722
1725
1723 def _commit_factory(self, commit_id):
1726 def _commit_factory(self, commit_id):
1724 """
1727 """
1725 Allows backends to override the way commits are generated.
1728 Allows backends to override the way commits are generated.
1726 """
1729 """
1727 return self.repo.get_commit(
1730 return self.repo.get_commit(
1728 commit_id=commit_id, pre_load=self.pre_load,
1731 commit_id=commit_id, pre_load=self.pre_load,
1729 translate_tag=self.translate_tag)
1732 translate_tag=self.translate_tag)
1730
1733
1731 def __getslice__(self, i, j):
1734 def __getslice__(self, i, j):
1732 """
1735 """
1733 Returns an iterator of sliced repository
1736 Returns an iterator of sliced repository
1734 """
1737 """
1735 commit_ids = self.commit_ids[i:j]
1738 commit_ids = self.commit_ids[i:j]
1736 return self.__class__(
1739 return self.__class__(
1737 self.repo, commit_ids, pre_load=self.pre_load,
1740 self.repo, commit_ids, pre_load=self.pre_load,
1738 translate_tag=self.translate_tag)
1741 translate_tag=self.translate_tag)
1739
1742
1740 def __repr__(self):
1743 def __repr__(self):
1741 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1744 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1742
1745
1743
1746
1744 class Config(object):
1747 class Config(object):
1745 """
1748 """
1746 Represents the configuration for a repository.
1749 Represents the configuration for a repository.
1747
1750
1748 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1751 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1749 standard library. It implements only the needed subset.
1752 standard library. It implements only the needed subset.
1750 """
1753 """
1751
1754
1752 def __init__(self):
1755 def __init__(self):
1753 self._values = {}
1756 self._values = {}
1754
1757
1755 def copy(self):
1758 def copy(self):
1756 clone = Config()
1759 clone = Config()
1757 for section, values in self._values.items():
1760 for section, values in self._values.items():
1758 clone._values[section] = values.copy()
1761 clone._values[section] = values.copy()
1759 return clone
1762 return clone
1760
1763
1761 def __repr__(self):
1764 def __repr__(self):
1762 return '<Config(%s sections) at %s>' % (
1765 return '<Config(%s sections) at %s>' % (
1763 len(self._values), hex(id(self)))
1766 len(self._values), hex(id(self)))
1764
1767
1765 def items(self, section):
1768 def items(self, section):
1766 return self._values.get(section, {}).iteritems()
1769 return self._values.get(section, {}).iteritems()
1767
1770
1768 def get(self, section, option):
1771 def get(self, section, option):
1769 return self._values.get(section, {}).get(option)
1772 return self._values.get(section, {}).get(option)
1770
1773
1771 def set(self, section, option, value):
1774 def set(self, section, option, value):
1772 section_values = self._values.setdefault(section, {})
1775 section_values = self._values.setdefault(section, {})
1773 section_values[option] = value
1776 section_values[option] = value
1774
1777
1775 def clear_section(self, section):
1778 def clear_section(self, section):
1776 self._values[section] = {}
1779 self._values[section] = {}
1777
1780
1778 def serialize(self):
1781 def serialize(self):
1779 """
1782 """
1780 Creates a list of three tuples (section, key, value) representing
1783 Creates a list of three tuples (section, key, value) representing
1781 this config object.
1784 this config object.
1782 """
1785 """
1783 items = []
1786 items = []
1784 for section in self._values:
1787 for section in self._values:
1785 for option, value in self._values[section].items():
1788 for option, value in self._values[section].items():
1786 items.append(
1789 items.append(
1787 (safe_str(section), safe_str(option), safe_str(value)))
1790 (safe_str(section), safe_str(option), safe_str(value)))
1788 return items
1791 return items
1789
1792
1790
1793
1791 class Diff(object):
1794 class Diff(object):
1792 """
1795 """
1793 Represents a diff result from a repository backend.
1796 Represents a diff result from a repository backend.
1794
1797
1795 Subclasses have to provide a backend specific value for
1798 Subclasses have to provide a backend specific value for
1796 :attr:`_header_re` and :attr:`_meta_re`.
1799 :attr:`_header_re` and :attr:`_meta_re`.
1797 """
1800 """
1798 _meta_re = None
1801 _meta_re = None
1799 _header_re = None
1802 _header_re = None
1800
1803
1801 def __init__(self, raw_diff):
1804 def __init__(self, raw_diff):
1802 self.raw = raw_diff
1805 self.raw = raw_diff
1803
1806
1804 def chunks(self):
1807 def chunks(self):
1805 """
1808 """
1806 split the diff in chunks of separate --git a/file b/file chunks
1809 split the diff in chunks of separate --git a/file b/file chunks
1807 to make diffs consistent we must prepend with \n, and make sure
1810 to make diffs consistent we must prepend with \n, and make sure
1808 we can detect last chunk as this was also has special rule
1811 we can detect last chunk as this was also has special rule
1809 """
1812 """
1810
1813
1811 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1814 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1812 header = diff_parts[0]
1815 header = diff_parts[0]
1813
1816
1814 if self._meta_re:
1817 if self._meta_re:
1815 match = self._meta_re.match(header)
1818 match = self._meta_re.match(header)
1816
1819
1817 chunks = diff_parts[1:]
1820 chunks = diff_parts[1:]
1818 total_chunks = len(chunks)
1821 total_chunks = len(chunks)
1819
1822
1820 return (
1823 return (
1821 DiffChunk(chunk, self, cur_chunk == total_chunks)
1824 DiffChunk(chunk, self, cur_chunk == total_chunks)
1822 for cur_chunk, chunk in enumerate(chunks, start=1))
1825 for cur_chunk, chunk in enumerate(chunks, start=1))
1823
1826
1824
1827
1825 class DiffChunk(object):
1828 class DiffChunk(object):
1826
1829
1827 def __init__(self, chunk, diff, last_chunk):
1830 def __init__(self, chunk, diff, last_chunk):
1828 self._diff = diff
1831 self._diff = diff
1829
1832
1830 # since we split by \ndiff --git that part is lost from original diff
1833 # since we split by \ndiff --git that part is lost from original diff
1831 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1834 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1832 if not last_chunk:
1835 if not last_chunk:
1833 chunk += '\n'
1836 chunk += '\n'
1834
1837
1835 match = self._diff._header_re.match(chunk)
1838 match = self._diff._header_re.match(chunk)
1836 self.header = match.groupdict()
1839 self.header = match.groupdict()
1837 self.diff = chunk[match.end():]
1840 self.diff = chunk[match.end():]
1838 self.raw = chunk
1841 self.raw = chunk
1839
1842
1840
1843
1841 class BasePathPermissionChecker(object):
1844 class BasePathPermissionChecker(object):
1842
1845
1843 @staticmethod
1846 @staticmethod
1844 def create_from_patterns(includes, excludes):
1847 def create_from_patterns(includes, excludes):
1845 if includes and '*' in includes and not excludes:
1848 if includes and '*' in includes and not excludes:
1846 return AllPathPermissionChecker()
1849 return AllPathPermissionChecker()
1847 elif excludes and '*' in excludes:
1850 elif excludes and '*' in excludes:
1848 return NonePathPermissionChecker()
1851 return NonePathPermissionChecker()
1849 else:
1852 else:
1850 return PatternPathPermissionChecker(includes, excludes)
1853 return PatternPathPermissionChecker(includes, excludes)
1851
1854
1852 @property
1855 @property
1853 def has_full_access(self):
1856 def has_full_access(self):
1854 raise NotImplemented()
1857 raise NotImplemented()
1855
1858
1856 def has_access(self, path):
1859 def has_access(self, path):
1857 raise NotImplemented()
1860 raise NotImplemented()
1858
1861
1859
1862
1860 class AllPathPermissionChecker(BasePathPermissionChecker):
1863 class AllPathPermissionChecker(BasePathPermissionChecker):
1861
1864
1862 @property
1865 @property
1863 def has_full_access(self):
1866 def has_full_access(self):
1864 return True
1867 return True
1865
1868
1866 def has_access(self, path):
1869 def has_access(self, path):
1867 return True
1870 return True
1868
1871
1869
1872
1870 class NonePathPermissionChecker(BasePathPermissionChecker):
1873 class NonePathPermissionChecker(BasePathPermissionChecker):
1871
1874
1872 @property
1875 @property
1873 def has_full_access(self):
1876 def has_full_access(self):
1874 return False
1877 return False
1875
1878
1876 def has_access(self, path):
1879 def has_access(self, path):
1877 return False
1880 return False
1878
1881
1879
1882
1880 class PatternPathPermissionChecker(BasePathPermissionChecker):
1883 class PatternPathPermissionChecker(BasePathPermissionChecker):
1881
1884
1882 def __init__(self, includes, excludes):
1885 def __init__(self, includes, excludes):
1883 self.includes = includes
1886 self.includes = includes
1884 self.excludes = excludes
1887 self.excludes = excludes
1885 self.includes_re = [] if not includes else [
1888 self.includes_re = [] if not includes else [
1886 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1889 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1887 self.excludes_re = [] if not excludes else [
1890 self.excludes_re = [] if not excludes else [
1888 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1891 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1889
1892
1890 @property
1893 @property
1891 def has_full_access(self):
1894 def has_full_access(self):
1892 return '*' in self.includes and not self.excludes
1895 return '*' in self.includes and not self.excludes
1893
1896
1894 def has_access(self, path):
1897 def has_access(self, path):
1895 for regex in self.excludes_re:
1898 for regex in self.excludes_re:
1896 if regex.match(path):
1899 if regex.match(path):
1897 return False
1900 return False
1898 for regex in self.includes_re:
1901 for regex in self.includes_re:
1899 if regex.match(path):
1902 if regex.match(path):
1900 return True
1903 return True
1901 return False
1904 return False
@@ -1,754 +1,774 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 comments model for RhodeCode
22 comments model for RhodeCode
23 """
23 """
24
24
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import collections
27 import collections
28
28
29 from pyramid.threadlocal import get_current_registry, get_current_request
29 from pyramid.threadlocal import get_current_registry, get_current_request
30 from sqlalchemy.sql.expression import null
30 from sqlalchemy.sql.expression import null
31 from sqlalchemy.sql.functions import coalesce
31 from sqlalchemy.sql.functions import coalesce
32
32
33 from rhodecode.lib import helpers as h, diffs, channelstream
33 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
34 from rhodecode.lib import audit_logger
34 from rhodecode.lib import audit_logger
35 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str
35 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str
36 from rhodecode.model import BaseModel
36 from rhodecode.model import BaseModel
37 from rhodecode.model.db import (
37 from rhodecode.model.db import (
38 ChangesetComment, User, Notification, PullRequest, AttributeDict)
38 ChangesetComment, User, Notification, PullRequest, AttributeDict)
39 from rhodecode.model.notification import NotificationModel
39 from rhodecode.model.notification import NotificationModel
40 from rhodecode.model.meta import Session
40 from rhodecode.model.meta import Session
41 from rhodecode.model.settings import VcsSettingsModel
41 from rhodecode.model.settings import VcsSettingsModel
42 from rhodecode.model.notification import EmailNotificationModel
42 from rhodecode.model.notification import EmailNotificationModel
43 from rhodecode.model.validation_schema.schemas import comment_schema
43 from rhodecode.model.validation_schema.schemas import comment_schema
44
44
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 class CommentsModel(BaseModel):
49 class CommentsModel(BaseModel):
50
50
51 cls = ChangesetComment
51 cls = ChangesetComment
52
52
53 DIFF_CONTEXT_BEFORE = 3
53 DIFF_CONTEXT_BEFORE = 3
54 DIFF_CONTEXT_AFTER = 3
54 DIFF_CONTEXT_AFTER = 3
55
55
56 def __get_commit_comment(self, changeset_comment):
56 def __get_commit_comment(self, changeset_comment):
57 return self._get_instance(ChangesetComment, changeset_comment)
57 return self._get_instance(ChangesetComment, changeset_comment)
58
58
59 def __get_pull_request(self, pull_request):
59 def __get_pull_request(self, pull_request):
60 return self._get_instance(PullRequest, pull_request)
60 return self._get_instance(PullRequest, pull_request)
61
61
62 def _extract_mentions(self, s):
62 def _extract_mentions(self, s):
63 user_objects = []
63 user_objects = []
64 for username in extract_mentioned_users(s):
64 for username in extract_mentioned_users(s):
65 user_obj = User.get_by_username(username, case_insensitive=True)
65 user_obj = User.get_by_username(username, case_insensitive=True)
66 if user_obj:
66 if user_obj:
67 user_objects.append(user_obj)
67 user_objects.append(user_obj)
68 return user_objects
68 return user_objects
69
69
70 def _get_renderer(self, global_renderer='rst', request=None):
70 def _get_renderer(self, global_renderer='rst', request=None):
71 request = request or get_current_request()
71 request = request or get_current_request()
72
72
73 try:
73 try:
74 global_renderer = request.call_context.visual.default_renderer
74 global_renderer = request.call_context.visual.default_renderer
75 except AttributeError:
75 except AttributeError:
76 log.debug("Renderer not set, falling back "
76 log.debug("Renderer not set, falling back "
77 "to default renderer '%s'", global_renderer)
77 "to default renderer '%s'", global_renderer)
78 except Exception:
78 except Exception:
79 log.error(traceback.format_exc())
79 log.error(traceback.format_exc())
80 return global_renderer
80 return global_renderer
81
81
82 def aggregate_comments(self, comments, versions, show_version, inline=False):
82 def aggregate_comments(self, comments, versions, show_version, inline=False):
83 # group by versions, and count until, and display objects
83 # group by versions, and count until, and display objects
84
84
85 comment_groups = collections.defaultdict(list)
85 comment_groups = collections.defaultdict(list)
86 [comment_groups[
86 [comment_groups[
87 _co.pull_request_version_id].append(_co) for _co in comments]
87 _co.pull_request_version_id].append(_co) for _co in comments]
88
88
89 def yield_comments(pos):
89 def yield_comments(pos):
90 for co in comment_groups[pos]:
90 for co in comment_groups[pos]:
91 yield co
91 yield co
92
92
93 comment_versions = collections.defaultdict(
93 comment_versions = collections.defaultdict(
94 lambda: collections.defaultdict(list))
94 lambda: collections.defaultdict(list))
95 prev_prvid = -1
95 prev_prvid = -1
96 # fake last entry with None, to aggregate on "latest" version which
96 # fake last entry with None, to aggregate on "latest" version which
97 # doesn't have an pull_request_version_id
97 # doesn't have an pull_request_version_id
98 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
98 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
99 prvid = ver.pull_request_version_id
99 prvid = ver.pull_request_version_id
100 if prev_prvid == -1:
100 if prev_prvid == -1:
101 prev_prvid = prvid
101 prev_prvid = prvid
102
102
103 for co in yield_comments(prvid):
103 for co in yield_comments(prvid):
104 comment_versions[prvid]['at'].append(co)
104 comment_versions[prvid]['at'].append(co)
105
105
106 # save until
106 # save until
107 current = comment_versions[prvid]['at']
107 current = comment_versions[prvid]['at']
108 prev_until = comment_versions[prev_prvid]['until']
108 prev_until = comment_versions[prev_prvid]['until']
109 cur_until = prev_until + current
109 cur_until = prev_until + current
110 comment_versions[prvid]['until'].extend(cur_until)
110 comment_versions[prvid]['until'].extend(cur_until)
111
111
112 # save outdated
112 # save outdated
113 if inline:
113 if inline:
114 outdated = [x for x in cur_until
114 outdated = [x for x in cur_until
115 if x.outdated_at_version(show_version)]
115 if x.outdated_at_version(show_version)]
116 else:
116 else:
117 outdated = [x for x in cur_until
117 outdated = [x for x in cur_until
118 if x.older_than_version(show_version)]
118 if x.older_than_version(show_version)]
119 display = [x for x in cur_until if x not in outdated]
119 display = [x for x in cur_until if x not in outdated]
120
120
121 comment_versions[prvid]['outdated'] = outdated
121 comment_versions[prvid]['outdated'] = outdated
122 comment_versions[prvid]['display'] = display
122 comment_versions[prvid]['display'] = display
123
123
124 prev_prvid = prvid
124 prev_prvid = prvid
125
125
126 return comment_versions
126 return comment_versions
127
127
128 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
128 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
129 qry = Session().query(ChangesetComment) \
129 qry = Session().query(ChangesetComment) \
130 .filter(ChangesetComment.repo == repo)
130 .filter(ChangesetComment.repo == repo)
131
131
132 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
132 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
133 qry = qry.filter(ChangesetComment.comment_type == comment_type)
133 qry = qry.filter(ChangesetComment.comment_type == comment_type)
134
134
135 if user:
135 if user:
136 user = self._get_user(user)
136 user = self._get_user(user)
137 if user:
137 if user:
138 qry = qry.filter(ChangesetComment.user_id == user.user_id)
138 qry = qry.filter(ChangesetComment.user_id == user.user_id)
139
139
140 if commit_id:
140 if commit_id:
141 qry = qry.filter(ChangesetComment.revision == commit_id)
141 qry = qry.filter(ChangesetComment.revision == commit_id)
142
142
143 qry = qry.order_by(ChangesetComment.created_on)
143 qry = qry.order_by(ChangesetComment.created_on)
144 return qry.all()
144 return qry.all()
145
145
146 def get_repository_unresolved_todos(self, repo):
146 def get_repository_unresolved_todos(self, repo):
147 todos = Session().query(ChangesetComment) \
147 todos = Session().query(ChangesetComment) \
148 .filter(ChangesetComment.repo == repo) \
148 .filter(ChangesetComment.repo == repo) \
149 .filter(ChangesetComment.resolved_by == None) \
149 .filter(ChangesetComment.resolved_by == None) \
150 .filter(ChangesetComment.comment_type
150 .filter(ChangesetComment.comment_type
151 == ChangesetComment.COMMENT_TYPE_TODO)
151 == ChangesetComment.COMMENT_TYPE_TODO)
152 todos = todos.all()
152 todos = todos.all()
153
153
154 return todos
154 return todos
155
155
156 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True):
156 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True):
157
157
158 todos = Session().query(ChangesetComment) \
158 todos = Session().query(ChangesetComment) \
159 .filter(ChangesetComment.pull_request == pull_request) \
159 .filter(ChangesetComment.pull_request == pull_request) \
160 .filter(ChangesetComment.resolved_by == None) \
160 .filter(ChangesetComment.resolved_by == None) \
161 .filter(ChangesetComment.comment_type
161 .filter(ChangesetComment.comment_type
162 == ChangesetComment.COMMENT_TYPE_TODO)
162 == ChangesetComment.COMMENT_TYPE_TODO)
163
163
164 if not show_outdated:
164 if not show_outdated:
165 todos = todos.filter(
165 todos = todos.filter(
166 coalesce(ChangesetComment.display_state, '') !=
166 coalesce(ChangesetComment.display_state, '') !=
167 ChangesetComment.COMMENT_OUTDATED)
167 ChangesetComment.COMMENT_OUTDATED)
168
168
169 todos = todos.all()
169 todos = todos.all()
170
170
171 return todos
171 return todos
172
172
173 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True):
173 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True):
174
174
175 todos = Session().query(ChangesetComment) \
175 todos = Session().query(ChangesetComment) \
176 .filter(ChangesetComment.pull_request == pull_request) \
176 .filter(ChangesetComment.pull_request == pull_request) \
177 .filter(ChangesetComment.resolved_by != None) \
177 .filter(ChangesetComment.resolved_by != None) \
178 .filter(ChangesetComment.comment_type
178 .filter(ChangesetComment.comment_type
179 == ChangesetComment.COMMENT_TYPE_TODO)
179 == ChangesetComment.COMMENT_TYPE_TODO)
180
180
181 if not show_outdated:
181 if not show_outdated:
182 todos = todos.filter(
182 todos = todos.filter(
183 coalesce(ChangesetComment.display_state, '') !=
183 coalesce(ChangesetComment.display_state, '') !=
184 ChangesetComment.COMMENT_OUTDATED)
184 ChangesetComment.COMMENT_OUTDATED)
185
185
186 todos = todos.all()
186 todos = todos.all()
187
187
188 return todos
188 return todos
189
189
190 def get_commit_unresolved_todos(self, commit_id, show_outdated=True):
190 def get_commit_unresolved_todos(self, commit_id, show_outdated=True):
191
191
192 todos = Session().query(ChangesetComment) \
192 todos = Session().query(ChangesetComment) \
193 .filter(ChangesetComment.revision == commit_id) \
193 .filter(ChangesetComment.revision == commit_id) \
194 .filter(ChangesetComment.resolved_by == None) \
194 .filter(ChangesetComment.resolved_by == None) \
195 .filter(ChangesetComment.comment_type
195 .filter(ChangesetComment.comment_type
196 == ChangesetComment.COMMENT_TYPE_TODO)
196 == ChangesetComment.COMMENT_TYPE_TODO)
197
197
198 if not show_outdated:
198 if not show_outdated:
199 todos = todos.filter(
199 todos = todos.filter(
200 coalesce(ChangesetComment.display_state, '') !=
200 coalesce(ChangesetComment.display_state, '') !=
201 ChangesetComment.COMMENT_OUTDATED)
201 ChangesetComment.COMMENT_OUTDATED)
202
202
203 todos = todos.all()
203 todos = todos.all()
204
204
205 return todos
205 return todos
206
206
207 def get_commit_resolved_todos(self, commit_id, show_outdated=True):
207 def get_commit_resolved_todos(self, commit_id, show_outdated=True):
208
208
209 todos = Session().query(ChangesetComment) \
209 todos = Session().query(ChangesetComment) \
210 .filter(ChangesetComment.revision == commit_id) \
210 .filter(ChangesetComment.revision == commit_id) \
211 .filter(ChangesetComment.resolved_by != None) \
211 .filter(ChangesetComment.resolved_by != None) \
212 .filter(ChangesetComment.comment_type
212 .filter(ChangesetComment.comment_type
213 == ChangesetComment.COMMENT_TYPE_TODO)
213 == ChangesetComment.COMMENT_TYPE_TODO)
214
214
215 if not show_outdated:
215 if not show_outdated:
216 todos = todos.filter(
216 todos = todos.filter(
217 coalesce(ChangesetComment.display_state, '') !=
217 coalesce(ChangesetComment.display_state, '') !=
218 ChangesetComment.COMMENT_OUTDATED)
218 ChangesetComment.COMMENT_OUTDATED)
219
219
220 todos = todos.all()
220 todos = todos.all()
221
221
222 return todos
222 return todos
223
223
224 def _log_audit_action(self, action, action_data, auth_user, comment):
224 def _log_audit_action(self, action, action_data, auth_user, comment):
225 audit_logger.store(
225 audit_logger.store(
226 action=action,
226 action=action,
227 action_data=action_data,
227 action_data=action_data,
228 user=auth_user,
228 user=auth_user,
229 repo=comment.repo)
229 repo=comment.repo)
230
230
231 def create(self, text, repo, user, commit_id=None, pull_request=None,
231 def create(self, text, repo, user, commit_id=None, pull_request=None,
232 f_path=None, line_no=None, status_change=None,
232 f_path=None, line_no=None, status_change=None,
233 status_change_type=None, comment_type=None,
233 status_change_type=None, comment_type=None,
234 resolves_comment_id=None, closing_pr=False, send_email=True,
234 resolves_comment_id=None, closing_pr=False, send_email=True,
235 renderer=None, auth_user=None, extra_recipients=None):
235 renderer=None, auth_user=None, extra_recipients=None):
236 """
236 """
237 Creates new comment for commit or pull request.
237 Creates new comment for commit or pull request.
238 IF status_change is not none this comment is associated with a
238 IF status_change is not none this comment is associated with a
239 status change of commit or commit associated with pull request
239 status change of commit or commit associated with pull request
240
240
241 :param text:
241 :param text:
242 :param repo:
242 :param repo:
243 :param user:
243 :param user:
244 :param commit_id:
244 :param commit_id:
245 :param pull_request:
245 :param pull_request:
246 :param f_path:
246 :param f_path:
247 :param line_no:
247 :param line_no:
248 :param status_change: Label for status change
248 :param status_change: Label for status change
249 :param comment_type: Type of comment
249 :param comment_type: Type of comment
250 :param resolves_comment_id: id of comment which this one will resolve
250 :param resolves_comment_id: id of comment which this one will resolve
251 :param status_change_type: type of status change
251 :param status_change_type: type of status change
252 :param closing_pr:
252 :param closing_pr:
253 :param send_email:
253 :param send_email:
254 :param renderer: pick renderer for this comment
254 :param renderer: pick renderer for this comment
255 :param auth_user: current authenticated user calling this method
255 :param auth_user: current authenticated user calling this method
256 :param extra_recipients: list of extra users to be added to recipients
256 :param extra_recipients: list of extra users to be added to recipients
257 """
257 """
258
258
259 if not text:
259 if not text:
260 log.warning('Missing text for comment, skipping...')
260 log.warning('Missing text for comment, skipping...')
261 return
261 return
262 request = get_current_request()
262 request = get_current_request()
263 _ = request.translate
263 _ = request.translate
264
264
265 if not renderer:
265 if not renderer:
266 renderer = self._get_renderer(request=request)
266 renderer = self._get_renderer(request=request)
267
267
268 repo = self._get_repo(repo)
268 repo = self._get_repo(repo)
269 user = self._get_user(user)
269 user = self._get_user(user)
270 auth_user = auth_user or user
270 auth_user = auth_user or user
271
271
272 schema = comment_schema.CommentSchema()
272 schema = comment_schema.CommentSchema()
273 validated_kwargs = schema.deserialize(dict(
273 validated_kwargs = schema.deserialize(dict(
274 comment_body=text,
274 comment_body=text,
275 comment_type=comment_type,
275 comment_type=comment_type,
276 comment_file=f_path,
276 comment_file=f_path,
277 comment_line=line_no,
277 comment_line=line_no,
278 renderer_type=renderer,
278 renderer_type=renderer,
279 status_change=status_change_type,
279 status_change=status_change_type,
280 resolves_comment_id=resolves_comment_id,
280 resolves_comment_id=resolves_comment_id,
281 repo=repo.repo_id,
281 repo=repo.repo_id,
282 user=user.user_id,
282 user=user.user_id,
283 ))
283 ))
284
284
285 comment = ChangesetComment()
285 comment = ChangesetComment()
286 comment.renderer = validated_kwargs['renderer_type']
286 comment.renderer = validated_kwargs['renderer_type']
287 comment.text = validated_kwargs['comment_body']
287 comment.text = validated_kwargs['comment_body']
288 comment.f_path = validated_kwargs['comment_file']
288 comment.f_path = validated_kwargs['comment_file']
289 comment.line_no = validated_kwargs['comment_line']
289 comment.line_no = validated_kwargs['comment_line']
290 comment.comment_type = validated_kwargs['comment_type']
290 comment.comment_type = validated_kwargs['comment_type']
291
291
292 comment.repo = repo
292 comment.repo = repo
293 comment.author = user
293 comment.author = user
294 resolved_comment = self.__get_commit_comment(
294 resolved_comment = self.__get_commit_comment(
295 validated_kwargs['resolves_comment_id'])
295 validated_kwargs['resolves_comment_id'])
296 # check if the comment actually belongs to this PR
296 # check if the comment actually belongs to this PR
297 if resolved_comment and resolved_comment.pull_request and \
297 if resolved_comment and resolved_comment.pull_request and \
298 resolved_comment.pull_request != pull_request:
298 resolved_comment.pull_request != pull_request:
299 log.warning('Comment tried to resolved unrelated todo comment: %s',
299 log.warning('Comment tried to resolved unrelated todo comment: %s',
300 resolved_comment)
300 resolved_comment)
301 # comment not bound to this pull request, forbid
301 # comment not bound to this pull request, forbid
302 resolved_comment = None
302 resolved_comment = None
303
303
304 elif resolved_comment and resolved_comment.repo and \
304 elif resolved_comment and resolved_comment.repo and \
305 resolved_comment.repo != repo:
305 resolved_comment.repo != repo:
306 log.warning('Comment tried to resolved unrelated todo comment: %s',
306 log.warning('Comment tried to resolved unrelated todo comment: %s',
307 resolved_comment)
307 resolved_comment)
308 # comment not bound to this repo, forbid
308 # comment not bound to this repo, forbid
309 resolved_comment = None
309 resolved_comment = None
310
310
311 comment.resolved_comment = resolved_comment
311 comment.resolved_comment = resolved_comment
312
312
313 pull_request_id = pull_request
313 pull_request_id = pull_request
314
314
315 commit_obj = None
315 commit_obj = None
316 pull_request_obj = None
316 pull_request_obj = None
317
317
318 if commit_id:
318 if commit_id:
319 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
319 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
320 # do a lookup, so we don't pass something bad here
320 # do a lookup, so we don't pass something bad here
321 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
321 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
322 comment.revision = commit_obj.raw_id
322 comment.revision = commit_obj.raw_id
323
323
324 elif pull_request_id:
324 elif pull_request_id:
325 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
325 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
326 pull_request_obj = self.__get_pull_request(pull_request_id)
326 pull_request_obj = self.__get_pull_request(pull_request_id)
327 comment.pull_request = pull_request_obj
327 comment.pull_request = pull_request_obj
328 else:
328 else:
329 raise Exception('Please specify commit or pull_request_id')
329 raise Exception('Please specify commit or pull_request_id')
330
330
331 Session().add(comment)
331 Session().add(comment)
332 Session().flush()
332 Session().flush()
333 kwargs = {
333 kwargs = {
334 'user': user,
334 'user': user,
335 'renderer_type': renderer,
335 'renderer_type': renderer,
336 'repo_name': repo.repo_name,
336 'repo_name': repo.repo_name,
337 'status_change': status_change,
337 'status_change': status_change,
338 'status_change_type': status_change_type,
338 'status_change_type': status_change_type,
339 'comment_body': text,
339 'comment_body': text,
340 'comment_file': f_path,
340 'comment_file': f_path,
341 'comment_line': line_no,
341 'comment_line': line_no,
342 'comment_type': comment_type or 'note',
342 'comment_type': comment_type or 'note',
343 'comment_id': comment.comment_id
343 'comment_id': comment.comment_id
344 }
344 }
345
345
346 if commit_obj:
346 if commit_obj:
347 recipients = ChangesetComment.get_users(
347 recipients = ChangesetComment.get_users(
348 revision=commit_obj.raw_id)
348 revision=commit_obj.raw_id)
349 # add commit author if it's in RhodeCode system
349 # add commit author if it's in RhodeCode system
350 cs_author = User.get_from_cs_author(commit_obj.author)
350 cs_author = User.get_from_cs_author(commit_obj.author)
351 if not cs_author:
351 if not cs_author:
352 # use repo owner if we cannot extract the author correctly
352 # use repo owner if we cannot extract the author correctly
353 cs_author = repo.user
353 cs_author = repo.user
354 recipients += [cs_author]
354 recipients += [cs_author]
355
355
356 commit_comment_url = self.get_url(comment, request=request)
356 commit_comment_url = self.get_url(comment, request=request)
357 commit_comment_reply_url = self.get_url(
357 commit_comment_reply_url = self.get_url(
358 comment, request=request,
358 comment, request=request,
359 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
359 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
360
360
361 target_repo_url = h.link_to(
361 target_repo_url = h.link_to(
362 repo.repo_name,
362 repo.repo_name,
363 h.route_url('repo_summary', repo_name=repo.repo_name))
363 h.route_url('repo_summary', repo_name=repo.repo_name))
364
364
365 # commit specifics
365 # commit specifics
366 kwargs.update({
366 kwargs.update({
367 'commit': commit_obj,
367 'commit': commit_obj,
368 'commit_message': commit_obj.message,
368 'commit_message': commit_obj.message,
369 'commit_target_repo_url': target_repo_url,
369 'commit_target_repo_url': target_repo_url,
370 'commit_comment_url': commit_comment_url,
370 'commit_comment_url': commit_comment_url,
371 'commit_comment_reply_url': commit_comment_reply_url
371 'commit_comment_reply_url': commit_comment_reply_url
372 })
372 })
373
373
374 elif pull_request_obj:
374 elif pull_request_obj:
375 # get the current participants of this pull request
375 # get the current participants of this pull request
376 recipients = ChangesetComment.get_users(
376 recipients = ChangesetComment.get_users(
377 pull_request_id=pull_request_obj.pull_request_id)
377 pull_request_id=pull_request_obj.pull_request_id)
378 # add pull request author
378 # add pull request author
379 recipients += [pull_request_obj.author]
379 recipients += [pull_request_obj.author]
380
380
381 # add the reviewers to notification
381 # add the reviewers to notification
382 recipients += [x.user for x in pull_request_obj.reviewers]
382 recipients += [x.user for x in pull_request_obj.reviewers]
383
383
384 pr_target_repo = pull_request_obj.target_repo
384 pr_target_repo = pull_request_obj.target_repo
385 pr_source_repo = pull_request_obj.source_repo
385 pr_source_repo = pull_request_obj.source_repo
386
386
387 pr_comment_url = self.get_url(comment, request=request)
387 pr_comment_url = self.get_url(comment, request=request)
388 pr_comment_reply_url = self.get_url(
388 pr_comment_reply_url = self.get_url(
389 comment, request=request,
389 comment, request=request,
390 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
390 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
391
391
392 pr_url = h.route_url(
392 pr_url = h.route_url(
393 'pullrequest_show',
393 'pullrequest_show',
394 repo_name=pr_target_repo.repo_name,
394 repo_name=pr_target_repo.repo_name,
395 pull_request_id=pull_request_obj.pull_request_id, )
395 pull_request_id=pull_request_obj.pull_request_id, )
396
396
397 # set some variables for email notification
397 # set some variables for email notification
398 pr_target_repo_url = h.route_url(
398 pr_target_repo_url = h.route_url(
399 'repo_summary', repo_name=pr_target_repo.repo_name)
399 'repo_summary', repo_name=pr_target_repo.repo_name)
400
400
401 pr_source_repo_url = h.route_url(
401 pr_source_repo_url = h.route_url(
402 'repo_summary', repo_name=pr_source_repo.repo_name)
402 'repo_summary', repo_name=pr_source_repo.repo_name)
403
403
404 # pull request specifics
404 # pull request specifics
405 kwargs.update({
405 kwargs.update({
406 'pull_request': pull_request_obj,
406 'pull_request': pull_request_obj,
407 'pr_id': pull_request_obj.pull_request_id,
407 'pr_id': pull_request_obj.pull_request_id,
408 'pull_request_url': pr_url,
408 'pull_request_url': pr_url,
409 'pull_request_target_repo': pr_target_repo,
409 'pull_request_target_repo': pr_target_repo,
410 'pull_request_target_repo_url': pr_target_repo_url,
410 'pull_request_target_repo_url': pr_target_repo_url,
411 'pull_request_source_repo': pr_source_repo,
411 'pull_request_source_repo': pr_source_repo,
412 'pull_request_source_repo_url': pr_source_repo_url,
412 'pull_request_source_repo_url': pr_source_repo_url,
413 'pr_comment_url': pr_comment_url,
413 'pr_comment_url': pr_comment_url,
414 'pr_comment_reply_url': pr_comment_reply_url,
414 'pr_comment_reply_url': pr_comment_reply_url,
415 'pr_closing': closing_pr,
415 'pr_closing': closing_pr,
416 })
416 })
417
417
418 recipients += [self._get_user(u) for u in (extra_recipients or [])]
418 recipients += [self._get_user(u) for u in (extra_recipients or [])]
419
419
420 if send_email:
420 if send_email:
421 # pre-generate the subject for notification itself
421 # pre-generate the subject for notification itself
422 (subject,
422 (subject,
423 _h, _e, # we don't care about those
423 _h, _e, # we don't care about those
424 body_plaintext) = EmailNotificationModel().render_email(
424 body_plaintext) = EmailNotificationModel().render_email(
425 notification_type, **kwargs)
425 notification_type, **kwargs)
426
426
427 mention_recipients = set(
427 mention_recipients = set(
428 self._extract_mentions(text)).difference(recipients)
428 self._extract_mentions(text)).difference(recipients)
429
429
430 # create notification objects, and emails
430 # create notification objects, and emails
431 NotificationModel().create(
431 NotificationModel().create(
432 created_by=user,
432 created_by=user,
433 notification_subject=subject,
433 notification_subject=subject,
434 notification_body=body_plaintext,
434 notification_body=body_plaintext,
435 notification_type=notification_type,
435 notification_type=notification_type,
436 recipients=recipients,
436 recipients=recipients,
437 mention_recipients=mention_recipients,
437 mention_recipients=mention_recipients,
438 email_kwargs=kwargs,
438 email_kwargs=kwargs,
439 )
439 )
440
440
441 Session().flush()
441 Session().flush()
442 if comment.pull_request:
442 if comment.pull_request:
443 action = 'repo.pull_request.comment.create'
443 action = 'repo.pull_request.comment.create'
444 else:
444 else:
445 action = 'repo.commit.comment.create'
445 action = 'repo.commit.comment.create'
446
446
447 comment_data = comment.get_api_data()
447 comment_data = comment.get_api_data()
448 self._log_audit_action(
448 self._log_audit_action(
449 action, {'data': comment_data}, auth_user, comment)
449 action, {'data': comment_data}, auth_user, comment)
450
450
451 msg_url = ''
451 msg_url = ''
452 channel = None
452 channel = None
453 if commit_obj:
453 if commit_obj:
454 msg_url = commit_comment_url
454 msg_url = commit_comment_url
455 repo_name = repo.repo_name
455 repo_name = repo.repo_name
456 channel = u'/repo${}$/commit/{}'.format(
456 channel = u'/repo${}$/commit/{}'.format(
457 repo_name,
457 repo_name,
458 commit_obj.raw_id
458 commit_obj.raw_id
459 )
459 )
460 elif pull_request_obj:
460 elif pull_request_obj:
461 msg_url = pr_comment_url
461 msg_url = pr_comment_url
462 repo_name = pr_target_repo.repo_name
462 repo_name = pr_target_repo.repo_name
463 channel = u'/repo${}$/pr/{}'.format(
463 channel = u'/repo${}$/pr/{}'.format(
464 repo_name,
464 repo_name,
465 pull_request_id
465 pull_request_id
466 )
466 )
467
467
468 message = '<strong>{}</strong> {} - ' \
468 message = '<strong>{}</strong> {} - ' \
469 '<a onclick="window.location=\'{}\';' \
469 '<a onclick="window.location=\'{}\';' \
470 'window.location.reload()">' \
470 'window.location.reload()">' \
471 '<strong>{}</strong></a>'
471 '<strong>{}</strong></a>'
472 message = message.format(
472 message = message.format(
473 user.username, _('made a comment'), msg_url,
473 user.username, _('made a comment'), msg_url,
474 _('Show it now'))
474 _('Show it now'))
475
475
476 channelstream.post_message(
476 channelstream.post_message(
477 channel, message, user.username,
477 channel, message, user.username,
478 registry=get_current_registry())
478 registry=get_current_registry())
479
479
480 return comment
480 return comment
481
481
482 def delete(self, comment, auth_user):
482 def delete(self, comment, auth_user):
483 """
483 """
484 Deletes given comment
484 Deletes given comment
485 """
485 """
486 comment = self.__get_commit_comment(comment)
486 comment = self.__get_commit_comment(comment)
487 old_data = comment.get_api_data()
487 old_data = comment.get_api_data()
488 Session().delete(comment)
488 Session().delete(comment)
489
489
490 if comment.pull_request:
490 if comment.pull_request:
491 action = 'repo.pull_request.comment.delete'
491 action = 'repo.pull_request.comment.delete'
492 else:
492 else:
493 action = 'repo.commit.comment.delete'
493 action = 'repo.commit.comment.delete'
494
494
495 self._log_audit_action(
495 self._log_audit_action(
496 action, {'old_data': old_data}, auth_user, comment)
496 action, {'old_data': old_data}, auth_user, comment)
497
497
498 return comment
498 return comment
499
499
500 def get_all_comments(self, repo_id, revision=None, pull_request=None):
500 def get_all_comments(self, repo_id, revision=None, pull_request=None):
501 q = ChangesetComment.query()\
501 q = ChangesetComment.query()\
502 .filter(ChangesetComment.repo_id == repo_id)
502 .filter(ChangesetComment.repo_id == repo_id)
503 if revision:
503 if revision:
504 q = q.filter(ChangesetComment.revision == revision)
504 q = q.filter(ChangesetComment.revision == revision)
505 elif pull_request:
505 elif pull_request:
506 pull_request = self.__get_pull_request(pull_request)
506 pull_request = self.__get_pull_request(pull_request)
507 q = q.filter(ChangesetComment.pull_request == pull_request)
507 q = q.filter(ChangesetComment.pull_request == pull_request)
508 else:
508 else:
509 raise Exception('Please specify commit or pull_request')
509 raise Exception('Please specify commit or pull_request')
510 q = q.order_by(ChangesetComment.created_on)
510 q = q.order_by(ChangesetComment.created_on)
511 return q.all()
511 return q.all()
512
512
513 def get_url(self, comment, request=None, permalink=False, anchor=None):
513 def get_url(self, comment, request=None, permalink=False, anchor=None):
514 if not request:
514 if not request:
515 request = get_current_request()
515 request = get_current_request()
516
516
517 comment = self.__get_commit_comment(comment)
517 comment = self.__get_commit_comment(comment)
518 if anchor is None:
518 if anchor is None:
519 anchor = 'comment-{}'.format(comment.comment_id)
519 anchor = 'comment-{}'.format(comment.comment_id)
520
520
521 if comment.pull_request:
521 if comment.pull_request:
522 pull_request = comment.pull_request
522 pull_request = comment.pull_request
523 if permalink:
523 if permalink:
524 return request.route_url(
524 return request.route_url(
525 'pull_requests_global',
525 'pull_requests_global',
526 pull_request_id=pull_request.pull_request_id,
526 pull_request_id=pull_request.pull_request_id,
527 _anchor=anchor)
527 _anchor=anchor)
528 else:
528 else:
529 return request.route_url(
529 return request.route_url(
530 'pullrequest_show',
530 'pullrequest_show',
531 repo_name=safe_str(pull_request.target_repo.repo_name),
531 repo_name=safe_str(pull_request.target_repo.repo_name),
532 pull_request_id=pull_request.pull_request_id,
532 pull_request_id=pull_request.pull_request_id,
533 _anchor=anchor)
533 _anchor=anchor)
534
534
535 else:
535 else:
536 repo = comment.repo
536 repo = comment.repo
537 commit_id = comment.revision
537 commit_id = comment.revision
538
538
539 if permalink:
539 if permalink:
540 return request.route_url(
540 return request.route_url(
541 'repo_commit', repo_name=safe_str(repo.repo_id),
541 'repo_commit', repo_name=safe_str(repo.repo_id),
542 commit_id=commit_id,
542 commit_id=commit_id,
543 _anchor=anchor)
543 _anchor=anchor)
544
544
545 else:
545 else:
546 return request.route_url(
546 return request.route_url(
547 'repo_commit', repo_name=safe_str(repo.repo_name),
547 'repo_commit', repo_name=safe_str(repo.repo_name),
548 commit_id=commit_id,
548 commit_id=commit_id,
549 _anchor=anchor)
549 _anchor=anchor)
550
550
551 def get_comments(self, repo_id, revision=None, pull_request=None):
551 def get_comments(self, repo_id, revision=None, pull_request=None):
552 """
552 """
553 Gets main comments based on revision or pull_request_id
553 Gets main comments based on revision or pull_request_id
554
554
555 :param repo_id:
555 :param repo_id:
556 :param revision:
556 :param revision:
557 :param pull_request:
557 :param pull_request:
558 """
558 """
559
559
560 q = ChangesetComment.query()\
560 q = ChangesetComment.query()\
561 .filter(ChangesetComment.repo_id == repo_id)\
561 .filter(ChangesetComment.repo_id == repo_id)\
562 .filter(ChangesetComment.line_no == None)\
562 .filter(ChangesetComment.line_no == None)\
563 .filter(ChangesetComment.f_path == None)
563 .filter(ChangesetComment.f_path == None)
564 if revision:
564 if revision:
565 q = q.filter(ChangesetComment.revision == revision)
565 q = q.filter(ChangesetComment.revision == revision)
566 elif pull_request:
566 elif pull_request:
567 pull_request = self.__get_pull_request(pull_request)
567 pull_request = self.__get_pull_request(pull_request)
568 q = q.filter(ChangesetComment.pull_request == pull_request)
568 q = q.filter(ChangesetComment.pull_request == pull_request)
569 else:
569 else:
570 raise Exception('Please specify commit or pull_request')
570 raise Exception('Please specify commit or pull_request')
571 q = q.order_by(ChangesetComment.created_on)
571 q = q.order_by(ChangesetComment.created_on)
572 return q.all()
572 return q.all()
573
573
574 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
574 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
575 q = self._get_inline_comments_query(repo_id, revision, pull_request)
575 q = self._get_inline_comments_query(repo_id, revision, pull_request)
576 return self._group_comments_by_path_and_line_number(q)
576 return self._group_comments_by_path_and_line_number(q)
577
577
578 def get_inline_comments_count(self, inline_comments, skip_outdated=True,
578 def get_inline_comments_count(self, inline_comments, skip_outdated=True,
579 version=None):
579 version=None):
580 inline_cnt = 0
580 inline_cnt = 0
581 for fname, per_line_comments in inline_comments.iteritems():
581 for fname, per_line_comments in inline_comments.iteritems():
582 for lno, comments in per_line_comments.iteritems():
582 for lno, comments in per_line_comments.iteritems():
583 for comm in comments:
583 for comm in comments:
584 if not comm.outdated_at_version(version) and skip_outdated:
584 if not comm.outdated_at_version(version) and skip_outdated:
585 inline_cnt += 1
585 inline_cnt += 1
586
586
587 return inline_cnt
587 return inline_cnt
588
588
589 def get_outdated_comments(self, repo_id, pull_request):
589 def get_outdated_comments(self, repo_id, pull_request):
590 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
590 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
591 # of a pull request.
591 # of a pull request.
592 q = self._all_inline_comments_of_pull_request(pull_request)
592 q = self._all_inline_comments_of_pull_request(pull_request)
593 q = q.filter(
593 q = q.filter(
594 ChangesetComment.display_state ==
594 ChangesetComment.display_state ==
595 ChangesetComment.COMMENT_OUTDATED
595 ChangesetComment.COMMENT_OUTDATED
596 ).order_by(ChangesetComment.comment_id.asc())
596 ).order_by(ChangesetComment.comment_id.asc())
597
597
598 return self._group_comments_by_path_and_line_number(q)
598 return self._group_comments_by_path_and_line_number(q)
599
599
600 def _get_inline_comments_query(self, repo_id, revision, pull_request):
600 def _get_inline_comments_query(self, repo_id, revision, pull_request):
601 # TODO: johbo: Split this into two methods: One for PR and one for
601 # TODO: johbo: Split this into two methods: One for PR and one for
602 # commit.
602 # commit.
603 if revision:
603 if revision:
604 q = Session().query(ChangesetComment).filter(
604 q = Session().query(ChangesetComment).filter(
605 ChangesetComment.repo_id == repo_id,
605 ChangesetComment.repo_id == repo_id,
606 ChangesetComment.line_no != null(),
606 ChangesetComment.line_no != null(),
607 ChangesetComment.f_path != null(),
607 ChangesetComment.f_path != null(),
608 ChangesetComment.revision == revision)
608 ChangesetComment.revision == revision)
609
609
610 elif pull_request:
610 elif pull_request:
611 pull_request = self.__get_pull_request(pull_request)
611 pull_request = self.__get_pull_request(pull_request)
612 if not CommentsModel.use_outdated_comments(pull_request):
612 if not CommentsModel.use_outdated_comments(pull_request):
613 q = self._visible_inline_comments_of_pull_request(pull_request)
613 q = self._visible_inline_comments_of_pull_request(pull_request)
614 else:
614 else:
615 q = self._all_inline_comments_of_pull_request(pull_request)
615 q = self._all_inline_comments_of_pull_request(pull_request)
616
616
617 else:
617 else:
618 raise Exception('Please specify commit or pull_request_id')
618 raise Exception('Please specify commit or pull_request_id')
619 q = q.order_by(ChangesetComment.comment_id.asc())
619 q = q.order_by(ChangesetComment.comment_id.asc())
620 return q
620 return q
621
621
622 def _group_comments_by_path_and_line_number(self, q):
622 def _group_comments_by_path_and_line_number(self, q):
623 comments = q.all()
623 comments = q.all()
624 paths = collections.defaultdict(lambda: collections.defaultdict(list))
624 paths = collections.defaultdict(lambda: collections.defaultdict(list))
625 for co in comments:
625 for co in comments:
626 paths[co.f_path][co.line_no].append(co)
626 paths[co.f_path][co.line_no].append(co)
627 return paths
627 return paths
628
628
629 @classmethod
629 @classmethod
630 def needed_extra_diff_context(cls):
630 def needed_extra_diff_context(cls):
631 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
631 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
632
632
633 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
633 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
634 if not CommentsModel.use_outdated_comments(pull_request):
634 if not CommentsModel.use_outdated_comments(pull_request):
635 return
635 return
636
636
637 comments = self._visible_inline_comments_of_pull_request(pull_request)
637 comments = self._visible_inline_comments_of_pull_request(pull_request)
638 comments_to_outdate = comments.all()
638 comments_to_outdate = comments.all()
639
639
640 for comment in comments_to_outdate:
640 for comment in comments_to_outdate:
641 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
641 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
642
642
643 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
643 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
644 diff_line = _parse_comment_line_number(comment.line_no)
644 diff_line = _parse_comment_line_number(comment.line_no)
645
645
646 try:
646 try:
647 old_context = old_diff_proc.get_context_of_line(
647 old_context = old_diff_proc.get_context_of_line(
648 path=comment.f_path, diff_line=diff_line)
648 path=comment.f_path, diff_line=diff_line)
649 new_context = new_diff_proc.get_context_of_line(
649 new_context = new_diff_proc.get_context_of_line(
650 path=comment.f_path, diff_line=diff_line)
650 path=comment.f_path, diff_line=diff_line)
651 except (diffs.LineNotInDiffException,
651 except (diffs.LineNotInDiffException,
652 diffs.FileNotInDiffException):
652 diffs.FileNotInDiffException):
653 comment.display_state = ChangesetComment.COMMENT_OUTDATED
653 comment.display_state = ChangesetComment.COMMENT_OUTDATED
654 return
654 return
655
655
656 if old_context == new_context:
656 if old_context == new_context:
657 return
657 return
658
658
659 if self._should_relocate_diff_line(diff_line):
659 if self._should_relocate_diff_line(diff_line):
660 new_diff_lines = new_diff_proc.find_context(
660 new_diff_lines = new_diff_proc.find_context(
661 path=comment.f_path, context=old_context,
661 path=comment.f_path, context=old_context,
662 offset=self.DIFF_CONTEXT_BEFORE)
662 offset=self.DIFF_CONTEXT_BEFORE)
663 if not new_diff_lines:
663 if not new_diff_lines:
664 comment.display_state = ChangesetComment.COMMENT_OUTDATED
664 comment.display_state = ChangesetComment.COMMENT_OUTDATED
665 else:
665 else:
666 new_diff_line = self._choose_closest_diff_line(
666 new_diff_line = self._choose_closest_diff_line(
667 diff_line, new_diff_lines)
667 diff_line, new_diff_lines)
668 comment.line_no = _diff_to_comment_line_number(new_diff_line)
668 comment.line_no = _diff_to_comment_line_number(new_diff_line)
669 else:
669 else:
670 comment.display_state = ChangesetComment.COMMENT_OUTDATED
670 comment.display_state = ChangesetComment.COMMENT_OUTDATED
671
671
672 def _should_relocate_diff_line(self, diff_line):
672 def _should_relocate_diff_line(self, diff_line):
673 """
673 """
674 Checks if relocation shall be tried for the given `diff_line`.
674 Checks if relocation shall be tried for the given `diff_line`.
675
675
676 If a comment points into the first lines, then we can have a situation
676 If a comment points into the first lines, then we can have a situation
677 that after an update another line has been added on top. In this case
677 that after an update another line has been added on top. In this case
678 we would find the context still and move the comment around. This
678 we would find the context still and move the comment around. This
679 would be wrong.
679 would be wrong.
680 """
680 """
681 should_relocate = (
681 should_relocate = (
682 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
682 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
683 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
683 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
684 return should_relocate
684 return should_relocate
685
685
686 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
686 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
687 candidate = new_diff_lines[0]
687 candidate = new_diff_lines[0]
688 best_delta = _diff_line_delta(diff_line, candidate)
688 best_delta = _diff_line_delta(diff_line, candidate)
689 for new_diff_line in new_diff_lines[1:]:
689 for new_diff_line in new_diff_lines[1:]:
690 delta = _diff_line_delta(diff_line, new_diff_line)
690 delta = _diff_line_delta(diff_line, new_diff_line)
691 if delta < best_delta:
691 if delta < best_delta:
692 candidate = new_diff_line
692 candidate = new_diff_line
693 best_delta = delta
693 best_delta = delta
694 return candidate
694 return candidate
695
695
696 def _visible_inline_comments_of_pull_request(self, pull_request):
696 def _visible_inline_comments_of_pull_request(self, pull_request):
697 comments = self._all_inline_comments_of_pull_request(pull_request)
697 comments = self._all_inline_comments_of_pull_request(pull_request)
698 comments = comments.filter(
698 comments = comments.filter(
699 coalesce(ChangesetComment.display_state, '') !=
699 coalesce(ChangesetComment.display_state, '') !=
700 ChangesetComment.COMMENT_OUTDATED)
700 ChangesetComment.COMMENT_OUTDATED)
701 return comments
701 return comments
702
702
703 def _all_inline_comments_of_pull_request(self, pull_request):
703 def _all_inline_comments_of_pull_request(self, pull_request):
704 comments = Session().query(ChangesetComment)\
704 comments = Session().query(ChangesetComment)\
705 .filter(ChangesetComment.line_no != None)\
705 .filter(ChangesetComment.line_no != None)\
706 .filter(ChangesetComment.f_path != None)\
706 .filter(ChangesetComment.f_path != None)\
707 .filter(ChangesetComment.pull_request == pull_request)
707 .filter(ChangesetComment.pull_request == pull_request)
708 return comments
708 return comments
709
709
710 def _all_general_comments_of_pull_request(self, pull_request):
710 def _all_general_comments_of_pull_request(self, pull_request):
711 comments = Session().query(ChangesetComment)\
711 comments = Session().query(ChangesetComment)\
712 .filter(ChangesetComment.line_no == None)\
712 .filter(ChangesetComment.line_no == None)\
713 .filter(ChangesetComment.f_path == None)\
713 .filter(ChangesetComment.f_path == None)\
714 .filter(ChangesetComment.pull_request == pull_request)
714 .filter(ChangesetComment.pull_request == pull_request)
715 return comments
715 return comments
716
716
717 @staticmethod
717 @staticmethod
718 def use_outdated_comments(pull_request):
718 def use_outdated_comments(pull_request):
719 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
719 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
720 settings = settings_model.get_general_settings()
720 settings = settings_model.get_general_settings()
721 return settings.get('rhodecode_use_outdated_comments', False)
721 return settings.get('rhodecode_use_outdated_comments', False)
722
722
723 def trigger_commit_comment_hook(self, repo, user, action, data=None):
724 repo = self._get_repo(repo)
725 target_scm = repo.scm_instance()
726 if action == 'create':
727 trigger_hook = hooks_utils.trigger_comment_commit_hooks
728 elif action == 'edit':
729 # TODO(dan): when this is supported we trigger edit hook too
730 return
731 else:
732 return
733
734 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
735 repo, action, trigger_hook)
736 trigger_hook(
737 username=user.username,
738 repo_name=repo.repo_name,
739 repo_type=target_scm.alias,
740 repo=repo,
741 data=data)
742
723
743
724 def _parse_comment_line_number(line_no):
744 def _parse_comment_line_number(line_no):
725 """
745 """
726 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
746 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
727 """
747 """
728 old_line = None
748 old_line = None
729 new_line = None
749 new_line = None
730 if line_no.startswith('o'):
750 if line_no.startswith('o'):
731 old_line = int(line_no[1:])
751 old_line = int(line_no[1:])
732 elif line_no.startswith('n'):
752 elif line_no.startswith('n'):
733 new_line = int(line_no[1:])
753 new_line = int(line_no[1:])
734 else:
754 else:
735 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
755 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
736 return diffs.DiffLineNumber(old_line, new_line)
756 return diffs.DiffLineNumber(old_line, new_line)
737
757
738
758
739 def _diff_to_comment_line_number(diff_line):
759 def _diff_to_comment_line_number(diff_line):
740 if diff_line.new is not None:
760 if diff_line.new is not None:
741 return u'n{}'.format(diff_line.new)
761 return u'n{}'.format(diff_line.new)
742 elif diff_line.old is not None:
762 elif diff_line.old is not None:
743 return u'o{}'.format(diff_line.old)
763 return u'o{}'.format(diff_line.old)
744 return u''
764 return u''
745
765
746
766
747 def _diff_line_delta(a, b):
767 def _diff_line_delta(a, b):
748 if None not in (a.new, b.new):
768 if None not in (a.new, b.new):
749 return abs(a.new - b.new)
769 return abs(a.new - b.new)
750 elif None not in (a.old, b.old):
770 elif None not in (a.old, b.old):
751 return abs(a.old - b.old)
771 return abs(a.old - b.old)
752 else:
772 else:
753 raise ValueError(
773 raise ValueError(
754 "Cannot compute delta between {} and {}".format(a, b))
774 "Cannot compute delta between {} and {}".format(a, b))
@@ -1,1893 +1,1890 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2019 RhodeCode GmbH
3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30
30
31 import datetime
31 import datetime
32 import urllib
32 import urllib
33 import collections
33 import collections
34
34
35 from pyramid import compat
35 from pyramid import compat
36 from pyramid.threadlocal import get_current_request
36 from pyramid.threadlocal import get_current_request
37
37
38 from rhodecode import events
38 from rhodecode import events
39 from rhodecode.translation import lazy_ugettext
39 from rhodecode.translation import lazy_ugettext
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 from rhodecode.lib import audit_logger
41 from rhodecode.lib import audit_logger
42 from rhodecode.lib.compat import OrderedDict
42 from rhodecode.lib.compat import OrderedDict
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.markup_renderer import (
44 from rhodecode.lib.markup_renderer import (
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
46 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
47 from rhodecode.lib.vcs.backends.base import (
47 from rhodecode.lib.vcs.backends.base import (
48 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
48 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
49 from rhodecode.lib.vcs.conf import settings as vcs_settings
49 from rhodecode.lib.vcs.conf import settings as vcs_settings
50 from rhodecode.lib.vcs.exceptions import (
50 from rhodecode.lib.vcs.exceptions import (
51 CommitDoesNotExistError, EmptyRepositoryError)
51 CommitDoesNotExistError, EmptyRepositoryError)
52 from rhodecode.model import BaseModel
52 from rhodecode.model import BaseModel
53 from rhodecode.model.changeset_status import ChangesetStatusModel
53 from rhodecode.model.changeset_status import ChangesetStatusModel
54 from rhodecode.model.comment import CommentsModel
54 from rhodecode.model.comment import CommentsModel
55 from rhodecode.model.db import (
55 from rhodecode.model.db import (
56 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
56 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
57 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
57 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
58 from rhodecode.model.meta import Session
58 from rhodecode.model.meta import Session
59 from rhodecode.model.notification import NotificationModel, \
59 from rhodecode.model.notification import NotificationModel, \
60 EmailNotificationModel
60 EmailNotificationModel
61 from rhodecode.model.scm import ScmModel
61 from rhodecode.model.scm import ScmModel
62 from rhodecode.model.settings import VcsSettingsModel
62 from rhodecode.model.settings import VcsSettingsModel
63
63
64
64
65 log = logging.getLogger(__name__)
65 log = logging.getLogger(__name__)
66
66
67
67
68 # Data structure to hold the response data when updating commits during a pull
68 # Data structure to hold the response data when updating commits during a pull
69 # request update.
69 # request update.
70 class UpdateResponse(object):
70 class UpdateResponse(object):
71
71
72 def __init__(self, executed, reason, new, old, common_ancestor_id,
72 def __init__(self, executed, reason, new, old, common_ancestor_id,
73 commit_changes, source_changed, target_changed):
73 commit_changes, source_changed, target_changed):
74
74
75 self.executed = executed
75 self.executed = executed
76 self.reason = reason
76 self.reason = reason
77 self.new = new
77 self.new = new
78 self.old = old
78 self.old = old
79 self.common_ancestor_id = common_ancestor_id
79 self.common_ancestor_id = common_ancestor_id
80 self.changes = commit_changes
80 self.changes = commit_changes
81 self.source_changed = source_changed
81 self.source_changed = source_changed
82 self.target_changed = target_changed
82 self.target_changed = target_changed
83
83
84
84
85 class PullRequestModel(BaseModel):
85 class PullRequestModel(BaseModel):
86
86
87 cls = PullRequest
87 cls = PullRequest
88
88
89 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
89 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
90
90
91 UPDATE_STATUS_MESSAGES = {
91 UPDATE_STATUS_MESSAGES = {
92 UpdateFailureReason.NONE: lazy_ugettext(
92 UpdateFailureReason.NONE: lazy_ugettext(
93 'Pull request update successful.'),
93 'Pull request update successful.'),
94 UpdateFailureReason.UNKNOWN: lazy_ugettext(
94 UpdateFailureReason.UNKNOWN: lazy_ugettext(
95 'Pull request update failed because of an unknown error.'),
95 'Pull request update failed because of an unknown error.'),
96 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
96 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
97 'No update needed because the source and target have not changed.'),
97 'No update needed because the source and target have not changed.'),
98 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
98 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
99 'Pull request cannot be updated because the reference type is '
99 'Pull request cannot be updated because the reference type is '
100 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
100 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
101 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
101 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
102 'This pull request cannot be updated because the target '
102 'This pull request cannot be updated because the target '
103 'reference is missing.'),
103 'reference is missing.'),
104 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
104 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
105 'This pull request cannot be updated because the source '
105 'This pull request cannot be updated because the source '
106 'reference is missing.'),
106 'reference is missing.'),
107 }
107 }
108 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
108 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
109 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
109 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
110
110
111 def __get_pull_request(self, pull_request):
111 def __get_pull_request(self, pull_request):
112 return self._get_instance((
112 return self._get_instance((
113 PullRequest, PullRequestVersion), pull_request)
113 PullRequest, PullRequestVersion), pull_request)
114
114
115 def _check_perms(self, perms, pull_request, user, api=False):
115 def _check_perms(self, perms, pull_request, user, api=False):
116 if not api:
116 if not api:
117 return h.HasRepoPermissionAny(*perms)(
117 return h.HasRepoPermissionAny(*perms)(
118 user=user, repo_name=pull_request.target_repo.repo_name)
118 user=user, repo_name=pull_request.target_repo.repo_name)
119 else:
119 else:
120 return h.HasRepoPermissionAnyApi(*perms)(
120 return h.HasRepoPermissionAnyApi(*perms)(
121 user=user, repo_name=pull_request.target_repo.repo_name)
121 user=user, repo_name=pull_request.target_repo.repo_name)
122
122
123 def check_user_read(self, pull_request, user, api=False):
123 def check_user_read(self, pull_request, user, api=False):
124 _perms = ('repository.admin', 'repository.write', 'repository.read',)
124 _perms = ('repository.admin', 'repository.write', 'repository.read',)
125 return self._check_perms(_perms, pull_request, user, api)
125 return self._check_perms(_perms, pull_request, user, api)
126
126
127 def check_user_merge(self, pull_request, user, api=False):
127 def check_user_merge(self, pull_request, user, api=False):
128 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
128 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
129 return self._check_perms(_perms, pull_request, user, api)
129 return self._check_perms(_perms, pull_request, user, api)
130
130
131 def check_user_update(self, pull_request, user, api=False):
131 def check_user_update(self, pull_request, user, api=False):
132 owner = user.user_id == pull_request.user_id
132 owner = user.user_id == pull_request.user_id
133 return self.check_user_merge(pull_request, user, api) or owner
133 return self.check_user_merge(pull_request, user, api) or owner
134
134
135 def check_user_delete(self, pull_request, user):
135 def check_user_delete(self, pull_request, user):
136 owner = user.user_id == pull_request.user_id
136 owner = user.user_id == pull_request.user_id
137 _perms = ('repository.admin',)
137 _perms = ('repository.admin',)
138 return self._check_perms(_perms, pull_request, user) or owner
138 return self._check_perms(_perms, pull_request, user) or owner
139
139
140 def check_user_change_status(self, pull_request, user, api=False):
140 def check_user_change_status(self, pull_request, user, api=False):
141 reviewer = user.user_id in [x.user_id for x in
141 reviewer = user.user_id in [x.user_id for x in
142 pull_request.reviewers]
142 pull_request.reviewers]
143 return self.check_user_update(pull_request, user, api) or reviewer
143 return self.check_user_update(pull_request, user, api) or reviewer
144
144
145 def check_user_comment(self, pull_request, user):
145 def check_user_comment(self, pull_request, user):
146 owner = user.user_id == pull_request.user_id
146 owner = user.user_id == pull_request.user_id
147 return self.check_user_read(pull_request, user) or owner
147 return self.check_user_read(pull_request, user) or owner
148
148
149 def get(self, pull_request):
149 def get(self, pull_request):
150 return self.__get_pull_request(pull_request)
150 return self.__get_pull_request(pull_request)
151
151
152 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
152 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
153 statuses=None, opened_by=None, order_by=None,
153 statuses=None, opened_by=None, order_by=None,
154 order_dir='desc', only_created=False):
154 order_dir='desc', only_created=False):
155 repo = None
155 repo = None
156 if repo_name:
156 if repo_name:
157 repo = self._get_repo(repo_name)
157 repo = self._get_repo(repo_name)
158
158
159 q = PullRequest.query()
159 q = PullRequest.query()
160
160
161 if search_q:
161 if search_q:
162 like_expression = u'%{}%'.format(safe_unicode(search_q))
162 like_expression = u'%{}%'.format(safe_unicode(search_q))
163 q = q.filter(or_(
163 q = q.filter(or_(
164 cast(PullRequest.pull_request_id, String).ilike(like_expression),
164 cast(PullRequest.pull_request_id, String).ilike(like_expression),
165 PullRequest.title.ilike(like_expression),
165 PullRequest.title.ilike(like_expression),
166 PullRequest.description.ilike(like_expression),
166 PullRequest.description.ilike(like_expression),
167 ))
167 ))
168
168
169 # source or target
169 # source or target
170 if repo and source:
170 if repo and source:
171 q = q.filter(PullRequest.source_repo == repo)
171 q = q.filter(PullRequest.source_repo == repo)
172 elif repo:
172 elif repo:
173 q = q.filter(PullRequest.target_repo == repo)
173 q = q.filter(PullRequest.target_repo == repo)
174
174
175 # closed,opened
175 # closed,opened
176 if statuses:
176 if statuses:
177 q = q.filter(PullRequest.status.in_(statuses))
177 q = q.filter(PullRequest.status.in_(statuses))
178
178
179 # opened by filter
179 # opened by filter
180 if opened_by:
180 if opened_by:
181 q = q.filter(PullRequest.user_id.in_(opened_by))
181 q = q.filter(PullRequest.user_id.in_(opened_by))
182
182
183 # only get those that are in "created" state
183 # only get those that are in "created" state
184 if only_created:
184 if only_created:
185 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
185 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
186
186
187 if order_by:
187 if order_by:
188 order_map = {
188 order_map = {
189 'name_raw': PullRequest.pull_request_id,
189 'name_raw': PullRequest.pull_request_id,
190 'id': PullRequest.pull_request_id,
190 'id': PullRequest.pull_request_id,
191 'title': PullRequest.title,
191 'title': PullRequest.title,
192 'updated_on_raw': PullRequest.updated_on,
192 'updated_on_raw': PullRequest.updated_on,
193 'target_repo': PullRequest.target_repo_id
193 'target_repo': PullRequest.target_repo_id
194 }
194 }
195 if order_dir == 'asc':
195 if order_dir == 'asc':
196 q = q.order_by(order_map[order_by].asc())
196 q = q.order_by(order_map[order_by].asc())
197 else:
197 else:
198 q = q.order_by(order_map[order_by].desc())
198 q = q.order_by(order_map[order_by].desc())
199
199
200 return q
200 return q
201
201
202 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
202 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
203 opened_by=None):
203 opened_by=None):
204 """
204 """
205 Count the number of pull requests for a specific repository.
205 Count the number of pull requests for a specific repository.
206
206
207 :param repo_name: target or source repo
207 :param repo_name: target or source repo
208 :param search_q: filter by text
208 :param search_q: filter by text
209 :param source: boolean flag to specify if repo_name refers to source
209 :param source: boolean flag to specify if repo_name refers to source
210 :param statuses: list of pull request statuses
210 :param statuses: list of pull request statuses
211 :param opened_by: author user of the pull request
211 :param opened_by: author user of the pull request
212 :returns: int number of pull requests
212 :returns: int number of pull requests
213 """
213 """
214 q = self._prepare_get_all_query(
214 q = self._prepare_get_all_query(
215 repo_name, search_q=search_q, source=source, statuses=statuses,
215 repo_name, search_q=search_q, source=source, statuses=statuses,
216 opened_by=opened_by)
216 opened_by=opened_by)
217
217
218 return q.count()
218 return q.count()
219
219
220 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
220 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
221 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
221 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
222 """
222 """
223 Get all pull requests for a specific repository.
223 Get all pull requests for a specific repository.
224
224
225 :param repo_name: target or source repo
225 :param repo_name: target or source repo
226 :param search_q: filter by text
226 :param search_q: filter by text
227 :param source: boolean flag to specify if repo_name refers to source
227 :param source: boolean flag to specify if repo_name refers to source
228 :param statuses: list of pull request statuses
228 :param statuses: list of pull request statuses
229 :param opened_by: author user of the pull request
229 :param opened_by: author user of the pull request
230 :param offset: pagination offset
230 :param offset: pagination offset
231 :param length: length of returned list
231 :param length: length of returned list
232 :param order_by: order of the returned list
232 :param order_by: order of the returned list
233 :param order_dir: 'asc' or 'desc' ordering direction
233 :param order_dir: 'asc' or 'desc' ordering direction
234 :returns: list of pull requests
234 :returns: list of pull requests
235 """
235 """
236 q = self._prepare_get_all_query(
236 q = self._prepare_get_all_query(
237 repo_name, search_q=search_q, source=source, statuses=statuses,
237 repo_name, search_q=search_q, source=source, statuses=statuses,
238 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
238 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
239
239
240 if length:
240 if length:
241 pull_requests = q.limit(length).offset(offset).all()
241 pull_requests = q.limit(length).offset(offset).all()
242 else:
242 else:
243 pull_requests = q.all()
243 pull_requests = q.all()
244
244
245 return pull_requests
245 return pull_requests
246
246
247 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
247 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
248 opened_by=None):
248 opened_by=None):
249 """
249 """
250 Count the number of pull requests for a specific repository that are
250 Count the number of pull requests for a specific repository that are
251 awaiting review.
251 awaiting review.
252
252
253 :param repo_name: target or source repo
253 :param repo_name: target or source repo
254 :param search_q: filter by text
254 :param search_q: filter by text
255 :param source: boolean flag to specify if repo_name refers to source
255 :param source: boolean flag to specify if repo_name refers to source
256 :param statuses: list of pull request statuses
256 :param statuses: list of pull request statuses
257 :param opened_by: author user of the pull request
257 :param opened_by: author user of the pull request
258 :returns: int number of pull requests
258 :returns: int number of pull requests
259 """
259 """
260 pull_requests = self.get_awaiting_review(
260 pull_requests = self.get_awaiting_review(
261 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
261 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
262
262
263 return len(pull_requests)
263 return len(pull_requests)
264
264
265 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
265 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
266 opened_by=None, offset=0, length=None,
266 opened_by=None, offset=0, length=None,
267 order_by=None, order_dir='desc'):
267 order_by=None, order_dir='desc'):
268 """
268 """
269 Get all pull requests for a specific repository that are awaiting
269 Get all pull requests for a specific repository that are awaiting
270 review.
270 review.
271
271
272 :param repo_name: target or source repo
272 :param repo_name: target or source repo
273 :param search_q: filter by text
273 :param search_q: filter by text
274 :param source: boolean flag to specify if repo_name refers to source
274 :param source: boolean flag to specify if repo_name refers to source
275 :param statuses: list of pull request statuses
275 :param statuses: list of pull request statuses
276 :param opened_by: author user of the pull request
276 :param opened_by: author user of the pull request
277 :param offset: pagination offset
277 :param offset: pagination offset
278 :param length: length of returned list
278 :param length: length of returned list
279 :param order_by: order of the returned list
279 :param order_by: order of the returned list
280 :param order_dir: 'asc' or 'desc' ordering direction
280 :param order_dir: 'asc' or 'desc' ordering direction
281 :returns: list of pull requests
281 :returns: list of pull requests
282 """
282 """
283 pull_requests = self.get_all(
283 pull_requests = self.get_all(
284 repo_name, search_q=search_q, source=source, statuses=statuses,
284 repo_name, search_q=search_q, source=source, statuses=statuses,
285 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
285 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
286
286
287 _filtered_pull_requests = []
287 _filtered_pull_requests = []
288 for pr in pull_requests:
288 for pr in pull_requests:
289 status = pr.calculated_review_status()
289 status = pr.calculated_review_status()
290 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
290 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
291 ChangesetStatus.STATUS_UNDER_REVIEW]:
291 ChangesetStatus.STATUS_UNDER_REVIEW]:
292 _filtered_pull_requests.append(pr)
292 _filtered_pull_requests.append(pr)
293 if length:
293 if length:
294 return _filtered_pull_requests[offset:offset+length]
294 return _filtered_pull_requests[offset:offset+length]
295 else:
295 else:
296 return _filtered_pull_requests
296 return _filtered_pull_requests
297
297
298 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
298 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
299 opened_by=None, user_id=None):
299 opened_by=None, user_id=None):
300 """
300 """
301 Count the number of pull requests for a specific repository that are
301 Count the number of pull requests for a specific repository that are
302 awaiting review from a specific user.
302 awaiting review from a specific user.
303
303
304 :param repo_name: target or source repo
304 :param repo_name: target or source repo
305 :param search_q: filter by text
305 :param search_q: filter by text
306 :param source: boolean flag to specify if repo_name refers to source
306 :param source: boolean flag to specify if repo_name refers to source
307 :param statuses: list of pull request statuses
307 :param statuses: list of pull request statuses
308 :param opened_by: author user of the pull request
308 :param opened_by: author user of the pull request
309 :param user_id: reviewer user of the pull request
309 :param user_id: reviewer user of the pull request
310 :returns: int number of pull requests
310 :returns: int number of pull requests
311 """
311 """
312 pull_requests = self.get_awaiting_my_review(
312 pull_requests = self.get_awaiting_my_review(
313 repo_name, search_q=search_q, source=source, statuses=statuses,
313 repo_name, search_q=search_q, source=source, statuses=statuses,
314 opened_by=opened_by, user_id=user_id)
314 opened_by=opened_by, user_id=user_id)
315
315
316 return len(pull_requests)
316 return len(pull_requests)
317
317
318 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
318 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
319 opened_by=None, user_id=None, offset=0,
319 opened_by=None, user_id=None, offset=0,
320 length=None, order_by=None, order_dir='desc'):
320 length=None, order_by=None, order_dir='desc'):
321 """
321 """
322 Get all pull requests for a specific repository that are awaiting
322 Get all pull requests for a specific repository that are awaiting
323 review from a specific user.
323 review from a specific user.
324
324
325 :param repo_name: target or source repo
325 :param repo_name: target or source repo
326 :param search_q: filter by text
326 :param search_q: filter by text
327 :param source: boolean flag to specify if repo_name refers to source
327 :param source: boolean flag to specify if repo_name refers to source
328 :param statuses: list of pull request statuses
328 :param statuses: list of pull request statuses
329 :param opened_by: author user of the pull request
329 :param opened_by: author user of the pull request
330 :param user_id: reviewer user of the pull request
330 :param user_id: reviewer user of the pull request
331 :param offset: pagination offset
331 :param offset: pagination offset
332 :param length: length of returned list
332 :param length: length of returned list
333 :param order_by: order of the returned list
333 :param order_by: order of the returned list
334 :param order_dir: 'asc' or 'desc' ordering direction
334 :param order_dir: 'asc' or 'desc' ordering direction
335 :returns: list of pull requests
335 :returns: list of pull requests
336 """
336 """
337 pull_requests = self.get_all(
337 pull_requests = self.get_all(
338 repo_name, search_q=search_q, source=source, statuses=statuses,
338 repo_name, search_q=search_q, source=source, statuses=statuses,
339 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
339 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
340
340
341 _my = PullRequestModel().get_not_reviewed(user_id)
341 _my = PullRequestModel().get_not_reviewed(user_id)
342 my_participation = []
342 my_participation = []
343 for pr in pull_requests:
343 for pr in pull_requests:
344 if pr in _my:
344 if pr in _my:
345 my_participation.append(pr)
345 my_participation.append(pr)
346 _filtered_pull_requests = my_participation
346 _filtered_pull_requests = my_participation
347 if length:
347 if length:
348 return _filtered_pull_requests[offset:offset+length]
348 return _filtered_pull_requests[offset:offset+length]
349 else:
349 else:
350 return _filtered_pull_requests
350 return _filtered_pull_requests
351
351
352 def get_not_reviewed(self, user_id):
352 def get_not_reviewed(self, user_id):
353 return [
353 return [
354 x.pull_request for x in PullRequestReviewers.query().filter(
354 x.pull_request for x in PullRequestReviewers.query().filter(
355 PullRequestReviewers.user_id == user_id).all()
355 PullRequestReviewers.user_id == user_id).all()
356 ]
356 ]
357
357
358 def _prepare_participating_query(self, user_id=None, statuses=None,
358 def _prepare_participating_query(self, user_id=None, statuses=None,
359 order_by=None, order_dir='desc'):
359 order_by=None, order_dir='desc'):
360 q = PullRequest.query()
360 q = PullRequest.query()
361 if user_id:
361 if user_id:
362 reviewers_subquery = Session().query(
362 reviewers_subquery = Session().query(
363 PullRequestReviewers.pull_request_id).filter(
363 PullRequestReviewers.pull_request_id).filter(
364 PullRequestReviewers.user_id == user_id).subquery()
364 PullRequestReviewers.user_id == user_id).subquery()
365 user_filter = or_(
365 user_filter = or_(
366 PullRequest.user_id == user_id,
366 PullRequest.user_id == user_id,
367 PullRequest.pull_request_id.in_(reviewers_subquery)
367 PullRequest.pull_request_id.in_(reviewers_subquery)
368 )
368 )
369 q = PullRequest.query().filter(user_filter)
369 q = PullRequest.query().filter(user_filter)
370
370
371 # closed,opened
371 # closed,opened
372 if statuses:
372 if statuses:
373 q = q.filter(PullRequest.status.in_(statuses))
373 q = q.filter(PullRequest.status.in_(statuses))
374
374
375 if order_by:
375 if order_by:
376 order_map = {
376 order_map = {
377 'name_raw': PullRequest.pull_request_id,
377 'name_raw': PullRequest.pull_request_id,
378 'title': PullRequest.title,
378 'title': PullRequest.title,
379 'updated_on_raw': PullRequest.updated_on,
379 'updated_on_raw': PullRequest.updated_on,
380 'target_repo': PullRequest.target_repo_id
380 'target_repo': PullRequest.target_repo_id
381 }
381 }
382 if order_dir == 'asc':
382 if order_dir == 'asc':
383 q = q.order_by(order_map[order_by].asc())
383 q = q.order_by(order_map[order_by].asc())
384 else:
384 else:
385 q = q.order_by(order_map[order_by].desc())
385 q = q.order_by(order_map[order_by].desc())
386
386
387 return q
387 return q
388
388
389 def count_im_participating_in(self, user_id=None, statuses=None):
389 def count_im_participating_in(self, user_id=None, statuses=None):
390 q = self._prepare_participating_query(user_id, statuses=statuses)
390 q = self._prepare_participating_query(user_id, statuses=statuses)
391 return q.count()
391 return q.count()
392
392
393 def get_im_participating_in(
393 def get_im_participating_in(
394 self, user_id=None, statuses=None, offset=0,
394 self, user_id=None, statuses=None, offset=0,
395 length=None, order_by=None, order_dir='desc'):
395 length=None, order_by=None, order_dir='desc'):
396 """
396 """
397 Get all Pull requests that i'm participating in, or i have opened
397 Get all Pull requests that i'm participating in, or i have opened
398 """
398 """
399
399
400 q = self._prepare_participating_query(
400 q = self._prepare_participating_query(
401 user_id, statuses=statuses, order_by=order_by,
401 user_id, statuses=statuses, order_by=order_by,
402 order_dir=order_dir)
402 order_dir=order_dir)
403
403
404 if length:
404 if length:
405 pull_requests = q.limit(length).offset(offset).all()
405 pull_requests = q.limit(length).offset(offset).all()
406 else:
406 else:
407 pull_requests = q.all()
407 pull_requests = q.all()
408
408
409 return pull_requests
409 return pull_requests
410
410
411 def get_versions(self, pull_request):
411 def get_versions(self, pull_request):
412 """
412 """
413 returns version of pull request sorted by ID descending
413 returns version of pull request sorted by ID descending
414 """
414 """
415 return PullRequestVersion.query()\
415 return PullRequestVersion.query()\
416 .filter(PullRequestVersion.pull_request == pull_request)\
416 .filter(PullRequestVersion.pull_request == pull_request)\
417 .order_by(PullRequestVersion.pull_request_version_id.asc())\
417 .order_by(PullRequestVersion.pull_request_version_id.asc())\
418 .all()
418 .all()
419
419
420 def get_pr_version(self, pull_request_id, version=None):
420 def get_pr_version(self, pull_request_id, version=None):
421 at_version = None
421 at_version = None
422
422
423 if version and version == 'latest':
423 if version and version == 'latest':
424 pull_request_ver = PullRequest.get(pull_request_id)
424 pull_request_ver = PullRequest.get(pull_request_id)
425 pull_request_obj = pull_request_ver
425 pull_request_obj = pull_request_ver
426 _org_pull_request_obj = pull_request_obj
426 _org_pull_request_obj = pull_request_obj
427 at_version = 'latest'
427 at_version = 'latest'
428 elif version:
428 elif version:
429 pull_request_ver = PullRequestVersion.get_or_404(version)
429 pull_request_ver = PullRequestVersion.get_or_404(version)
430 pull_request_obj = pull_request_ver
430 pull_request_obj = pull_request_ver
431 _org_pull_request_obj = pull_request_ver.pull_request
431 _org_pull_request_obj = pull_request_ver.pull_request
432 at_version = pull_request_ver.pull_request_version_id
432 at_version = pull_request_ver.pull_request_version_id
433 else:
433 else:
434 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
434 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
435 pull_request_id)
435 pull_request_id)
436
436
437 pull_request_display_obj = PullRequest.get_pr_display_object(
437 pull_request_display_obj = PullRequest.get_pr_display_object(
438 pull_request_obj, _org_pull_request_obj)
438 pull_request_obj, _org_pull_request_obj)
439
439
440 return _org_pull_request_obj, pull_request_obj, \
440 return _org_pull_request_obj, pull_request_obj, \
441 pull_request_display_obj, at_version
441 pull_request_display_obj, at_version
442
442
443 def create(self, created_by, source_repo, source_ref, target_repo,
443 def create(self, created_by, source_repo, source_ref, target_repo,
444 target_ref, revisions, reviewers, title, description=None,
444 target_ref, revisions, reviewers, title, description=None,
445 description_renderer=None,
445 description_renderer=None,
446 reviewer_data=None, translator=None, auth_user=None):
446 reviewer_data=None, translator=None, auth_user=None):
447 translator = translator or get_current_request().translate
447 translator = translator or get_current_request().translate
448
448
449 created_by_user = self._get_user(created_by)
449 created_by_user = self._get_user(created_by)
450 auth_user = auth_user or created_by_user.AuthUser()
450 auth_user = auth_user or created_by_user.AuthUser()
451 source_repo = self._get_repo(source_repo)
451 source_repo = self._get_repo(source_repo)
452 target_repo = self._get_repo(target_repo)
452 target_repo = self._get_repo(target_repo)
453
453
454 pull_request = PullRequest()
454 pull_request = PullRequest()
455 pull_request.source_repo = source_repo
455 pull_request.source_repo = source_repo
456 pull_request.source_ref = source_ref
456 pull_request.source_ref = source_ref
457 pull_request.target_repo = target_repo
457 pull_request.target_repo = target_repo
458 pull_request.target_ref = target_ref
458 pull_request.target_ref = target_ref
459 pull_request.revisions = revisions
459 pull_request.revisions = revisions
460 pull_request.title = title
460 pull_request.title = title
461 pull_request.description = description
461 pull_request.description = description
462 pull_request.description_renderer = description_renderer
462 pull_request.description_renderer = description_renderer
463 pull_request.author = created_by_user
463 pull_request.author = created_by_user
464 pull_request.reviewer_data = reviewer_data
464 pull_request.reviewer_data = reviewer_data
465 pull_request.pull_request_state = pull_request.STATE_CREATING
465 pull_request.pull_request_state = pull_request.STATE_CREATING
466 Session().add(pull_request)
466 Session().add(pull_request)
467 Session().flush()
467 Session().flush()
468
468
469 reviewer_ids = set()
469 reviewer_ids = set()
470 # members / reviewers
470 # members / reviewers
471 for reviewer_object in reviewers:
471 for reviewer_object in reviewers:
472 user_id, reasons, mandatory, rules = reviewer_object
472 user_id, reasons, mandatory, rules = reviewer_object
473 user = self._get_user(user_id)
473 user = self._get_user(user_id)
474
474
475 # skip duplicates
475 # skip duplicates
476 if user.user_id in reviewer_ids:
476 if user.user_id in reviewer_ids:
477 continue
477 continue
478
478
479 reviewer_ids.add(user.user_id)
479 reviewer_ids.add(user.user_id)
480
480
481 reviewer = PullRequestReviewers()
481 reviewer = PullRequestReviewers()
482 reviewer.user = user
482 reviewer.user = user
483 reviewer.pull_request = pull_request
483 reviewer.pull_request = pull_request
484 reviewer.reasons = reasons
484 reviewer.reasons = reasons
485 reviewer.mandatory = mandatory
485 reviewer.mandatory = mandatory
486
486
487 # NOTE(marcink): pick only first rule for now
487 # NOTE(marcink): pick only first rule for now
488 rule_id = list(rules)[0] if rules else None
488 rule_id = list(rules)[0] if rules else None
489 rule = RepoReviewRule.get(rule_id) if rule_id else None
489 rule = RepoReviewRule.get(rule_id) if rule_id else None
490 if rule:
490 if rule:
491 review_group = rule.user_group_vote_rule(user_id)
491 review_group = rule.user_group_vote_rule(user_id)
492 # we check if this particular reviewer is member of a voting group
492 # we check if this particular reviewer is member of a voting group
493 if review_group:
493 if review_group:
494 # NOTE(marcink):
494 # NOTE(marcink):
495 # can be that user is member of more but we pick the first same,
495 # can be that user is member of more but we pick the first same,
496 # same as default reviewers algo
496 # same as default reviewers algo
497 review_group = review_group[0]
497 review_group = review_group[0]
498
498
499 rule_data = {
499 rule_data = {
500 'rule_name':
500 'rule_name':
501 rule.review_rule_name,
501 rule.review_rule_name,
502 'rule_user_group_entry_id':
502 'rule_user_group_entry_id':
503 review_group.repo_review_rule_users_group_id,
503 review_group.repo_review_rule_users_group_id,
504 'rule_user_group_name':
504 'rule_user_group_name':
505 review_group.users_group.users_group_name,
505 review_group.users_group.users_group_name,
506 'rule_user_group_members':
506 'rule_user_group_members':
507 [x.user.username for x in review_group.users_group.members],
507 [x.user.username for x in review_group.users_group.members],
508 'rule_user_group_members_id':
508 'rule_user_group_members_id':
509 [x.user.user_id for x in review_group.users_group.members],
509 [x.user.user_id for x in review_group.users_group.members],
510 }
510 }
511 # e.g {'vote_rule': -1, 'mandatory': True}
511 # e.g {'vote_rule': -1, 'mandatory': True}
512 rule_data.update(review_group.rule_data())
512 rule_data.update(review_group.rule_data())
513
513
514 reviewer.rule_data = rule_data
514 reviewer.rule_data = rule_data
515
515
516 Session().add(reviewer)
516 Session().add(reviewer)
517 Session().flush()
517 Session().flush()
518
518
519 # Set approval status to "Under Review" for all commits which are
519 # Set approval status to "Under Review" for all commits which are
520 # part of this pull request.
520 # part of this pull request.
521 ChangesetStatusModel().set_status(
521 ChangesetStatusModel().set_status(
522 repo=target_repo,
522 repo=target_repo,
523 status=ChangesetStatus.STATUS_UNDER_REVIEW,
523 status=ChangesetStatus.STATUS_UNDER_REVIEW,
524 user=created_by_user,
524 user=created_by_user,
525 pull_request=pull_request
525 pull_request=pull_request
526 )
526 )
527 # we commit early at this point. This has to do with a fact
527 # we commit early at this point. This has to do with a fact
528 # that before queries do some row-locking. And because of that
528 # that before queries do some row-locking. And because of that
529 # we need to commit and finish transaction before below validate call
529 # we need to commit and finish transaction before below validate call
530 # that for large repos could be long resulting in long row locks
530 # that for large repos could be long resulting in long row locks
531 Session().commit()
531 Session().commit()
532
532
533 # prepare workspace, and run initial merge simulation. Set state during that
533 # prepare workspace, and run initial merge simulation. Set state during that
534 # operation
534 # operation
535 pull_request = PullRequest.get(pull_request.pull_request_id)
535 pull_request = PullRequest.get(pull_request.pull_request_id)
536
536
537 # set as merging, for merge simulation, and if finished to created so we mark
537 # set as merging, for merge simulation, and if finished to created so we mark
538 # simulation is working fine
538 # simulation is working fine
539 with pull_request.set_state(PullRequest.STATE_MERGING,
539 with pull_request.set_state(PullRequest.STATE_MERGING,
540 final_state=PullRequest.STATE_CREATED) as state_obj:
540 final_state=PullRequest.STATE_CREATED) as state_obj:
541 MergeCheck.validate(
541 MergeCheck.validate(
542 pull_request, auth_user=auth_user, translator=translator)
542 pull_request, auth_user=auth_user, translator=translator)
543
543
544 self.notify_reviewers(pull_request, reviewer_ids)
544 self.notify_reviewers(pull_request, reviewer_ids)
545 self.trigger_pull_request_hook(
545 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
546 pull_request, created_by_user, 'create')
547
546
548 creation_data = pull_request.get_api_data(with_merge_state=False)
547 creation_data = pull_request.get_api_data(with_merge_state=False)
549 self._log_audit_action(
548 self._log_audit_action(
550 'repo.pull_request.create', {'data': creation_data},
549 'repo.pull_request.create', {'data': creation_data},
551 auth_user, pull_request)
550 auth_user, pull_request)
552
551
553 return pull_request
552 return pull_request
554
553
555 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
554 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
556 pull_request = self.__get_pull_request(pull_request)
555 pull_request = self.__get_pull_request(pull_request)
557 target_scm = pull_request.target_repo.scm_instance()
556 target_scm = pull_request.target_repo.scm_instance()
558 if action == 'create':
557 if action == 'create':
559 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
558 trigger_hook = hooks_utils.trigger_create_pull_request_hook
560 elif action == 'merge':
559 elif action == 'merge':
561 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
560 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
562 elif action == 'close':
561 elif action == 'close':
563 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
562 trigger_hook = hooks_utils.trigger_close_pull_request_hook
564 elif action == 'review_status_change':
563 elif action == 'review_status_change':
565 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
564 trigger_hook = hooks_utils.trigger_review_pull_request_hook
566 elif action == 'update':
565 elif action == 'update':
567 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
566 trigger_hook = hooks_utils.trigger_update_pull_request_hook
568 elif action == 'comment':
567 elif action == 'comment':
569 # dummy hook ! for comment. We want this function to handle all cases
568 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
570 def trigger_hook(*args, **kwargs):
571 pass
572 comment = data['comment']
573 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
574 else:
569 else:
575 return
570 return
576
571
572 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
573 pull_request, action, trigger_hook)
577 trigger_hook(
574 trigger_hook(
578 username=user.username,
575 username=user.username,
579 repo_name=pull_request.target_repo.repo_name,
576 repo_name=pull_request.target_repo.repo_name,
580 repo_alias=target_scm.alias,
577 repo_type=target_scm.alias,
581 pull_request=pull_request,
578 pull_request=pull_request,
582 data=data)
579 data=data)
583
580
584 def _get_commit_ids(self, pull_request):
581 def _get_commit_ids(self, pull_request):
585 """
582 """
586 Return the commit ids of the merged pull request.
583 Return the commit ids of the merged pull request.
587
584
588 This method is not dealing correctly yet with the lack of autoupdates
585 This method is not dealing correctly yet with the lack of autoupdates
589 nor with the implicit target updates.
586 nor with the implicit target updates.
590 For example: if a commit in the source repo is already in the target it
587 For example: if a commit in the source repo is already in the target it
591 will be reported anyways.
588 will be reported anyways.
592 """
589 """
593 merge_rev = pull_request.merge_rev
590 merge_rev = pull_request.merge_rev
594 if merge_rev is None:
591 if merge_rev is None:
595 raise ValueError('This pull request was not merged yet')
592 raise ValueError('This pull request was not merged yet')
596
593
597 commit_ids = list(pull_request.revisions)
594 commit_ids = list(pull_request.revisions)
598 if merge_rev not in commit_ids:
595 if merge_rev not in commit_ids:
599 commit_ids.append(merge_rev)
596 commit_ids.append(merge_rev)
600
597
601 return commit_ids
598 return commit_ids
602
599
603 def merge_repo(self, pull_request, user, extras):
600 def merge_repo(self, pull_request, user, extras):
604 log.debug("Merging pull request %s", pull_request.pull_request_id)
601 log.debug("Merging pull request %s", pull_request.pull_request_id)
605 extras['user_agent'] = 'internal-merge'
602 extras['user_agent'] = 'internal-merge'
606 merge_state = self._merge_pull_request(pull_request, user, extras)
603 merge_state = self._merge_pull_request(pull_request, user, extras)
607 if merge_state.executed:
604 if merge_state.executed:
608 log.debug("Merge was successful, updating the pull request comments.")
605 log.debug("Merge was successful, updating the pull request comments.")
609 self._comment_and_close_pr(pull_request, user, merge_state)
606 self._comment_and_close_pr(pull_request, user, merge_state)
610
607
611 self._log_audit_action(
608 self._log_audit_action(
612 'repo.pull_request.merge',
609 'repo.pull_request.merge',
613 {'merge_state': merge_state.__dict__},
610 {'merge_state': merge_state.__dict__},
614 user, pull_request)
611 user, pull_request)
615
612
616 else:
613 else:
617 log.warn("Merge failed, not updating the pull request.")
614 log.warn("Merge failed, not updating the pull request.")
618 return merge_state
615 return merge_state
619
616
620 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
617 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
621 target_vcs = pull_request.target_repo.scm_instance()
618 target_vcs = pull_request.target_repo.scm_instance()
622 source_vcs = pull_request.source_repo.scm_instance()
619 source_vcs = pull_request.source_repo.scm_instance()
623
620
624 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
621 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
625 pr_id=pull_request.pull_request_id,
622 pr_id=pull_request.pull_request_id,
626 pr_title=pull_request.title,
623 pr_title=pull_request.title,
627 source_repo=source_vcs.name,
624 source_repo=source_vcs.name,
628 source_ref_name=pull_request.source_ref_parts.name,
625 source_ref_name=pull_request.source_ref_parts.name,
629 target_repo=target_vcs.name,
626 target_repo=target_vcs.name,
630 target_ref_name=pull_request.target_ref_parts.name,
627 target_ref_name=pull_request.target_ref_parts.name,
631 )
628 )
632
629
633 workspace_id = self._workspace_id(pull_request)
630 workspace_id = self._workspace_id(pull_request)
634 repo_id = pull_request.target_repo.repo_id
631 repo_id = pull_request.target_repo.repo_id
635 use_rebase = self._use_rebase_for_merging(pull_request)
632 use_rebase = self._use_rebase_for_merging(pull_request)
636 close_branch = self._close_branch_before_merging(pull_request)
633 close_branch = self._close_branch_before_merging(pull_request)
637 user_name = self._user_name_for_merging(pull_request, user)
634 user_name = self._user_name_for_merging(pull_request, user)
638
635
639 target_ref = self._refresh_reference(
636 target_ref = self._refresh_reference(
640 pull_request.target_ref_parts, target_vcs)
637 pull_request.target_ref_parts, target_vcs)
641
638
642 callback_daemon, extras = prepare_callback_daemon(
639 callback_daemon, extras = prepare_callback_daemon(
643 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
640 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
644 host=vcs_settings.HOOKS_HOST,
641 host=vcs_settings.HOOKS_HOST,
645 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
642 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
646
643
647 with callback_daemon:
644 with callback_daemon:
648 # TODO: johbo: Implement a clean way to run a config_override
645 # TODO: johbo: Implement a clean way to run a config_override
649 # for a single call.
646 # for a single call.
650 target_vcs.config.set(
647 target_vcs.config.set(
651 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
648 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
652
649
653 merge_state = target_vcs.merge(
650 merge_state = target_vcs.merge(
654 repo_id, workspace_id, target_ref, source_vcs,
651 repo_id, workspace_id, target_ref, source_vcs,
655 pull_request.source_ref_parts,
652 pull_request.source_ref_parts,
656 user_name=user_name, user_email=user.email,
653 user_name=user_name, user_email=user.email,
657 message=message, use_rebase=use_rebase,
654 message=message, use_rebase=use_rebase,
658 close_branch=close_branch)
655 close_branch=close_branch)
659 return merge_state
656 return merge_state
660
657
661 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
658 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
662 pull_request.merge_rev = merge_state.merge_ref.commit_id
659 pull_request.merge_rev = merge_state.merge_ref.commit_id
663 pull_request.updated_on = datetime.datetime.now()
660 pull_request.updated_on = datetime.datetime.now()
664 close_msg = close_msg or 'Pull request merged and closed'
661 close_msg = close_msg or 'Pull request merged and closed'
665
662
666 CommentsModel().create(
663 CommentsModel().create(
667 text=safe_unicode(close_msg),
664 text=safe_unicode(close_msg),
668 repo=pull_request.target_repo.repo_id,
665 repo=pull_request.target_repo.repo_id,
669 user=user.user_id,
666 user=user.user_id,
670 pull_request=pull_request.pull_request_id,
667 pull_request=pull_request.pull_request_id,
671 f_path=None,
668 f_path=None,
672 line_no=None,
669 line_no=None,
673 closing_pr=True
670 closing_pr=True
674 )
671 )
675
672
676 Session().add(pull_request)
673 Session().add(pull_request)
677 Session().flush()
674 Session().flush()
678 # TODO: paris: replace invalidation with less radical solution
675 # TODO: paris: replace invalidation with less radical solution
679 ScmModel().mark_for_invalidation(
676 ScmModel().mark_for_invalidation(
680 pull_request.target_repo.repo_name)
677 pull_request.target_repo.repo_name)
681 self.trigger_pull_request_hook(pull_request, user, 'merge')
678 self.trigger_pull_request_hook(pull_request, user, 'merge')
682
679
683 def has_valid_update_type(self, pull_request):
680 def has_valid_update_type(self, pull_request):
684 source_ref_type = pull_request.source_ref_parts.type
681 source_ref_type = pull_request.source_ref_parts.type
685 return source_ref_type in self.REF_TYPES
682 return source_ref_type in self.REF_TYPES
686
683
687 def update_commits(self, pull_request, updating_user):
684 def update_commits(self, pull_request, updating_user):
688 """
685 """
689 Get the updated list of commits for the pull request
686 Get the updated list of commits for the pull request
690 and return the new pull request version and the list
687 and return the new pull request version and the list
691 of commits processed by this update action
688 of commits processed by this update action
692
689
693 updating_user is the user_object who triggered the update
690 updating_user is the user_object who triggered the update
694 """
691 """
695 pull_request = self.__get_pull_request(pull_request)
692 pull_request = self.__get_pull_request(pull_request)
696 source_ref_type = pull_request.source_ref_parts.type
693 source_ref_type = pull_request.source_ref_parts.type
697 source_ref_name = pull_request.source_ref_parts.name
694 source_ref_name = pull_request.source_ref_parts.name
698 source_ref_id = pull_request.source_ref_parts.commit_id
695 source_ref_id = pull_request.source_ref_parts.commit_id
699
696
700 target_ref_type = pull_request.target_ref_parts.type
697 target_ref_type = pull_request.target_ref_parts.type
701 target_ref_name = pull_request.target_ref_parts.name
698 target_ref_name = pull_request.target_ref_parts.name
702 target_ref_id = pull_request.target_ref_parts.commit_id
699 target_ref_id = pull_request.target_ref_parts.commit_id
703
700
704 if not self.has_valid_update_type(pull_request):
701 if not self.has_valid_update_type(pull_request):
705 log.debug("Skipping update of pull request %s due to ref type: %s",
702 log.debug("Skipping update of pull request %s due to ref type: %s",
706 pull_request, source_ref_type)
703 pull_request, source_ref_type)
707 return UpdateResponse(
704 return UpdateResponse(
708 executed=False,
705 executed=False,
709 reason=UpdateFailureReason.WRONG_REF_TYPE,
706 reason=UpdateFailureReason.WRONG_REF_TYPE,
710 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
707 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
711 source_changed=False, target_changed=False)
708 source_changed=False, target_changed=False)
712
709
713 # source repo
710 # source repo
714 source_repo = pull_request.source_repo.scm_instance()
711 source_repo = pull_request.source_repo.scm_instance()
715
712
716 try:
713 try:
717 source_commit = source_repo.get_commit(commit_id=source_ref_name)
714 source_commit = source_repo.get_commit(commit_id=source_ref_name)
718 except CommitDoesNotExistError:
715 except CommitDoesNotExistError:
719 return UpdateResponse(
716 return UpdateResponse(
720 executed=False,
717 executed=False,
721 reason=UpdateFailureReason.MISSING_SOURCE_REF,
718 reason=UpdateFailureReason.MISSING_SOURCE_REF,
722 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
719 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
723 source_changed=False, target_changed=False)
720 source_changed=False, target_changed=False)
724
721
725 source_changed = source_ref_id != source_commit.raw_id
722 source_changed = source_ref_id != source_commit.raw_id
726
723
727 # target repo
724 # target repo
728 target_repo = pull_request.target_repo.scm_instance()
725 target_repo = pull_request.target_repo.scm_instance()
729
726
730 try:
727 try:
731 target_commit = target_repo.get_commit(commit_id=target_ref_name)
728 target_commit = target_repo.get_commit(commit_id=target_ref_name)
732 except CommitDoesNotExistError:
729 except CommitDoesNotExistError:
733 return UpdateResponse(
730 return UpdateResponse(
734 executed=False,
731 executed=False,
735 reason=UpdateFailureReason.MISSING_TARGET_REF,
732 reason=UpdateFailureReason.MISSING_TARGET_REF,
736 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
733 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
737 source_changed=False, target_changed=False)
734 source_changed=False, target_changed=False)
738 target_changed = target_ref_id != target_commit.raw_id
735 target_changed = target_ref_id != target_commit.raw_id
739
736
740 if not (source_changed or target_changed):
737 if not (source_changed or target_changed):
741 log.debug("Nothing changed in pull request %s", pull_request)
738 log.debug("Nothing changed in pull request %s", pull_request)
742 return UpdateResponse(
739 return UpdateResponse(
743 executed=False,
740 executed=False,
744 reason=UpdateFailureReason.NO_CHANGE,
741 reason=UpdateFailureReason.NO_CHANGE,
745 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
742 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
746 source_changed=target_changed, target_changed=source_changed)
743 source_changed=target_changed, target_changed=source_changed)
747
744
748 change_in_found = 'target repo' if target_changed else 'source repo'
745 change_in_found = 'target repo' if target_changed else 'source repo'
749 log.debug('Updating pull request because of change in %s detected',
746 log.debug('Updating pull request because of change in %s detected',
750 change_in_found)
747 change_in_found)
751
748
752 # Finally there is a need for an update, in case of source change
749 # Finally there is a need for an update, in case of source change
753 # we create a new version, else just an update
750 # we create a new version, else just an update
754 if source_changed:
751 if source_changed:
755 pull_request_version = self._create_version_from_snapshot(pull_request)
752 pull_request_version = self._create_version_from_snapshot(pull_request)
756 self._link_comments_to_version(pull_request_version)
753 self._link_comments_to_version(pull_request_version)
757 else:
754 else:
758 try:
755 try:
759 ver = pull_request.versions[-1]
756 ver = pull_request.versions[-1]
760 except IndexError:
757 except IndexError:
761 ver = None
758 ver = None
762
759
763 pull_request.pull_request_version_id = \
760 pull_request.pull_request_version_id = \
764 ver.pull_request_version_id if ver else None
761 ver.pull_request_version_id if ver else None
765 pull_request_version = pull_request
762 pull_request_version = pull_request
766
763
767 try:
764 try:
768 if target_ref_type in self.REF_TYPES:
765 if target_ref_type in self.REF_TYPES:
769 target_commit = target_repo.get_commit(target_ref_name)
766 target_commit = target_repo.get_commit(target_ref_name)
770 else:
767 else:
771 target_commit = target_repo.get_commit(target_ref_id)
768 target_commit = target_repo.get_commit(target_ref_id)
772 except CommitDoesNotExistError:
769 except CommitDoesNotExistError:
773 return UpdateResponse(
770 return UpdateResponse(
774 executed=False,
771 executed=False,
775 reason=UpdateFailureReason.MISSING_TARGET_REF,
772 reason=UpdateFailureReason.MISSING_TARGET_REF,
776 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
773 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
777 source_changed=source_changed, target_changed=target_changed)
774 source_changed=source_changed, target_changed=target_changed)
778
775
779 # re-compute commit ids
776 # re-compute commit ids
780 old_commit_ids = pull_request.revisions
777 old_commit_ids = pull_request.revisions
781 pre_load = ["author", "date", "message", "branch"]
778 pre_load = ["author", "date", "message", "branch"]
782 commit_ranges = target_repo.compare(
779 commit_ranges = target_repo.compare(
783 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
780 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
784 pre_load=pre_load)
781 pre_load=pre_load)
785
782
786 ancestor_commit_id = source_repo.get_common_ancestor(
783 ancestor_commit_id = source_repo.get_common_ancestor(
787 source_commit.raw_id, target_commit.raw_id, target_repo)
784 source_commit.raw_id, target_commit.raw_id, target_repo)
788
785
789 pull_request.source_ref = '%s:%s:%s' % (
786 pull_request.source_ref = '%s:%s:%s' % (
790 source_ref_type, source_ref_name, source_commit.raw_id)
787 source_ref_type, source_ref_name, source_commit.raw_id)
791 pull_request.target_ref = '%s:%s:%s' % (
788 pull_request.target_ref = '%s:%s:%s' % (
792 target_ref_type, target_ref_name, ancestor_commit_id)
789 target_ref_type, target_ref_name, ancestor_commit_id)
793
790
794 pull_request.revisions = [
791 pull_request.revisions = [
795 commit.raw_id for commit in reversed(commit_ranges)]
792 commit.raw_id for commit in reversed(commit_ranges)]
796 pull_request.updated_on = datetime.datetime.now()
793 pull_request.updated_on = datetime.datetime.now()
797 Session().add(pull_request)
794 Session().add(pull_request)
798 new_commit_ids = pull_request.revisions
795 new_commit_ids = pull_request.revisions
799
796
800 old_diff_data, new_diff_data = self._generate_update_diffs(
797 old_diff_data, new_diff_data = self._generate_update_diffs(
801 pull_request, pull_request_version)
798 pull_request, pull_request_version)
802
799
803 # calculate commit and file changes
800 # calculate commit and file changes
804 commit_changes = self._calculate_commit_id_changes(
801 commit_changes = self._calculate_commit_id_changes(
805 old_commit_ids, new_commit_ids)
802 old_commit_ids, new_commit_ids)
806 file_changes = self._calculate_file_changes(
803 file_changes = self._calculate_file_changes(
807 old_diff_data, new_diff_data)
804 old_diff_data, new_diff_data)
808
805
809 # set comments as outdated if DIFFS changed
806 # set comments as outdated if DIFFS changed
810 CommentsModel().outdate_comments(
807 CommentsModel().outdate_comments(
811 pull_request, old_diff_data=old_diff_data,
808 pull_request, old_diff_data=old_diff_data,
812 new_diff_data=new_diff_data)
809 new_diff_data=new_diff_data)
813
810
814 valid_commit_changes = (commit_changes.added or commit_changes.removed)
811 valid_commit_changes = (commit_changes.added or commit_changes.removed)
815 file_node_changes = (
812 file_node_changes = (
816 file_changes.added or file_changes.modified or file_changes.removed)
813 file_changes.added or file_changes.modified or file_changes.removed)
817 pr_has_changes = valid_commit_changes or file_node_changes
814 pr_has_changes = valid_commit_changes or file_node_changes
818
815
819 # Add an automatic comment to the pull request, in case
816 # Add an automatic comment to the pull request, in case
820 # anything has changed
817 # anything has changed
821 if pr_has_changes:
818 if pr_has_changes:
822 update_comment = CommentsModel().create(
819 update_comment = CommentsModel().create(
823 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
820 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
824 repo=pull_request.target_repo,
821 repo=pull_request.target_repo,
825 user=pull_request.author,
822 user=pull_request.author,
826 pull_request=pull_request,
823 pull_request=pull_request,
827 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
824 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
828
825
829 # Update status to "Under Review" for added commits
826 # Update status to "Under Review" for added commits
830 for commit_id in commit_changes.added:
827 for commit_id in commit_changes.added:
831 ChangesetStatusModel().set_status(
828 ChangesetStatusModel().set_status(
832 repo=pull_request.source_repo,
829 repo=pull_request.source_repo,
833 status=ChangesetStatus.STATUS_UNDER_REVIEW,
830 status=ChangesetStatus.STATUS_UNDER_REVIEW,
834 comment=update_comment,
831 comment=update_comment,
835 user=pull_request.author,
832 user=pull_request.author,
836 pull_request=pull_request,
833 pull_request=pull_request,
837 revision=commit_id)
834 revision=commit_id)
838
835
839 # send update email to users
836 # send update email to users
840 try:
837 try:
841 self.notify_users(pull_request=pull_request, updating_user=updating_user,
838 self.notify_users(pull_request=pull_request, updating_user=updating_user,
842 ancestor_commit_id=ancestor_commit_id,
839 ancestor_commit_id=ancestor_commit_id,
843 commit_changes=commit_changes,
840 commit_changes=commit_changes,
844 file_changes=file_changes)
841 file_changes=file_changes)
845 except Exception:
842 except Exception:
846 log.exception('Failed to send email notification to users')
843 log.exception('Failed to send email notification to users')
847
844
848 log.debug(
845 log.debug(
849 'Updated pull request %s, added_ids: %s, common_ids: %s, '
846 'Updated pull request %s, added_ids: %s, common_ids: %s, '
850 'removed_ids: %s', pull_request.pull_request_id,
847 'removed_ids: %s', pull_request.pull_request_id,
851 commit_changes.added, commit_changes.common, commit_changes.removed)
848 commit_changes.added, commit_changes.common, commit_changes.removed)
852 log.debug(
849 log.debug(
853 'Updated pull request with the following file changes: %s',
850 'Updated pull request with the following file changes: %s',
854 file_changes)
851 file_changes)
855
852
856 log.info(
853 log.info(
857 "Updated pull request %s from commit %s to commit %s, "
854 "Updated pull request %s from commit %s to commit %s, "
858 "stored new version %s of this pull request.",
855 "stored new version %s of this pull request.",
859 pull_request.pull_request_id, source_ref_id,
856 pull_request.pull_request_id, source_ref_id,
860 pull_request.source_ref_parts.commit_id,
857 pull_request.source_ref_parts.commit_id,
861 pull_request_version.pull_request_version_id)
858 pull_request_version.pull_request_version_id)
862 Session().commit()
859 Session().commit()
863 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
860 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
864
861
865 return UpdateResponse(
862 return UpdateResponse(
866 executed=True, reason=UpdateFailureReason.NONE,
863 executed=True, reason=UpdateFailureReason.NONE,
867 old=pull_request, new=pull_request_version,
864 old=pull_request, new=pull_request_version,
868 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
865 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
869 source_changed=source_changed, target_changed=target_changed)
866 source_changed=source_changed, target_changed=target_changed)
870
867
871 def _create_version_from_snapshot(self, pull_request):
868 def _create_version_from_snapshot(self, pull_request):
872 version = PullRequestVersion()
869 version = PullRequestVersion()
873 version.title = pull_request.title
870 version.title = pull_request.title
874 version.description = pull_request.description
871 version.description = pull_request.description
875 version.status = pull_request.status
872 version.status = pull_request.status
876 version.pull_request_state = pull_request.pull_request_state
873 version.pull_request_state = pull_request.pull_request_state
877 version.created_on = datetime.datetime.now()
874 version.created_on = datetime.datetime.now()
878 version.updated_on = pull_request.updated_on
875 version.updated_on = pull_request.updated_on
879 version.user_id = pull_request.user_id
876 version.user_id = pull_request.user_id
880 version.source_repo = pull_request.source_repo
877 version.source_repo = pull_request.source_repo
881 version.source_ref = pull_request.source_ref
878 version.source_ref = pull_request.source_ref
882 version.target_repo = pull_request.target_repo
879 version.target_repo = pull_request.target_repo
883 version.target_ref = pull_request.target_ref
880 version.target_ref = pull_request.target_ref
884
881
885 version._last_merge_source_rev = pull_request._last_merge_source_rev
882 version._last_merge_source_rev = pull_request._last_merge_source_rev
886 version._last_merge_target_rev = pull_request._last_merge_target_rev
883 version._last_merge_target_rev = pull_request._last_merge_target_rev
887 version.last_merge_status = pull_request.last_merge_status
884 version.last_merge_status = pull_request.last_merge_status
888 version.last_merge_metadata = pull_request.last_merge_metadata
885 version.last_merge_metadata = pull_request.last_merge_metadata
889 version.shadow_merge_ref = pull_request.shadow_merge_ref
886 version.shadow_merge_ref = pull_request.shadow_merge_ref
890 version.merge_rev = pull_request.merge_rev
887 version.merge_rev = pull_request.merge_rev
891 version.reviewer_data = pull_request.reviewer_data
888 version.reviewer_data = pull_request.reviewer_data
892
889
893 version.revisions = pull_request.revisions
890 version.revisions = pull_request.revisions
894 version.pull_request = pull_request
891 version.pull_request = pull_request
895 Session().add(version)
892 Session().add(version)
896 Session().flush()
893 Session().flush()
897
894
898 return version
895 return version
899
896
900 def _generate_update_diffs(self, pull_request, pull_request_version):
897 def _generate_update_diffs(self, pull_request, pull_request_version):
901
898
902 diff_context = (
899 diff_context = (
903 self.DIFF_CONTEXT +
900 self.DIFF_CONTEXT +
904 CommentsModel.needed_extra_diff_context())
901 CommentsModel.needed_extra_diff_context())
905 hide_whitespace_changes = False
902 hide_whitespace_changes = False
906 source_repo = pull_request_version.source_repo
903 source_repo = pull_request_version.source_repo
907 source_ref_id = pull_request_version.source_ref_parts.commit_id
904 source_ref_id = pull_request_version.source_ref_parts.commit_id
908 target_ref_id = pull_request_version.target_ref_parts.commit_id
905 target_ref_id = pull_request_version.target_ref_parts.commit_id
909 old_diff = self._get_diff_from_pr_or_version(
906 old_diff = self._get_diff_from_pr_or_version(
910 source_repo, source_ref_id, target_ref_id,
907 source_repo, source_ref_id, target_ref_id,
911 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
908 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
912
909
913 source_repo = pull_request.source_repo
910 source_repo = pull_request.source_repo
914 source_ref_id = pull_request.source_ref_parts.commit_id
911 source_ref_id = pull_request.source_ref_parts.commit_id
915 target_ref_id = pull_request.target_ref_parts.commit_id
912 target_ref_id = pull_request.target_ref_parts.commit_id
916
913
917 new_diff = self._get_diff_from_pr_or_version(
914 new_diff = self._get_diff_from_pr_or_version(
918 source_repo, source_ref_id, target_ref_id,
915 source_repo, source_ref_id, target_ref_id,
919 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
916 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
920
917
921 old_diff_data = diffs.DiffProcessor(old_diff)
918 old_diff_data = diffs.DiffProcessor(old_diff)
922 old_diff_data.prepare()
919 old_diff_data.prepare()
923 new_diff_data = diffs.DiffProcessor(new_diff)
920 new_diff_data = diffs.DiffProcessor(new_diff)
924 new_diff_data.prepare()
921 new_diff_data.prepare()
925
922
926 return old_diff_data, new_diff_data
923 return old_diff_data, new_diff_data
927
924
928 def _link_comments_to_version(self, pull_request_version):
925 def _link_comments_to_version(self, pull_request_version):
929 """
926 """
930 Link all unlinked comments of this pull request to the given version.
927 Link all unlinked comments of this pull request to the given version.
931
928
932 :param pull_request_version: The `PullRequestVersion` to which
929 :param pull_request_version: The `PullRequestVersion` to which
933 the comments shall be linked.
930 the comments shall be linked.
934
931
935 """
932 """
936 pull_request = pull_request_version.pull_request
933 pull_request = pull_request_version.pull_request
937 comments = ChangesetComment.query()\
934 comments = ChangesetComment.query()\
938 .filter(
935 .filter(
939 # TODO: johbo: Should we query for the repo at all here?
936 # TODO: johbo: Should we query for the repo at all here?
940 # Pending decision on how comments of PRs are to be related
937 # Pending decision on how comments of PRs are to be related
941 # to either the source repo, the target repo or no repo at all.
938 # to either the source repo, the target repo or no repo at all.
942 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
939 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
943 ChangesetComment.pull_request == pull_request,
940 ChangesetComment.pull_request == pull_request,
944 ChangesetComment.pull_request_version == None)\
941 ChangesetComment.pull_request_version == None)\
945 .order_by(ChangesetComment.comment_id.asc())
942 .order_by(ChangesetComment.comment_id.asc())
946
943
947 # TODO: johbo: Find out why this breaks if it is done in a bulk
944 # TODO: johbo: Find out why this breaks if it is done in a bulk
948 # operation.
945 # operation.
949 for comment in comments:
946 for comment in comments:
950 comment.pull_request_version_id = (
947 comment.pull_request_version_id = (
951 pull_request_version.pull_request_version_id)
948 pull_request_version.pull_request_version_id)
952 Session().add(comment)
949 Session().add(comment)
953
950
954 def _calculate_commit_id_changes(self, old_ids, new_ids):
951 def _calculate_commit_id_changes(self, old_ids, new_ids):
955 added = [x for x in new_ids if x not in old_ids]
952 added = [x for x in new_ids if x not in old_ids]
956 common = [x for x in new_ids if x in old_ids]
953 common = [x for x in new_ids if x in old_ids]
957 removed = [x for x in old_ids if x not in new_ids]
954 removed = [x for x in old_ids if x not in new_ids]
958 total = new_ids
955 total = new_ids
959 return ChangeTuple(added, common, removed, total)
956 return ChangeTuple(added, common, removed, total)
960
957
961 def _calculate_file_changes(self, old_diff_data, new_diff_data):
958 def _calculate_file_changes(self, old_diff_data, new_diff_data):
962
959
963 old_files = OrderedDict()
960 old_files = OrderedDict()
964 for diff_data in old_diff_data.parsed_diff:
961 for diff_data in old_diff_data.parsed_diff:
965 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
962 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
966
963
967 added_files = []
964 added_files = []
968 modified_files = []
965 modified_files = []
969 removed_files = []
966 removed_files = []
970 for diff_data in new_diff_data.parsed_diff:
967 for diff_data in new_diff_data.parsed_diff:
971 new_filename = diff_data['filename']
968 new_filename = diff_data['filename']
972 new_hash = md5_safe(diff_data['raw_diff'])
969 new_hash = md5_safe(diff_data['raw_diff'])
973
970
974 old_hash = old_files.get(new_filename)
971 old_hash = old_files.get(new_filename)
975 if not old_hash:
972 if not old_hash:
976 # file is not present in old diff, we have to figure out from parsed diff
973 # file is not present in old diff, we have to figure out from parsed diff
977 # operation ADD/REMOVE
974 # operation ADD/REMOVE
978 operations_dict = diff_data['stats']['ops']
975 operations_dict = diff_data['stats']['ops']
979 if diffs.DEL_FILENODE in operations_dict:
976 if diffs.DEL_FILENODE in operations_dict:
980 removed_files.append(new_filename)
977 removed_files.append(new_filename)
981 else:
978 else:
982 added_files.append(new_filename)
979 added_files.append(new_filename)
983 else:
980 else:
984 if new_hash != old_hash:
981 if new_hash != old_hash:
985 modified_files.append(new_filename)
982 modified_files.append(new_filename)
986 # now remove a file from old, since we have seen it already
983 # now remove a file from old, since we have seen it already
987 del old_files[new_filename]
984 del old_files[new_filename]
988
985
989 # removed files is when there are present in old, but not in NEW,
986 # removed files is when there are present in old, but not in NEW,
990 # since we remove old files that are present in new diff, left-overs
987 # since we remove old files that are present in new diff, left-overs
991 # if any should be the removed files
988 # if any should be the removed files
992 removed_files.extend(old_files.keys())
989 removed_files.extend(old_files.keys())
993
990
994 return FileChangeTuple(added_files, modified_files, removed_files)
991 return FileChangeTuple(added_files, modified_files, removed_files)
995
992
996 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
993 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
997 """
994 """
998 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
995 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
999 so it's always looking the same disregarding on which default
996 so it's always looking the same disregarding on which default
1000 renderer system is using.
997 renderer system is using.
1001
998
1002 :param ancestor_commit_id: ancestor raw_id
999 :param ancestor_commit_id: ancestor raw_id
1003 :param changes: changes named tuple
1000 :param changes: changes named tuple
1004 :param file_changes: file changes named tuple
1001 :param file_changes: file changes named tuple
1005
1002
1006 """
1003 """
1007 new_status = ChangesetStatus.get_status_lbl(
1004 new_status = ChangesetStatus.get_status_lbl(
1008 ChangesetStatus.STATUS_UNDER_REVIEW)
1005 ChangesetStatus.STATUS_UNDER_REVIEW)
1009
1006
1010 changed_files = (
1007 changed_files = (
1011 file_changes.added + file_changes.modified + file_changes.removed)
1008 file_changes.added + file_changes.modified + file_changes.removed)
1012
1009
1013 params = {
1010 params = {
1014 'under_review_label': new_status,
1011 'under_review_label': new_status,
1015 'added_commits': changes.added,
1012 'added_commits': changes.added,
1016 'removed_commits': changes.removed,
1013 'removed_commits': changes.removed,
1017 'changed_files': changed_files,
1014 'changed_files': changed_files,
1018 'added_files': file_changes.added,
1015 'added_files': file_changes.added,
1019 'modified_files': file_changes.modified,
1016 'modified_files': file_changes.modified,
1020 'removed_files': file_changes.removed,
1017 'removed_files': file_changes.removed,
1021 'ancestor_commit_id': ancestor_commit_id
1018 'ancestor_commit_id': ancestor_commit_id
1022 }
1019 }
1023 renderer = RstTemplateRenderer()
1020 renderer = RstTemplateRenderer()
1024 return renderer.render('pull_request_update.mako', **params)
1021 return renderer.render('pull_request_update.mako', **params)
1025
1022
1026 def edit(self, pull_request, title, description, description_renderer, user):
1023 def edit(self, pull_request, title, description, description_renderer, user):
1027 pull_request = self.__get_pull_request(pull_request)
1024 pull_request = self.__get_pull_request(pull_request)
1028 old_data = pull_request.get_api_data(with_merge_state=False)
1025 old_data = pull_request.get_api_data(with_merge_state=False)
1029 if pull_request.is_closed():
1026 if pull_request.is_closed():
1030 raise ValueError('This pull request is closed')
1027 raise ValueError('This pull request is closed')
1031 if title:
1028 if title:
1032 pull_request.title = title
1029 pull_request.title = title
1033 pull_request.description = description
1030 pull_request.description = description
1034 pull_request.updated_on = datetime.datetime.now()
1031 pull_request.updated_on = datetime.datetime.now()
1035 pull_request.description_renderer = description_renderer
1032 pull_request.description_renderer = description_renderer
1036 Session().add(pull_request)
1033 Session().add(pull_request)
1037 self._log_audit_action(
1034 self._log_audit_action(
1038 'repo.pull_request.edit', {'old_data': old_data},
1035 'repo.pull_request.edit', {'old_data': old_data},
1039 user, pull_request)
1036 user, pull_request)
1040
1037
1041 def update_reviewers(self, pull_request, reviewer_data, user):
1038 def update_reviewers(self, pull_request, reviewer_data, user):
1042 """
1039 """
1043 Update the reviewers in the pull request
1040 Update the reviewers in the pull request
1044
1041
1045 :param pull_request: the pr to update
1042 :param pull_request: the pr to update
1046 :param reviewer_data: list of tuples
1043 :param reviewer_data: list of tuples
1047 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1044 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1048 """
1045 """
1049 pull_request = self.__get_pull_request(pull_request)
1046 pull_request = self.__get_pull_request(pull_request)
1050 if pull_request.is_closed():
1047 if pull_request.is_closed():
1051 raise ValueError('This pull request is closed')
1048 raise ValueError('This pull request is closed')
1052
1049
1053 reviewers = {}
1050 reviewers = {}
1054 for user_id, reasons, mandatory, rules in reviewer_data:
1051 for user_id, reasons, mandatory, rules in reviewer_data:
1055 if isinstance(user_id, (int, compat.string_types)):
1052 if isinstance(user_id, (int, compat.string_types)):
1056 user_id = self._get_user(user_id).user_id
1053 user_id = self._get_user(user_id).user_id
1057 reviewers[user_id] = {
1054 reviewers[user_id] = {
1058 'reasons': reasons, 'mandatory': mandatory}
1055 'reasons': reasons, 'mandatory': mandatory}
1059
1056
1060 reviewers_ids = set(reviewers.keys())
1057 reviewers_ids = set(reviewers.keys())
1061 current_reviewers = PullRequestReviewers.query()\
1058 current_reviewers = PullRequestReviewers.query()\
1062 .filter(PullRequestReviewers.pull_request ==
1059 .filter(PullRequestReviewers.pull_request ==
1063 pull_request).all()
1060 pull_request).all()
1064 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1061 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1065
1062
1066 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1063 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1067 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1064 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1068
1065
1069 log.debug("Adding %s reviewers", ids_to_add)
1066 log.debug("Adding %s reviewers", ids_to_add)
1070 log.debug("Removing %s reviewers", ids_to_remove)
1067 log.debug("Removing %s reviewers", ids_to_remove)
1071 changed = False
1068 changed = False
1072 added_audit_reviewers = []
1069 added_audit_reviewers = []
1073 removed_audit_reviewers = []
1070 removed_audit_reviewers = []
1074
1071
1075 for uid in ids_to_add:
1072 for uid in ids_to_add:
1076 changed = True
1073 changed = True
1077 _usr = self._get_user(uid)
1074 _usr = self._get_user(uid)
1078 reviewer = PullRequestReviewers()
1075 reviewer = PullRequestReviewers()
1079 reviewer.user = _usr
1076 reviewer.user = _usr
1080 reviewer.pull_request = pull_request
1077 reviewer.pull_request = pull_request
1081 reviewer.reasons = reviewers[uid]['reasons']
1078 reviewer.reasons = reviewers[uid]['reasons']
1082 # NOTE(marcink): mandatory shouldn't be changed now
1079 # NOTE(marcink): mandatory shouldn't be changed now
1083 # reviewer.mandatory = reviewers[uid]['reasons']
1080 # reviewer.mandatory = reviewers[uid]['reasons']
1084 Session().add(reviewer)
1081 Session().add(reviewer)
1085 added_audit_reviewers.append(reviewer.get_dict())
1082 added_audit_reviewers.append(reviewer.get_dict())
1086
1083
1087 for uid in ids_to_remove:
1084 for uid in ids_to_remove:
1088 changed = True
1085 changed = True
1089 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1086 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1090 # that prevents and fixes cases that we added the same reviewer twice.
1087 # that prevents and fixes cases that we added the same reviewer twice.
1091 # this CAN happen due to the lack of DB checks
1088 # this CAN happen due to the lack of DB checks
1092 reviewers = PullRequestReviewers.query()\
1089 reviewers = PullRequestReviewers.query()\
1093 .filter(PullRequestReviewers.user_id == uid,
1090 .filter(PullRequestReviewers.user_id == uid,
1094 PullRequestReviewers.pull_request == pull_request)\
1091 PullRequestReviewers.pull_request == pull_request)\
1095 .all()
1092 .all()
1096
1093
1097 for obj in reviewers:
1094 for obj in reviewers:
1098 added_audit_reviewers.append(obj.get_dict())
1095 added_audit_reviewers.append(obj.get_dict())
1099 Session().delete(obj)
1096 Session().delete(obj)
1100
1097
1101 if changed:
1098 if changed:
1102 Session().expire_all()
1099 Session().expire_all()
1103 pull_request.updated_on = datetime.datetime.now()
1100 pull_request.updated_on = datetime.datetime.now()
1104 Session().add(pull_request)
1101 Session().add(pull_request)
1105
1102
1106 # finally store audit logs
1103 # finally store audit logs
1107 for user_data in added_audit_reviewers:
1104 for user_data in added_audit_reviewers:
1108 self._log_audit_action(
1105 self._log_audit_action(
1109 'repo.pull_request.reviewer.add', {'data': user_data},
1106 'repo.pull_request.reviewer.add', {'data': user_data},
1110 user, pull_request)
1107 user, pull_request)
1111 for user_data in removed_audit_reviewers:
1108 for user_data in removed_audit_reviewers:
1112 self._log_audit_action(
1109 self._log_audit_action(
1113 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1110 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1114 user, pull_request)
1111 user, pull_request)
1115
1112
1116 self.notify_reviewers(pull_request, ids_to_add)
1113 self.notify_reviewers(pull_request, ids_to_add)
1117 return ids_to_add, ids_to_remove
1114 return ids_to_add, ids_to_remove
1118
1115
1119 def get_url(self, pull_request, request=None, permalink=False):
1116 def get_url(self, pull_request, request=None, permalink=False):
1120 if not request:
1117 if not request:
1121 request = get_current_request()
1118 request = get_current_request()
1122
1119
1123 if permalink:
1120 if permalink:
1124 return request.route_url(
1121 return request.route_url(
1125 'pull_requests_global',
1122 'pull_requests_global',
1126 pull_request_id=pull_request.pull_request_id,)
1123 pull_request_id=pull_request.pull_request_id,)
1127 else:
1124 else:
1128 return request.route_url('pullrequest_show',
1125 return request.route_url('pullrequest_show',
1129 repo_name=safe_str(pull_request.target_repo.repo_name),
1126 repo_name=safe_str(pull_request.target_repo.repo_name),
1130 pull_request_id=pull_request.pull_request_id,)
1127 pull_request_id=pull_request.pull_request_id,)
1131
1128
1132 def get_shadow_clone_url(self, pull_request, request=None):
1129 def get_shadow_clone_url(self, pull_request, request=None):
1133 """
1130 """
1134 Returns qualified url pointing to the shadow repository. If this pull
1131 Returns qualified url pointing to the shadow repository. If this pull
1135 request is closed there is no shadow repository and ``None`` will be
1132 request is closed there is no shadow repository and ``None`` will be
1136 returned.
1133 returned.
1137 """
1134 """
1138 if pull_request.is_closed():
1135 if pull_request.is_closed():
1139 return None
1136 return None
1140 else:
1137 else:
1141 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1138 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1142 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1139 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1143
1140
1144 def notify_reviewers(self, pull_request, reviewers_ids):
1141 def notify_reviewers(self, pull_request, reviewers_ids):
1145 # notification to reviewers
1142 # notification to reviewers
1146 if not reviewers_ids:
1143 if not reviewers_ids:
1147 return
1144 return
1148
1145
1149 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1146 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1150
1147
1151 pull_request_obj = pull_request
1148 pull_request_obj = pull_request
1152 # get the current participants of this pull request
1149 # get the current participants of this pull request
1153 recipients = reviewers_ids
1150 recipients = reviewers_ids
1154 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1151 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1155
1152
1156 pr_source_repo = pull_request_obj.source_repo
1153 pr_source_repo = pull_request_obj.source_repo
1157 pr_target_repo = pull_request_obj.target_repo
1154 pr_target_repo = pull_request_obj.target_repo
1158
1155
1159 pr_url = h.route_url('pullrequest_show',
1156 pr_url = h.route_url('pullrequest_show',
1160 repo_name=pr_target_repo.repo_name,
1157 repo_name=pr_target_repo.repo_name,
1161 pull_request_id=pull_request_obj.pull_request_id,)
1158 pull_request_id=pull_request_obj.pull_request_id,)
1162
1159
1163 # set some variables for email notification
1160 # set some variables for email notification
1164 pr_target_repo_url = h.route_url(
1161 pr_target_repo_url = h.route_url(
1165 'repo_summary', repo_name=pr_target_repo.repo_name)
1162 'repo_summary', repo_name=pr_target_repo.repo_name)
1166
1163
1167 pr_source_repo_url = h.route_url(
1164 pr_source_repo_url = h.route_url(
1168 'repo_summary', repo_name=pr_source_repo.repo_name)
1165 'repo_summary', repo_name=pr_source_repo.repo_name)
1169
1166
1170 # pull request specifics
1167 # pull request specifics
1171 pull_request_commits = [
1168 pull_request_commits = [
1172 (x.raw_id, x.message)
1169 (x.raw_id, x.message)
1173 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1170 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1174
1171
1175 kwargs = {
1172 kwargs = {
1176 'user': pull_request.author,
1173 'user': pull_request.author,
1177 'pull_request': pull_request_obj,
1174 'pull_request': pull_request_obj,
1178 'pull_request_commits': pull_request_commits,
1175 'pull_request_commits': pull_request_commits,
1179
1176
1180 'pull_request_target_repo': pr_target_repo,
1177 'pull_request_target_repo': pr_target_repo,
1181 'pull_request_target_repo_url': pr_target_repo_url,
1178 'pull_request_target_repo_url': pr_target_repo_url,
1182
1179
1183 'pull_request_source_repo': pr_source_repo,
1180 'pull_request_source_repo': pr_source_repo,
1184 'pull_request_source_repo_url': pr_source_repo_url,
1181 'pull_request_source_repo_url': pr_source_repo_url,
1185
1182
1186 'pull_request_url': pr_url,
1183 'pull_request_url': pr_url,
1187 }
1184 }
1188
1185
1189 # pre-generate the subject for notification itself
1186 # pre-generate the subject for notification itself
1190 (subject,
1187 (subject,
1191 _h, _e, # we don't care about those
1188 _h, _e, # we don't care about those
1192 body_plaintext) = EmailNotificationModel().render_email(
1189 body_plaintext) = EmailNotificationModel().render_email(
1193 notification_type, **kwargs)
1190 notification_type, **kwargs)
1194
1191
1195 # create notification objects, and emails
1192 # create notification objects, and emails
1196 NotificationModel().create(
1193 NotificationModel().create(
1197 created_by=pull_request.author,
1194 created_by=pull_request.author,
1198 notification_subject=subject,
1195 notification_subject=subject,
1199 notification_body=body_plaintext,
1196 notification_body=body_plaintext,
1200 notification_type=notification_type,
1197 notification_type=notification_type,
1201 recipients=recipients,
1198 recipients=recipients,
1202 email_kwargs=kwargs,
1199 email_kwargs=kwargs,
1203 )
1200 )
1204
1201
1205 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1202 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1206 commit_changes, file_changes):
1203 commit_changes, file_changes):
1207
1204
1208 updating_user_id = updating_user.user_id
1205 updating_user_id = updating_user.user_id
1209 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1206 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1210 # NOTE(marcink): send notification to all other users except to
1207 # NOTE(marcink): send notification to all other users except to
1211 # person who updated the PR
1208 # person who updated the PR
1212 recipients = reviewers.difference(set([updating_user_id]))
1209 recipients = reviewers.difference(set([updating_user_id]))
1213
1210
1214 log.debug('Notify following recipients about pull-request update %s', recipients)
1211 log.debug('Notify following recipients about pull-request update %s', recipients)
1215
1212
1216 pull_request_obj = pull_request
1213 pull_request_obj = pull_request
1217
1214
1218 # send email about the update
1215 # send email about the update
1219 changed_files = (
1216 changed_files = (
1220 file_changes.added + file_changes.modified + file_changes.removed)
1217 file_changes.added + file_changes.modified + file_changes.removed)
1221
1218
1222 pr_source_repo = pull_request_obj.source_repo
1219 pr_source_repo = pull_request_obj.source_repo
1223 pr_target_repo = pull_request_obj.target_repo
1220 pr_target_repo = pull_request_obj.target_repo
1224
1221
1225 pr_url = h.route_url('pullrequest_show',
1222 pr_url = h.route_url('pullrequest_show',
1226 repo_name=pr_target_repo.repo_name,
1223 repo_name=pr_target_repo.repo_name,
1227 pull_request_id=pull_request_obj.pull_request_id,)
1224 pull_request_id=pull_request_obj.pull_request_id,)
1228
1225
1229 # set some variables for email notification
1226 # set some variables for email notification
1230 pr_target_repo_url = h.route_url(
1227 pr_target_repo_url = h.route_url(
1231 'repo_summary', repo_name=pr_target_repo.repo_name)
1228 'repo_summary', repo_name=pr_target_repo.repo_name)
1232
1229
1233 pr_source_repo_url = h.route_url(
1230 pr_source_repo_url = h.route_url(
1234 'repo_summary', repo_name=pr_source_repo.repo_name)
1231 'repo_summary', repo_name=pr_source_repo.repo_name)
1235
1232
1236 email_kwargs = {
1233 email_kwargs = {
1237 'date': datetime.datetime.now(),
1234 'date': datetime.datetime.now(),
1238 'updating_user': updating_user,
1235 'updating_user': updating_user,
1239
1236
1240 'pull_request': pull_request_obj,
1237 'pull_request': pull_request_obj,
1241
1238
1242 'pull_request_target_repo': pr_target_repo,
1239 'pull_request_target_repo': pr_target_repo,
1243 'pull_request_target_repo_url': pr_target_repo_url,
1240 'pull_request_target_repo_url': pr_target_repo_url,
1244
1241
1245 'pull_request_source_repo': pr_source_repo,
1242 'pull_request_source_repo': pr_source_repo,
1246 'pull_request_source_repo_url': pr_source_repo_url,
1243 'pull_request_source_repo_url': pr_source_repo_url,
1247
1244
1248 'pull_request_url': pr_url,
1245 'pull_request_url': pr_url,
1249
1246
1250 'ancestor_commit_id': ancestor_commit_id,
1247 'ancestor_commit_id': ancestor_commit_id,
1251 'added_commits': commit_changes.added,
1248 'added_commits': commit_changes.added,
1252 'removed_commits': commit_changes.removed,
1249 'removed_commits': commit_changes.removed,
1253 'changed_files': changed_files,
1250 'changed_files': changed_files,
1254 'added_files': file_changes.added,
1251 'added_files': file_changes.added,
1255 'modified_files': file_changes.modified,
1252 'modified_files': file_changes.modified,
1256 'removed_files': file_changes.removed,
1253 'removed_files': file_changes.removed,
1257 }
1254 }
1258
1255
1259 (subject,
1256 (subject,
1260 _h, _e, # we don't care about those
1257 _h, _e, # we don't care about those
1261 body_plaintext) = EmailNotificationModel().render_email(
1258 body_plaintext) = EmailNotificationModel().render_email(
1262 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1259 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1263
1260
1264 # create notification objects, and emails
1261 # create notification objects, and emails
1265 NotificationModel().create(
1262 NotificationModel().create(
1266 created_by=updating_user,
1263 created_by=updating_user,
1267 notification_subject=subject,
1264 notification_subject=subject,
1268 notification_body=body_plaintext,
1265 notification_body=body_plaintext,
1269 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1266 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1270 recipients=recipients,
1267 recipients=recipients,
1271 email_kwargs=email_kwargs,
1268 email_kwargs=email_kwargs,
1272 )
1269 )
1273
1270
1274 def delete(self, pull_request, user):
1271 def delete(self, pull_request, user):
1275 pull_request = self.__get_pull_request(pull_request)
1272 pull_request = self.__get_pull_request(pull_request)
1276 old_data = pull_request.get_api_data(with_merge_state=False)
1273 old_data = pull_request.get_api_data(with_merge_state=False)
1277 self._cleanup_merge_workspace(pull_request)
1274 self._cleanup_merge_workspace(pull_request)
1278 self._log_audit_action(
1275 self._log_audit_action(
1279 'repo.pull_request.delete', {'old_data': old_data},
1276 'repo.pull_request.delete', {'old_data': old_data},
1280 user, pull_request)
1277 user, pull_request)
1281 Session().delete(pull_request)
1278 Session().delete(pull_request)
1282
1279
1283 def close_pull_request(self, pull_request, user):
1280 def close_pull_request(self, pull_request, user):
1284 pull_request = self.__get_pull_request(pull_request)
1281 pull_request = self.__get_pull_request(pull_request)
1285 self._cleanup_merge_workspace(pull_request)
1282 self._cleanup_merge_workspace(pull_request)
1286 pull_request.status = PullRequest.STATUS_CLOSED
1283 pull_request.status = PullRequest.STATUS_CLOSED
1287 pull_request.updated_on = datetime.datetime.now()
1284 pull_request.updated_on = datetime.datetime.now()
1288 Session().add(pull_request)
1285 Session().add(pull_request)
1289 self.trigger_pull_request_hook(
1286 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1290 pull_request, pull_request.author, 'close')
1291
1287
1292 pr_data = pull_request.get_api_data(with_merge_state=False)
1288 pr_data = pull_request.get_api_data(with_merge_state=False)
1293 self._log_audit_action(
1289 self._log_audit_action(
1294 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1290 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1295
1291
1296 def close_pull_request_with_comment(
1292 def close_pull_request_with_comment(
1297 self, pull_request, user, repo, message=None, auth_user=None):
1293 self, pull_request, user, repo, message=None, auth_user=None):
1298
1294
1299 pull_request_review_status = pull_request.calculated_review_status()
1295 pull_request_review_status = pull_request.calculated_review_status()
1300
1296
1301 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1297 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1302 # approved only if we have voting consent
1298 # approved only if we have voting consent
1303 status = ChangesetStatus.STATUS_APPROVED
1299 status = ChangesetStatus.STATUS_APPROVED
1304 else:
1300 else:
1305 status = ChangesetStatus.STATUS_REJECTED
1301 status = ChangesetStatus.STATUS_REJECTED
1306 status_lbl = ChangesetStatus.get_status_lbl(status)
1302 status_lbl = ChangesetStatus.get_status_lbl(status)
1307
1303
1308 default_message = (
1304 default_message = (
1309 'Closing with status change {transition_icon} {status}.'
1305 'Closing with status change {transition_icon} {status}.'
1310 ).format(transition_icon='>', status=status_lbl)
1306 ).format(transition_icon='>', status=status_lbl)
1311 text = message or default_message
1307 text = message or default_message
1312
1308
1313 # create a comment, and link it to new status
1309 # create a comment, and link it to new status
1314 comment = CommentsModel().create(
1310 comment = CommentsModel().create(
1315 text=text,
1311 text=text,
1316 repo=repo.repo_id,
1312 repo=repo.repo_id,
1317 user=user.user_id,
1313 user=user.user_id,
1318 pull_request=pull_request.pull_request_id,
1314 pull_request=pull_request.pull_request_id,
1319 status_change=status_lbl,
1315 status_change=status_lbl,
1320 status_change_type=status,
1316 status_change_type=status,
1321 closing_pr=True,
1317 closing_pr=True,
1322 auth_user=auth_user,
1318 auth_user=auth_user,
1323 )
1319 )
1324
1320
1325 # calculate old status before we change it
1321 # calculate old status before we change it
1326 old_calculated_status = pull_request.calculated_review_status()
1322 old_calculated_status = pull_request.calculated_review_status()
1327 ChangesetStatusModel().set_status(
1323 ChangesetStatusModel().set_status(
1328 repo.repo_id,
1324 repo.repo_id,
1329 status,
1325 status,
1330 user.user_id,
1326 user.user_id,
1331 comment=comment,
1327 comment=comment,
1332 pull_request=pull_request.pull_request_id
1328 pull_request=pull_request.pull_request_id
1333 )
1329 )
1334
1330
1335 Session().flush()
1331 Session().flush()
1336 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1332
1333 self.trigger_pull_request_hook(pull_request, user, 'comment',
1334 data={'comment': comment})
1335
1337 # we now calculate the status of pull request again, and based on that
1336 # we now calculate the status of pull request again, and based on that
1338 # calculation trigger status change. This might happen in cases
1337 # calculation trigger status change. This might happen in cases
1339 # that non-reviewer admin closes a pr, which means his vote doesn't
1338 # that non-reviewer admin closes a pr, which means his vote doesn't
1340 # change the status, while if he's a reviewer this might change it.
1339 # change the status, while if he's a reviewer this might change it.
1341 calculated_status = pull_request.calculated_review_status()
1340 calculated_status = pull_request.calculated_review_status()
1342 if old_calculated_status != calculated_status:
1341 if old_calculated_status != calculated_status:
1343 self.trigger_pull_request_hook(
1342 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1344 pull_request, user, 'review_status_change',
1343 data={'status': calculated_status})
1345 data={'status': calculated_status})
1346
1344
1347 # finally close the PR
1345 # finally close the PR
1348 PullRequestModel().close_pull_request(
1346 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1349 pull_request.pull_request_id, user)
1350
1347
1351 return comment, status
1348 return comment, status
1352
1349
1353 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1350 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1354 _ = translator or get_current_request().translate
1351 _ = translator or get_current_request().translate
1355
1352
1356 if not self._is_merge_enabled(pull_request):
1353 if not self._is_merge_enabled(pull_request):
1357 return None, False, _('Server-side pull request merging is disabled.')
1354 return None, False, _('Server-side pull request merging is disabled.')
1358
1355
1359 if pull_request.is_closed():
1356 if pull_request.is_closed():
1360 return None, False, _('This pull request is closed.')
1357 return None, False, _('This pull request is closed.')
1361
1358
1362 merge_possible, msg = self._check_repo_requirements(
1359 merge_possible, msg = self._check_repo_requirements(
1363 target=pull_request.target_repo, source=pull_request.source_repo,
1360 target=pull_request.target_repo, source=pull_request.source_repo,
1364 translator=_)
1361 translator=_)
1365 if not merge_possible:
1362 if not merge_possible:
1366 return None, merge_possible, msg
1363 return None, merge_possible, msg
1367
1364
1368 try:
1365 try:
1369 merge_response = self._try_merge(
1366 merge_response = self._try_merge(
1370 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1367 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1371 log.debug("Merge response: %s", merge_response)
1368 log.debug("Merge response: %s", merge_response)
1372 return merge_response, merge_response.possible, merge_response.merge_status_message
1369 return merge_response, merge_response.possible, merge_response.merge_status_message
1373 except NotImplementedError:
1370 except NotImplementedError:
1374 return None, False, _('Pull request merging is not supported.')
1371 return None, False, _('Pull request merging is not supported.')
1375
1372
1376 def _check_repo_requirements(self, target, source, translator):
1373 def _check_repo_requirements(self, target, source, translator):
1377 """
1374 """
1378 Check if `target` and `source` have compatible requirements.
1375 Check if `target` and `source` have compatible requirements.
1379
1376
1380 Currently this is just checking for largefiles.
1377 Currently this is just checking for largefiles.
1381 """
1378 """
1382 _ = translator
1379 _ = translator
1383 target_has_largefiles = self._has_largefiles(target)
1380 target_has_largefiles = self._has_largefiles(target)
1384 source_has_largefiles = self._has_largefiles(source)
1381 source_has_largefiles = self._has_largefiles(source)
1385 merge_possible = True
1382 merge_possible = True
1386 message = u''
1383 message = u''
1387
1384
1388 if target_has_largefiles != source_has_largefiles:
1385 if target_has_largefiles != source_has_largefiles:
1389 merge_possible = False
1386 merge_possible = False
1390 if source_has_largefiles:
1387 if source_has_largefiles:
1391 message = _(
1388 message = _(
1392 'Target repository large files support is disabled.')
1389 'Target repository large files support is disabled.')
1393 else:
1390 else:
1394 message = _(
1391 message = _(
1395 'Source repository large files support is disabled.')
1392 'Source repository large files support is disabled.')
1396
1393
1397 return merge_possible, message
1394 return merge_possible, message
1398
1395
1399 def _has_largefiles(self, repo):
1396 def _has_largefiles(self, repo):
1400 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1397 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1401 'extensions', 'largefiles')
1398 'extensions', 'largefiles')
1402 return largefiles_ui and largefiles_ui[0].active
1399 return largefiles_ui and largefiles_ui[0].active
1403
1400
1404 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1401 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1405 """
1402 """
1406 Try to merge the pull request and return the merge status.
1403 Try to merge the pull request and return the merge status.
1407 """
1404 """
1408 log.debug(
1405 log.debug(
1409 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1406 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1410 pull_request.pull_request_id, force_shadow_repo_refresh)
1407 pull_request.pull_request_id, force_shadow_repo_refresh)
1411 target_vcs = pull_request.target_repo.scm_instance()
1408 target_vcs = pull_request.target_repo.scm_instance()
1412 # Refresh the target reference.
1409 # Refresh the target reference.
1413 try:
1410 try:
1414 target_ref = self._refresh_reference(
1411 target_ref = self._refresh_reference(
1415 pull_request.target_ref_parts, target_vcs)
1412 pull_request.target_ref_parts, target_vcs)
1416 except CommitDoesNotExistError:
1413 except CommitDoesNotExistError:
1417 merge_state = MergeResponse(
1414 merge_state = MergeResponse(
1418 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1415 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1419 metadata={'target_ref': pull_request.target_ref_parts})
1416 metadata={'target_ref': pull_request.target_ref_parts})
1420 return merge_state
1417 return merge_state
1421
1418
1422 target_locked = pull_request.target_repo.locked
1419 target_locked = pull_request.target_repo.locked
1423 if target_locked and target_locked[0]:
1420 if target_locked and target_locked[0]:
1424 locked_by = 'user:{}'.format(target_locked[0])
1421 locked_by = 'user:{}'.format(target_locked[0])
1425 log.debug("The target repository is locked by %s.", locked_by)
1422 log.debug("The target repository is locked by %s.", locked_by)
1426 merge_state = MergeResponse(
1423 merge_state = MergeResponse(
1427 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1424 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1428 metadata={'locked_by': locked_by})
1425 metadata={'locked_by': locked_by})
1429 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1426 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1430 pull_request, target_ref):
1427 pull_request, target_ref):
1431 log.debug("Refreshing the merge status of the repository.")
1428 log.debug("Refreshing the merge status of the repository.")
1432 merge_state = self._refresh_merge_state(
1429 merge_state = self._refresh_merge_state(
1433 pull_request, target_vcs, target_ref)
1430 pull_request, target_vcs, target_ref)
1434 else:
1431 else:
1435 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1432 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1436 metadata = {
1433 metadata = {
1437 'unresolved_files': '',
1434 'unresolved_files': '',
1438 'target_ref': pull_request.target_ref_parts,
1435 'target_ref': pull_request.target_ref_parts,
1439 'source_ref': pull_request.source_ref_parts,
1436 'source_ref': pull_request.source_ref_parts,
1440 }
1437 }
1441 if pull_request.last_merge_metadata:
1438 if pull_request.last_merge_metadata:
1442 metadata.update(pull_request.last_merge_metadata)
1439 metadata.update(pull_request.last_merge_metadata)
1443
1440
1444 if not possible and target_ref.type == 'branch':
1441 if not possible and target_ref.type == 'branch':
1445 # NOTE(marcink): case for mercurial multiple heads on branch
1442 # NOTE(marcink): case for mercurial multiple heads on branch
1446 heads = target_vcs._heads(target_ref.name)
1443 heads = target_vcs._heads(target_ref.name)
1447 if len(heads) != 1:
1444 if len(heads) != 1:
1448 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1445 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1449 metadata.update({
1446 metadata.update({
1450 'heads': heads
1447 'heads': heads
1451 })
1448 })
1452
1449
1453 merge_state = MergeResponse(
1450 merge_state = MergeResponse(
1454 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1451 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1455
1452
1456 return merge_state
1453 return merge_state
1457
1454
1458 def _refresh_reference(self, reference, vcs_repository):
1455 def _refresh_reference(self, reference, vcs_repository):
1459 if reference.type in self.UPDATABLE_REF_TYPES:
1456 if reference.type in self.UPDATABLE_REF_TYPES:
1460 name_or_id = reference.name
1457 name_or_id = reference.name
1461 else:
1458 else:
1462 name_or_id = reference.commit_id
1459 name_or_id = reference.commit_id
1463
1460
1464 refreshed_commit = vcs_repository.get_commit(name_or_id)
1461 refreshed_commit = vcs_repository.get_commit(name_or_id)
1465 refreshed_reference = Reference(
1462 refreshed_reference = Reference(
1466 reference.type, reference.name, refreshed_commit.raw_id)
1463 reference.type, reference.name, refreshed_commit.raw_id)
1467 return refreshed_reference
1464 return refreshed_reference
1468
1465
1469 def _needs_merge_state_refresh(self, pull_request, target_reference):
1466 def _needs_merge_state_refresh(self, pull_request, target_reference):
1470 return not(
1467 return not(
1471 pull_request.revisions and
1468 pull_request.revisions and
1472 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1469 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1473 target_reference.commit_id == pull_request._last_merge_target_rev)
1470 target_reference.commit_id == pull_request._last_merge_target_rev)
1474
1471
1475 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1472 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1476 workspace_id = self._workspace_id(pull_request)
1473 workspace_id = self._workspace_id(pull_request)
1477 source_vcs = pull_request.source_repo.scm_instance()
1474 source_vcs = pull_request.source_repo.scm_instance()
1478 repo_id = pull_request.target_repo.repo_id
1475 repo_id = pull_request.target_repo.repo_id
1479 use_rebase = self._use_rebase_for_merging(pull_request)
1476 use_rebase = self._use_rebase_for_merging(pull_request)
1480 close_branch = self._close_branch_before_merging(pull_request)
1477 close_branch = self._close_branch_before_merging(pull_request)
1481 merge_state = target_vcs.merge(
1478 merge_state = target_vcs.merge(
1482 repo_id, workspace_id,
1479 repo_id, workspace_id,
1483 target_reference, source_vcs, pull_request.source_ref_parts,
1480 target_reference, source_vcs, pull_request.source_ref_parts,
1484 dry_run=True, use_rebase=use_rebase,
1481 dry_run=True, use_rebase=use_rebase,
1485 close_branch=close_branch)
1482 close_branch=close_branch)
1486
1483
1487 # Do not store the response if there was an unknown error.
1484 # Do not store the response if there was an unknown error.
1488 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1485 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1489 pull_request._last_merge_source_rev = \
1486 pull_request._last_merge_source_rev = \
1490 pull_request.source_ref_parts.commit_id
1487 pull_request.source_ref_parts.commit_id
1491 pull_request._last_merge_target_rev = target_reference.commit_id
1488 pull_request._last_merge_target_rev = target_reference.commit_id
1492 pull_request.last_merge_status = merge_state.failure_reason
1489 pull_request.last_merge_status = merge_state.failure_reason
1493 pull_request.last_merge_metadata = merge_state.metadata
1490 pull_request.last_merge_metadata = merge_state.metadata
1494
1491
1495 pull_request.shadow_merge_ref = merge_state.merge_ref
1492 pull_request.shadow_merge_ref = merge_state.merge_ref
1496 Session().add(pull_request)
1493 Session().add(pull_request)
1497 Session().commit()
1494 Session().commit()
1498
1495
1499 return merge_state
1496 return merge_state
1500
1497
1501 def _workspace_id(self, pull_request):
1498 def _workspace_id(self, pull_request):
1502 workspace_id = 'pr-%s' % pull_request.pull_request_id
1499 workspace_id = 'pr-%s' % pull_request.pull_request_id
1503 return workspace_id
1500 return workspace_id
1504
1501
1505 def generate_repo_data(self, repo, commit_id=None, branch=None,
1502 def generate_repo_data(self, repo, commit_id=None, branch=None,
1506 bookmark=None, translator=None):
1503 bookmark=None, translator=None):
1507 from rhodecode.model.repo import RepoModel
1504 from rhodecode.model.repo import RepoModel
1508
1505
1509 all_refs, selected_ref = \
1506 all_refs, selected_ref = \
1510 self._get_repo_pullrequest_sources(
1507 self._get_repo_pullrequest_sources(
1511 repo.scm_instance(), commit_id=commit_id,
1508 repo.scm_instance(), commit_id=commit_id,
1512 branch=branch, bookmark=bookmark, translator=translator)
1509 branch=branch, bookmark=bookmark, translator=translator)
1513
1510
1514 refs_select2 = []
1511 refs_select2 = []
1515 for element in all_refs:
1512 for element in all_refs:
1516 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1513 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1517 refs_select2.append({'text': element[1], 'children': children})
1514 refs_select2.append({'text': element[1], 'children': children})
1518
1515
1519 return {
1516 return {
1520 'user': {
1517 'user': {
1521 'user_id': repo.user.user_id,
1518 'user_id': repo.user.user_id,
1522 'username': repo.user.username,
1519 'username': repo.user.username,
1523 'firstname': repo.user.first_name,
1520 'firstname': repo.user.first_name,
1524 'lastname': repo.user.last_name,
1521 'lastname': repo.user.last_name,
1525 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1522 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1526 },
1523 },
1527 'name': repo.repo_name,
1524 'name': repo.repo_name,
1528 'link': RepoModel().get_url(repo),
1525 'link': RepoModel().get_url(repo),
1529 'description': h.chop_at_smart(repo.description_safe, '\n'),
1526 'description': h.chop_at_smart(repo.description_safe, '\n'),
1530 'refs': {
1527 'refs': {
1531 'all_refs': all_refs,
1528 'all_refs': all_refs,
1532 'selected_ref': selected_ref,
1529 'selected_ref': selected_ref,
1533 'select2_refs': refs_select2
1530 'select2_refs': refs_select2
1534 }
1531 }
1535 }
1532 }
1536
1533
1537 def generate_pullrequest_title(self, source, source_ref, target):
1534 def generate_pullrequest_title(self, source, source_ref, target):
1538 return u'{source}#{at_ref} to {target}'.format(
1535 return u'{source}#{at_ref} to {target}'.format(
1539 source=source,
1536 source=source,
1540 at_ref=source_ref,
1537 at_ref=source_ref,
1541 target=target,
1538 target=target,
1542 )
1539 )
1543
1540
1544 def _cleanup_merge_workspace(self, pull_request):
1541 def _cleanup_merge_workspace(self, pull_request):
1545 # Merging related cleanup
1542 # Merging related cleanup
1546 repo_id = pull_request.target_repo.repo_id
1543 repo_id = pull_request.target_repo.repo_id
1547 target_scm = pull_request.target_repo.scm_instance()
1544 target_scm = pull_request.target_repo.scm_instance()
1548 workspace_id = self._workspace_id(pull_request)
1545 workspace_id = self._workspace_id(pull_request)
1549
1546
1550 try:
1547 try:
1551 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1548 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1552 except NotImplementedError:
1549 except NotImplementedError:
1553 pass
1550 pass
1554
1551
1555 def _get_repo_pullrequest_sources(
1552 def _get_repo_pullrequest_sources(
1556 self, repo, commit_id=None, branch=None, bookmark=None,
1553 self, repo, commit_id=None, branch=None, bookmark=None,
1557 translator=None):
1554 translator=None):
1558 """
1555 """
1559 Return a structure with repo's interesting commits, suitable for
1556 Return a structure with repo's interesting commits, suitable for
1560 the selectors in pullrequest controller
1557 the selectors in pullrequest controller
1561
1558
1562 :param commit_id: a commit that must be in the list somehow
1559 :param commit_id: a commit that must be in the list somehow
1563 and selected by default
1560 and selected by default
1564 :param branch: a branch that must be in the list and selected
1561 :param branch: a branch that must be in the list and selected
1565 by default - even if closed
1562 by default - even if closed
1566 :param bookmark: a bookmark that must be in the list and selected
1563 :param bookmark: a bookmark that must be in the list and selected
1567 """
1564 """
1568 _ = translator or get_current_request().translate
1565 _ = translator or get_current_request().translate
1569
1566
1570 commit_id = safe_str(commit_id) if commit_id else None
1567 commit_id = safe_str(commit_id) if commit_id else None
1571 branch = safe_unicode(branch) if branch else None
1568 branch = safe_unicode(branch) if branch else None
1572 bookmark = safe_unicode(bookmark) if bookmark else None
1569 bookmark = safe_unicode(bookmark) if bookmark else None
1573
1570
1574 selected = None
1571 selected = None
1575
1572
1576 # order matters: first source that has commit_id in it will be selected
1573 # order matters: first source that has commit_id in it will be selected
1577 sources = []
1574 sources = []
1578 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1575 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1579 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1576 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1580
1577
1581 if commit_id:
1578 if commit_id:
1582 ref_commit = (h.short_id(commit_id), commit_id)
1579 ref_commit = (h.short_id(commit_id), commit_id)
1583 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1580 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1584
1581
1585 sources.append(
1582 sources.append(
1586 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1583 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1587 )
1584 )
1588
1585
1589 groups = []
1586 groups = []
1590
1587
1591 for group_key, ref_list, group_name, match in sources:
1588 for group_key, ref_list, group_name, match in sources:
1592 group_refs = []
1589 group_refs = []
1593 for ref_name, ref_id in ref_list:
1590 for ref_name, ref_id in ref_list:
1594 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1591 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1595 group_refs.append((ref_key, ref_name))
1592 group_refs.append((ref_key, ref_name))
1596
1593
1597 if not selected:
1594 if not selected:
1598 if set([commit_id, match]) & set([ref_id, ref_name]):
1595 if set([commit_id, match]) & set([ref_id, ref_name]):
1599 selected = ref_key
1596 selected = ref_key
1600
1597
1601 if group_refs:
1598 if group_refs:
1602 groups.append((group_refs, group_name))
1599 groups.append((group_refs, group_name))
1603
1600
1604 if not selected:
1601 if not selected:
1605 ref = commit_id or branch or bookmark
1602 ref = commit_id or branch or bookmark
1606 if ref:
1603 if ref:
1607 raise CommitDoesNotExistError(
1604 raise CommitDoesNotExistError(
1608 u'No commit refs could be found matching: {}'.format(ref))
1605 u'No commit refs could be found matching: {}'.format(ref))
1609 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1606 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1610 selected = u'branch:{}:{}'.format(
1607 selected = u'branch:{}:{}'.format(
1611 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1608 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1612 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1609 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1613 )
1610 )
1614 elif repo.commit_ids:
1611 elif repo.commit_ids:
1615 # make the user select in this case
1612 # make the user select in this case
1616 selected = None
1613 selected = None
1617 else:
1614 else:
1618 raise EmptyRepositoryError()
1615 raise EmptyRepositoryError()
1619 return groups, selected
1616 return groups, selected
1620
1617
1621 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1618 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1622 hide_whitespace_changes, diff_context):
1619 hide_whitespace_changes, diff_context):
1623
1620
1624 return self._get_diff_from_pr_or_version(
1621 return self._get_diff_from_pr_or_version(
1625 source_repo, source_ref_id, target_ref_id,
1622 source_repo, source_ref_id, target_ref_id,
1626 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1623 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1627
1624
1628 def _get_diff_from_pr_or_version(
1625 def _get_diff_from_pr_or_version(
1629 self, source_repo, source_ref_id, target_ref_id,
1626 self, source_repo, source_ref_id, target_ref_id,
1630 hide_whitespace_changes, diff_context):
1627 hide_whitespace_changes, diff_context):
1631
1628
1632 target_commit = source_repo.get_commit(
1629 target_commit = source_repo.get_commit(
1633 commit_id=safe_str(target_ref_id))
1630 commit_id=safe_str(target_ref_id))
1634 source_commit = source_repo.get_commit(
1631 source_commit = source_repo.get_commit(
1635 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1632 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1636 if isinstance(source_repo, Repository):
1633 if isinstance(source_repo, Repository):
1637 vcs_repo = source_repo.scm_instance()
1634 vcs_repo = source_repo.scm_instance()
1638 else:
1635 else:
1639 vcs_repo = source_repo
1636 vcs_repo = source_repo
1640
1637
1641 # TODO: johbo: In the context of an update, we cannot reach
1638 # TODO: johbo: In the context of an update, we cannot reach
1642 # the old commit anymore with our normal mechanisms. It needs
1639 # the old commit anymore with our normal mechanisms. It needs
1643 # some sort of special support in the vcs layer to avoid this
1640 # some sort of special support in the vcs layer to avoid this
1644 # workaround.
1641 # workaround.
1645 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1642 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1646 vcs_repo.alias == 'git'):
1643 vcs_repo.alias == 'git'):
1647 source_commit.raw_id = safe_str(source_ref_id)
1644 source_commit.raw_id = safe_str(source_ref_id)
1648
1645
1649 log.debug('calculating diff between '
1646 log.debug('calculating diff between '
1650 'source_ref:%s and target_ref:%s for repo `%s`',
1647 'source_ref:%s and target_ref:%s for repo `%s`',
1651 target_ref_id, source_ref_id,
1648 target_ref_id, source_ref_id,
1652 safe_unicode(vcs_repo.path))
1649 safe_unicode(vcs_repo.path))
1653
1650
1654 vcs_diff = vcs_repo.get_diff(
1651 vcs_diff = vcs_repo.get_diff(
1655 commit1=target_commit, commit2=source_commit,
1652 commit1=target_commit, commit2=source_commit,
1656 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1653 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1657 return vcs_diff
1654 return vcs_diff
1658
1655
1659 def _is_merge_enabled(self, pull_request):
1656 def _is_merge_enabled(self, pull_request):
1660 return self._get_general_setting(
1657 return self._get_general_setting(
1661 pull_request, 'rhodecode_pr_merge_enabled')
1658 pull_request, 'rhodecode_pr_merge_enabled')
1662
1659
1663 def _use_rebase_for_merging(self, pull_request):
1660 def _use_rebase_for_merging(self, pull_request):
1664 repo_type = pull_request.target_repo.repo_type
1661 repo_type = pull_request.target_repo.repo_type
1665 if repo_type == 'hg':
1662 if repo_type == 'hg':
1666 return self._get_general_setting(
1663 return self._get_general_setting(
1667 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1664 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1668 elif repo_type == 'git':
1665 elif repo_type == 'git':
1669 return self._get_general_setting(
1666 return self._get_general_setting(
1670 pull_request, 'rhodecode_git_use_rebase_for_merging')
1667 pull_request, 'rhodecode_git_use_rebase_for_merging')
1671
1668
1672 return False
1669 return False
1673
1670
1674 def _user_name_for_merging(self, pull_request, user):
1671 def _user_name_for_merging(self, pull_request, user):
1675 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1672 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1676 if env_user_name_attr and hasattr(user, env_user_name_attr):
1673 if env_user_name_attr and hasattr(user, env_user_name_attr):
1677 user_name_attr = env_user_name_attr
1674 user_name_attr = env_user_name_attr
1678 else:
1675 else:
1679 user_name_attr = 'short_contact'
1676 user_name_attr = 'short_contact'
1680
1677
1681 user_name = getattr(user, user_name_attr)
1678 user_name = getattr(user, user_name_attr)
1682 return user_name
1679 return user_name
1683
1680
1684 def _close_branch_before_merging(self, pull_request):
1681 def _close_branch_before_merging(self, pull_request):
1685 repo_type = pull_request.target_repo.repo_type
1682 repo_type = pull_request.target_repo.repo_type
1686 if repo_type == 'hg':
1683 if repo_type == 'hg':
1687 return self._get_general_setting(
1684 return self._get_general_setting(
1688 pull_request, 'rhodecode_hg_close_branch_before_merging')
1685 pull_request, 'rhodecode_hg_close_branch_before_merging')
1689 elif repo_type == 'git':
1686 elif repo_type == 'git':
1690 return self._get_general_setting(
1687 return self._get_general_setting(
1691 pull_request, 'rhodecode_git_close_branch_before_merging')
1688 pull_request, 'rhodecode_git_close_branch_before_merging')
1692
1689
1693 return False
1690 return False
1694
1691
1695 def _get_general_setting(self, pull_request, settings_key, default=False):
1692 def _get_general_setting(self, pull_request, settings_key, default=False):
1696 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1693 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1697 settings = settings_model.get_general_settings()
1694 settings = settings_model.get_general_settings()
1698 return settings.get(settings_key, default)
1695 return settings.get(settings_key, default)
1699
1696
1700 def _log_audit_action(self, action, action_data, user, pull_request):
1697 def _log_audit_action(self, action, action_data, user, pull_request):
1701 audit_logger.store(
1698 audit_logger.store(
1702 action=action,
1699 action=action,
1703 action_data=action_data,
1700 action_data=action_data,
1704 user=user,
1701 user=user,
1705 repo=pull_request.target_repo)
1702 repo=pull_request.target_repo)
1706
1703
1707 def get_reviewer_functions(self):
1704 def get_reviewer_functions(self):
1708 """
1705 """
1709 Fetches functions for validation and fetching default reviewers.
1706 Fetches functions for validation and fetching default reviewers.
1710 If available we use the EE package, else we fallback to CE
1707 If available we use the EE package, else we fallback to CE
1711 package functions
1708 package functions
1712 """
1709 """
1713 try:
1710 try:
1714 from rc_reviewers.utils import get_default_reviewers_data
1711 from rc_reviewers.utils import get_default_reviewers_data
1715 from rc_reviewers.utils import validate_default_reviewers
1712 from rc_reviewers.utils import validate_default_reviewers
1716 except ImportError:
1713 except ImportError:
1717 from rhodecode.apps.repository.utils import get_default_reviewers_data
1714 from rhodecode.apps.repository.utils import get_default_reviewers_data
1718 from rhodecode.apps.repository.utils import validate_default_reviewers
1715 from rhodecode.apps.repository.utils import validate_default_reviewers
1719
1716
1720 return get_default_reviewers_data, validate_default_reviewers
1717 return get_default_reviewers_data, validate_default_reviewers
1721
1718
1722
1719
1723 class MergeCheck(object):
1720 class MergeCheck(object):
1724 """
1721 """
1725 Perform Merge Checks and returns a check object which stores information
1722 Perform Merge Checks and returns a check object which stores information
1726 about merge errors, and merge conditions
1723 about merge errors, and merge conditions
1727 """
1724 """
1728 TODO_CHECK = 'todo'
1725 TODO_CHECK = 'todo'
1729 PERM_CHECK = 'perm'
1726 PERM_CHECK = 'perm'
1730 REVIEW_CHECK = 'review'
1727 REVIEW_CHECK = 'review'
1731 MERGE_CHECK = 'merge'
1728 MERGE_CHECK = 'merge'
1732 WIP_CHECK = 'wip'
1729 WIP_CHECK = 'wip'
1733
1730
1734 def __init__(self):
1731 def __init__(self):
1735 self.review_status = None
1732 self.review_status = None
1736 self.merge_possible = None
1733 self.merge_possible = None
1737 self.merge_msg = ''
1734 self.merge_msg = ''
1738 self.merge_response = None
1735 self.merge_response = None
1739 self.failed = None
1736 self.failed = None
1740 self.errors = []
1737 self.errors = []
1741 self.error_details = OrderedDict()
1738 self.error_details = OrderedDict()
1742
1739
1743 def __repr__(self):
1740 def __repr__(self):
1744 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1741 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1745 self.merge_possible, self.failed, self.errors)
1742 self.merge_possible, self.failed, self.errors)
1746
1743
1747 def push_error(self, error_type, message, error_key, details):
1744 def push_error(self, error_type, message, error_key, details):
1748 self.failed = True
1745 self.failed = True
1749 self.errors.append([error_type, message])
1746 self.errors.append([error_type, message])
1750 self.error_details[error_key] = dict(
1747 self.error_details[error_key] = dict(
1751 details=details,
1748 details=details,
1752 error_type=error_type,
1749 error_type=error_type,
1753 message=message
1750 message=message
1754 )
1751 )
1755
1752
1756 @classmethod
1753 @classmethod
1757 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1754 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1758 force_shadow_repo_refresh=False):
1755 force_shadow_repo_refresh=False):
1759 _ = translator
1756 _ = translator
1760 merge_check = cls()
1757 merge_check = cls()
1761
1758
1762 # title has WIP:
1759 # title has WIP:
1763 if pull_request.work_in_progress:
1760 if pull_request.work_in_progress:
1764 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1761 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1765
1762
1766 msg = _('WIP marker in title prevents from accidental merge.')
1763 msg = _('WIP marker in title prevents from accidental merge.')
1767 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1764 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1768 if fail_early:
1765 if fail_early:
1769 return merge_check
1766 return merge_check
1770
1767
1771 # permissions to merge
1768 # permissions to merge
1772 user_allowed_to_merge = PullRequestModel().check_user_merge(
1769 user_allowed_to_merge = PullRequestModel().check_user_merge(
1773 pull_request, auth_user)
1770 pull_request, auth_user)
1774 if not user_allowed_to_merge:
1771 if not user_allowed_to_merge:
1775 log.debug("MergeCheck: cannot merge, approval is pending.")
1772 log.debug("MergeCheck: cannot merge, approval is pending.")
1776
1773
1777 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1774 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1778 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1775 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1779 if fail_early:
1776 if fail_early:
1780 return merge_check
1777 return merge_check
1781
1778
1782 # permission to merge into the target branch
1779 # permission to merge into the target branch
1783 target_commit_id = pull_request.target_ref_parts.commit_id
1780 target_commit_id = pull_request.target_ref_parts.commit_id
1784 if pull_request.target_ref_parts.type == 'branch':
1781 if pull_request.target_ref_parts.type == 'branch':
1785 branch_name = pull_request.target_ref_parts.name
1782 branch_name = pull_request.target_ref_parts.name
1786 else:
1783 else:
1787 # for mercurial we can always figure out the branch from the commit
1784 # for mercurial we can always figure out the branch from the commit
1788 # in case of bookmark
1785 # in case of bookmark
1789 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1786 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1790 branch_name = target_commit.branch
1787 branch_name = target_commit.branch
1791
1788
1792 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1789 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1793 pull_request.target_repo.repo_name, branch_name)
1790 pull_request.target_repo.repo_name, branch_name)
1794 if branch_perm and branch_perm == 'branch.none':
1791 if branch_perm and branch_perm == 'branch.none':
1795 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1792 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1796 branch_name, rule)
1793 branch_name, rule)
1797 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1794 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1798 if fail_early:
1795 if fail_early:
1799 return merge_check
1796 return merge_check
1800
1797
1801 # review status, must be always present
1798 # review status, must be always present
1802 review_status = pull_request.calculated_review_status()
1799 review_status = pull_request.calculated_review_status()
1803 merge_check.review_status = review_status
1800 merge_check.review_status = review_status
1804
1801
1805 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1802 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1806 if not status_approved:
1803 if not status_approved:
1807 log.debug("MergeCheck: cannot merge, approval is pending.")
1804 log.debug("MergeCheck: cannot merge, approval is pending.")
1808
1805
1809 msg = _('Pull request reviewer approval is pending.')
1806 msg = _('Pull request reviewer approval is pending.')
1810
1807
1811 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1808 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1812
1809
1813 if fail_early:
1810 if fail_early:
1814 return merge_check
1811 return merge_check
1815
1812
1816 # left over TODOs
1813 # left over TODOs
1817 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1814 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1818 if todos:
1815 if todos:
1819 log.debug("MergeCheck: cannot merge, {} "
1816 log.debug("MergeCheck: cannot merge, {} "
1820 "unresolved TODOs left.".format(len(todos)))
1817 "unresolved TODOs left.".format(len(todos)))
1821
1818
1822 if len(todos) == 1:
1819 if len(todos) == 1:
1823 msg = _('Cannot merge, {} TODO still not resolved.').format(
1820 msg = _('Cannot merge, {} TODO still not resolved.').format(
1824 len(todos))
1821 len(todos))
1825 else:
1822 else:
1826 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1823 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1827 len(todos))
1824 len(todos))
1828
1825
1829 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1826 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1830
1827
1831 if fail_early:
1828 if fail_early:
1832 return merge_check
1829 return merge_check
1833
1830
1834 # merge possible, here is the filesystem simulation + shadow repo
1831 # merge possible, here is the filesystem simulation + shadow repo
1835 merge_response, merge_status, msg = PullRequestModel().merge_status(
1832 merge_response, merge_status, msg = PullRequestModel().merge_status(
1836 pull_request, translator=translator,
1833 pull_request, translator=translator,
1837 force_shadow_repo_refresh=force_shadow_repo_refresh)
1834 force_shadow_repo_refresh=force_shadow_repo_refresh)
1838
1835
1839 merge_check.merge_possible = merge_status
1836 merge_check.merge_possible = merge_status
1840 merge_check.merge_msg = msg
1837 merge_check.merge_msg = msg
1841 merge_check.merge_response = merge_response
1838 merge_check.merge_response = merge_response
1842
1839
1843 if not merge_status:
1840 if not merge_status:
1844 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1841 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1845 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1842 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1846
1843
1847 if fail_early:
1844 if fail_early:
1848 return merge_check
1845 return merge_check
1849
1846
1850 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1847 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1851 return merge_check
1848 return merge_check
1852
1849
1853 @classmethod
1850 @classmethod
1854 def get_merge_conditions(cls, pull_request, translator):
1851 def get_merge_conditions(cls, pull_request, translator):
1855 _ = translator
1852 _ = translator
1856 merge_details = {}
1853 merge_details = {}
1857
1854
1858 model = PullRequestModel()
1855 model = PullRequestModel()
1859 use_rebase = model._use_rebase_for_merging(pull_request)
1856 use_rebase = model._use_rebase_for_merging(pull_request)
1860
1857
1861 if use_rebase:
1858 if use_rebase:
1862 merge_details['merge_strategy'] = dict(
1859 merge_details['merge_strategy'] = dict(
1863 details={},
1860 details={},
1864 message=_('Merge strategy: rebase')
1861 message=_('Merge strategy: rebase')
1865 )
1862 )
1866 else:
1863 else:
1867 merge_details['merge_strategy'] = dict(
1864 merge_details['merge_strategy'] = dict(
1868 details={},
1865 details={},
1869 message=_('Merge strategy: explicit merge commit')
1866 message=_('Merge strategy: explicit merge commit')
1870 )
1867 )
1871
1868
1872 close_branch = model._close_branch_before_merging(pull_request)
1869 close_branch = model._close_branch_before_merging(pull_request)
1873 if close_branch:
1870 if close_branch:
1874 repo_type = pull_request.target_repo.repo_type
1871 repo_type = pull_request.target_repo.repo_type
1875 close_msg = ''
1872 close_msg = ''
1876 if repo_type == 'hg':
1873 if repo_type == 'hg':
1877 close_msg = _('Source branch will be closed after merge.')
1874 close_msg = _('Source branch will be closed after merge.')
1878 elif repo_type == 'git':
1875 elif repo_type == 'git':
1879 close_msg = _('Source branch will be deleted after merge.')
1876 close_msg = _('Source branch will be deleted after merge.')
1880
1877
1881 merge_details['close_branch'] = dict(
1878 merge_details['close_branch'] = dict(
1882 details={},
1879 details={},
1883 message=close_msg
1880 message=close_msg
1884 )
1881 )
1885
1882
1886 return merge_details
1883 return merge_details
1887
1884
1888
1885
1889 ChangeTuple = collections.namedtuple(
1886 ChangeTuple = collections.namedtuple(
1890 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1887 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1891
1888
1892 FileChangeTuple = collections.namedtuple(
1889 FileChangeTuple = collections.namedtuple(
1893 'FileChangeTuple', ['added', 'modified', 'removed'])
1890 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,1020 +1,1020 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 or_, false,
50 or_, false,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 PullRequest, FileStore)
52 PullRequest, FileStore)
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 class UserTemp(object):
59 class UserTemp(object):
60 def __init__(self, user_id):
60 def __init__(self, user_id):
61 self.user_id = user_id
61 self.user_id = user_id
62
62
63 def __repr__(self):
63 def __repr__(self):
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65
65
66
66
67 class RepoTemp(object):
67 class RepoTemp(object):
68 def __init__(self, repo_id):
68 def __init__(self, repo_id):
69 self.repo_id = repo_id
69 self.repo_id = repo_id
70
70
71 def __repr__(self):
71 def __repr__(self):
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73
73
74
74
75 class SimpleCachedRepoList(object):
75 class SimpleCachedRepoList(object):
76 """
76 """
77 Lighter version of of iteration of repos without the scm initialisation,
77 Lighter version of of iteration of repos without the scm initialisation,
78 and with cache usage
78 and with cache usage
79 """
79 """
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
82 self.repos_path = repos_path
83 self.order_by = order_by
83 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
85 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
86 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
87 'repository.admin']
88 self.perm_set = perm_set
88 self.perm_set = perm_set
89
89
90 def __len__(self):
90 def __len__(self):
91 return len(self.db_repo_list)
91 return len(self.db_repo_list)
92
92
93 def __repr__(self):
93 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95
95
96 def __iter__(self):
96 def __iter__(self):
97 for dbr in self.db_repo_list:
97 for dbr in self.db_repo_list:
98 # check permission at this level
98 # check permission at this level
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 dbr.repo_name, 'SimpleCachedRepoList check')
100 dbr.repo_name, 'SimpleCachedRepoList check')
101 if not has_perm:
101 if not has_perm:
102 continue
102 continue
103
103
104 tmp_d = {
104 tmp_d = {
105 'name': dbr.repo_name,
105 'name': dbr.repo_name,
106 'dbrepo': dbr.get_dict(),
106 'dbrepo': dbr.get_dict(),
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 }
108 }
109 yield tmp_d
109 yield tmp_d
110
110
111
111
112 class _PermCheckIterator(object):
112 class _PermCheckIterator(object):
113
113
114 def __init__(
114 def __init__(
115 self, obj_list, obj_attr, perm_set, perm_checker,
115 self, obj_list, obj_attr, perm_set, perm_checker,
116 extra_kwargs=None):
116 extra_kwargs=None):
117 """
117 """
118 Creates iterator from given list of objects, additionally
118 Creates iterator from given list of objects, additionally
119 checking permission for them from perm_set var
119 checking permission for them from perm_set var
120
120
121 :param obj_list: list of db objects
121 :param obj_list: list of db objects
122 :param obj_attr: attribute of object to pass into perm_checker
122 :param obj_attr: attribute of object to pass into perm_checker
123 :param perm_set: list of permissions to check
123 :param perm_set: list of permissions to check
124 :param perm_checker: callable to check permissions against
124 :param perm_checker: callable to check permissions against
125 """
125 """
126 self.obj_list = obj_list
126 self.obj_list = obj_list
127 self.obj_attr = obj_attr
127 self.obj_attr = obj_attr
128 self.perm_set = perm_set
128 self.perm_set = perm_set
129 self.perm_checker = perm_checker(*self.perm_set)
129 self.perm_checker = perm_checker(*self.perm_set)
130 self.extra_kwargs = extra_kwargs or {}
130 self.extra_kwargs = extra_kwargs or {}
131
131
132 def __len__(self):
132 def __len__(self):
133 return len(self.obj_list)
133 return len(self.obj_list)
134
134
135 def __repr__(self):
135 def __repr__(self):
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137
137
138 def __iter__(self):
138 def __iter__(self):
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 name = db_obj.__dict__.get(self.obj_attr, None)
142 name = db_obj.__dict__.get(self.obj_attr, None)
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 continue
144 continue
145
145
146 yield db_obj
146 yield db_obj
147
147
148
148
149 class RepoList(_PermCheckIterator):
149 class RepoList(_PermCheckIterator):
150
150
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 if not perm_set:
152 if not perm_set:
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
154
154
155 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
156 obj_list=db_repo_list,
157 obj_attr='_repo_name', perm_set=perm_set,
157 obj_attr='_repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
160
160
161
161
162 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
163
163
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
165 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
167
167
168 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
170 obj_attr='_group_name', perm_set=perm_set,
170 obj_attr='_group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
173
173
174
174
175 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
176
176
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
178 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
180
181 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
186
186
187
187
188 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
189 """
189 """
190 Generic Scm Model
190 Generic Scm Model
191 """
191 """
192
192
193 @LazyProperty
193 @LazyProperty
194 def repos_path(self):
194 def repos_path(self):
195 """
195 """
196 Gets the repositories root path from database
196 Gets the repositories root path from database
197 """
197 """
198
198
199 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
201
201
202 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
203 """
203 """
204 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
206
206
207 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
208 """
208 """
209
209
210 if repos_path is None:
210 if repos_path is None:
211 repos_path = self.repos_path
211 repos_path = self.repos_path
212
212
213 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
214
214
215 config = make_db_config()
215 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
217 repos = {}
217 repos = {}
218
218
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
223
223
224 try:
224 try:
225 if name in repos:
225 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
227 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
229 backend = get_backend(path[0])
229 backend = get_backend(path[0])
230 repos[name] = backend(path[1], config=config,
230 repos[name] = backend(path[1], config=config,
231 with_wire={"cache": False})
231 with_wire={"cache": False})
232 except OSError:
232 except OSError:
233 continue
233 continue
234 log.debug('found %s paths with repositories', len(repos))
234 log.debug('found %s paths with repositories', len(repos))
235 return repos
235 return repos
236
236
237 def get_repos(self, all_repos=None, sort_key=None):
237 def get_repos(self, all_repos=None, sort_key=None):
238 """
238 """
239 Get all repositories from db and for each repo create it's
239 Get all repositories from db and for each repo create it's
240 backend instance and fill that backed with information from database
240 backend instance and fill that backed with information from database
241
241
242 :param all_repos: list of repository names as strings
242 :param all_repos: list of repository names as strings
243 give specific repositories list, good for filtering
243 give specific repositories list, good for filtering
244
244
245 :param sort_key: initial sorting of repositories
245 :param sort_key: initial sorting of repositories
246 """
246 """
247 if all_repos is None:
247 if all_repos is None:
248 all_repos = self.sa.query(Repository)\
248 all_repos = self.sa.query(Repository)\
249 .filter(Repository.group_id == None)\
249 .filter(Repository.group_id == None)\
250 .order_by(func.lower(Repository.repo_name)).all()
250 .order_by(func.lower(Repository.repo_name)).all()
251 repo_iter = SimpleCachedRepoList(
251 repo_iter = SimpleCachedRepoList(
252 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 all_repos, repos_path=self.repos_path, order_by=sort_key)
253 return repo_iter
253 return repo_iter
254
254
255 def get_repo_groups(self, all_groups=None):
255 def get_repo_groups(self, all_groups=None):
256 if all_groups is None:
256 if all_groups is None:
257 all_groups = RepoGroup.query()\
257 all_groups = RepoGroup.query()\
258 .filter(RepoGroup.group_parent_id == None).all()
258 .filter(RepoGroup.group_parent_id == None).all()
259 return [x for x in RepoGroupList(all_groups)]
259 return [x for x in RepoGroupList(all_groups)]
260
260
261 def mark_for_invalidation(self, repo_name, delete=False):
261 def mark_for_invalidation(self, repo_name, delete=False):
262 """
262 """
263 Mark caches of this repo invalid in the database. `delete` flag
263 Mark caches of this repo invalid in the database. `delete` flag
264 removes the cache entries
264 removes the cache entries
265
265
266 :param repo_name: the repo_name for which caches should be marked
266 :param repo_name: the repo_name for which caches should be marked
267 invalid, or deleted
267 invalid, or deleted
268 :param delete: delete the entry keys instead of setting bool
268 :param delete: delete the entry keys instead of setting bool
269 flag on them, and also purge caches used by the dogpile
269 flag on them, and also purge caches used by the dogpile
270 """
270 """
271 repo = Repository.get_by_repo_name(repo_name)
271 repo = Repository.get_by_repo_name(repo_name)
272
272
273 if repo:
273 if repo:
274 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
275 repo_id=repo.repo_id)
275 repo_id=repo.repo_id)
276 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
277
277
278 repo_id = repo.repo_id
278 repo_id = repo.repo_id
279 config = repo._config
279 config = repo._config
280 config.set('extensions', 'largefiles', '')
280 config.set('extensions', 'largefiles', '')
281 repo.update_commit_cache(config=config, cs_cache=None)
281 repo.update_commit_cache(config=config, cs_cache=None)
282 if delete:
282 if delete:
283 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
284 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
285
285
286 def toggle_following_repo(self, follow_repo_id, user_id):
286 def toggle_following_repo(self, follow_repo_id, user_id):
287
287
288 f = self.sa.query(UserFollowing)\
288 f = self.sa.query(UserFollowing)\
289 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
290 .filter(UserFollowing.user_id == user_id).scalar()
290 .filter(UserFollowing.user_id == user_id).scalar()
291
291
292 if f is not None:
292 if f is not None:
293 try:
293 try:
294 self.sa.delete(f)
294 self.sa.delete(f)
295 return
295 return
296 except Exception:
296 except Exception:
297 log.error(traceback.format_exc())
297 log.error(traceback.format_exc())
298 raise
298 raise
299
299
300 try:
300 try:
301 f = UserFollowing()
301 f = UserFollowing()
302 f.user_id = user_id
302 f.user_id = user_id
303 f.follows_repo_id = follow_repo_id
303 f.follows_repo_id = follow_repo_id
304 self.sa.add(f)
304 self.sa.add(f)
305 except Exception:
305 except Exception:
306 log.error(traceback.format_exc())
306 log.error(traceback.format_exc())
307 raise
307 raise
308
308
309 def toggle_following_user(self, follow_user_id, user_id):
309 def toggle_following_user(self, follow_user_id, user_id):
310 f = self.sa.query(UserFollowing)\
310 f = self.sa.query(UserFollowing)\
311 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 .filter(UserFollowing.follows_user_id == follow_user_id)\
312 .filter(UserFollowing.user_id == user_id).scalar()
312 .filter(UserFollowing.user_id == user_id).scalar()
313
313
314 if f is not None:
314 if f is not None:
315 try:
315 try:
316 self.sa.delete(f)
316 self.sa.delete(f)
317 return
317 return
318 except Exception:
318 except Exception:
319 log.error(traceback.format_exc())
319 log.error(traceback.format_exc())
320 raise
320 raise
321
321
322 try:
322 try:
323 f = UserFollowing()
323 f = UserFollowing()
324 f.user_id = user_id
324 f.user_id = user_id
325 f.follows_user_id = follow_user_id
325 f.follows_user_id = follow_user_id
326 self.sa.add(f)
326 self.sa.add(f)
327 except Exception:
327 except Exception:
328 log.error(traceback.format_exc())
328 log.error(traceback.format_exc())
329 raise
329 raise
330
330
331 def is_following_repo(self, repo_name, user_id, cache=False):
331 def is_following_repo(self, repo_name, user_id, cache=False):
332 r = self.sa.query(Repository)\
332 r = self.sa.query(Repository)\
333 .filter(Repository.repo_name == repo_name).scalar()
333 .filter(Repository.repo_name == repo_name).scalar()
334
334
335 f = self.sa.query(UserFollowing)\
335 f = self.sa.query(UserFollowing)\
336 .filter(UserFollowing.follows_repository == r)\
336 .filter(UserFollowing.follows_repository == r)\
337 .filter(UserFollowing.user_id == user_id).scalar()
337 .filter(UserFollowing.user_id == user_id).scalar()
338
338
339 return f is not None
339 return f is not None
340
340
341 def is_following_user(self, username, user_id, cache=False):
341 def is_following_user(self, username, user_id, cache=False):
342 u = User.get_by_username(username)
342 u = User.get_by_username(username)
343
343
344 f = self.sa.query(UserFollowing)\
344 f = self.sa.query(UserFollowing)\
345 .filter(UserFollowing.follows_user == u)\
345 .filter(UserFollowing.follows_user == u)\
346 .filter(UserFollowing.user_id == user_id).scalar()
346 .filter(UserFollowing.user_id == user_id).scalar()
347
347
348 return f is not None
348 return f is not None
349
349
350 def get_followers(self, repo):
350 def get_followers(self, repo):
351 repo = self._get_repo(repo)
351 repo = self._get_repo(repo)
352
352
353 return self.sa.query(UserFollowing)\
353 return self.sa.query(UserFollowing)\
354 .filter(UserFollowing.follows_repository == repo).count()
354 .filter(UserFollowing.follows_repository == repo).count()
355
355
356 def get_forks(self, repo):
356 def get_forks(self, repo):
357 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
358 return self.sa.query(Repository)\
358 return self.sa.query(Repository)\
359 .filter(Repository.fork == repo).count()
359 .filter(Repository.fork == repo).count()
360
360
361 def get_pull_requests(self, repo):
361 def get_pull_requests(self, repo):
362 repo = self._get_repo(repo)
362 repo = self._get_repo(repo)
363 return self.sa.query(PullRequest)\
363 return self.sa.query(PullRequest)\
364 .filter(PullRequest.target_repo == repo)\
364 .filter(PullRequest.target_repo == repo)\
365 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
366
366
367 def get_artifacts(self, repo):
367 def get_artifacts(self, repo):
368 repo = self._get_repo(repo)
368 repo = self._get_repo(repo)
369 return self.sa.query(FileStore)\
369 return self.sa.query(FileStore)\
370 .filter(FileStore.repo == repo)\
370 .filter(FileStore.repo == repo)\
371 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
371 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
372
372
373 def mark_as_fork(self, repo, fork, user):
373 def mark_as_fork(self, repo, fork, user):
374 repo = self._get_repo(repo)
374 repo = self._get_repo(repo)
375 fork = self._get_repo(fork)
375 fork = self._get_repo(fork)
376 if fork and repo.repo_id == fork.repo_id:
376 if fork and repo.repo_id == fork.repo_id:
377 raise Exception("Cannot set repository as fork of itself")
377 raise Exception("Cannot set repository as fork of itself")
378
378
379 if fork and repo.repo_type != fork.repo_type:
379 if fork and repo.repo_type != fork.repo_type:
380 raise RepositoryError(
380 raise RepositoryError(
381 "Cannot set repository as fork of repository with other type")
381 "Cannot set repository as fork of repository with other type")
382
382
383 repo.fork = fork
383 repo.fork = fork
384 self.sa.add(repo)
384 self.sa.add(repo)
385 return repo
385 return repo
386
386
387 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
387 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
388 dbrepo = self._get_repo(repo)
388 dbrepo = self._get_repo(repo)
389 remote_uri = remote_uri or dbrepo.clone_uri
389 remote_uri = remote_uri or dbrepo.clone_uri
390 if not remote_uri:
390 if not remote_uri:
391 raise Exception("This repository doesn't have a clone uri")
391 raise Exception("This repository doesn't have a clone uri")
392
392
393 repo = dbrepo.scm_instance(cache=False)
393 repo = dbrepo.scm_instance(cache=False)
394 repo.config.clear_section('hooks')
394 repo.config.clear_section('hooks')
395
395
396 try:
396 try:
397 # NOTE(marcink): add extra validation so we skip invalid urls
397 # NOTE(marcink): add extra validation so we skip invalid urls
398 # this is due this tasks can be executed via scheduler without
398 # this is due this tasks can be executed via scheduler without
399 # proper validation of remote_uri
399 # proper validation of remote_uri
400 if validate_uri:
400 if validate_uri:
401 config = make_db_config(clear_session=False)
401 config = make_db_config(clear_session=False)
402 url_validator(remote_uri, dbrepo.repo_type, config)
402 url_validator(remote_uri, dbrepo.repo_type, config)
403 except InvalidCloneUrl:
403 except InvalidCloneUrl:
404 raise
404 raise
405
405
406 repo_name = dbrepo.repo_name
406 repo_name = dbrepo.repo_name
407 try:
407 try:
408 # TODO: we need to make sure those operations call proper hooks !
408 # TODO: we need to make sure those operations call proper hooks !
409 repo.fetch(remote_uri)
409 repo.fetch(remote_uri)
410
410
411 self.mark_for_invalidation(repo_name)
411 self.mark_for_invalidation(repo_name)
412 except Exception:
412 except Exception:
413 log.error(traceback.format_exc())
413 log.error(traceback.format_exc())
414 raise
414 raise
415
415
416 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
416 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
417 dbrepo = self._get_repo(repo)
417 dbrepo = self._get_repo(repo)
418 remote_uri = remote_uri or dbrepo.push_uri
418 remote_uri = remote_uri or dbrepo.push_uri
419 if not remote_uri:
419 if not remote_uri:
420 raise Exception("This repository doesn't have a clone uri")
420 raise Exception("This repository doesn't have a clone uri")
421
421
422 repo = dbrepo.scm_instance(cache=False)
422 repo = dbrepo.scm_instance(cache=False)
423 repo.config.clear_section('hooks')
423 repo.config.clear_section('hooks')
424
424
425 try:
425 try:
426 # NOTE(marcink): add extra validation so we skip invalid urls
426 # NOTE(marcink): add extra validation so we skip invalid urls
427 # this is due this tasks can be executed via scheduler without
427 # this is due this tasks can be executed via scheduler without
428 # proper validation of remote_uri
428 # proper validation of remote_uri
429 if validate_uri:
429 if validate_uri:
430 config = make_db_config(clear_session=False)
430 config = make_db_config(clear_session=False)
431 url_validator(remote_uri, dbrepo.repo_type, config)
431 url_validator(remote_uri, dbrepo.repo_type, config)
432 except InvalidCloneUrl:
432 except InvalidCloneUrl:
433 raise
433 raise
434
434
435 try:
435 try:
436 repo.push(remote_uri)
436 repo.push(remote_uri)
437 except Exception:
437 except Exception:
438 log.error(traceback.format_exc())
438 log.error(traceback.format_exc())
439 raise
439 raise
440
440
441 def commit_change(self, repo, repo_name, commit, user, author, message,
441 def commit_change(self, repo, repo_name, commit, user, author, message,
442 content, f_path):
442 content, f_path):
443 """
443 """
444 Commits changes
444 Commits changes
445
445
446 :param repo: SCM instance
446 :param repo: SCM instance
447
447
448 """
448 """
449 user = self._get_user(user)
449 user = self._get_user(user)
450
450
451 # decoding here will force that we have proper encoded values
451 # decoding here will force that we have proper encoded values
452 # in any other case this will throw exceptions and deny commit
452 # in any other case this will throw exceptions and deny commit
453 content = safe_str(content)
453 content = safe_str(content)
454 path = safe_str(f_path)
454 path = safe_str(f_path)
455 # message and author needs to be unicode
455 # message and author needs to be unicode
456 # proper backend should then translate that into required type
456 # proper backend should then translate that into required type
457 message = safe_unicode(message)
457 message = safe_unicode(message)
458 author = safe_unicode(author)
458 author = safe_unicode(author)
459 imc = repo.in_memory_commit
459 imc = repo.in_memory_commit
460 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
460 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
461 try:
461 try:
462 # TODO: handle pre-push action !
462 # TODO: handle pre-push action !
463 tip = imc.commit(
463 tip = imc.commit(
464 message=message, author=author, parents=[commit],
464 message=message, author=author, parents=[commit],
465 branch=commit.branch)
465 branch=commit.branch)
466 except Exception as e:
466 except Exception as e:
467 log.error(traceback.format_exc())
467 log.error(traceback.format_exc())
468 raise IMCCommitError(str(e))
468 raise IMCCommitError(str(e))
469 finally:
469 finally:
470 # always clear caches, if commit fails we want fresh object also
470 # always clear caches, if commit fails we want fresh object also
471 self.mark_for_invalidation(repo_name)
471 self.mark_for_invalidation(repo_name)
472
472
473 # We trigger the post-push action
473 # We trigger the post-push action
474 hooks_utils.trigger_post_push_hook(
474 hooks_utils.trigger_post_push_hook(
475 username=user.username, action='push_local', hook_type='post_push',
475 username=user.username, action='push_local', hook_type='post_push',
476 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
476 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
477 return tip
477 return tip
478
478
479 def _sanitize_path(self, f_path):
479 def _sanitize_path(self, f_path):
480 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
480 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
481 raise NonRelativePathError('%s is not an relative path' % f_path)
481 raise NonRelativePathError('%s is not an relative path' % f_path)
482 if f_path:
482 if f_path:
483 f_path = os.path.normpath(f_path)
483 f_path = os.path.normpath(f_path)
484 return f_path
484 return f_path
485
485
486 def get_dirnode_metadata(self, request, commit, dir_node):
486 def get_dirnode_metadata(self, request, commit, dir_node):
487 if not dir_node.is_dir():
487 if not dir_node.is_dir():
488 return []
488 return []
489
489
490 data = []
490 data = []
491 for node in dir_node:
491 for node in dir_node:
492 if not node.is_file():
492 if not node.is_file():
493 # we skip file-nodes
493 # we skip file-nodes
494 continue
494 continue
495
495
496 last_commit = node.last_commit
496 last_commit = node.last_commit
497 last_commit_date = last_commit.date
497 last_commit_date = last_commit.date
498 data.append({
498 data.append({
499 'name': node.name,
499 'name': node.name,
500 'size': h.format_byte_size_binary(node.size),
500 'size': h.format_byte_size_binary(node.size),
501 'modified_at': h.format_date(last_commit_date),
501 'modified_at': h.format_date(last_commit_date),
502 'modified_ts': last_commit_date.isoformat(),
502 'modified_ts': last_commit_date.isoformat(),
503 'revision': last_commit.revision,
503 'revision': last_commit.revision,
504 'short_id': last_commit.short_id,
504 'short_id': last_commit.short_id,
505 'message': h.escape(last_commit.message),
505 'message': h.escape(last_commit.message),
506 'author': h.escape(last_commit.author),
506 'author': h.escape(last_commit.author),
507 'user_profile': h.gravatar_with_user(
507 'user_profile': h.gravatar_with_user(
508 request, last_commit.author),
508 request, last_commit.author),
509 })
509 })
510
510
511 return data
511 return data
512
512
513 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
513 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
514 extended_info=False, content=False, max_file_bytes=None):
514 extended_info=False, content=False, max_file_bytes=None):
515 """
515 """
516 recursive walk in root dir and return a set of all path in that dir
516 recursive walk in root dir and return a set of all path in that dir
517 based on repository walk function
517 based on repository walk function
518
518
519 :param repo_name: name of repository
519 :param repo_name: name of repository
520 :param commit_id: commit id for which to list nodes
520 :param commit_id: commit id for which to list nodes
521 :param root_path: root path to list
521 :param root_path: root path to list
522 :param flat: return as a list, if False returns a dict with description
522 :param flat: return as a list, if False returns a dict with description
523 :param extended_info: show additional info such as md5, binary, size etc
523 :param extended_info: show additional info such as md5, binary, size etc
524 :param content: add nodes content to the return data
524 :param content: add nodes content to the return data
525 :param max_file_bytes: will not return file contents over this limit
525 :param max_file_bytes: will not return file contents over this limit
526
526
527 """
527 """
528 _files = list()
528 _files = list()
529 _dirs = list()
529 _dirs = list()
530 try:
530 try:
531 _repo = self._get_repo(repo_name)
531 _repo = self._get_repo(repo_name)
532 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
532 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
533 root_path = root_path.lstrip('/')
533 root_path = root_path.lstrip('/')
534 for __, dirs, files in commit.walk(root_path):
534 for __, dirs, files in commit.walk(root_path):
535
535
536 for f in files:
536 for f in files:
537 _content = None
537 _content = None
538 _data = f_name = f.unicode_path
538 _data = f_name = f.unicode_path
539
539
540 if not flat:
540 if not flat:
541 _data = {
541 _data = {
542 "name": h.escape(f_name),
542 "name": h.escape(f_name),
543 "type": "file",
543 "type": "file",
544 }
544 }
545 if extended_info:
545 if extended_info:
546 _data.update({
546 _data.update({
547 "md5": f.md5,
547 "md5": f.md5,
548 "binary": f.is_binary,
548 "binary": f.is_binary,
549 "size": f.size,
549 "size": f.size,
550 "extension": f.extension,
550 "extension": f.extension,
551 "mimetype": f.mimetype,
551 "mimetype": f.mimetype,
552 "lines": f.lines()[0]
552 "lines": f.lines()[0]
553 })
553 })
554
554
555 if content:
555 if content:
556 over_size_limit = (max_file_bytes is not None
556 over_size_limit = (max_file_bytes is not None
557 and f.size > max_file_bytes)
557 and f.size > max_file_bytes)
558 full_content = None
558 full_content = None
559 if not f.is_binary and not over_size_limit:
559 if not f.is_binary and not over_size_limit:
560 full_content = safe_str(f.content)
560 full_content = safe_str(f.content)
561
561
562 _data.update({
562 _data.update({
563 "content": full_content,
563 "content": full_content,
564 })
564 })
565 _files.append(_data)
565 _files.append(_data)
566
566
567 for d in dirs:
567 for d in dirs:
568 _data = d_name = d.unicode_path
568 _data = d_name = d.unicode_path
569 if not flat:
569 if not flat:
570 _data = {
570 _data = {
571 "name": h.escape(d_name),
571 "name": h.escape(d_name),
572 "type": "dir",
572 "type": "dir",
573 }
573 }
574 if extended_info:
574 if extended_info:
575 _data.update({
575 _data.update({
576 "md5": None,
576 "md5": None,
577 "binary": None,
577 "binary": None,
578 "size": None,
578 "size": None,
579 "extension": None,
579 "extension": None,
580 })
580 })
581 if content:
581 if content:
582 _data.update({
582 _data.update({
583 "content": None
583 "content": None
584 })
584 })
585 _dirs.append(_data)
585 _dirs.append(_data)
586 except RepositoryError:
586 except RepositoryError:
587 log.exception("Exception in get_nodes")
587 log.exception("Exception in get_nodes")
588 raise
588 raise
589
589
590 return _dirs, _files
590 return _dirs, _files
591
591
592 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
592 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
593 """
593 """
594 Generate files for quick filter in files view
594 Generate files for quick filter in files view
595 """
595 """
596
596
597 _files = list()
597 _files = list()
598 _dirs = list()
598 _dirs = list()
599 try:
599 try:
600 _repo = self._get_repo(repo_name)
600 _repo = self._get_repo(repo_name)
601 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
601 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
602 root_path = root_path.lstrip('/')
602 root_path = root_path.lstrip('/')
603 for __, dirs, files in commit.walk(root_path):
603 for __, dirs, files in commit.walk(root_path):
604
604
605 for f in files:
605 for f in files:
606
606
607 _data = {
607 _data = {
608 "name": h.escape(f.unicode_path),
608 "name": h.escape(f.unicode_path),
609 "type": "file",
609 "type": "file",
610 }
610 }
611
611
612 _files.append(_data)
612 _files.append(_data)
613
613
614 for d in dirs:
614 for d in dirs:
615
615
616 _data = {
616 _data = {
617 "name": h.escape(d.unicode_path),
617 "name": h.escape(d.unicode_path),
618 "type": "dir",
618 "type": "dir",
619 }
619 }
620
620
621 _dirs.append(_data)
621 _dirs.append(_data)
622 except RepositoryError:
622 except RepositoryError:
623 log.exception("Exception in get_quick_filter_nodes")
623 log.exception("Exception in get_quick_filter_nodes")
624 raise
624 raise
625
625
626 return _dirs, _files
626 return _dirs, _files
627
627
628 def get_node(self, repo_name, commit_id, file_path,
628 def get_node(self, repo_name, commit_id, file_path,
629 extended_info=False, content=False, max_file_bytes=None, cache=True):
629 extended_info=False, content=False, max_file_bytes=None, cache=True):
630 """
630 """
631 retrieve single node from commit
631 retrieve single node from commit
632 """
632 """
633 try:
633 try:
634
634
635 _repo = self._get_repo(repo_name)
635 _repo = self._get_repo(repo_name)
636 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
636 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
637
637
638 file_node = commit.get_node(file_path)
638 file_node = commit.get_node(file_path)
639 if file_node.is_dir():
639 if file_node.is_dir():
640 raise RepositoryError('The given path is a directory')
640 raise RepositoryError('The given path is a directory')
641
641
642 _content = None
642 _content = None
643 f_name = file_node.unicode_path
643 f_name = file_node.unicode_path
644
644
645 file_data = {
645 file_data = {
646 "name": h.escape(f_name),
646 "name": h.escape(f_name),
647 "type": "file",
647 "type": "file",
648 }
648 }
649
649
650 if extended_info:
650 if extended_info:
651 file_data.update({
651 file_data.update({
652 "extension": file_node.extension,
652 "extension": file_node.extension,
653 "mimetype": file_node.mimetype,
653 "mimetype": file_node.mimetype,
654 })
654 })
655
655
656 if cache:
656 if cache:
657 md5 = file_node.md5
657 md5 = file_node.md5
658 is_binary = file_node.is_binary
658 is_binary = file_node.is_binary
659 size = file_node.size
659 size = file_node.size
660 else:
660 else:
661 is_binary, md5, size, _content = file_node.metadata_uncached()
661 is_binary, md5, size, _content = file_node.metadata_uncached()
662
662
663 file_data.update({
663 file_data.update({
664 "md5": md5,
664 "md5": md5,
665 "binary": is_binary,
665 "binary": is_binary,
666 "size": size,
666 "size": size,
667 })
667 })
668
668
669 if content and cache:
669 if content and cache:
670 # get content + cache
670 # get content + cache
671 size = file_node.size
671 size = file_node.size
672 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
672 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
673 full_content = None
673 full_content = None
674 all_lines = 0
674 all_lines = 0
675 if not file_node.is_binary and not over_size_limit:
675 if not file_node.is_binary and not over_size_limit:
676 full_content = safe_unicode(file_node.content)
676 full_content = safe_unicode(file_node.content)
677 all_lines, empty_lines = file_node.count_lines(full_content)
677 all_lines, empty_lines = file_node.count_lines(full_content)
678
678
679 file_data.update({
679 file_data.update({
680 "content": full_content,
680 "content": full_content,
681 "lines": all_lines
681 "lines": all_lines
682 })
682 })
683 elif content:
683 elif content:
684 # get content *without* cache
684 # get content *without* cache
685 if _content is None:
685 if _content is None:
686 is_binary, md5, size, _content = file_node.metadata_uncached()
686 is_binary, md5, size, _content = file_node.metadata_uncached()
687
687
688 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
688 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
689 full_content = None
689 full_content = None
690 all_lines = 0
690 all_lines = 0
691 if not is_binary and not over_size_limit:
691 if not is_binary and not over_size_limit:
692 full_content = safe_unicode(_content)
692 full_content = safe_unicode(_content)
693 all_lines, empty_lines = file_node.count_lines(full_content)
693 all_lines, empty_lines = file_node.count_lines(full_content)
694
694
695 file_data.update({
695 file_data.update({
696 "content": full_content,
696 "content": full_content,
697 "lines": all_lines
697 "lines": all_lines
698 })
698 })
699
699
700 except RepositoryError:
700 except RepositoryError:
701 log.exception("Exception in get_node")
701 log.exception("Exception in get_node")
702 raise
702 raise
703
703
704 return file_data
704 return file_data
705
705
706 def get_fts_data(self, repo_name, commit_id, root_path='/'):
706 def get_fts_data(self, repo_name, commit_id, root_path='/'):
707 """
707 """
708 Fetch node tree for usage in full text search
708 Fetch node tree for usage in full text search
709 """
709 """
710
710
711 tree_info = list()
711 tree_info = list()
712
712
713 try:
713 try:
714 _repo = self._get_repo(repo_name)
714 _repo = self._get_repo(repo_name)
715 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
715 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
716 root_path = root_path.lstrip('/')
716 root_path = root_path.lstrip('/')
717 for __, dirs, files in commit.walk(root_path):
717 for __, dirs, files in commit.walk(root_path):
718
718
719 for f in files:
719 for f in files:
720 is_binary, md5, size, _content = f.metadata_uncached()
720 is_binary, md5, size, _content = f.metadata_uncached()
721 _data = {
721 _data = {
722 "name": f.unicode_path,
722 "name": f.unicode_path,
723 "md5": md5,
723 "md5": md5,
724 "extension": f.extension,
724 "extension": f.extension,
725 "binary": is_binary,
725 "binary": is_binary,
726 "size": size
726 "size": size
727 }
727 }
728
728
729 tree_info.append(_data)
729 tree_info.append(_data)
730
730
731 except RepositoryError:
731 except RepositoryError:
732 log.exception("Exception in get_nodes")
732 log.exception("Exception in get_nodes")
733 raise
733 raise
734
734
735 return tree_info
735 return tree_info
736
736
737 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
737 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
738 author=None, trigger_push_hook=True):
738 author=None, trigger_push_hook=True):
739 """
739 """
740 Commits given multiple nodes into repo
740 Commits given multiple nodes into repo
741
741
742 :param user: RhodeCode User object or user_id, the commiter
742 :param user: RhodeCode User object or user_id, the commiter
743 :param repo: RhodeCode Repository object
743 :param repo: RhodeCode Repository object
744 :param message: commit message
744 :param message: commit message
745 :param nodes: mapping {filename:{'content':content},...}
745 :param nodes: mapping {filename:{'content':content},...}
746 :param parent_commit: parent commit, can be empty than it's
746 :param parent_commit: parent commit, can be empty than it's
747 initial commit
747 initial commit
748 :param author: author of commit, cna be different that commiter
748 :param author: author of commit, cna be different that commiter
749 only for git
749 only for git
750 :param trigger_push_hook: trigger push hooks
750 :param trigger_push_hook: trigger push hooks
751
751
752 :returns: new commited commit
752 :returns: new commited commit
753 """
753 """
754
754
755 user = self._get_user(user)
755 user = self._get_user(user)
756 scm_instance = repo.scm_instance(cache=False)
756 scm_instance = repo.scm_instance(cache=False)
757
757
758 processed_nodes = []
758 processed_nodes = []
759 for f_path in nodes:
759 for f_path in nodes:
760 f_path = self._sanitize_path(f_path)
760 f_path = self._sanitize_path(f_path)
761 content = nodes[f_path]['content']
761 content = nodes[f_path]['content']
762 f_path = safe_str(f_path)
762 f_path = safe_str(f_path)
763 # decoding here will force that we have proper encoded values
763 # decoding here will force that we have proper encoded values
764 # in any other case this will throw exceptions and deny commit
764 # in any other case this will throw exceptions and deny commit
765 if isinstance(content, (basestring,)):
765 if isinstance(content, (basestring,)):
766 content = safe_str(content)
766 content = safe_str(content)
767 elif isinstance(content, (file, cStringIO.OutputType,)):
767 elif isinstance(content, (file, cStringIO.OutputType,)):
768 content = content.read()
768 content = content.read()
769 else:
769 else:
770 raise Exception('Content is of unrecognized type %s' % (
770 raise Exception('Content is of unrecognized type %s' % (
771 type(content)
771 type(content)
772 ))
772 ))
773 processed_nodes.append((f_path, content))
773 processed_nodes.append((f_path, content))
774
774
775 message = safe_unicode(message)
775 message = safe_unicode(message)
776 commiter = user.full_contact
776 commiter = user.full_contact
777 author = safe_unicode(author) if author else commiter
777 author = safe_unicode(author) if author else commiter
778
778
779 imc = scm_instance.in_memory_commit
779 imc = scm_instance.in_memory_commit
780
780
781 if not parent_commit:
781 if not parent_commit:
782 parent_commit = EmptyCommit(alias=scm_instance.alias)
782 parent_commit = EmptyCommit(alias=scm_instance.alias)
783
783
784 if isinstance(parent_commit, EmptyCommit):
784 if isinstance(parent_commit, EmptyCommit):
785 # EmptyCommit means we we're editing empty repository
785 # EmptyCommit means we we're editing empty repository
786 parents = None
786 parents = None
787 else:
787 else:
788 parents = [parent_commit]
788 parents = [parent_commit]
789 # add multiple nodes
789 # add multiple nodes
790 for path, content in processed_nodes:
790 for path, content in processed_nodes:
791 imc.add(FileNode(path, content=content))
791 imc.add(FileNode(path, content=content))
792 # TODO: handle pre push scenario
792 # TODO: handle pre push scenario
793 tip = imc.commit(message=message,
793 tip = imc.commit(message=message,
794 author=author,
794 author=author,
795 parents=parents,
795 parents=parents,
796 branch=parent_commit.branch)
796 branch=parent_commit.branch)
797
797
798 self.mark_for_invalidation(repo.repo_name)
798 self.mark_for_invalidation(repo.repo_name)
799 if trigger_push_hook:
799 if trigger_push_hook:
800 hooks_utils.trigger_post_push_hook(
800 hooks_utils.trigger_post_push_hook(
801 username=user.username, action='push_local',
801 username=user.username, action='push_local',
802 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
802 repo_name=repo.repo_name, repo_type=scm_instance.alias,
803 hook_type='post_push',
803 hook_type='post_push',
804 commit_ids=[tip.raw_id])
804 commit_ids=[tip.raw_id])
805 return tip
805 return tip
806
806
807 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
807 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
808 author=None, trigger_push_hook=True):
808 author=None, trigger_push_hook=True):
809 user = self._get_user(user)
809 user = self._get_user(user)
810 scm_instance = repo.scm_instance(cache=False)
810 scm_instance = repo.scm_instance(cache=False)
811
811
812 message = safe_unicode(message)
812 message = safe_unicode(message)
813 commiter = user.full_contact
813 commiter = user.full_contact
814 author = safe_unicode(author) if author else commiter
814 author = safe_unicode(author) if author else commiter
815
815
816 imc = scm_instance.in_memory_commit
816 imc = scm_instance.in_memory_commit
817
817
818 if not parent_commit:
818 if not parent_commit:
819 parent_commit = EmptyCommit(alias=scm_instance.alias)
819 parent_commit = EmptyCommit(alias=scm_instance.alias)
820
820
821 if isinstance(parent_commit, EmptyCommit):
821 if isinstance(parent_commit, EmptyCommit):
822 # EmptyCommit means we we're editing empty repository
822 # EmptyCommit means we we're editing empty repository
823 parents = None
823 parents = None
824 else:
824 else:
825 parents = [parent_commit]
825 parents = [parent_commit]
826
826
827 # add multiple nodes
827 # add multiple nodes
828 for _filename, data in nodes.items():
828 for _filename, data in nodes.items():
829 # new filename, can be renamed from the old one, also sanitaze
829 # new filename, can be renamed from the old one, also sanitaze
830 # the path for any hack around relative paths like ../../ etc.
830 # the path for any hack around relative paths like ../../ etc.
831 filename = self._sanitize_path(data['filename'])
831 filename = self._sanitize_path(data['filename'])
832 old_filename = self._sanitize_path(_filename)
832 old_filename = self._sanitize_path(_filename)
833 content = data['content']
833 content = data['content']
834 file_mode = data.get('mode')
834 file_mode = data.get('mode')
835 filenode = FileNode(old_filename, content=content, mode=file_mode)
835 filenode = FileNode(old_filename, content=content, mode=file_mode)
836 op = data['op']
836 op = data['op']
837 if op == 'add':
837 if op == 'add':
838 imc.add(filenode)
838 imc.add(filenode)
839 elif op == 'del':
839 elif op == 'del':
840 imc.remove(filenode)
840 imc.remove(filenode)
841 elif op == 'mod':
841 elif op == 'mod':
842 if filename != old_filename:
842 if filename != old_filename:
843 # TODO: handle renames more efficient, needs vcs lib changes
843 # TODO: handle renames more efficient, needs vcs lib changes
844 imc.remove(filenode)
844 imc.remove(filenode)
845 imc.add(FileNode(filename, content=content, mode=file_mode))
845 imc.add(FileNode(filename, content=content, mode=file_mode))
846 else:
846 else:
847 imc.change(filenode)
847 imc.change(filenode)
848
848
849 try:
849 try:
850 # TODO: handle pre push scenario commit changes
850 # TODO: handle pre push scenario commit changes
851 tip = imc.commit(message=message,
851 tip = imc.commit(message=message,
852 author=author,
852 author=author,
853 parents=parents,
853 parents=parents,
854 branch=parent_commit.branch)
854 branch=parent_commit.branch)
855 except NodeNotChangedError:
855 except NodeNotChangedError:
856 raise
856 raise
857 except Exception as e:
857 except Exception as e:
858 log.exception("Unexpected exception during call to imc.commit")
858 log.exception("Unexpected exception during call to imc.commit")
859 raise IMCCommitError(str(e))
859 raise IMCCommitError(str(e))
860 finally:
860 finally:
861 # always clear caches, if commit fails we want fresh object also
861 # always clear caches, if commit fails we want fresh object also
862 self.mark_for_invalidation(repo.repo_name)
862 self.mark_for_invalidation(repo.repo_name)
863
863
864 if trigger_push_hook:
864 if trigger_push_hook:
865 hooks_utils.trigger_post_push_hook(
865 hooks_utils.trigger_post_push_hook(
866 username=user.username, action='push_local', hook_type='post_push',
866 username=user.username, action='push_local', hook_type='post_push',
867 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
867 repo_name=repo.repo_name, repo_type=scm_instance.alias,
868 commit_ids=[tip.raw_id])
868 commit_ids=[tip.raw_id])
869
869
870 return tip
870 return tip
871
871
872 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
872 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
873 author=None, trigger_push_hook=True):
873 author=None, trigger_push_hook=True):
874 """
874 """
875 Deletes given multiple nodes into `repo`
875 Deletes given multiple nodes into `repo`
876
876
877 :param user: RhodeCode User object or user_id, the committer
877 :param user: RhodeCode User object or user_id, the committer
878 :param repo: RhodeCode Repository object
878 :param repo: RhodeCode Repository object
879 :param message: commit message
879 :param message: commit message
880 :param nodes: mapping {filename:{'content':content},...}
880 :param nodes: mapping {filename:{'content':content},...}
881 :param parent_commit: parent commit, can be empty than it's initial
881 :param parent_commit: parent commit, can be empty than it's initial
882 commit
882 commit
883 :param author: author of commit, cna be different that commiter only
883 :param author: author of commit, cna be different that commiter only
884 for git
884 for git
885 :param trigger_push_hook: trigger push hooks
885 :param trigger_push_hook: trigger push hooks
886
886
887 :returns: new commit after deletion
887 :returns: new commit after deletion
888 """
888 """
889
889
890 user = self._get_user(user)
890 user = self._get_user(user)
891 scm_instance = repo.scm_instance(cache=False)
891 scm_instance = repo.scm_instance(cache=False)
892
892
893 processed_nodes = []
893 processed_nodes = []
894 for f_path in nodes:
894 for f_path in nodes:
895 f_path = self._sanitize_path(f_path)
895 f_path = self._sanitize_path(f_path)
896 # content can be empty but for compatabilty it allows same dicts
896 # content can be empty but for compatabilty it allows same dicts
897 # structure as add_nodes
897 # structure as add_nodes
898 content = nodes[f_path].get('content')
898 content = nodes[f_path].get('content')
899 processed_nodes.append((f_path, content))
899 processed_nodes.append((f_path, content))
900
900
901 message = safe_unicode(message)
901 message = safe_unicode(message)
902 commiter = user.full_contact
902 commiter = user.full_contact
903 author = safe_unicode(author) if author else commiter
903 author = safe_unicode(author) if author else commiter
904
904
905 imc = scm_instance.in_memory_commit
905 imc = scm_instance.in_memory_commit
906
906
907 if not parent_commit:
907 if not parent_commit:
908 parent_commit = EmptyCommit(alias=scm_instance.alias)
908 parent_commit = EmptyCommit(alias=scm_instance.alias)
909
909
910 if isinstance(parent_commit, EmptyCommit):
910 if isinstance(parent_commit, EmptyCommit):
911 # EmptyCommit means we we're editing empty repository
911 # EmptyCommit means we we're editing empty repository
912 parents = None
912 parents = None
913 else:
913 else:
914 parents = [parent_commit]
914 parents = [parent_commit]
915 # add multiple nodes
915 # add multiple nodes
916 for path, content in processed_nodes:
916 for path, content in processed_nodes:
917 imc.remove(FileNode(path, content=content))
917 imc.remove(FileNode(path, content=content))
918
918
919 # TODO: handle pre push scenario
919 # TODO: handle pre push scenario
920 tip = imc.commit(message=message,
920 tip = imc.commit(message=message,
921 author=author,
921 author=author,
922 parents=parents,
922 parents=parents,
923 branch=parent_commit.branch)
923 branch=parent_commit.branch)
924
924
925 self.mark_for_invalidation(repo.repo_name)
925 self.mark_for_invalidation(repo.repo_name)
926 if trigger_push_hook:
926 if trigger_push_hook:
927 hooks_utils.trigger_post_push_hook(
927 hooks_utils.trigger_post_push_hook(
928 username=user.username, action='push_local', hook_type='post_push',
928 username=user.username, action='push_local', hook_type='post_push',
929 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
929 repo_name=repo.repo_name, repo_type=scm_instance.alias,
930 commit_ids=[tip.raw_id])
930 commit_ids=[tip.raw_id])
931 return tip
931 return tip
932
932
933 def strip(self, repo, commit_id, branch):
933 def strip(self, repo, commit_id, branch):
934 scm_instance = repo.scm_instance(cache=False)
934 scm_instance = repo.scm_instance(cache=False)
935 scm_instance.config.clear_section('hooks')
935 scm_instance.config.clear_section('hooks')
936 scm_instance.strip(commit_id, branch)
936 scm_instance.strip(commit_id, branch)
937 self.mark_for_invalidation(repo.repo_name)
937 self.mark_for_invalidation(repo.repo_name)
938
938
939 def get_unread_journal(self):
939 def get_unread_journal(self):
940 return self.sa.query(UserLog).count()
940 return self.sa.query(UserLog).count()
941
941
942 @classmethod
942 @classmethod
943 def backend_landing_ref(cls, repo_type):
943 def backend_landing_ref(cls, repo_type):
944 """
944 """
945 Return a default landing ref based on a repository type.
945 Return a default landing ref based on a repository type.
946 """
946 """
947
947
948 landing_ref = {
948 landing_ref = {
949 'hg': ('branch:default', 'default'),
949 'hg': ('branch:default', 'default'),
950 'git': ('branch:master', 'master'),
950 'git': ('branch:master', 'master'),
951 'svn': ('rev:tip', 'latest tip'),
951 'svn': ('rev:tip', 'latest tip'),
952 'default': ('rev:tip', 'latest tip'),
952 'default': ('rev:tip', 'latest tip'),
953 }
953 }
954
954
955 return landing_ref.get(repo_type) or landing_ref['default']
955 return landing_ref.get(repo_type) or landing_ref['default']
956
956
957 def get_repo_landing_revs(self, translator, repo=None):
957 def get_repo_landing_revs(self, translator, repo=None):
958 """
958 """
959 Generates select option with tags branches and bookmarks (for hg only)
959 Generates select option with tags branches and bookmarks (for hg only)
960 grouped by type
960 grouped by type
961
961
962 :param repo:
962 :param repo:
963 """
963 """
964 _ = translator
964 _ = translator
965 repo = self._get_repo(repo)
965 repo = self._get_repo(repo)
966
966
967 if repo:
967 if repo:
968 repo_type = repo.repo_type
968 repo_type = repo.repo_type
969 else:
969 else:
970 repo_type = 'default'
970 repo_type = 'default'
971
971
972 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
972 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
973
973
974 default_ref_options = [
974 default_ref_options = [
975 [default_landing_ref, landing_ref_lbl]
975 [default_landing_ref, landing_ref_lbl]
976 ]
976 ]
977 default_choices = [
977 default_choices = [
978 default_landing_ref
978 default_landing_ref
979 ]
979 ]
980
980
981 if not repo:
981 if not repo:
982 return default_choices, default_ref_options
982 return default_choices, default_ref_options
983
983
984 repo = repo.scm_instance()
984 repo = repo.scm_instance()
985
985
986 ref_options = [('rev:tip', 'latest tip')]
986 ref_options = [('rev:tip', 'latest tip')]
987 choices = ['rev:tip']
987 choices = ['rev:tip']
988
988
989 # branches
989 # branches
990 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
990 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
991 if not branch_group:
991 if not branch_group:
992 # new repo, or without maybe a branch?
992 # new repo, or without maybe a branch?
993 branch_group = default_ref_options
993 branch_group = default_ref_options
994
994
995 branches_group = (branch_group, _("Branches"))
995 branches_group = (branch_group, _("Branches"))
996 ref_options.append(branches_group)
996 ref_options.append(branches_group)
997 choices.extend([x[0] for x in branches_group[0]])
997 choices.extend([x[0] for x in branches_group[0]])
998
998
999 # bookmarks for HG
999 # bookmarks for HG
1000 if repo.alias == 'hg':
1000 if repo.alias == 'hg':
1001 bookmarks_group = (
1001 bookmarks_group = (
1002 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1002 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1003 for b in repo.bookmarks],
1003 for b in repo.bookmarks],
1004 _("Bookmarks"))
1004 _("Bookmarks"))
1005 ref_options.append(bookmarks_group)
1005 ref_options.append(bookmarks_group)
1006 choices.extend([x[0] for x in bookmarks_group[0]])
1006 choices.extend([x[0] for x in bookmarks_group[0]])
1007
1007
1008 # tags
1008 # tags
1009 tags_group = (
1009 tags_group = (
1010 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1010 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1011 for t in repo.tags],
1011 for t in repo.tags],
1012 _("Tags"))
1012 _("Tags"))
1013 ref_options.append(tags_group)
1013 ref_options.append(tags_group)
1014 choices.extend([x[0] for x in tags_group[0]])
1014 choices.extend([x[0] for x in tags_group[0]])
1015
1015
1016 return choices, ref_options
1016 return choices, ref_options
1017
1017
1018 def get_server_info(self, environ=None):
1018 def get_server_info(self, environ=None):
1019 server_info = get_system_info(environ)
1019 server_info = get_system_info(environ)
1020 return server_info
1020 return server_info
General Comments 0
You need to be logged in to leave comments. Login now