##// END OF EJS Templates
pull-requests: ensure merge response provide more details...
dan -
r3339:8c7a75f7 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,937 +1,937 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23
23
24 from rhodecode import events
24 from rhodecode import events
25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 from rhodecode.api.utils import (
26 from rhodecode.api.utils import (
27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 validate_repo_permissions, resolve_ref_or_error)
29 validate_repo_permissions, resolve_ref_or_error)
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 from rhodecode.lib.base import vcs_operation_context
31 from rhodecode.lib.base import vcs_operation_context
32 from rhodecode.lib.utils2 import str2bool
32 from rhodecode.lib.utils2 import str2bool
33 from rhodecode.model.changeset_status import ChangesetStatusModel
33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 from rhodecode.model.comment import CommentsModel
34 from rhodecode.model.comment import CommentsModel
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 from rhodecode.model.settings import SettingsModel
37 from rhodecode.model.settings import SettingsModel
38 from rhodecode.model.validation_schema import Invalid
38 from rhodecode.model.validation_schema import Invalid
39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 ReviewerListSchema)
40 ReviewerListSchema)
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 @jsonrpc_method()
45 @jsonrpc_method()
46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)):
46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)):
47 """
47 """
48 Get a pull request based on the given ID.
48 Get a pull request based on the given ID.
49
49
50 :param apiuser: This is filled automatically from the |authtoken|.
50 :param apiuser: This is filled automatically from the |authtoken|.
51 :type apiuser: AuthUser
51 :type apiuser: AuthUser
52 :param repoid: Optional, repository name or repository ID from where
52 :param repoid: Optional, repository name or repository ID from where
53 the pull request was opened.
53 the pull request was opened.
54 :type repoid: str or int
54 :type repoid: str or int
55 :param pullrequestid: ID of the requested pull request.
55 :param pullrequestid: ID of the requested pull request.
56 :type pullrequestid: int
56 :type pullrequestid: int
57
57
58 Example output:
58 Example output:
59
59
60 .. code-block:: bash
60 .. code-block:: bash
61
61
62 "id": <id_given_in_input>,
62 "id": <id_given_in_input>,
63 "result":
63 "result":
64 {
64 {
65 "pull_request_id": "<pull_request_id>",
65 "pull_request_id": "<pull_request_id>",
66 "url": "<url>",
66 "url": "<url>",
67 "title": "<title>",
67 "title": "<title>",
68 "description": "<description>",
68 "description": "<description>",
69 "status" : "<status>",
69 "status" : "<status>",
70 "created_on": "<date_time_created>",
70 "created_on": "<date_time_created>",
71 "updated_on": "<date_time_updated>",
71 "updated_on": "<date_time_updated>",
72 "commit_ids": [
72 "commit_ids": [
73 ...
73 ...
74 "<commit_id>",
74 "<commit_id>",
75 "<commit_id>",
75 "<commit_id>",
76 ...
76 ...
77 ],
77 ],
78 "review_status": "<review_status>",
78 "review_status": "<review_status>",
79 "mergeable": {
79 "mergeable": {
80 "status": "<bool>",
80 "status": "<bool>",
81 "message": "<message>",
81 "message": "<message>",
82 },
82 },
83 "source": {
83 "source": {
84 "clone_url": "<clone_url>",
84 "clone_url": "<clone_url>",
85 "repository": "<repository_name>",
85 "repository": "<repository_name>",
86 "reference":
86 "reference":
87 {
87 {
88 "name": "<name>",
88 "name": "<name>",
89 "type": "<type>",
89 "type": "<type>",
90 "commit_id": "<commit_id>",
90 "commit_id": "<commit_id>",
91 }
91 }
92 },
92 },
93 "target": {
93 "target": {
94 "clone_url": "<clone_url>",
94 "clone_url": "<clone_url>",
95 "repository": "<repository_name>",
95 "repository": "<repository_name>",
96 "reference":
96 "reference":
97 {
97 {
98 "name": "<name>",
98 "name": "<name>",
99 "type": "<type>",
99 "type": "<type>",
100 "commit_id": "<commit_id>",
100 "commit_id": "<commit_id>",
101 }
101 }
102 },
102 },
103 "merge": {
103 "merge": {
104 "clone_url": "<clone_url>",
104 "clone_url": "<clone_url>",
105 "reference":
105 "reference":
106 {
106 {
107 "name": "<name>",
107 "name": "<name>",
108 "type": "<type>",
108 "type": "<type>",
109 "commit_id": "<commit_id>",
109 "commit_id": "<commit_id>",
110 }
110 }
111 },
111 },
112 "author": <user_obj>,
112 "author": <user_obj>,
113 "reviewers": [
113 "reviewers": [
114 ...
114 ...
115 {
115 {
116 "user": "<user_obj>",
116 "user": "<user_obj>",
117 "review_status": "<review_status>",
117 "review_status": "<review_status>",
118 }
118 }
119 ...
119 ...
120 ]
120 ]
121 },
121 },
122 "error": null
122 "error": null
123 """
123 """
124
124
125 pull_request = get_pull_request_or_error(pullrequestid)
125 pull_request = get_pull_request_or_error(pullrequestid)
126 if Optional.extract(repoid):
126 if Optional.extract(repoid):
127 repo = get_repo_or_error(repoid)
127 repo = get_repo_or_error(repoid)
128 else:
128 else:
129 repo = pull_request.target_repo
129 repo = pull_request.target_repo
130
130
131 if not PullRequestModel().check_user_read(
131 if not PullRequestModel().check_user_read(
132 pull_request, apiuser, api=True):
132 pull_request, apiuser, api=True):
133 raise JSONRPCError('repository `%s` or pull request `%s` '
133 raise JSONRPCError('repository `%s` or pull request `%s` '
134 'does not exist' % (repoid, pullrequestid))
134 'does not exist' % (repoid, pullrequestid))
135 data = pull_request.get_api_data()
135 data = pull_request.get_api_data()
136 return data
136 return data
137
137
138
138
139 @jsonrpc_method()
139 @jsonrpc_method()
140 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
140 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
141 """
141 """
142 Get all pull requests from the repository specified in `repoid`.
142 Get all pull requests from the repository specified in `repoid`.
143
143
144 :param apiuser: This is filled automatically from the |authtoken|.
144 :param apiuser: This is filled automatically from the |authtoken|.
145 :type apiuser: AuthUser
145 :type apiuser: AuthUser
146 :param repoid: Optional repository name or repository ID.
146 :param repoid: Optional repository name or repository ID.
147 :type repoid: str or int
147 :type repoid: str or int
148 :param status: Only return pull requests with the specified status.
148 :param status: Only return pull requests with the specified status.
149 Valid options are.
149 Valid options are.
150 * ``new`` (default)
150 * ``new`` (default)
151 * ``open``
151 * ``open``
152 * ``closed``
152 * ``closed``
153 :type status: str
153 :type status: str
154
154
155 Example output:
155 Example output:
156
156
157 .. code-block:: bash
157 .. code-block:: bash
158
158
159 "id": <id_given_in_input>,
159 "id": <id_given_in_input>,
160 "result":
160 "result":
161 [
161 [
162 ...
162 ...
163 {
163 {
164 "pull_request_id": "<pull_request_id>",
164 "pull_request_id": "<pull_request_id>",
165 "url": "<url>",
165 "url": "<url>",
166 "title" : "<title>",
166 "title" : "<title>",
167 "description": "<description>",
167 "description": "<description>",
168 "status": "<status>",
168 "status": "<status>",
169 "created_on": "<date_time_created>",
169 "created_on": "<date_time_created>",
170 "updated_on": "<date_time_updated>",
170 "updated_on": "<date_time_updated>",
171 "commit_ids": [
171 "commit_ids": [
172 ...
172 ...
173 "<commit_id>",
173 "<commit_id>",
174 "<commit_id>",
174 "<commit_id>",
175 ...
175 ...
176 ],
176 ],
177 "review_status": "<review_status>",
177 "review_status": "<review_status>",
178 "mergeable": {
178 "mergeable": {
179 "status": "<bool>",
179 "status": "<bool>",
180 "message: "<message>",
180 "message: "<message>",
181 },
181 },
182 "source": {
182 "source": {
183 "clone_url": "<clone_url>",
183 "clone_url": "<clone_url>",
184 "reference":
184 "reference":
185 {
185 {
186 "name": "<name>",
186 "name": "<name>",
187 "type": "<type>",
187 "type": "<type>",
188 "commit_id": "<commit_id>",
188 "commit_id": "<commit_id>",
189 }
189 }
190 },
190 },
191 "target": {
191 "target": {
192 "clone_url": "<clone_url>",
192 "clone_url": "<clone_url>",
193 "reference":
193 "reference":
194 {
194 {
195 "name": "<name>",
195 "name": "<name>",
196 "type": "<type>",
196 "type": "<type>",
197 "commit_id": "<commit_id>",
197 "commit_id": "<commit_id>",
198 }
198 }
199 },
199 },
200 "merge": {
200 "merge": {
201 "clone_url": "<clone_url>",
201 "clone_url": "<clone_url>",
202 "reference":
202 "reference":
203 {
203 {
204 "name": "<name>",
204 "name": "<name>",
205 "type": "<type>",
205 "type": "<type>",
206 "commit_id": "<commit_id>",
206 "commit_id": "<commit_id>",
207 }
207 }
208 },
208 },
209 "author": <user_obj>,
209 "author": <user_obj>,
210 "reviewers": [
210 "reviewers": [
211 ...
211 ...
212 {
212 {
213 "user": "<user_obj>",
213 "user": "<user_obj>",
214 "review_status": "<review_status>",
214 "review_status": "<review_status>",
215 }
215 }
216 ...
216 ...
217 ]
217 ]
218 }
218 }
219 ...
219 ...
220 ],
220 ],
221 "error": null
221 "error": null
222
222
223 """
223 """
224 repo = get_repo_or_error(repoid)
224 repo = get_repo_or_error(repoid)
225 if not has_superadmin_permission(apiuser):
225 if not has_superadmin_permission(apiuser):
226 _perms = (
226 _perms = (
227 'repository.admin', 'repository.write', 'repository.read',)
227 'repository.admin', 'repository.write', 'repository.read',)
228 validate_repo_permissions(apiuser, repoid, repo, _perms)
228 validate_repo_permissions(apiuser, repoid, repo, _perms)
229
229
230 status = Optional.extract(status)
230 status = Optional.extract(status)
231 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
231 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
232 data = [pr.get_api_data() for pr in pull_requests]
232 data = [pr.get_api_data() for pr in pull_requests]
233 return data
233 return data
234
234
235
235
236 @jsonrpc_method()
236 @jsonrpc_method()
237 def merge_pull_request(
237 def merge_pull_request(
238 request, apiuser, pullrequestid, repoid=Optional(None),
238 request, apiuser, pullrequestid, repoid=Optional(None),
239 userid=Optional(OAttr('apiuser'))):
239 userid=Optional(OAttr('apiuser'))):
240 """
240 """
241 Merge the pull request specified by `pullrequestid` into its target
241 Merge the pull request specified by `pullrequestid` into its target
242 repository.
242 repository.
243
243
244 :param apiuser: This is filled automatically from the |authtoken|.
244 :param apiuser: This is filled automatically from the |authtoken|.
245 :type apiuser: AuthUser
245 :type apiuser: AuthUser
246 :param repoid: Optional, repository name or repository ID of the
246 :param repoid: Optional, repository name or repository ID of the
247 target repository to which the |pr| is to be merged.
247 target repository to which the |pr| is to be merged.
248 :type repoid: str or int
248 :type repoid: str or int
249 :param pullrequestid: ID of the pull request which shall be merged.
249 :param pullrequestid: ID of the pull request which shall be merged.
250 :type pullrequestid: int
250 :type pullrequestid: int
251 :param userid: Merge the pull request as this user.
251 :param userid: Merge the pull request as this user.
252 :type userid: Optional(str or int)
252 :type userid: Optional(str or int)
253
253
254 Example output:
254 Example output:
255
255
256 .. code-block:: bash
256 .. code-block:: bash
257
257
258 "id": <id_given_in_input>,
258 "id": <id_given_in_input>,
259 "result": {
259 "result": {
260 "executed": "<bool>",
260 "executed": "<bool>",
261 "failure_reason": "<int>",
261 "failure_reason": "<int>",
262 "merge_commit_id": "<merge_commit_id>",
262 "merge_commit_id": "<merge_commit_id>",
263 "possible": "<bool>",
263 "possible": "<bool>",
264 "merge_ref": {
264 "merge_ref": {
265 "commit_id": "<commit_id>",
265 "commit_id": "<commit_id>",
266 "type": "<type>",
266 "type": "<type>",
267 "name": "<name>"
267 "name": "<name>"
268 }
268 }
269 },
269 },
270 "error": null
270 "error": null
271 """
271 """
272 pull_request = get_pull_request_or_error(pullrequestid)
272 pull_request = get_pull_request_or_error(pullrequestid)
273 if Optional.extract(repoid):
273 if Optional.extract(repoid):
274 repo = get_repo_or_error(repoid)
274 repo = get_repo_or_error(repoid)
275 else:
275 else:
276 repo = pull_request.target_repo
276 repo = pull_request.target_repo
277
277
278 if not isinstance(userid, Optional):
278 if not isinstance(userid, Optional):
279 if (has_superadmin_permission(apiuser) or
279 if (has_superadmin_permission(apiuser) or
280 HasRepoPermissionAnyApi('repository.admin')(
280 HasRepoPermissionAnyApi('repository.admin')(
281 user=apiuser, repo_name=repo.repo_name)):
281 user=apiuser, repo_name=repo.repo_name)):
282 apiuser = get_user_or_error(userid)
282 apiuser = get_user_or_error(userid)
283 else:
283 else:
284 raise JSONRPCError('userid is not the same as your user')
284 raise JSONRPCError('userid is not the same as your user')
285
285
286 check = MergeCheck.validate(
286 check = MergeCheck.validate(
287 pull_request, auth_user=apiuser, translator=request.translate)
287 pull_request, auth_user=apiuser, translator=request.translate)
288 merge_possible = not check.failed
288 merge_possible = not check.failed
289
289
290 if not merge_possible:
290 if not merge_possible:
291 error_messages = []
291 error_messages = []
292 for err_type, error_msg in check.errors:
292 for err_type, error_msg in check.errors:
293 error_msg = request.translate(error_msg)
293 error_msg = request.translate(error_msg)
294 error_messages.append(error_msg)
294 error_messages.append(error_msg)
295
295
296 reasons = ','.join(error_messages)
296 reasons = ','.join(error_messages)
297 raise JSONRPCError(
297 raise JSONRPCError(
298 'merge not possible for following reasons: {}'.format(reasons))
298 'merge not possible for following reasons: {}'.format(reasons))
299
299
300 target_repo = pull_request.target_repo
300 target_repo = pull_request.target_repo
301 extras = vcs_operation_context(
301 extras = vcs_operation_context(
302 request.environ, repo_name=target_repo.repo_name,
302 request.environ, repo_name=target_repo.repo_name,
303 username=apiuser.username, action='push',
303 username=apiuser.username, action='push',
304 scm=target_repo.repo_type)
304 scm=target_repo.repo_type)
305 merge_response = PullRequestModel().merge_repo(
305 merge_response = PullRequestModel().merge_repo(
306 pull_request, apiuser, extras=extras)
306 pull_request, apiuser, extras=extras)
307 if merge_response.executed:
307 if merge_response.executed:
308 PullRequestModel().close_pull_request(
308 PullRequestModel().close_pull_request(
309 pull_request.pull_request_id, apiuser)
309 pull_request.pull_request_id, apiuser)
310
310
311 Session().commit()
311 Session().commit()
312
312
313 # In previous versions the merge response directly contained the merge
313 # In previous versions the merge response directly contained the merge
314 # commit id. It is now contained in the merge reference object. To be
314 # commit id. It is now contained in the merge reference object. To be
315 # backwards compatible we have to extract it again.
315 # backwards compatible we have to extract it again.
316 merge_response = merge_response._asdict()
316 merge_response = merge_response.asdict()
317 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
317 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
318
318
319 return merge_response
319 return merge_response
320
320
321
321
322 @jsonrpc_method()
322 @jsonrpc_method()
323 def get_pull_request_comments(
323 def get_pull_request_comments(
324 request, apiuser, pullrequestid, repoid=Optional(None)):
324 request, apiuser, pullrequestid, repoid=Optional(None)):
325 """
325 """
326 Get all comments of pull request specified with the `pullrequestid`
326 Get all comments of pull request specified with the `pullrequestid`
327
327
328 :param apiuser: This is filled automatically from the |authtoken|.
328 :param apiuser: This is filled automatically from the |authtoken|.
329 :type apiuser: AuthUser
329 :type apiuser: AuthUser
330 :param repoid: Optional repository name or repository ID.
330 :param repoid: Optional repository name or repository ID.
331 :type repoid: str or int
331 :type repoid: str or int
332 :param pullrequestid: The pull request ID.
332 :param pullrequestid: The pull request ID.
333 :type pullrequestid: int
333 :type pullrequestid: int
334
334
335 Example output:
335 Example output:
336
336
337 .. code-block:: bash
337 .. code-block:: bash
338
338
339 id : <id_given_in_input>
339 id : <id_given_in_input>
340 result : [
340 result : [
341 {
341 {
342 "comment_author": {
342 "comment_author": {
343 "active": true,
343 "active": true,
344 "full_name_or_username": "Tom Gore",
344 "full_name_or_username": "Tom Gore",
345 "username": "admin"
345 "username": "admin"
346 },
346 },
347 "comment_created_on": "2017-01-02T18:43:45.533",
347 "comment_created_on": "2017-01-02T18:43:45.533",
348 "comment_f_path": null,
348 "comment_f_path": null,
349 "comment_id": 25,
349 "comment_id": 25,
350 "comment_lineno": null,
350 "comment_lineno": null,
351 "comment_status": {
351 "comment_status": {
352 "status": "under_review",
352 "status": "under_review",
353 "status_lbl": "Under Review"
353 "status_lbl": "Under Review"
354 },
354 },
355 "comment_text": "Example text",
355 "comment_text": "Example text",
356 "comment_type": null,
356 "comment_type": null,
357 "pull_request_version": null
357 "pull_request_version": null
358 }
358 }
359 ],
359 ],
360 error : null
360 error : null
361 """
361 """
362
362
363 pull_request = get_pull_request_or_error(pullrequestid)
363 pull_request = get_pull_request_or_error(pullrequestid)
364 if Optional.extract(repoid):
364 if Optional.extract(repoid):
365 repo = get_repo_or_error(repoid)
365 repo = get_repo_or_error(repoid)
366 else:
366 else:
367 repo = pull_request.target_repo
367 repo = pull_request.target_repo
368
368
369 if not PullRequestModel().check_user_read(
369 if not PullRequestModel().check_user_read(
370 pull_request, apiuser, api=True):
370 pull_request, apiuser, api=True):
371 raise JSONRPCError('repository `%s` or pull request `%s` '
371 raise JSONRPCError('repository `%s` or pull request `%s` '
372 'does not exist' % (repoid, pullrequestid))
372 'does not exist' % (repoid, pullrequestid))
373
373
374 (pull_request_latest,
374 (pull_request_latest,
375 pull_request_at_ver,
375 pull_request_at_ver,
376 pull_request_display_obj,
376 pull_request_display_obj,
377 at_version) = PullRequestModel().get_pr_version(
377 at_version) = PullRequestModel().get_pr_version(
378 pull_request.pull_request_id, version=None)
378 pull_request.pull_request_id, version=None)
379
379
380 versions = pull_request_display_obj.versions()
380 versions = pull_request_display_obj.versions()
381 ver_map = {
381 ver_map = {
382 ver.pull_request_version_id: cnt
382 ver.pull_request_version_id: cnt
383 for cnt, ver in enumerate(versions, 1)
383 for cnt, ver in enumerate(versions, 1)
384 }
384 }
385
385
386 # GENERAL COMMENTS with versions #
386 # GENERAL COMMENTS with versions #
387 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
387 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
388 q = q.order_by(ChangesetComment.comment_id.asc())
388 q = q.order_by(ChangesetComment.comment_id.asc())
389 general_comments = q.all()
389 general_comments = q.all()
390
390
391 # INLINE COMMENTS with versions #
391 # INLINE COMMENTS with versions #
392 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
392 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
393 q = q.order_by(ChangesetComment.comment_id.asc())
393 q = q.order_by(ChangesetComment.comment_id.asc())
394 inline_comments = q.all()
394 inline_comments = q.all()
395
395
396 data = []
396 data = []
397 for comment in inline_comments + general_comments:
397 for comment in inline_comments + general_comments:
398 full_data = comment.get_api_data()
398 full_data = comment.get_api_data()
399 pr_version_id = None
399 pr_version_id = None
400 if comment.pull_request_version_id:
400 if comment.pull_request_version_id:
401 pr_version_id = 'v{}'.format(
401 pr_version_id = 'v{}'.format(
402 ver_map[comment.pull_request_version_id])
402 ver_map[comment.pull_request_version_id])
403
403
404 # sanitize some entries
404 # sanitize some entries
405
405
406 full_data['pull_request_version'] = pr_version_id
406 full_data['pull_request_version'] = pr_version_id
407 full_data['comment_author'] = {
407 full_data['comment_author'] = {
408 'username': full_data['comment_author'].username,
408 'username': full_data['comment_author'].username,
409 'full_name_or_username': full_data['comment_author'].full_name_or_username,
409 'full_name_or_username': full_data['comment_author'].full_name_or_username,
410 'active': full_data['comment_author'].active,
410 'active': full_data['comment_author'].active,
411 }
411 }
412
412
413 if full_data['comment_status']:
413 if full_data['comment_status']:
414 full_data['comment_status'] = {
414 full_data['comment_status'] = {
415 'status': full_data['comment_status'][0].status,
415 'status': full_data['comment_status'][0].status,
416 'status_lbl': full_data['comment_status'][0].status_lbl,
416 'status_lbl': full_data['comment_status'][0].status_lbl,
417 }
417 }
418 else:
418 else:
419 full_data['comment_status'] = {}
419 full_data['comment_status'] = {}
420
420
421 data.append(full_data)
421 data.append(full_data)
422 return data
422 return data
423
423
424
424
425 @jsonrpc_method()
425 @jsonrpc_method()
426 def comment_pull_request(
426 def comment_pull_request(
427 request, apiuser, pullrequestid, repoid=Optional(None),
427 request, apiuser, pullrequestid, repoid=Optional(None),
428 message=Optional(None), commit_id=Optional(None), status=Optional(None),
428 message=Optional(None), commit_id=Optional(None), status=Optional(None),
429 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
429 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
430 resolves_comment_id=Optional(None),
430 resolves_comment_id=Optional(None),
431 userid=Optional(OAttr('apiuser'))):
431 userid=Optional(OAttr('apiuser'))):
432 """
432 """
433 Comment on the pull request specified with the `pullrequestid`,
433 Comment on the pull request specified with the `pullrequestid`,
434 in the |repo| specified by the `repoid`, and optionally change the
434 in the |repo| specified by the `repoid`, and optionally change the
435 review status.
435 review status.
436
436
437 :param apiuser: This is filled automatically from the |authtoken|.
437 :param apiuser: This is filled automatically from the |authtoken|.
438 :type apiuser: AuthUser
438 :type apiuser: AuthUser
439 :param repoid: Optional repository name or repository ID.
439 :param repoid: Optional repository name or repository ID.
440 :type repoid: str or int
440 :type repoid: str or int
441 :param pullrequestid: The pull request ID.
441 :param pullrequestid: The pull request ID.
442 :type pullrequestid: int
442 :type pullrequestid: int
443 :param commit_id: Specify the commit_id for which to set a comment. If
443 :param commit_id: Specify the commit_id for which to set a comment. If
444 given commit_id is different than latest in the PR status
444 given commit_id is different than latest in the PR status
445 change won't be performed.
445 change won't be performed.
446 :type commit_id: str
446 :type commit_id: str
447 :param message: The text content of the comment.
447 :param message: The text content of the comment.
448 :type message: str
448 :type message: str
449 :param status: (**Optional**) Set the approval status of the pull
449 :param status: (**Optional**) Set the approval status of the pull
450 request. One of: 'not_reviewed', 'approved', 'rejected',
450 request. One of: 'not_reviewed', 'approved', 'rejected',
451 'under_review'
451 'under_review'
452 :type status: str
452 :type status: str
453 :param comment_type: Comment type, one of: 'note', 'todo'
453 :param comment_type: Comment type, one of: 'note', 'todo'
454 :type comment_type: Optional(str), default: 'note'
454 :type comment_type: Optional(str), default: 'note'
455 :param userid: Comment on the pull request as this user
455 :param userid: Comment on the pull request as this user
456 :type userid: Optional(str or int)
456 :type userid: Optional(str or int)
457
457
458 Example output:
458 Example output:
459
459
460 .. code-block:: bash
460 .. code-block:: bash
461
461
462 id : <id_given_in_input>
462 id : <id_given_in_input>
463 result : {
463 result : {
464 "pull_request_id": "<Integer>",
464 "pull_request_id": "<Integer>",
465 "comment_id": "<Integer>",
465 "comment_id": "<Integer>",
466 "status": {"given": <given_status>,
466 "status": {"given": <given_status>,
467 "was_changed": <bool status_was_actually_changed> },
467 "was_changed": <bool status_was_actually_changed> },
468 },
468 },
469 error : null
469 error : null
470 """
470 """
471 pull_request = get_pull_request_or_error(pullrequestid)
471 pull_request = get_pull_request_or_error(pullrequestid)
472 if Optional.extract(repoid):
472 if Optional.extract(repoid):
473 repo = get_repo_or_error(repoid)
473 repo = get_repo_or_error(repoid)
474 else:
474 else:
475 repo = pull_request.target_repo
475 repo = pull_request.target_repo
476
476
477 if not isinstance(userid, Optional):
477 if not isinstance(userid, Optional):
478 if (has_superadmin_permission(apiuser) or
478 if (has_superadmin_permission(apiuser) or
479 HasRepoPermissionAnyApi('repository.admin')(
479 HasRepoPermissionAnyApi('repository.admin')(
480 user=apiuser, repo_name=repo.repo_name)):
480 user=apiuser, repo_name=repo.repo_name)):
481 apiuser = get_user_or_error(userid)
481 apiuser = get_user_or_error(userid)
482 else:
482 else:
483 raise JSONRPCError('userid is not the same as your user')
483 raise JSONRPCError('userid is not the same as your user')
484
484
485 if not PullRequestModel().check_user_read(
485 if not PullRequestModel().check_user_read(
486 pull_request, apiuser, api=True):
486 pull_request, apiuser, api=True):
487 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
487 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
488 message = Optional.extract(message)
488 message = Optional.extract(message)
489 status = Optional.extract(status)
489 status = Optional.extract(status)
490 commit_id = Optional.extract(commit_id)
490 commit_id = Optional.extract(commit_id)
491 comment_type = Optional.extract(comment_type)
491 comment_type = Optional.extract(comment_type)
492 resolves_comment_id = Optional.extract(resolves_comment_id)
492 resolves_comment_id = Optional.extract(resolves_comment_id)
493
493
494 if not message and not status:
494 if not message and not status:
495 raise JSONRPCError(
495 raise JSONRPCError(
496 'Both message and status parameters are missing. '
496 'Both message and status parameters are missing. '
497 'At least one is required.')
497 'At least one is required.')
498
498
499 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
499 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
500 status is not None):
500 status is not None):
501 raise JSONRPCError('Unknown comment status: `%s`' % status)
501 raise JSONRPCError('Unknown comment status: `%s`' % status)
502
502
503 if commit_id and commit_id not in pull_request.revisions:
503 if commit_id and commit_id not in pull_request.revisions:
504 raise JSONRPCError(
504 raise JSONRPCError(
505 'Invalid commit_id `%s` for this pull request.' % commit_id)
505 'Invalid commit_id `%s` for this pull request.' % commit_id)
506
506
507 allowed_to_change_status = PullRequestModel().check_user_change_status(
507 allowed_to_change_status = PullRequestModel().check_user_change_status(
508 pull_request, apiuser)
508 pull_request, apiuser)
509
509
510 # if commit_id is passed re-validated if user is allowed to change status
510 # if commit_id is passed re-validated if user is allowed to change status
511 # based on latest commit_id from the PR
511 # based on latest commit_id from the PR
512 if commit_id:
512 if commit_id:
513 commit_idx = pull_request.revisions.index(commit_id)
513 commit_idx = pull_request.revisions.index(commit_id)
514 if commit_idx != 0:
514 if commit_idx != 0:
515 allowed_to_change_status = False
515 allowed_to_change_status = False
516
516
517 if resolves_comment_id:
517 if resolves_comment_id:
518 comment = ChangesetComment.get(resolves_comment_id)
518 comment = ChangesetComment.get(resolves_comment_id)
519 if not comment:
519 if not comment:
520 raise JSONRPCError(
520 raise JSONRPCError(
521 'Invalid resolves_comment_id `%s` for this pull request.'
521 'Invalid resolves_comment_id `%s` for this pull request.'
522 % resolves_comment_id)
522 % resolves_comment_id)
523 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
523 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
524 raise JSONRPCError(
524 raise JSONRPCError(
525 'Comment `%s` is wrong type for setting status to resolved.'
525 'Comment `%s` is wrong type for setting status to resolved.'
526 % resolves_comment_id)
526 % resolves_comment_id)
527
527
528 text = message
528 text = message
529 status_label = ChangesetStatus.get_status_lbl(status)
529 status_label = ChangesetStatus.get_status_lbl(status)
530 if status and allowed_to_change_status:
530 if status and allowed_to_change_status:
531 st_message = ('Status change %(transition_icon)s %(status)s'
531 st_message = ('Status change %(transition_icon)s %(status)s'
532 % {'transition_icon': '>', 'status': status_label})
532 % {'transition_icon': '>', 'status': status_label})
533 text = message or st_message
533 text = message or st_message
534
534
535 rc_config = SettingsModel().get_all_settings()
535 rc_config = SettingsModel().get_all_settings()
536 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
536 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
537
537
538 status_change = status and allowed_to_change_status
538 status_change = status and allowed_to_change_status
539 comment = CommentsModel().create(
539 comment = CommentsModel().create(
540 text=text,
540 text=text,
541 repo=pull_request.target_repo.repo_id,
541 repo=pull_request.target_repo.repo_id,
542 user=apiuser.user_id,
542 user=apiuser.user_id,
543 pull_request=pull_request.pull_request_id,
543 pull_request=pull_request.pull_request_id,
544 f_path=None,
544 f_path=None,
545 line_no=None,
545 line_no=None,
546 status_change=(status_label if status_change else None),
546 status_change=(status_label if status_change else None),
547 status_change_type=(status if status_change else None),
547 status_change_type=(status if status_change else None),
548 closing_pr=False,
548 closing_pr=False,
549 renderer=renderer,
549 renderer=renderer,
550 comment_type=comment_type,
550 comment_type=comment_type,
551 resolves_comment_id=resolves_comment_id,
551 resolves_comment_id=resolves_comment_id,
552 auth_user=apiuser
552 auth_user=apiuser
553 )
553 )
554
554
555 if allowed_to_change_status and status:
555 if allowed_to_change_status and status:
556 ChangesetStatusModel().set_status(
556 ChangesetStatusModel().set_status(
557 pull_request.target_repo.repo_id,
557 pull_request.target_repo.repo_id,
558 status,
558 status,
559 apiuser.user_id,
559 apiuser.user_id,
560 comment,
560 comment,
561 pull_request=pull_request.pull_request_id
561 pull_request=pull_request.pull_request_id
562 )
562 )
563 Session().flush()
563 Session().flush()
564
564
565 Session().commit()
565 Session().commit()
566 data = {
566 data = {
567 'pull_request_id': pull_request.pull_request_id,
567 'pull_request_id': pull_request.pull_request_id,
568 'comment_id': comment.comment_id if comment else None,
568 'comment_id': comment.comment_id if comment else None,
569 'status': {'given': status, 'was_changed': status_change},
569 'status': {'given': status, 'was_changed': status_change},
570 }
570 }
571 return data
571 return data
572
572
573
573
574 @jsonrpc_method()
574 @jsonrpc_method()
575 def create_pull_request(
575 def create_pull_request(
576 request, apiuser, source_repo, target_repo, source_ref, target_ref,
576 request, apiuser, source_repo, target_repo, source_ref, target_ref,
577 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
577 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
578 reviewers=Optional(None)):
578 reviewers=Optional(None)):
579 """
579 """
580 Creates a new pull request.
580 Creates a new pull request.
581
581
582 Accepts refs in the following formats:
582 Accepts refs in the following formats:
583
583
584 * branch:<branch_name>:<sha>
584 * branch:<branch_name>:<sha>
585 * branch:<branch_name>
585 * branch:<branch_name>
586 * bookmark:<bookmark_name>:<sha> (Mercurial only)
586 * bookmark:<bookmark_name>:<sha> (Mercurial only)
587 * bookmark:<bookmark_name> (Mercurial only)
587 * bookmark:<bookmark_name> (Mercurial only)
588
588
589 :param apiuser: This is filled automatically from the |authtoken|.
589 :param apiuser: This is filled automatically from the |authtoken|.
590 :type apiuser: AuthUser
590 :type apiuser: AuthUser
591 :param source_repo: Set the source repository name.
591 :param source_repo: Set the source repository name.
592 :type source_repo: str
592 :type source_repo: str
593 :param target_repo: Set the target repository name.
593 :param target_repo: Set the target repository name.
594 :type target_repo: str
594 :type target_repo: str
595 :param source_ref: Set the source ref name.
595 :param source_ref: Set the source ref name.
596 :type source_ref: str
596 :type source_ref: str
597 :param target_ref: Set the target ref name.
597 :param target_ref: Set the target ref name.
598 :type target_ref: str
598 :type target_ref: str
599 :param title: Optionally Set the pull request title, it's generated otherwise
599 :param title: Optionally Set the pull request title, it's generated otherwise
600 :type title: str
600 :type title: str
601 :param description: Set the pull request description.
601 :param description: Set the pull request description.
602 :type description: Optional(str)
602 :type description: Optional(str)
603 :type description_renderer: Optional(str)
603 :type description_renderer: Optional(str)
604 :param description_renderer: Set pull request renderer for the description.
604 :param description_renderer: Set pull request renderer for the description.
605 It should be 'rst', 'markdown' or 'plain'. If not give default
605 It should be 'rst', 'markdown' or 'plain'. If not give default
606 system renderer will be used
606 system renderer will be used
607 :param reviewers: Set the new pull request reviewers list.
607 :param reviewers: Set the new pull request reviewers list.
608 Reviewer defined by review rules will be added automatically to the
608 Reviewer defined by review rules will be added automatically to the
609 defined list.
609 defined list.
610 :type reviewers: Optional(list)
610 :type reviewers: Optional(list)
611 Accepts username strings or objects of the format:
611 Accepts username strings or objects of the format:
612
612
613 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
613 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
614 """
614 """
615
615
616 source_db_repo = get_repo_or_error(source_repo)
616 source_db_repo = get_repo_or_error(source_repo)
617 target_db_repo = get_repo_or_error(target_repo)
617 target_db_repo = get_repo_or_error(target_repo)
618 if not has_superadmin_permission(apiuser):
618 if not has_superadmin_permission(apiuser):
619 _perms = ('repository.admin', 'repository.write', 'repository.read',)
619 _perms = ('repository.admin', 'repository.write', 'repository.read',)
620 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
620 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
621
621
622 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
622 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
623 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
623 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
624
624
625 source_scm = source_db_repo.scm_instance()
625 source_scm = source_db_repo.scm_instance()
626 target_scm = target_db_repo.scm_instance()
626 target_scm = target_db_repo.scm_instance()
627
627
628 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
628 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
629 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
629 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
630
630
631 ancestor = source_scm.get_common_ancestor(
631 ancestor = source_scm.get_common_ancestor(
632 source_commit.raw_id, target_commit.raw_id, target_scm)
632 source_commit.raw_id, target_commit.raw_id, target_scm)
633 if not ancestor:
633 if not ancestor:
634 raise JSONRPCError('no common ancestor found')
634 raise JSONRPCError('no common ancestor found')
635
635
636 # recalculate target ref based on ancestor
636 # recalculate target ref based on ancestor
637 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
637 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
638 full_target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
638 full_target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
639
639
640 commit_ranges = target_scm.compare(
640 commit_ranges = target_scm.compare(
641 target_commit.raw_id, source_commit.raw_id, source_scm,
641 target_commit.raw_id, source_commit.raw_id, source_scm,
642 merge=True, pre_load=[])
642 merge=True, pre_load=[])
643
643
644 if not commit_ranges:
644 if not commit_ranges:
645 raise JSONRPCError('no commits found')
645 raise JSONRPCError('no commits found')
646
646
647 reviewer_objects = Optional.extract(reviewers) or []
647 reviewer_objects = Optional.extract(reviewers) or []
648
648
649 # serialize and validate passed in given reviewers
649 # serialize and validate passed in given reviewers
650 if reviewer_objects:
650 if reviewer_objects:
651 schema = ReviewerListSchema()
651 schema = ReviewerListSchema()
652 try:
652 try:
653 reviewer_objects = schema.deserialize(reviewer_objects)
653 reviewer_objects = schema.deserialize(reviewer_objects)
654 except Invalid as err:
654 except Invalid as err:
655 raise JSONRPCValidationError(colander_exc=err)
655 raise JSONRPCValidationError(colander_exc=err)
656
656
657 # validate users
657 # validate users
658 for reviewer_object in reviewer_objects:
658 for reviewer_object in reviewer_objects:
659 user = get_user_or_error(reviewer_object['username'])
659 user = get_user_or_error(reviewer_object['username'])
660 reviewer_object['user_id'] = user.user_id
660 reviewer_object['user_id'] = user.user_id
661
661
662 get_default_reviewers_data, validate_default_reviewers = \
662 get_default_reviewers_data, validate_default_reviewers = \
663 PullRequestModel().get_reviewer_functions()
663 PullRequestModel().get_reviewer_functions()
664
664
665 # recalculate reviewers logic, to make sure we can validate this
665 # recalculate reviewers logic, to make sure we can validate this
666 reviewer_rules = get_default_reviewers_data(
666 reviewer_rules = get_default_reviewers_data(
667 apiuser.get_instance(), source_db_repo,
667 apiuser.get_instance(), source_db_repo,
668 source_commit, target_db_repo, target_commit)
668 source_commit, target_db_repo, target_commit)
669
669
670 # now MERGE our given with the calculated
670 # now MERGE our given with the calculated
671 reviewer_objects = reviewer_rules['reviewers'] + reviewer_objects
671 reviewer_objects = reviewer_rules['reviewers'] + reviewer_objects
672
672
673 try:
673 try:
674 reviewers = validate_default_reviewers(
674 reviewers = validate_default_reviewers(
675 reviewer_objects, reviewer_rules)
675 reviewer_objects, reviewer_rules)
676 except ValueError as e:
676 except ValueError as e:
677 raise JSONRPCError('Reviewers Validation: {}'.format(e))
677 raise JSONRPCError('Reviewers Validation: {}'.format(e))
678
678
679 title = Optional.extract(title)
679 title = Optional.extract(title)
680 if not title:
680 if not title:
681 title_source_ref = source_ref.split(':', 2)[1]
681 title_source_ref = source_ref.split(':', 2)[1]
682 title = PullRequestModel().generate_pullrequest_title(
682 title = PullRequestModel().generate_pullrequest_title(
683 source=source_repo,
683 source=source_repo,
684 source_ref=title_source_ref,
684 source_ref=title_source_ref,
685 target=target_repo
685 target=target_repo
686 )
686 )
687 # fetch renderer, if set fallback to plain in case of PR
687 # fetch renderer, if set fallback to plain in case of PR
688 rc_config = SettingsModel().get_all_settings()
688 rc_config = SettingsModel().get_all_settings()
689 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
689 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
690 description = Optional.extract(description)
690 description = Optional.extract(description)
691 description_renderer = Optional.extract(description_renderer) or default_system_renderer
691 description_renderer = Optional.extract(description_renderer) or default_system_renderer
692
692
693 pull_request = PullRequestModel().create(
693 pull_request = PullRequestModel().create(
694 created_by=apiuser.user_id,
694 created_by=apiuser.user_id,
695 source_repo=source_repo,
695 source_repo=source_repo,
696 source_ref=full_source_ref,
696 source_ref=full_source_ref,
697 target_repo=target_repo,
697 target_repo=target_repo,
698 target_ref=full_target_ref,
698 target_ref=full_target_ref,
699 revisions=[commit.raw_id for commit in reversed(commit_ranges)],
699 revisions=[commit.raw_id for commit in reversed(commit_ranges)],
700 reviewers=reviewers,
700 reviewers=reviewers,
701 title=title,
701 title=title,
702 description=description,
702 description=description,
703 description_renderer=description_renderer,
703 description_renderer=description_renderer,
704 reviewer_data=reviewer_rules,
704 reviewer_data=reviewer_rules,
705 auth_user=apiuser
705 auth_user=apiuser
706 )
706 )
707
707
708 Session().commit()
708 Session().commit()
709 data = {
709 data = {
710 'msg': 'Created new pull request `{}`'.format(title),
710 'msg': 'Created new pull request `{}`'.format(title),
711 'pull_request_id': pull_request.pull_request_id,
711 'pull_request_id': pull_request.pull_request_id,
712 }
712 }
713 return data
713 return data
714
714
715
715
716 @jsonrpc_method()
716 @jsonrpc_method()
717 def update_pull_request(
717 def update_pull_request(
718 request, apiuser, pullrequestid, repoid=Optional(None),
718 request, apiuser, pullrequestid, repoid=Optional(None),
719 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
719 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
720 reviewers=Optional(None), update_commits=Optional(None)):
720 reviewers=Optional(None), update_commits=Optional(None)):
721 """
721 """
722 Updates a pull request.
722 Updates a pull request.
723
723
724 :param apiuser: This is filled automatically from the |authtoken|.
724 :param apiuser: This is filled automatically from the |authtoken|.
725 :type apiuser: AuthUser
725 :type apiuser: AuthUser
726 :param repoid: Optional repository name or repository ID.
726 :param repoid: Optional repository name or repository ID.
727 :type repoid: str or int
727 :type repoid: str or int
728 :param pullrequestid: The pull request ID.
728 :param pullrequestid: The pull request ID.
729 :type pullrequestid: int
729 :type pullrequestid: int
730 :param title: Set the pull request title.
730 :param title: Set the pull request title.
731 :type title: str
731 :type title: str
732 :param description: Update pull request description.
732 :param description: Update pull request description.
733 :type description: Optional(str)
733 :type description: Optional(str)
734 :type description_renderer: Optional(str)
734 :type description_renderer: Optional(str)
735 :param description_renderer: Update pull request renderer for the description.
735 :param description_renderer: Update pull request renderer for the description.
736 It should be 'rst', 'markdown' or 'plain'
736 It should be 'rst', 'markdown' or 'plain'
737 :param reviewers: Update pull request reviewers list with new value.
737 :param reviewers: Update pull request reviewers list with new value.
738 :type reviewers: Optional(list)
738 :type reviewers: Optional(list)
739 Accepts username strings or objects of the format:
739 Accepts username strings or objects of the format:
740
740
741 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
741 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
742
742
743 :param update_commits: Trigger update of commits for this pull request
743 :param update_commits: Trigger update of commits for this pull request
744 :type: update_commits: Optional(bool)
744 :type: update_commits: Optional(bool)
745
745
746 Example output:
746 Example output:
747
747
748 .. code-block:: bash
748 .. code-block:: bash
749
749
750 id : <id_given_in_input>
750 id : <id_given_in_input>
751 result : {
751 result : {
752 "msg": "Updated pull request `63`",
752 "msg": "Updated pull request `63`",
753 "pull_request": <pull_request_object>,
753 "pull_request": <pull_request_object>,
754 "updated_reviewers": {
754 "updated_reviewers": {
755 "added": [
755 "added": [
756 "username"
756 "username"
757 ],
757 ],
758 "removed": []
758 "removed": []
759 },
759 },
760 "updated_commits": {
760 "updated_commits": {
761 "added": [
761 "added": [
762 "<sha1_hash>"
762 "<sha1_hash>"
763 ],
763 ],
764 "common": [
764 "common": [
765 "<sha1_hash>",
765 "<sha1_hash>",
766 "<sha1_hash>",
766 "<sha1_hash>",
767 ],
767 ],
768 "removed": []
768 "removed": []
769 }
769 }
770 }
770 }
771 error : null
771 error : null
772 """
772 """
773
773
774 pull_request = get_pull_request_or_error(pullrequestid)
774 pull_request = get_pull_request_or_error(pullrequestid)
775 if Optional.extract(repoid):
775 if Optional.extract(repoid):
776 repo = get_repo_or_error(repoid)
776 repo = get_repo_or_error(repoid)
777 else:
777 else:
778 repo = pull_request.target_repo
778 repo = pull_request.target_repo
779
779
780 if not PullRequestModel().check_user_update(
780 if not PullRequestModel().check_user_update(
781 pull_request, apiuser, api=True):
781 pull_request, apiuser, api=True):
782 raise JSONRPCError(
782 raise JSONRPCError(
783 'pull request `%s` update failed, no permission to update.' % (
783 'pull request `%s` update failed, no permission to update.' % (
784 pullrequestid,))
784 pullrequestid,))
785 if pull_request.is_closed():
785 if pull_request.is_closed():
786 raise JSONRPCError(
786 raise JSONRPCError(
787 'pull request `%s` update failed, pull request is closed' % (
787 'pull request `%s` update failed, pull request is closed' % (
788 pullrequestid,))
788 pullrequestid,))
789
789
790 reviewer_objects = Optional.extract(reviewers) or []
790 reviewer_objects = Optional.extract(reviewers) or []
791
791
792 if reviewer_objects:
792 if reviewer_objects:
793 schema = ReviewerListSchema()
793 schema = ReviewerListSchema()
794 try:
794 try:
795 reviewer_objects = schema.deserialize(reviewer_objects)
795 reviewer_objects = schema.deserialize(reviewer_objects)
796 except Invalid as err:
796 except Invalid as err:
797 raise JSONRPCValidationError(colander_exc=err)
797 raise JSONRPCValidationError(colander_exc=err)
798
798
799 # validate users
799 # validate users
800 for reviewer_object in reviewer_objects:
800 for reviewer_object in reviewer_objects:
801 user = get_user_or_error(reviewer_object['username'])
801 user = get_user_or_error(reviewer_object['username'])
802 reviewer_object['user_id'] = user.user_id
802 reviewer_object['user_id'] = user.user_id
803
803
804 get_default_reviewers_data, get_validated_reviewers = \
804 get_default_reviewers_data, get_validated_reviewers = \
805 PullRequestModel().get_reviewer_functions()
805 PullRequestModel().get_reviewer_functions()
806
806
807 # re-use stored rules
807 # re-use stored rules
808 reviewer_rules = pull_request.reviewer_data
808 reviewer_rules = pull_request.reviewer_data
809 try:
809 try:
810 reviewers = get_validated_reviewers(
810 reviewers = get_validated_reviewers(
811 reviewer_objects, reviewer_rules)
811 reviewer_objects, reviewer_rules)
812 except ValueError as e:
812 except ValueError as e:
813 raise JSONRPCError('Reviewers Validation: {}'.format(e))
813 raise JSONRPCError('Reviewers Validation: {}'.format(e))
814 else:
814 else:
815 reviewers = []
815 reviewers = []
816
816
817 title = Optional.extract(title)
817 title = Optional.extract(title)
818 description = Optional.extract(description)
818 description = Optional.extract(description)
819 description_renderer = Optional.extract(description_renderer)
819 description_renderer = Optional.extract(description_renderer)
820
820
821 if title or description:
821 if title or description:
822 PullRequestModel().edit(
822 PullRequestModel().edit(
823 pull_request,
823 pull_request,
824 title or pull_request.title,
824 title or pull_request.title,
825 description or pull_request.description,
825 description or pull_request.description,
826 description_renderer or pull_request.description_renderer,
826 description_renderer or pull_request.description_renderer,
827 apiuser)
827 apiuser)
828 Session().commit()
828 Session().commit()
829
829
830 commit_changes = {"added": [], "common": [], "removed": []}
830 commit_changes = {"added": [], "common": [], "removed": []}
831 if str2bool(Optional.extract(update_commits)):
831 if str2bool(Optional.extract(update_commits)):
832 if PullRequestModel().has_valid_update_type(pull_request):
832 if PullRequestModel().has_valid_update_type(pull_request):
833 update_response = PullRequestModel().update_commits(
833 update_response = PullRequestModel().update_commits(
834 pull_request)
834 pull_request)
835 commit_changes = update_response.changes or commit_changes
835 commit_changes = update_response.changes or commit_changes
836 Session().commit()
836 Session().commit()
837
837
838 reviewers_changes = {"added": [], "removed": []}
838 reviewers_changes = {"added": [], "removed": []}
839 if reviewers:
839 if reviewers:
840 added_reviewers, removed_reviewers = \
840 added_reviewers, removed_reviewers = \
841 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
841 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
842
842
843 reviewers_changes['added'] = sorted(
843 reviewers_changes['added'] = sorted(
844 [get_user_or_error(n).username for n in added_reviewers])
844 [get_user_or_error(n).username for n in added_reviewers])
845 reviewers_changes['removed'] = sorted(
845 reviewers_changes['removed'] = sorted(
846 [get_user_or_error(n).username for n in removed_reviewers])
846 [get_user_or_error(n).username for n in removed_reviewers])
847 Session().commit()
847 Session().commit()
848
848
849 data = {
849 data = {
850 'msg': 'Updated pull request `{}`'.format(
850 'msg': 'Updated pull request `{}`'.format(
851 pull_request.pull_request_id),
851 pull_request.pull_request_id),
852 'pull_request': pull_request.get_api_data(),
852 'pull_request': pull_request.get_api_data(),
853 'updated_commits': commit_changes,
853 'updated_commits': commit_changes,
854 'updated_reviewers': reviewers_changes
854 'updated_reviewers': reviewers_changes
855 }
855 }
856
856
857 return data
857 return data
858
858
859
859
860 @jsonrpc_method()
860 @jsonrpc_method()
861 def close_pull_request(
861 def close_pull_request(
862 request, apiuser, pullrequestid, repoid=Optional(None),
862 request, apiuser, pullrequestid, repoid=Optional(None),
863 userid=Optional(OAttr('apiuser')), message=Optional('')):
863 userid=Optional(OAttr('apiuser')), message=Optional('')):
864 """
864 """
865 Close the pull request specified by `pullrequestid`.
865 Close the pull request specified by `pullrequestid`.
866
866
867 :param apiuser: This is filled automatically from the |authtoken|.
867 :param apiuser: This is filled automatically from the |authtoken|.
868 :type apiuser: AuthUser
868 :type apiuser: AuthUser
869 :param repoid: Repository name or repository ID to which the pull
869 :param repoid: Repository name or repository ID to which the pull
870 request belongs.
870 request belongs.
871 :type repoid: str or int
871 :type repoid: str or int
872 :param pullrequestid: ID of the pull request to be closed.
872 :param pullrequestid: ID of the pull request to be closed.
873 :type pullrequestid: int
873 :type pullrequestid: int
874 :param userid: Close the pull request as this user.
874 :param userid: Close the pull request as this user.
875 :type userid: Optional(str or int)
875 :type userid: Optional(str or int)
876 :param message: Optional message to close the Pull Request with. If not
876 :param message: Optional message to close the Pull Request with. If not
877 specified it will be generated automatically.
877 specified it will be generated automatically.
878 :type message: Optional(str)
878 :type message: Optional(str)
879
879
880 Example output:
880 Example output:
881
881
882 .. code-block:: bash
882 .. code-block:: bash
883
883
884 "id": <id_given_in_input>,
884 "id": <id_given_in_input>,
885 "result": {
885 "result": {
886 "pull_request_id": "<int>",
886 "pull_request_id": "<int>",
887 "close_status": "<str:status_lbl>,
887 "close_status": "<str:status_lbl>,
888 "closed": "<bool>"
888 "closed": "<bool>"
889 },
889 },
890 "error": null
890 "error": null
891
891
892 """
892 """
893 _ = request.translate
893 _ = request.translate
894
894
895 pull_request = get_pull_request_or_error(pullrequestid)
895 pull_request = get_pull_request_or_error(pullrequestid)
896 if Optional.extract(repoid):
896 if Optional.extract(repoid):
897 repo = get_repo_or_error(repoid)
897 repo = get_repo_or_error(repoid)
898 else:
898 else:
899 repo = pull_request.target_repo
899 repo = pull_request.target_repo
900
900
901 if not isinstance(userid, Optional):
901 if not isinstance(userid, Optional):
902 if (has_superadmin_permission(apiuser) or
902 if (has_superadmin_permission(apiuser) or
903 HasRepoPermissionAnyApi('repository.admin')(
903 HasRepoPermissionAnyApi('repository.admin')(
904 user=apiuser, repo_name=repo.repo_name)):
904 user=apiuser, repo_name=repo.repo_name)):
905 apiuser = get_user_or_error(userid)
905 apiuser = get_user_or_error(userid)
906 else:
906 else:
907 raise JSONRPCError('userid is not the same as your user')
907 raise JSONRPCError('userid is not the same as your user')
908
908
909 if pull_request.is_closed():
909 if pull_request.is_closed():
910 raise JSONRPCError(
910 raise JSONRPCError(
911 'pull request `%s` is already closed' % (pullrequestid,))
911 'pull request `%s` is already closed' % (pullrequestid,))
912
912
913 # only owner or admin or person with write permissions
913 # only owner or admin or person with write permissions
914 allowed_to_close = PullRequestModel().check_user_update(
914 allowed_to_close = PullRequestModel().check_user_update(
915 pull_request, apiuser, api=True)
915 pull_request, apiuser, api=True)
916
916
917 if not allowed_to_close:
917 if not allowed_to_close:
918 raise JSONRPCError(
918 raise JSONRPCError(
919 'pull request `%s` close failed, no permission to close.' % (
919 'pull request `%s` close failed, no permission to close.' % (
920 pullrequestid,))
920 pullrequestid,))
921
921
922 # message we're using to close the PR, else it's automatically generated
922 # message we're using to close the PR, else it's automatically generated
923 message = Optional.extract(message)
923 message = Optional.extract(message)
924
924
925 # finally close the PR, with proper message comment
925 # finally close the PR, with proper message comment
926 comment, status = PullRequestModel().close_pull_request_with_comment(
926 comment, status = PullRequestModel().close_pull_request_with_comment(
927 pull_request, apiuser, repo, message=message, auth_user=apiuser)
927 pull_request, apiuser, repo, message=message, auth_user=apiuser)
928 status_lbl = ChangesetStatus.get_status_lbl(status)
928 status_lbl = ChangesetStatus.get_status_lbl(status)
929
929
930 Session().commit()
930 Session().commit()
931
931
932 data = {
932 data = {
933 'pull_request_id': pull_request.pull_request_id,
933 'pull_request_id': pull_request.pull_request_id,
934 'close_status': status_lbl,
934 'close_status': status_lbl,
935 'closed': True,
935 'closed': True,
936 }
936 }
937 return data
937 return data
@@ -1,1233 +1,1233 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 from rhodecode.tests.utils import AssertResponse
36
35
37
36
38 def route_path(name, params=None, **kwargs):
37 def route_path(name, params=None, **kwargs):
39 import urllib
38 import urllib
40
39
41 base_url = {
40 base_url = {
42 'repo_changelog': '/{repo_name}/changelog',
41 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
43 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
44 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
45 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
46 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
47 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
48 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
49 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
50 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
51 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
52 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
53 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
54 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 }[name].format(**kwargs)
55 }[name].format(**kwargs)
57
56
58 if params:
57 if params:
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
58 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 return base_url
59 return base_url
61
60
62
61
63 @pytest.mark.usefixtures('app', 'autologin_user')
62 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.backends("git", "hg")
63 @pytest.mark.backends("git", "hg")
65 class TestPullrequestsView(object):
64 class TestPullrequestsView(object):
66
65
67 def test_index(self, backend):
66 def test_index(self, backend):
68 self.app.get(route_path(
67 self.app.get(route_path(
69 'pullrequest_new',
68 'pullrequest_new',
70 repo_name=backend.repo_name))
69 repo_name=backend.repo_name))
71
70
72 def test_option_menu_create_pull_request_exists(self, backend):
71 def test_option_menu_create_pull_request_exists(self, backend):
73 repo_name = backend.repo_name
72 repo_name = backend.repo_name
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
73 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75
74
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
75 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 'pullrequest_new', repo_name=repo_name)
76 'pullrequest_new', repo_name=repo_name)
78 response.mustcontain(create_pr_link)
77 response.mustcontain(create_pr_link)
79
78
80 def test_create_pr_form_with_raw_commit_id(self, backend):
79 def test_create_pr_form_with_raw_commit_id(self, backend):
81 repo = backend.repo
80 repo = backend.repo
82
81
83 self.app.get(
82 self.app.get(
84 route_path('pullrequest_new', repo_name=repo.repo_name,
83 route_path('pullrequest_new', repo_name=repo.repo_name,
85 commit=repo.get_commit().raw_id),
84 commit=repo.get_commit().raw_id),
86 status=200)
85 status=200)
87
86
88 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
87 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('range_diff', ["0", "1"])
88 @pytest.mark.parametrize('range_diff', ["0", "1"])
90 def test_show(self, pr_util, pr_merge_enabled, range_diff):
89 def test_show(self, pr_util, pr_merge_enabled, range_diff):
91 pull_request = pr_util.create_pull_request(
90 pull_request = pr_util.create_pull_request(
92 mergeable=pr_merge_enabled, enable_notifications=False)
91 mergeable=pr_merge_enabled, enable_notifications=False)
93
92
94 response = self.app.get(route_path(
93 response = self.app.get(route_path(
95 'pullrequest_show',
94 'pullrequest_show',
96 repo_name=pull_request.target_repo.scm_instance().name,
95 repo_name=pull_request.target_repo.scm_instance().name,
97 pull_request_id=pull_request.pull_request_id,
96 pull_request_id=pull_request.pull_request_id,
98 params={'range-diff': range_diff}))
97 params={'range-diff': range_diff}))
99
98
100 for commit_id in pull_request.revisions:
99 for commit_id in pull_request.revisions:
101 response.mustcontain(commit_id)
100 response.mustcontain(commit_id)
102
101
103 assert pull_request.target_ref_parts.type in response
102 assert pull_request.target_ref_parts.type in response
104 assert pull_request.target_ref_parts.name in response
103 assert pull_request.target_ref_parts.name in response
105 target_clone_url = pull_request.target_repo.clone_url()
104 target_clone_url = pull_request.target_repo.clone_url()
106 assert target_clone_url in response
105 assert target_clone_url in response
107
106
108 assert 'class="pull-request-merge"' in response
107 assert 'class="pull-request-merge"' in response
109 if pr_merge_enabled:
108 if pr_merge_enabled:
110 response.mustcontain('Pull request reviewer approval is pending')
109 response.mustcontain('Pull request reviewer approval is pending')
111 else:
110 else:
112 response.mustcontain('Server-side pull request merging is disabled.')
111 response.mustcontain('Server-side pull request merging is disabled.')
113
112
114 if range_diff == "1":
113 if range_diff == "1":
115 response.mustcontain('Turn off: Show the diff as commit range')
114 response.mustcontain('Turn off: Show the diff as commit range')
116
115
117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
116 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
118 # Logout
117 # Logout
119 response = self.app.post(
118 response = self.app.post(
120 h.route_path('logout'),
119 h.route_path('logout'),
121 params={'csrf_token': csrf_token})
120 params={'csrf_token': csrf_token})
122 # Login as regular user
121 # Login as regular user
123 response = self.app.post(h.route_path('login'),
122 response = self.app.post(h.route_path('login'),
124 {'username': TEST_USER_REGULAR_LOGIN,
123 {'username': TEST_USER_REGULAR_LOGIN,
125 'password': 'test12'})
124 'password': 'test12'})
126
125
127 pull_request = pr_util.create_pull_request(
126 pull_request = pr_util.create_pull_request(
128 author=TEST_USER_REGULAR_LOGIN)
127 author=TEST_USER_REGULAR_LOGIN)
129
128
130 response = self.app.get(route_path(
129 response = self.app.get(route_path(
131 'pullrequest_show',
130 'pullrequest_show',
132 repo_name=pull_request.target_repo.scm_instance().name,
131 repo_name=pull_request.target_repo.scm_instance().name,
133 pull_request_id=pull_request.pull_request_id))
132 pull_request_id=pull_request.pull_request_id))
134
133
135 response.mustcontain('Server-side pull request merging is disabled.')
134 response.mustcontain('Server-side pull request merging is disabled.')
136
135
137 assert_response = response.assert_response()
136 assert_response = response.assert_response()
138 # for regular user without a merge permissions, we don't see it
137 # for regular user without a merge permissions, we don't see it
139 assert_response.no_element_exists('#close-pull-request-action')
138 assert_response.no_element_exists('#close-pull-request-action')
140
139
141 user_util.grant_user_permission_to_repo(
140 user_util.grant_user_permission_to_repo(
142 pull_request.target_repo,
141 pull_request.target_repo,
143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
142 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
144 'repository.write')
143 'repository.write')
145 response = self.app.get(route_path(
144 response = self.app.get(route_path(
146 'pullrequest_show',
145 'pullrequest_show',
147 repo_name=pull_request.target_repo.scm_instance().name,
146 repo_name=pull_request.target_repo.scm_instance().name,
148 pull_request_id=pull_request.pull_request_id))
147 pull_request_id=pull_request.pull_request_id))
149
148
150 response.mustcontain('Server-side pull request merging is disabled.')
149 response.mustcontain('Server-side pull request merging is disabled.')
151
150
152 assert_response = response.assert_response()
151 assert_response = response.assert_response()
153 # now regular user has a merge permissions, we have CLOSE button
152 # now regular user has a merge permissions, we have CLOSE button
154 assert_response.one_element_exists('#close-pull-request-action')
153 assert_response.one_element_exists('#close-pull-request-action')
155
154
156 def test_show_invalid_commit_id(self, pr_util):
155 def test_show_invalid_commit_id(self, pr_util):
157 # Simulating invalid revisions which will cause a lookup error
156 # Simulating invalid revisions which will cause a lookup error
158 pull_request = pr_util.create_pull_request()
157 pull_request = pr_util.create_pull_request()
159 pull_request.revisions = ['invalid']
158 pull_request.revisions = ['invalid']
160 Session().add(pull_request)
159 Session().add(pull_request)
161 Session().commit()
160 Session().commit()
162
161
163 response = self.app.get(route_path(
162 response = self.app.get(route_path(
164 'pullrequest_show',
163 'pullrequest_show',
165 repo_name=pull_request.target_repo.scm_instance().name,
164 repo_name=pull_request.target_repo.scm_instance().name,
166 pull_request_id=pull_request.pull_request_id))
165 pull_request_id=pull_request.pull_request_id))
167
166
168 for commit_id in pull_request.revisions:
167 for commit_id in pull_request.revisions:
169 response.mustcontain(commit_id)
168 response.mustcontain(commit_id)
170
169
171 def test_show_invalid_source_reference(self, pr_util):
170 def test_show_invalid_source_reference(self, pr_util):
172 pull_request = pr_util.create_pull_request()
171 pull_request = pr_util.create_pull_request()
173 pull_request.source_ref = 'branch:b:invalid'
172 pull_request.source_ref = 'branch:b:invalid'
174 Session().add(pull_request)
173 Session().add(pull_request)
175 Session().commit()
174 Session().commit()
176
175
177 self.app.get(route_path(
176 self.app.get(route_path(
178 'pullrequest_show',
177 'pullrequest_show',
179 repo_name=pull_request.target_repo.scm_instance().name,
178 repo_name=pull_request.target_repo.scm_instance().name,
180 pull_request_id=pull_request.pull_request_id))
179 pull_request_id=pull_request.pull_request_id))
181
180
182 def test_edit_title_description(self, pr_util, csrf_token):
181 def test_edit_title_description(self, pr_util, csrf_token):
183 pull_request = pr_util.create_pull_request()
182 pull_request = pr_util.create_pull_request()
184 pull_request_id = pull_request.pull_request_id
183 pull_request_id = pull_request.pull_request_id
185
184
186 response = self.app.post(
185 response = self.app.post(
187 route_path('pullrequest_update',
186 route_path('pullrequest_update',
188 repo_name=pull_request.target_repo.repo_name,
187 repo_name=pull_request.target_repo.repo_name,
189 pull_request_id=pull_request_id),
188 pull_request_id=pull_request_id),
190 params={
189 params={
191 'edit_pull_request': 'true',
190 'edit_pull_request': 'true',
192 'title': 'New title',
191 'title': 'New title',
193 'description': 'New description',
192 'description': 'New description',
194 'csrf_token': csrf_token})
193 'csrf_token': csrf_token})
195
194
196 assert_session_flash(
195 assert_session_flash(
197 response, u'Pull request title & description updated.',
196 response, u'Pull request title & description updated.',
198 category='success')
197 category='success')
199
198
200 pull_request = PullRequest.get(pull_request_id)
199 pull_request = PullRequest.get(pull_request_id)
201 assert pull_request.title == 'New title'
200 assert pull_request.title == 'New title'
202 assert pull_request.description == 'New description'
201 assert pull_request.description == 'New description'
203
202
204 def test_edit_title_description_closed(self, pr_util, csrf_token):
203 def test_edit_title_description_closed(self, pr_util, csrf_token):
205 pull_request = pr_util.create_pull_request()
204 pull_request = pr_util.create_pull_request()
206 pull_request_id = pull_request.pull_request_id
205 pull_request_id = pull_request.pull_request_id
207 repo_name = pull_request.target_repo.repo_name
206 repo_name = pull_request.target_repo.repo_name
208 pr_util.close()
207 pr_util.close()
209
208
210 response = self.app.post(
209 response = self.app.post(
211 route_path('pullrequest_update',
210 route_path('pullrequest_update',
212 repo_name=repo_name, pull_request_id=pull_request_id),
211 repo_name=repo_name, pull_request_id=pull_request_id),
213 params={
212 params={
214 'edit_pull_request': 'true',
213 'edit_pull_request': 'true',
215 'title': 'New title',
214 'title': 'New title',
216 'description': 'New description',
215 'description': 'New description',
217 'csrf_token': csrf_token}, status=200)
216 'csrf_token': csrf_token}, status=200)
218 assert_session_flash(
217 assert_session_flash(
219 response, u'Cannot update closed pull requests.',
218 response, u'Cannot update closed pull requests.',
220 category='error')
219 category='error')
221
220
222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
221 def test_update_invalid_source_reference(self, pr_util, csrf_token):
223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
222 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
224
223
225 pull_request = pr_util.create_pull_request()
224 pull_request = pr_util.create_pull_request()
226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
225 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
227 Session().add(pull_request)
226 Session().add(pull_request)
228 Session().commit()
227 Session().commit()
229
228
230 pull_request_id = pull_request.pull_request_id
229 pull_request_id = pull_request.pull_request_id
231
230
232 response = self.app.post(
231 response = self.app.post(
233 route_path('pullrequest_update',
232 route_path('pullrequest_update',
234 repo_name=pull_request.target_repo.repo_name,
233 repo_name=pull_request.target_repo.repo_name,
235 pull_request_id=pull_request_id),
234 pull_request_id=pull_request_id),
236 params={'update_commits': 'true',
235 params={'update_commits': 'true', 'csrf_token': csrf_token})
237 'csrf_token': csrf_token})
238
236
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
237 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
240 UpdateFailureReason.MISSING_SOURCE_REF])
238 UpdateFailureReason.MISSING_SOURCE_REF])
241 assert_session_flash(response, expected_msg, category='error')
239 assert_session_flash(response, expected_msg, category='error')
242
240
243 def test_missing_target_reference(self, pr_util, csrf_token):
241 def test_missing_target_reference(self, pr_util, csrf_token):
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
242 from rhodecode.lib.vcs.backends.base import MergeFailureReason
245 pull_request = pr_util.create_pull_request(
243 pull_request = pr_util.create_pull_request(
246 approved=True, mergeable=True)
244 approved=True, mergeable=True)
247 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
245 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
246 pull_request.target_ref = unicode_reference
248 Session().add(pull_request)
247 Session().add(pull_request)
249 Session().commit()
248 Session().commit()
250
249
251 pull_request_id = pull_request.pull_request_id
250 pull_request_id = pull_request.pull_request_id
252 pull_request_url = route_path(
251 pull_request_url = route_path(
253 'pullrequest_show',
252 'pullrequest_show',
254 repo_name=pull_request.target_repo.repo_name,
253 repo_name=pull_request.target_repo.repo_name,
255 pull_request_id=pull_request_id)
254 pull_request_id=pull_request_id)
256
255
257 response = self.app.get(pull_request_url)
256 response = self.app.get(pull_request_url)
258
257 target_ref_id = 'invalid-branch'
259 assertr = AssertResponse(response)
258 merge_resp = MergeResponse(
260 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
259 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
261 MergeFailureReason.MISSING_TARGET_REF]
260 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
262 assertr.element_contains(
261 response.assert_response().element_contains(
263 'span[data-role="merge-message"]', str(expected_msg))
262 'span[data-role="merge-message"]', merge_resp.merge_status_message)
264
263
265 def test_comment_and_close_pull_request_custom_message_approved(
264 def test_comment_and_close_pull_request_custom_message_approved(
266 self, pr_util, csrf_token, xhr_header):
265 self, pr_util, csrf_token, xhr_header):
267
266
268 pull_request = pr_util.create_pull_request(approved=True)
267 pull_request = pr_util.create_pull_request(approved=True)
269 pull_request_id = pull_request.pull_request_id
268 pull_request_id = pull_request.pull_request_id
270 author = pull_request.user_id
269 author = pull_request.user_id
271 repo = pull_request.target_repo.repo_id
270 repo = pull_request.target_repo.repo_id
272
271
273 self.app.post(
272 self.app.post(
274 route_path('pullrequest_comment_create',
273 route_path('pullrequest_comment_create',
275 repo_name=pull_request.target_repo.scm_instance().name,
274 repo_name=pull_request.target_repo.scm_instance().name,
276 pull_request_id=pull_request_id),
275 pull_request_id=pull_request_id),
277 params={
276 params={
278 'close_pull_request': '1',
277 'close_pull_request': '1',
279 'text': 'Closing a PR',
278 'text': 'Closing a PR',
280 'csrf_token': csrf_token},
279 'csrf_token': csrf_token},
281 extra_environ=xhr_header,)
280 extra_environ=xhr_header,)
282
281
283 journal = UserLog.query()\
282 journal = UserLog.query()\
284 .filter(UserLog.user_id == author)\
283 .filter(UserLog.user_id == author)\
285 .filter(UserLog.repository_id == repo) \
284 .filter(UserLog.repository_id == repo) \
286 .order_by('user_log_id') \
285 .order_by('user_log_id') \
287 .all()
286 .all()
288 assert journal[-1].action == 'repo.pull_request.close'
287 assert journal[-1].action == 'repo.pull_request.close'
289
288
290 pull_request = PullRequest.get(pull_request_id)
289 pull_request = PullRequest.get(pull_request_id)
291 assert pull_request.is_closed()
290 assert pull_request.is_closed()
292
291
293 status = ChangesetStatusModel().get_status(
292 status = ChangesetStatusModel().get_status(
294 pull_request.source_repo, pull_request=pull_request)
293 pull_request.source_repo, pull_request=pull_request)
295 assert status == ChangesetStatus.STATUS_APPROVED
294 assert status == ChangesetStatus.STATUS_APPROVED
296 comments = ChangesetComment().query() \
295 comments = ChangesetComment().query() \
297 .filter(ChangesetComment.pull_request == pull_request) \
296 .filter(ChangesetComment.pull_request == pull_request) \
298 .order_by(ChangesetComment.comment_id.asc())\
297 .order_by(ChangesetComment.comment_id.asc())\
299 .all()
298 .all()
300 assert comments[-1].text == 'Closing a PR'
299 assert comments[-1].text == 'Closing a PR'
301
300
302 def test_comment_force_close_pull_request_rejected(
301 def test_comment_force_close_pull_request_rejected(
303 self, pr_util, csrf_token, xhr_header):
302 self, pr_util, csrf_token, xhr_header):
304 pull_request = pr_util.create_pull_request()
303 pull_request = pr_util.create_pull_request()
305 pull_request_id = pull_request.pull_request_id
304 pull_request_id = pull_request.pull_request_id
306 PullRequestModel().update_reviewers(
305 PullRequestModel().update_reviewers(
307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
306 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
308 pull_request.author)
307 pull_request.author)
309 author = pull_request.user_id
308 author = pull_request.user_id
310 repo = pull_request.target_repo.repo_id
309 repo = pull_request.target_repo.repo_id
311
310
312 self.app.post(
311 self.app.post(
313 route_path('pullrequest_comment_create',
312 route_path('pullrequest_comment_create',
314 repo_name=pull_request.target_repo.scm_instance().name,
313 repo_name=pull_request.target_repo.scm_instance().name,
315 pull_request_id=pull_request_id),
314 pull_request_id=pull_request_id),
316 params={
315 params={
317 'close_pull_request': '1',
316 'close_pull_request': '1',
318 'csrf_token': csrf_token},
317 'csrf_token': csrf_token},
319 extra_environ=xhr_header)
318 extra_environ=xhr_header)
320
319
321 pull_request = PullRequest.get(pull_request_id)
320 pull_request = PullRequest.get(pull_request_id)
322
321
323 journal = UserLog.query()\
322 journal = UserLog.query()\
324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
323 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
325 .order_by('user_log_id') \
324 .order_by('user_log_id') \
326 .all()
325 .all()
327 assert journal[-1].action == 'repo.pull_request.close'
326 assert journal[-1].action == 'repo.pull_request.close'
328
327
329 # check only the latest status, not the review status
328 # check only the latest status, not the review status
330 status = ChangesetStatusModel().get_status(
329 status = ChangesetStatusModel().get_status(
331 pull_request.source_repo, pull_request=pull_request)
330 pull_request.source_repo, pull_request=pull_request)
332 assert status == ChangesetStatus.STATUS_REJECTED
331 assert status == ChangesetStatus.STATUS_REJECTED
333
332
334 def test_comment_and_close_pull_request(
333 def test_comment_and_close_pull_request(
335 self, pr_util, csrf_token, xhr_header):
334 self, pr_util, csrf_token, xhr_header):
336 pull_request = pr_util.create_pull_request()
335 pull_request = pr_util.create_pull_request()
337 pull_request_id = pull_request.pull_request_id
336 pull_request_id = pull_request.pull_request_id
338
337
339 response = self.app.post(
338 response = self.app.post(
340 route_path('pullrequest_comment_create',
339 route_path('pullrequest_comment_create',
341 repo_name=pull_request.target_repo.scm_instance().name,
340 repo_name=pull_request.target_repo.scm_instance().name,
342 pull_request_id=pull_request.pull_request_id),
341 pull_request_id=pull_request.pull_request_id),
343 params={
342 params={
344 'close_pull_request': 'true',
343 'close_pull_request': 'true',
345 'csrf_token': csrf_token},
344 'csrf_token': csrf_token},
346 extra_environ=xhr_header)
345 extra_environ=xhr_header)
347
346
348 assert response.json
347 assert response.json
349
348
350 pull_request = PullRequest.get(pull_request_id)
349 pull_request = PullRequest.get(pull_request_id)
351 assert pull_request.is_closed()
350 assert pull_request.is_closed()
352
351
353 # check only the latest status, not the review status
352 # check only the latest status, not the review status
354 status = ChangesetStatusModel().get_status(
353 status = ChangesetStatusModel().get_status(
355 pull_request.source_repo, pull_request=pull_request)
354 pull_request.source_repo, pull_request=pull_request)
356 assert status == ChangesetStatus.STATUS_REJECTED
355 assert status == ChangesetStatus.STATUS_REJECTED
357
356
358 def test_create_pull_request(self, backend, csrf_token):
357 def test_create_pull_request(self, backend, csrf_token):
359 commits = [
358 commits = [
360 {'message': 'ancestor'},
359 {'message': 'ancestor'},
361 {'message': 'change'},
360 {'message': 'change'},
362 {'message': 'change2'},
361 {'message': 'change2'},
363 ]
362 ]
364 commit_ids = backend.create_master_repo(commits)
363 commit_ids = backend.create_master_repo(commits)
365 target = backend.create_repo(heads=['ancestor'])
364 target = backend.create_repo(heads=['ancestor'])
366 source = backend.create_repo(heads=['change2'])
365 source = backend.create_repo(heads=['change2'])
367
366
368 response = self.app.post(
367 response = self.app.post(
369 route_path('pullrequest_create', repo_name=source.repo_name),
368 route_path('pullrequest_create', repo_name=source.repo_name),
370 [
369 [
371 ('source_repo', source.repo_name),
370 ('source_repo', source.repo_name),
372 ('source_ref', 'branch:default:' + commit_ids['change2']),
371 ('source_ref', 'branch:default:' + commit_ids['change2']),
373 ('target_repo', target.repo_name),
372 ('target_repo', target.repo_name),
374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
373 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
375 ('common_ancestor', commit_ids['ancestor']),
374 ('common_ancestor', commit_ids['ancestor']),
376 ('pullrequest_title', 'Title'),
375 ('pullrequest_title', 'Title'),
377 ('pullrequest_desc', 'Description'),
376 ('pullrequest_desc', 'Description'),
378 ('description_renderer', 'markdown'),
377 ('description_renderer', 'markdown'),
379 ('__start__', 'review_members:sequence'),
378 ('__start__', 'review_members:sequence'),
380 ('__start__', 'reviewer:mapping'),
379 ('__start__', 'reviewer:mapping'),
381 ('user_id', '1'),
380 ('user_id', '1'),
382 ('__start__', 'reasons:sequence'),
381 ('__start__', 'reasons:sequence'),
383 ('reason', 'Some reason'),
382 ('reason', 'Some reason'),
384 ('__end__', 'reasons:sequence'),
383 ('__end__', 'reasons:sequence'),
385 ('__start__', 'rules:sequence'),
384 ('__start__', 'rules:sequence'),
386 ('__end__', 'rules:sequence'),
385 ('__end__', 'rules:sequence'),
387 ('mandatory', 'False'),
386 ('mandatory', 'False'),
388 ('__end__', 'reviewer:mapping'),
387 ('__end__', 'reviewer:mapping'),
389 ('__end__', 'review_members:sequence'),
388 ('__end__', 'review_members:sequence'),
390 ('__start__', 'revisions:sequence'),
389 ('__start__', 'revisions:sequence'),
391 ('revisions', commit_ids['change']),
390 ('revisions', commit_ids['change']),
392 ('revisions', commit_ids['change2']),
391 ('revisions', commit_ids['change2']),
393 ('__end__', 'revisions:sequence'),
392 ('__end__', 'revisions:sequence'),
394 ('user', ''),
393 ('user', ''),
395 ('csrf_token', csrf_token),
394 ('csrf_token', csrf_token),
396 ],
395 ],
397 status=302)
396 status=302)
398
397
399 location = response.headers['Location']
398 location = response.headers['Location']
400 pull_request_id = location.rsplit('/', 1)[1]
399 pull_request_id = location.rsplit('/', 1)[1]
401 assert pull_request_id != 'new'
400 assert pull_request_id != 'new'
402 pull_request = PullRequest.get(int(pull_request_id))
401 pull_request = PullRequest.get(int(pull_request_id))
403
402
404 # check that we have now both revisions
403 # check that we have now both revisions
405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
404 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
405 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
406 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
408 assert pull_request.target_ref == expected_target_ref
407 assert pull_request.target_ref == expected_target_ref
409
408
410 def test_reviewer_notifications(self, backend, csrf_token):
409 def test_reviewer_notifications(self, backend, csrf_token):
411 # We have to use the app.post for this test so it will create the
410 # We have to use the app.post for this test so it will create the
412 # notifications properly with the new PR
411 # notifications properly with the new PR
413 commits = [
412 commits = [
414 {'message': 'ancestor',
413 {'message': 'ancestor',
415 'added': [FileNode('file_A', content='content_of_ancestor')]},
414 'added': [FileNode('file_A', content='content_of_ancestor')]},
416 {'message': 'change',
415 {'message': 'change',
417 'added': [FileNode('file_a', content='content_of_change')]},
416 'added': [FileNode('file_a', content='content_of_change')]},
418 {'message': 'change-child'},
417 {'message': 'change-child'},
419 {'message': 'ancestor-child', 'parents': ['ancestor'],
418 {'message': 'ancestor-child', 'parents': ['ancestor'],
420 'added': [
419 'added': [
421 FileNode('file_B', content='content_of_ancestor_child')]},
420 FileNode('file_B', content='content_of_ancestor_child')]},
422 {'message': 'ancestor-child-2'},
421 {'message': 'ancestor-child-2'},
423 ]
422 ]
424 commit_ids = backend.create_master_repo(commits)
423 commit_ids = backend.create_master_repo(commits)
425 target = backend.create_repo(heads=['ancestor-child'])
424 target = backend.create_repo(heads=['ancestor-child'])
426 source = backend.create_repo(heads=['change'])
425 source = backend.create_repo(heads=['change'])
427
426
428 response = self.app.post(
427 response = self.app.post(
429 route_path('pullrequest_create', repo_name=source.repo_name),
428 route_path('pullrequest_create', repo_name=source.repo_name),
430 [
429 [
431 ('source_repo', source.repo_name),
430 ('source_repo', source.repo_name),
432 ('source_ref', 'branch:default:' + commit_ids['change']),
431 ('source_ref', 'branch:default:' + commit_ids['change']),
433 ('target_repo', target.repo_name),
432 ('target_repo', target.repo_name),
434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
433 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
435 ('common_ancestor', commit_ids['ancestor']),
434 ('common_ancestor', commit_ids['ancestor']),
436 ('pullrequest_title', 'Title'),
435 ('pullrequest_title', 'Title'),
437 ('pullrequest_desc', 'Description'),
436 ('pullrequest_desc', 'Description'),
438 ('description_renderer', 'markdown'),
437 ('description_renderer', 'markdown'),
439 ('__start__', 'review_members:sequence'),
438 ('__start__', 'review_members:sequence'),
440 ('__start__', 'reviewer:mapping'),
439 ('__start__', 'reviewer:mapping'),
441 ('user_id', '2'),
440 ('user_id', '2'),
442 ('__start__', 'reasons:sequence'),
441 ('__start__', 'reasons:sequence'),
443 ('reason', 'Some reason'),
442 ('reason', 'Some reason'),
444 ('__end__', 'reasons:sequence'),
443 ('__end__', 'reasons:sequence'),
445 ('__start__', 'rules:sequence'),
444 ('__start__', 'rules:sequence'),
446 ('__end__', 'rules:sequence'),
445 ('__end__', 'rules:sequence'),
447 ('mandatory', 'False'),
446 ('mandatory', 'False'),
448 ('__end__', 'reviewer:mapping'),
447 ('__end__', 'reviewer:mapping'),
449 ('__end__', 'review_members:sequence'),
448 ('__end__', 'review_members:sequence'),
450 ('__start__', 'revisions:sequence'),
449 ('__start__', 'revisions:sequence'),
451 ('revisions', commit_ids['change']),
450 ('revisions', commit_ids['change']),
452 ('__end__', 'revisions:sequence'),
451 ('__end__', 'revisions:sequence'),
453 ('user', ''),
452 ('user', ''),
454 ('csrf_token', csrf_token),
453 ('csrf_token', csrf_token),
455 ],
454 ],
456 status=302)
455 status=302)
457
456
458 location = response.headers['Location']
457 location = response.headers['Location']
459
458
460 pull_request_id = location.rsplit('/', 1)[1]
459 pull_request_id = location.rsplit('/', 1)[1]
461 assert pull_request_id != 'new'
460 assert pull_request_id != 'new'
462 pull_request = PullRequest.get(int(pull_request_id))
461 pull_request = PullRequest.get(int(pull_request_id))
463
462
464 # Check that a notification was made
463 # Check that a notification was made
465 notifications = Notification.query()\
464 notifications = Notification.query()\
466 .filter(Notification.created_by == pull_request.author.user_id,
465 .filter(Notification.created_by == pull_request.author.user_id,
467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
466 Notification.type_ == Notification.TYPE_PULL_REQUEST,
468 Notification.subject.contains(
467 Notification.subject.contains(
469 "wants you to review pull request #%s" % pull_request_id))
468 "wants you to review pull request #%s" % pull_request_id))
470 assert len(notifications.all()) == 1
469 assert len(notifications.all()) == 1
471
470
472 # Change reviewers and check that a notification was made
471 # Change reviewers and check that a notification was made
473 PullRequestModel().update_reviewers(
472 PullRequestModel().update_reviewers(
474 pull_request.pull_request_id, [(1, [], False, [])],
473 pull_request.pull_request_id, [(1, [], False, [])],
475 pull_request.author)
474 pull_request.author)
476 assert len(notifications.all()) == 2
475 assert len(notifications.all()) == 2
477
476
478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
477 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
479 csrf_token):
478 csrf_token):
480 commits = [
479 commits = [
481 {'message': 'ancestor',
480 {'message': 'ancestor',
482 'added': [FileNode('file_A', content='content_of_ancestor')]},
481 'added': [FileNode('file_A', content='content_of_ancestor')]},
483 {'message': 'change',
482 {'message': 'change',
484 'added': [FileNode('file_a', content='content_of_change')]},
483 'added': [FileNode('file_a', content='content_of_change')]},
485 {'message': 'change-child'},
484 {'message': 'change-child'},
486 {'message': 'ancestor-child', 'parents': ['ancestor'],
485 {'message': 'ancestor-child', 'parents': ['ancestor'],
487 'added': [
486 'added': [
488 FileNode('file_B', content='content_of_ancestor_child')]},
487 FileNode('file_B', content='content_of_ancestor_child')]},
489 {'message': 'ancestor-child-2'},
488 {'message': 'ancestor-child-2'},
490 ]
489 ]
491 commit_ids = backend.create_master_repo(commits)
490 commit_ids = backend.create_master_repo(commits)
492 target = backend.create_repo(heads=['ancestor-child'])
491 target = backend.create_repo(heads=['ancestor-child'])
493 source = backend.create_repo(heads=['change'])
492 source = backend.create_repo(heads=['change'])
494
493
495 response = self.app.post(
494 response = self.app.post(
496 route_path('pullrequest_create', repo_name=source.repo_name),
495 route_path('pullrequest_create', repo_name=source.repo_name),
497 [
496 [
498 ('source_repo', source.repo_name),
497 ('source_repo', source.repo_name),
499 ('source_ref', 'branch:default:' + commit_ids['change']),
498 ('source_ref', 'branch:default:' + commit_ids['change']),
500 ('target_repo', target.repo_name),
499 ('target_repo', target.repo_name),
501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
500 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
502 ('common_ancestor', commit_ids['ancestor']),
501 ('common_ancestor', commit_ids['ancestor']),
503 ('pullrequest_title', 'Title'),
502 ('pullrequest_title', 'Title'),
504 ('pullrequest_desc', 'Description'),
503 ('pullrequest_desc', 'Description'),
505 ('description_renderer', 'markdown'),
504 ('description_renderer', 'markdown'),
506 ('__start__', 'review_members:sequence'),
505 ('__start__', 'review_members:sequence'),
507 ('__start__', 'reviewer:mapping'),
506 ('__start__', 'reviewer:mapping'),
508 ('user_id', '1'),
507 ('user_id', '1'),
509 ('__start__', 'reasons:sequence'),
508 ('__start__', 'reasons:sequence'),
510 ('reason', 'Some reason'),
509 ('reason', 'Some reason'),
511 ('__end__', 'reasons:sequence'),
510 ('__end__', 'reasons:sequence'),
512 ('__start__', 'rules:sequence'),
511 ('__start__', 'rules:sequence'),
513 ('__end__', 'rules:sequence'),
512 ('__end__', 'rules:sequence'),
514 ('mandatory', 'False'),
513 ('mandatory', 'False'),
515 ('__end__', 'reviewer:mapping'),
514 ('__end__', 'reviewer:mapping'),
516 ('__end__', 'review_members:sequence'),
515 ('__end__', 'review_members:sequence'),
517 ('__start__', 'revisions:sequence'),
516 ('__start__', 'revisions:sequence'),
518 ('revisions', commit_ids['change']),
517 ('revisions', commit_ids['change']),
519 ('__end__', 'revisions:sequence'),
518 ('__end__', 'revisions:sequence'),
520 ('user', ''),
519 ('user', ''),
521 ('csrf_token', csrf_token),
520 ('csrf_token', csrf_token),
522 ],
521 ],
523 status=302)
522 status=302)
524
523
525 location = response.headers['Location']
524 location = response.headers['Location']
526
525
527 pull_request_id = location.rsplit('/', 1)[1]
526 pull_request_id = location.rsplit('/', 1)[1]
528 assert pull_request_id != 'new'
527 assert pull_request_id != 'new'
529 pull_request = PullRequest.get(int(pull_request_id))
528 pull_request = PullRequest.get(int(pull_request_id))
530
529
531 # target_ref has to point to the ancestor's commit_id in order to
530 # target_ref has to point to the ancestor's commit_id in order to
532 # show the correct diff
531 # show the correct diff
533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
532 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
534 assert pull_request.target_ref == expected_target_ref
533 assert pull_request.target_ref == expected_target_ref
535
534
536 # Check generated diff contents
535 # Check generated diff contents
537 response = response.follow()
536 response = response.follow()
538 assert 'content_of_ancestor' not in response.body
537 assert 'content_of_ancestor' not in response.body
539 assert 'content_of_ancestor-child' not in response.body
538 assert 'content_of_ancestor-child' not in response.body
540 assert 'content_of_change' in response.body
539 assert 'content_of_change' in response.body
541
540
542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
541 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
543 # Clear any previous calls to rcextensions
542 # Clear any previous calls to rcextensions
544 rhodecode.EXTENSIONS.calls.clear()
543 rhodecode.EXTENSIONS.calls.clear()
545
544
546 pull_request = pr_util.create_pull_request(
545 pull_request = pr_util.create_pull_request(
547 approved=True, mergeable=True)
546 approved=True, mergeable=True)
548 pull_request_id = pull_request.pull_request_id
547 pull_request_id = pull_request.pull_request_id
549 repo_name = pull_request.target_repo.scm_instance().name,
548 repo_name = pull_request.target_repo.scm_instance().name,
550
549
551 response = self.app.post(
550 response = self.app.post(
552 route_path('pullrequest_merge',
551 route_path('pullrequest_merge',
553 repo_name=str(repo_name[0]),
552 repo_name=str(repo_name[0]),
554 pull_request_id=pull_request_id),
553 pull_request_id=pull_request_id),
555 params={'csrf_token': csrf_token}).follow()
554 params={'csrf_token': csrf_token}).follow()
556
555
557 pull_request = PullRequest.get(pull_request_id)
556 pull_request = PullRequest.get(pull_request_id)
558
557
559 assert response.status_int == 200
558 assert response.status_int == 200
560 assert pull_request.is_closed()
559 assert pull_request.is_closed()
561 assert_pull_request_status(
560 assert_pull_request_status(
562 pull_request, ChangesetStatus.STATUS_APPROVED)
561 pull_request, ChangesetStatus.STATUS_APPROVED)
563
562
564 # Check the relevant log entries were added
563 # Check the relevant log entries were added
565 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
564 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
566 actions = [log.action for log in user_logs]
565 actions = [log.action for log in user_logs]
567 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
566 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
568 expected_actions = [
567 expected_actions = [
569 u'repo.pull_request.close',
568 u'repo.pull_request.close',
570 u'repo.pull_request.merge',
569 u'repo.pull_request.merge',
571 u'repo.pull_request.comment.create'
570 u'repo.pull_request.comment.create'
572 ]
571 ]
573 assert actions == expected_actions
572 assert actions == expected_actions
574
573
575 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
574 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
576 actions = [log for log in user_logs]
575 actions = [log for log in user_logs]
577 assert actions[-1].action == 'user.push'
576 assert actions[-1].action == 'user.push'
578 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
577 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
579
578
580 # Check post_push rcextension was really executed
579 # Check post_push rcextension was really executed
581 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
580 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
582 assert len(push_calls) == 1
581 assert len(push_calls) == 1
583 unused_last_call_args, last_call_kwargs = push_calls[0]
582 unused_last_call_args, last_call_kwargs = push_calls[0]
584 assert last_call_kwargs['action'] == 'push'
583 assert last_call_kwargs['action'] == 'push'
585 assert last_call_kwargs['commit_ids'] == pr_commit_ids
584 assert last_call_kwargs['commit_ids'] == pr_commit_ids
586
585
587 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
586 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
588 pull_request = pr_util.create_pull_request(mergeable=False)
587 pull_request = pr_util.create_pull_request(mergeable=False)
589 pull_request_id = pull_request.pull_request_id
588 pull_request_id = pull_request.pull_request_id
590 pull_request = PullRequest.get(pull_request_id)
589 pull_request = PullRequest.get(pull_request_id)
591
590
592 response = self.app.post(
591 response = self.app.post(
593 route_path('pullrequest_merge',
592 route_path('pullrequest_merge',
594 repo_name=pull_request.target_repo.scm_instance().name,
593 repo_name=pull_request.target_repo.scm_instance().name,
595 pull_request_id=pull_request.pull_request_id),
594 pull_request_id=pull_request.pull_request_id),
596 params={'csrf_token': csrf_token}).follow()
595 params={'csrf_token': csrf_token}).follow()
597
596
598 assert response.status_int == 200
597 assert response.status_int == 200
599 response.mustcontain(
598 response.mustcontain(
600 'Merge is not currently possible because of below failed checks.')
599 'Merge is not currently possible because of below failed checks.')
601 response.mustcontain('Server-side pull request merging is disabled.')
600 response.mustcontain('Server-side pull request merging is disabled.')
602
601
603 @pytest.mark.skip_backends('svn')
602 @pytest.mark.skip_backends('svn')
604 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
603 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
605 pull_request = pr_util.create_pull_request(mergeable=True)
604 pull_request = pr_util.create_pull_request(mergeable=True)
606 pull_request_id = pull_request.pull_request_id
605 pull_request_id = pull_request.pull_request_id
607 repo_name = pull_request.target_repo.scm_instance().name
606 repo_name = pull_request.target_repo.scm_instance().name
608
607
609 response = self.app.post(
608 response = self.app.post(
610 route_path('pullrequest_merge',
609 route_path('pullrequest_merge',
611 repo_name=repo_name,
610 repo_name=repo_name, pull_request_id=pull_request_id),
612 pull_request_id=pull_request_id),
613 params={'csrf_token': csrf_token}).follow()
611 params={'csrf_token': csrf_token}).follow()
614
612
615 assert response.status_int == 200
613 assert response.status_int == 200
616
614
617 response.mustcontain(
615 response.mustcontain(
618 'Merge is not currently possible because of below failed checks.')
616 'Merge is not currently possible because of below failed checks.')
619 response.mustcontain('Pull request reviewer approval is pending.')
617 response.mustcontain('Pull request reviewer approval is pending.')
620
618
621 def test_merge_pull_request_renders_failure_reason(
619 def test_merge_pull_request_renders_failure_reason(
622 self, user_regular, csrf_token, pr_util):
620 self, user_regular, csrf_token, pr_util):
623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
621 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
624 pull_request_id = pull_request.pull_request_id
622 pull_request_id = pull_request.pull_request_id
625 repo_name = pull_request.target_repo.scm_instance().name
623 repo_name = pull_request.target_repo.scm_instance().name
626
624
625 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
626 MergeFailureReason.PUSH_FAILED,
627 metadata={'target': 'shadow repo',
628 'merge_commit': 'xxx'})
627 model_patcher = mock.patch.multiple(
629 model_patcher = mock.patch.multiple(
628 PullRequestModel,
630 PullRequestModel,
629 merge_repo=mock.Mock(return_value=MergeResponse(
631 merge_repo=mock.Mock(return_value=merge_resp),
630 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
631 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
632 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
632
633
633 with model_patcher:
634 with model_patcher:
634 response = self.app.post(
635 response = self.app.post(
635 route_path('pullrequest_merge',
636 route_path('pullrequest_merge',
636 repo_name=repo_name,
637 repo_name=repo_name,
637 pull_request_id=pull_request_id),
638 pull_request_id=pull_request_id),
638 params={'csrf_token': csrf_token}, status=302)
639 params={'csrf_token': csrf_token}, status=302)
639
640
640 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
641 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
641 MergeFailureReason.PUSH_FAILED])
642 metadata={'target': 'shadow repo',
643 'merge_commit': 'xxx'})
644 assert_session_flash(response, merge_resp.merge_status_message)
642
645
643 def test_update_source_revision(self, backend, csrf_token):
646 def test_update_source_revision(self, backend, csrf_token):
644 commits = [
647 commits = [
645 {'message': 'ancestor'},
648 {'message': 'ancestor'},
646 {'message': 'change'},
649 {'message': 'change'},
647 {'message': 'change-2'},
650 {'message': 'change-2'},
648 ]
651 ]
649 commit_ids = backend.create_master_repo(commits)
652 commit_ids = backend.create_master_repo(commits)
650 target = backend.create_repo(heads=['ancestor'])
653 target = backend.create_repo(heads=['ancestor'])
651 source = backend.create_repo(heads=['change'])
654 source = backend.create_repo(heads=['change'])
652
655
653 # create pr from a in source to A in target
656 # create pr from a in source to A in target
654 pull_request = PullRequest()
657 pull_request = PullRequest()
655 pull_request.source_repo = source
658 pull_request.source_repo = source
656 # TODO: johbo: Make sure that we write the source ref this way!
659 # TODO: johbo: Make sure that we write the source ref this way!
657 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
660 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
658 branch=backend.default_branch_name, commit_id=commit_ids['change'])
661 branch=backend.default_branch_name, commit_id=commit_ids['change'])
659 pull_request.target_repo = target
662 pull_request.target_repo = target
660
663
661 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
664 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
662 branch=backend.default_branch_name,
665 branch=backend.default_branch_name,
663 commit_id=commit_ids['ancestor'])
666 commit_id=commit_ids['ancestor'])
664 pull_request.revisions = [commit_ids['change']]
667 pull_request.revisions = [commit_ids['change']]
665 pull_request.title = u"Test"
668 pull_request.title = u"Test"
666 pull_request.description = u"Description"
669 pull_request.description = u"Description"
667 pull_request.author = UserModel().get_by_username(
670 pull_request.author = UserModel().get_by_username(
668 TEST_USER_ADMIN_LOGIN)
671 TEST_USER_ADMIN_LOGIN)
669 Session().add(pull_request)
672 Session().add(pull_request)
670 Session().commit()
673 Session().commit()
671 pull_request_id = pull_request.pull_request_id
674 pull_request_id = pull_request.pull_request_id
672
675
673 # source has ancestor - change - change-2
676 # source has ancestor - change - change-2
674 backend.pull_heads(source, heads=['change-2'])
677 backend.pull_heads(source, heads=['change-2'])
675
678
676 # update PR
679 # update PR
677 self.app.post(
680 self.app.post(
678 route_path('pullrequest_update',
681 route_path('pullrequest_update',
679 repo_name=target.repo_name,
682 repo_name=target.repo_name,
680 pull_request_id=pull_request_id),
683 pull_request_id=pull_request_id),
681 params={'update_commits': 'true',
684 params={'update_commits': 'true',
682 'csrf_token': csrf_token})
685 'csrf_token': csrf_token})
683
686
684 # check that we have now both revisions
687 # check that we have now both revisions
685 pull_request = PullRequest.get(pull_request_id)
688 pull_request = PullRequest.get(pull_request_id)
686 assert pull_request.revisions == [
689 assert pull_request.revisions == [
687 commit_ids['change-2'], commit_ids['change']]
690 commit_ids['change-2'], commit_ids['change']]
688
691
689 # TODO: johbo: this should be a test on its own
692 # TODO: johbo: this should be a test on its own
690 response = self.app.get(route_path(
693 response = self.app.get(route_path(
691 'pullrequest_new',
694 'pullrequest_new',
692 repo_name=target.repo_name))
695 repo_name=target.repo_name))
693 assert response.status_int == 200
696 assert response.status_int == 200
694 assert 'Pull request updated to' in response.body
697 assert 'Pull request updated to' in response.body
695 assert 'with 1 added, 0 removed commits.' in response.body
698 assert 'with 1 added, 0 removed commits.' in response.body
696
699
697 def test_update_target_revision(self, backend, csrf_token):
700 def test_update_target_revision(self, backend, csrf_token):
698 commits = [
701 commits = [
699 {'message': 'ancestor'},
702 {'message': 'ancestor'},
700 {'message': 'change'},
703 {'message': 'change'},
701 {'message': 'ancestor-new', 'parents': ['ancestor']},
704 {'message': 'ancestor-new', 'parents': ['ancestor']},
702 {'message': 'change-rebased'},
705 {'message': 'change-rebased'},
703 ]
706 ]
704 commit_ids = backend.create_master_repo(commits)
707 commit_ids = backend.create_master_repo(commits)
705 target = backend.create_repo(heads=['ancestor'])
708 target = backend.create_repo(heads=['ancestor'])
706 source = backend.create_repo(heads=['change'])
709 source = backend.create_repo(heads=['change'])
707
710
708 # create pr from a in source to A in target
711 # create pr from a in source to A in target
709 pull_request = PullRequest()
712 pull_request = PullRequest()
710 pull_request.source_repo = source
713 pull_request.source_repo = source
711 # TODO: johbo: Make sure that we write the source ref this way!
714 # TODO: johbo: Make sure that we write the source ref this way!
712 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
715 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
713 branch=backend.default_branch_name, commit_id=commit_ids['change'])
716 branch=backend.default_branch_name, commit_id=commit_ids['change'])
714 pull_request.target_repo = target
717 pull_request.target_repo = target
715 # TODO: johbo: Target ref should be branch based, since tip can jump
718 # TODO: johbo: Target ref should be branch based, since tip can jump
716 # from branch to branch
719 # from branch to branch
717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
720 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
718 branch=backend.default_branch_name,
721 branch=backend.default_branch_name,
719 commit_id=commit_ids['ancestor'])
722 commit_id=commit_ids['ancestor'])
720 pull_request.revisions = [commit_ids['change']]
723 pull_request.revisions = [commit_ids['change']]
721 pull_request.title = u"Test"
724 pull_request.title = u"Test"
722 pull_request.description = u"Description"
725 pull_request.description = u"Description"
723 pull_request.author = UserModel().get_by_username(
726 pull_request.author = UserModel().get_by_username(
724 TEST_USER_ADMIN_LOGIN)
727 TEST_USER_ADMIN_LOGIN)
725 Session().add(pull_request)
728 Session().add(pull_request)
726 Session().commit()
729 Session().commit()
727 pull_request_id = pull_request.pull_request_id
730 pull_request_id = pull_request.pull_request_id
728
731
729 # target has ancestor - ancestor-new
732 # target has ancestor - ancestor-new
730 # source has ancestor - ancestor-new - change-rebased
733 # source has ancestor - ancestor-new - change-rebased
731 backend.pull_heads(target, heads=['ancestor-new'])
734 backend.pull_heads(target, heads=['ancestor-new'])
732 backend.pull_heads(source, heads=['change-rebased'])
735 backend.pull_heads(source, heads=['change-rebased'])
733
736
734 # update PR
737 # update PR
735 self.app.post(
738 self.app.post(
736 route_path('pullrequest_update',
739 route_path('pullrequest_update',
737 repo_name=target.repo_name,
740 repo_name=target.repo_name,
738 pull_request_id=pull_request_id),
741 pull_request_id=pull_request_id),
739 params={'update_commits': 'true',
742 params={'update_commits': 'true',
740 'csrf_token': csrf_token},
743 'csrf_token': csrf_token},
741 status=200)
744 status=200)
742
745
743 # check that we have now both revisions
746 # check that we have now both revisions
744 pull_request = PullRequest.get(pull_request_id)
747 pull_request = PullRequest.get(pull_request_id)
745 assert pull_request.revisions == [commit_ids['change-rebased']]
748 assert pull_request.revisions == [commit_ids['change-rebased']]
746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
749 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
747 branch=backend.default_branch_name,
750 branch=backend.default_branch_name,
748 commit_id=commit_ids['ancestor-new'])
751 commit_id=commit_ids['ancestor-new'])
749
752
750 # TODO: johbo: This should be a test on its own
753 # TODO: johbo: This should be a test on its own
751 response = self.app.get(route_path(
754 response = self.app.get(route_path(
752 'pullrequest_new',
755 'pullrequest_new',
753 repo_name=target.repo_name))
756 repo_name=target.repo_name))
754 assert response.status_int == 200
757 assert response.status_int == 200
755 assert 'Pull request updated to' in response.body
758 assert 'Pull request updated to' in response.body
756 assert 'with 1 added, 1 removed commits.' in response.body
759 assert 'with 1 added, 1 removed commits.' in response.body
757
760
758 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
761 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
759 backend = backend_git
762 backend = backend_git
760 commits = [
763 commits = [
761 {'message': 'master-commit-1'},
764 {'message': 'master-commit-1'},
762 {'message': 'master-commit-2-change-1'},
765 {'message': 'master-commit-2-change-1'},
763 {'message': 'master-commit-3-change-2'},
766 {'message': 'master-commit-3-change-2'},
764
767
765 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
768 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
766 {'message': 'feat-commit-2'},
769 {'message': 'feat-commit-2'},
767 ]
770 ]
768 commit_ids = backend.create_master_repo(commits)
771 commit_ids = backend.create_master_repo(commits)
769 target = backend.create_repo(heads=['master-commit-3-change-2'])
772 target = backend.create_repo(heads=['master-commit-3-change-2'])
770 source = backend.create_repo(heads=['feat-commit-2'])
773 source = backend.create_repo(heads=['feat-commit-2'])
771
774
772 # create pr from a in source to A in target
775 # create pr from a in source to A in target
773 pull_request = PullRequest()
776 pull_request = PullRequest()
774 pull_request.source_repo = source
777 pull_request.source_repo = source
775 # TODO: johbo: Make sure that we write the source ref this way!
778 # TODO: johbo: Make sure that we write the source ref this way!
776 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
779 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
777 branch=backend.default_branch_name,
780 branch=backend.default_branch_name,
778 commit_id=commit_ids['master-commit-3-change-2'])
781 commit_id=commit_ids['master-commit-3-change-2'])
779
782
780 pull_request.target_repo = target
783 pull_request.target_repo = target
781 # TODO: johbo: Target ref should be branch based, since tip can jump
784 # TODO: johbo: Target ref should be branch based, since tip can jump
782 # from branch to branch
785 # from branch to branch
783 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
786 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
784 branch=backend.default_branch_name,
787 branch=backend.default_branch_name,
785 commit_id=commit_ids['feat-commit-2'])
788 commit_id=commit_ids['feat-commit-2'])
786
789
787 pull_request.revisions = [
790 pull_request.revisions = [
788 commit_ids['feat-commit-1'],
791 commit_ids['feat-commit-1'],
789 commit_ids['feat-commit-2']
792 commit_ids['feat-commit-2']
790 ]
793 ]
791 pull_request.title = u"Test"
794 pull_request.title = u"Test"
792 pull_request.description = u"Description"
795 pull_request.description = u"Description"
793 pull_request.author = UserModel().get_by_username(
796 pull_request.author = UserModel().get_by_username(
794 TEST_USER_ADMIN_LOGIN)
797 TEST_USER_ADMIN_LOGIN)
795 Session().add(pull_request)
798 Session().add(pull_request)
796 Session().commit()
799 Session().commit()
797 pull_request_id = pull_request.pull_request_id
800 pull_request_id = pull_request.pull_request_id
798
801
799 # PR is created, now we simulate a force-push into target,
802 # PR is created, now we simulate a force-push into target,
800 # that drops a 2 last commits
803 # that drops a 2 last commits
801 vcsrepo = target.scm_instance()
804 vcsrepo = target.scm_instance()
802 vcsrepo.config.clear_section('hooks')
805 vcsrepo.config.clear_section('hooks')
803 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
806 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
804
807
805 # update PR
808 # update PR
806 self.app.post(
809 self.app.post(
807 route_path('pullrequest_update',
810 route_path('pullrequest_update',
808 repo_name=target.repo_name,
811 repo_name=target.repo_name,
809 pull_request_id=pull_request_id),
812 pull_request_id=pull_request_id),
810 params={'update_commits': 'true',
813 params={'update_commits': 'true',
811 'csrf_token': csrf_token},
814 'csrf_token': csrf_token},
812 status=200)
815 status=200)
813
816
814 response = self.app.get(route_path(
817 response = self.app.get(route_path(
815 'pullrequest_new',
818 'pullrequest_new',
816 repo_name=target.repo_name))
819 repo_name=target.repo_name))
817 assert response.status_int == 200
820 assert response.status_int == 200
818 response.mustcontain('Pull request updated to')
821 response.mustcontain('Pull request updated to')
819 response.mustcontain('with 0 added, 0 removed commits.')
822 response.mustcontain('with 0 added, 0 removed commits.')
820
823
821 def test_update_of_ancestor_reference(self, backend, csrf_token):
824 def test_update_of_ancestor_reference(self, backend, csrf_token):
822 commits = [
825 commits = [
823 {'message': 'ancestor'},
826 {'message': 'ancestor'},
824 {'message': 'change'},
827 {'message': 'change'},
825 {'message': 'change-2'},
828 {'message': 'change-2'},
826 {'message': 'ancestor-new', 'parents': ['ancestor']},
829 {'message': 'ancestor-new', 'parents': ['ancestor']},
827 {'message': 'change-rebased'},
830 {'message': 'change-rebased'},
828 ]
831 ]
829 commit_ids = backend.create_master_repo(commits)
832 commit_ids = backend.create_master_repo(commits)
830 target = backend.create_repo(heads=['ancestor'])
833 target = backend.create_repo(heads=['ancestor'])
831 source = backend.create_repo(heads=['change'])
834 source = backend.create_repo(heads=['change'])
832
835
833 # create pr from a in source to A in target
836 # create pr from a in source to A in target
834 pull_request = PullRequest()
837 pull_request = PullRequest()
835 pull_request.source_repo = source
838 pull_request.source_repo = source
836 # TODO: johbo: Make sure that we write the source ref this way!
839 # TODO: johbo: Make sure that we write the source ref this way!
837 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
840 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
838 branch=backend.default_branch_name,
841 branch=backend.default_branch_name,
839 commit_id=commit_ids['change'])
842 commit_id=commit_ids['change'])
840 pull_request.target_repo = target
843 pull_request.target_repo = target
841 # TODO: johbo: Target ref should be branch based, since tip can jump
844 # TODO: johbo: Target ref should be branch based, since tip can jump
842 # from branch to branch
845 # from branch to branch
843 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
846 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
844 branch=backend.default_branch_name,
847 branch=backend.default_branch_name,
845 commit_id=commit_ids['ancestor'])
848 commit_id=commit_ids['ancestor'])
846 pull_request.revisions = [commit_ids['change']]
849 pull_request.revisions = [commit_ids['change']]
847 pull_request.title = u"Test"
850 pull_request.title = u"Test"
848 pull_request.description = u"Description"
851 pull_request.description = u"Description"
849 pull_request.author = UserModel().get_by_username(
852 pull_request.author = UserModel().get_by_username(
850 TEST_USER_ADMIN_LOGIN)
853 TEST_USER_ADMIN_LOGIN)
851 Session().add(pull_request)
854 Session().add(pull_request)
852 Session().commit()
855 Session().commit()
853 pull_request_id = pull_request.pull_request_id
856 pull_request_id = pull_request.pull_request_id
854
857
855 # target has ancestor - ancestor-new
858 # target has ancestor - ancestor-new
856 # source has ancestor - ancestor-new - change-rebased
859 # source has ancestor - ancestor-new - change-rebased
857 backend.pull_heads(target, heads=['ancestor-new'])
860 backend.pull_heads(target, heads=['ancestor-new'])
858 backend.pull_heads(source, heads=['change-rebased'])
861 backend.pull_heads(source, heads=['change-rebased'])
859
862
860 # update PR
863 # update PR
861 self.app.post(
864 self.app.post(
862 route_path('pullrequest_update',
865 route_path('pullrequest_update',
863 repo_name=target.repo_name,
866 repo_name=target.repo_name,
864 pull_request_id=pull_request_id),
867 pull_request_id=pull_request_id),
865 params={'update_commits': 'true',
868 params={'update_commits': 'true',
866 'csrf_token': csrf_token},
869 'csrf_token': csrf_token},
867 status=200)
870 status=200)
868
871
869 # Expect the target reference to be updated correctly
872 # Expect the target reference to be updated correctly
870 pull_request = PullRequest.get(pull_request_id)
873 pull_request = PullRequest.get(pull_request_id)
871 assert pull_request.revisions == [commit_ids['change-rebased']]
874 assert pull_request.revisions == [commit_ids['change-rebased']]
872 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
875 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
873 branch=backend.default_branch_name,
876 branch=backend.default_branch_name,
874 commit_id=commit_ids['ancestor-new'])
877 commit_id=commit_ids['ancestor-new'])
875 assert pull_request.target_ref == expected_target_ref
878 assert pull_request.target_ref == expected_target_ref
876
879
877 def test_remove_pull_request_branch(self, backend_git, csrf_token):
880 def test_remove_pull_request_branch(self, backend_git, csrf_token):
878 branch_name = 'development'
881 branch_name = 'development'
879 commits = [
882 commits = [
880 {'message': 'initial-commit'},
883 {'message': 'initial-commit'},
881 {'message': 'old-feature'},
884 {'message': 'old-feature'},
882 {'message': 'new-feature', 'branch': branch_name},
885 {'message': 'new-feature', 'branch': branch_name},
883 ]
886 ]
884 repo = backend_git.create_repo(commits)
887 repo = backend_git.create_repo(commits)
885 commit_ids = backend_git.commit_ids
888 commit_ids = backend_git.commit_ids
886
889
887 pull_request = PullRequest()
890 pull_request = PullRequest()
888 pull_request.source_repo = repo
891 pull_request.source_repo = repo
889 pull_request.target_repo = repo
892 pull_request.target_repo = repo
890 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
893 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
891 branch=branch_name, commit_id=commit_ids['new-feature'])
894 branch=branch_name, commit_id=commit_ids['new-feature'])
892 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
895 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
893 branch=backend_git.default_branch_name,
896 branch=backend_git.default_branch_name,
894 commit_id=commit_ids['old-feature'])
897 commit_id=commit_ids['old-feature'])
895 pull_request.revisions = [commit_ids['new-feature']]
898 pull_request.revisions = [commit_ids['new-feature']]
896 pull_request.title = u"Test"
899 pull_request.title = u"Test"
897 pull_request.description = u"Description"
900 pull_request.description = u"Description"
898 pull_request.author = UserModel().get_by_username(
901 pull_request.author = UserModel().get_by_username(
899 TEST_USER_ADMIN_LOGIN)
902 TEST_USER_ADMIN_LOGIN)
900 Session().add(pull_request)
903 Session().add(pull_request)
901 Session().commit()
904 Session().commit()
902
905
903 vcs = repo.scm_instance()
906 vcs = repo.scm_instance()
904 vcs.remove_ref('refs/heads/{}'.format(branch_name))
907 vcs.remove_ref('refs/heads/{}'.format(branch_name))
905
908
906 response = self.app.get(route_path(
909 response = self.app.get(route_path(
907 'pullrequest_show',
910 'pullrequest_show',
908 repo_name=repo.repo_name,
911 repo_name=repo.repo_name,
909 pull_request_id=pull_request.pull_request_id))
912 pull_request_id=pull_request.pull_request_id))
910
913
911 assert response.status_int == 200
914 assert response.status_int == 200
912 assert_response = AssertResponse(response)
915
913 assert_response.element_contains(
916 response.assert_response().element_contains(
914 '#changeset_compare_view_content .alert strong',
917 '#changeset_compare_view_content .alert strong',
915 'Missing commits')
918 'Missing commits')
916 assert_response.element_contains(
919 response.assert_response().element_contains(
917 '#changeset_compare_view_content .alert',
920 '#changeset_compare_view_content .alert',
918 'This pull request cannot be displayed, because one or more'
921 'This pull request cannot be displayed, because one or more'
919 ' commits no longer exist in the source repository.')
922 ' commits no longer exist in the source repository.')
920
923
921 def test_strip_commits_from_pull_request(
924 def test_strip_commits_from_pull_request(
922 self, backend, pr_util, csrf_token):
925 self, backend, pr_util, csrf_token):
923 commits = [
926 commits = [
924 {'message': 'initial-commit'},
927 {'message': 'initial-commit'},
925 {'message': 'old-feature'},
928 {'message': 'old-feature'},
926 {'message': 'new-feature', 'parents': ['initial-commit']},
929 {'message': 'new-feature', 'parents': ['initial-commit']},
927 ]
930 ]
928 pull_request = pr_util.create_pull_request(
931 pull_request = pr_util.create_pull_request(
929 commits, target_head='initial-commit', source_head='new-feature',
932 commits, target_head='initial-commit', source_head='new-feature',
930 revisions=['new-feature'])
933 revisions=['new-feature'])
931
934
932 vcs = pr_util.source_repository.scm_instance()
935 vcs = pr_util.source_repository.scm_instance()
933 if backend.alias == 'git':
936 if backend.alias == 'git':
934 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
937 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
935 else:
938 else:
936 vcs.strip(pr_util.commit_ids['new-feature'])
939 vcs.strip(pr_util.commit_ids['new-feature'])
937
940
938 response = self.app.get(route_path(
941 response = self.app.get(route_path(
939 'pullrequest_show',
942 'pullrequest_show',
940 repo_name=pr_util.target_repository.repo_name,
943 repo_name=pr_util.target_repository.repo_name,
941 pull_request_id=pull_request.pull_request_id))
944 pull_request_id=pull_request.pull_request_id))
942
945
943 assert response.status_int == 200
946 assert response.status_int == 200
944 assert_response = AssertResponse(response)
947
945 assert_response.element_contains(
948 response.assert_response().element_contains(
946 '#changeset_compare_view_content .alert strong',
949 '#changeset_compare_view_content .alert strong',
947 'Missing commits')
950 'Missing commits')
948 assert_response.element_contains(
951 response.assert_response().element_contains(
949 '#changeset_compare_view_content .alert',
952 '#changeset_compare_view_content .alert',
950 'This pull request cannot be displayed, because one or more'
953 'This pull request cannot be displayed, because one or more'
951 ' commits no longer exist in the source repository.')
954 ' commits no longer exist in the source repository.')
952 assert_response.element_contains(
955 response.assert_response().element_contains(
953 '#update_commits',
956 '#update_commits',
954 'Update commits')
957 'Update commits')
955
958
956 def test_strip_commits_and_update(
959 def test_strip_commits_and_update(
957 self, backend, pr_util, csrf_token):
960 self, backend, pr_util, csrf_token):
958 commits = [
961 commits = [
959 {'message': 'initial-commit'},
962 {'message': 'initial-commit'},
960 {'message': 'old-feature'},
963 {'message': 'old-feature'},
961 {'message': 'new-feature', 'parents': ['old-feature']},
964 {'message': 'new-feature', 'parents': ['old-feature']},
962 ]
965 ]
963 pull_request = pr_util.create_pull_request(
966 pull_request = pr_util.create_pull_request(
964 commits, target_head='old-feature', source_head='new-feature',
967 commits, target_head='old-feature', source_head='new-feature',
965 revisions=['new-feature'], mergeable=True)
968 revisions=['new-feature'], mergeable=True)
966
969
967 vcs = pr_util.source_repository.scm_instance()
970 vcs = pr_util.source_repository.scm_instance()
968 if backend.alias == 'git':
971 if backend.alias == 'git':
969 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
972 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
970 else:
973 else:
971 vcs.strip(pr_util.commit_ids['new-feature'])
974 vcs.strip(pr_util.commit_ids['new-feature'])
972
975
973 response = self.app.post(
976 response = self.app.post(
974 route_path('pullrequest_update',
977 route_path('pullrequest_update',
975 repo_name=pull_request.target_repo.repo_name,
978 repo_name=pull_request.target_repo.repo_name,
976 pull_request_id=pull_request.pull_request_id),
979 pull_request_id=pull_request.pull_request_id),
977 params={'update_commits': 'true',
980 params={'update_commits': 'true',
978 'csrf_token': csrf_token})
981 'csrf_token': csrf_token})
979
982
980 assert response.status_int == 200
983 assert response.status_int == 200
981 assert response.body == 'true'
984 assert response.body == 'true'
982
985
983 # Make sure that after update, it won't raise 500 errors
986 # Make sure that after update, it won't raise 500 errors
984 response = self.app.get(route_path(
987 response = self.app.get(route_path(
985 'pullrequest_show',
988 'pullrequest_show',
986 repo_name=pr_util.target_repository.repo_name,
989 repo_name=pr_util.target_repository.repo_name,
987 pull_request_id=pull_request.pull_request_id))
990 pull_request_id=pull_request.pull_request_id))
988
991
989 assert response.status_int == 200
992 assert response.status_int == 200
990 assert_response = AssertResponse(response)
993 response.assert_response().element_contains(
991 assert_response.element_contains(
992 '#changeset_compare_view_content .alert strong',
994 '#changeset_compare_view_content .alert strong',
993 'Missing commits')
995 'Missing commits')
994
996
995 def test_branch_is_a_link(self, pr_util):
997 def test_branch_is_a_link(self, pr_util):
996 pull_request = pr_util.create_pull_request()
998 pull_request = pr_util.create_pull_request()
997 pull_request.source_ref = 'branch:origin:1234567890abcdef'
999 pull_request.source_ref = 'branch:origin:1234567890abcdef'
998 pull_request.target_ref = 'branch:target:abcdef1234567890'
1000 pull_request.target_ref = 'branch:target:abcdef1234567890'
999 Session().add(pull_request)
1001 Session().add(pull_request)
1000 Session().commit()
1002 Session().commit()
1001
1003
1002 response = self.app.get(route_path(
1004 response = self.app.get(route_path(
1003 'pullrequest_show',
1005 'pullrequest_show',
1004 repo_name=pull_request.target_repo.scm_instance().name,
1006 repo_name=pull_request.target_repo.scm_instance().name,
1005 pull_request_id=pull_request.pull_request_id))
1007 pull_request_id=pull_request.pull_request_id))
1006 assert response.status_int == 200
1008 assert response.status_int == 200
1007 assert_response = AssertResponse(response)
1008
1009
1009 origin = assert_response.get_element('.pr-origininfo .tag')
1010 origin = response.assert_response().get_element('.pr-origininfo .tag')
1010 origin_children = origin.getchildren()
1011 origin_children = origin.getchildren()
1011 assert len(origin_children) == 1
1012 assert len(origin_children) == 1
1012 target = assert_response.get_element('.pr-targetinfo .tag')
1013 target = response.assert_response().get_element('.pr-targetinfo .tag')
1013 target_children = target.getchildren()
1014 target_children = target.getchildren()
1014 assert len(target_children) == 1
1015 assert len(target_children) == 1
1015
1016
1016 expected_origin_link = route_path(
1017 expected_origin_link = route_path(
1017 'repo_changelog',
1018 'repo_changelog',
1018 repo_name=pull_request.source_repo.scm_instance().name,
1019 repo_name=pull_request.source_repo.scm_instance().name,
1019 params=dict(branch='origin'))
1020 params=dict(branch='origin'))
1020 expected_target_link = route_path(
1021 expected_target_link = route_path(
1021 'repo_changelog',
1022 'repo_changelog',
1022 repo_name=pull_request.target_repo.scm_instance().name,
1023 repo_name=pull_request.target_repo.scm_instance().name,
1023 params=dict(branch='target'))
1024 params=dict(branch='target'))
1024 assert origin_children[0].attrib['href'] == expected_origin_link
1025 assert origin_children[0].attrib['href'] == expected_origin_link
1025 assert origin_children[0].text == 'branch: origin'
1026 assert origin_children[0].text == 'branch: origin'
1026 assert target_children[0].attrib['href'] == expected_target_link
1027 assert target_children[0].attrib['href'] == expected_target_link
1027 assert target_children[0].text == 'branch: target'
1028 assert target_children[0].text == 'branch: target'
1028
1029
1029 def test_bookmark_is_not_a_link(self, pr_util):
1030 def test_bookmark_is_not_a_link(self, pr_util):
1030 pull_request = pr_util.create_pull_request()
1031 pull_request = pr_util.create_pull_request()
1031 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1032 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1032 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1033 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1033 Session().add(pull_request)
1034 Session().add(pull_request)
1034 Session().commit()
1035 Session().commit()
1035
1036
1036 response = self.app.get(route_path(
1037 response = self.app.get(route_path(
1037 'pullrequest_show',
1038 'pullrequest_show',
1038 repo_name=pull_request.target_repo.scm_instance().name,
1039 repo_name=pull_request.target_repo.scm_instance().name,
1039 pull_request_id=pull_request.pull_request_id))
1040 pull_request_id=pull_request.pull_request_id))
1040 assert response.status_int == 200
1041 assert response.status_int == 200
1041 assert_response = AssertResponse(response)
1042
1042
1043 origin = assert_response.get_element('.pr-origininfo .tag')
1043 origin = response.assert_response().get_element('.pr-origininfo .tag')
1044 assert origin.text.strip() == 'bookmark: origin'
1044 assert origin.text.strip() == 'bookmark: origin'
1045 assert origin.getchildren() == []
1045 assert origin.getchildren() == []
1046
1046
1047 target = assert_response.get_element('.pr-targetinfo .tag')
1047 target = response.assert_response().get_element('.pr-targetinfo .tag')
1048 assert target.text.strip() == 'bookmark: target'
1048 assert target.text.strip() == 'bookmark: target'
1049 assert target.getchildren() == []
1049 assert target.getchildren() == []
1050
1050
1051 def test_tag_is_not_a_link(self, pr_util):
1051 def test_tag_is_not_a_link(self, pr_util):
1052 pull_request = pr_util.create_pull_request()
1052 pull_request = pr_util.create_pull_request()
1053 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1053 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1054 pull_request.target_ref = 'tag:target:abcdef1234567890'
1054 pull_request.target_ref = 'tag:target:abcdef1234567890'
1055 Session().add(pull_request)
1055 Session().add(pull_request)
1056 Session().commit()
1056 Session().commit()
1057
1057
1058 response = self.app.get(route_path(
1058 response = self.app.get(route_path(
1059 'pullrequest_show',
1059 'pullrequest_show',
1060 repo_name=pull_request.target_repo.scm_instance().name,
1060 repo_name=pull_request.target_repo.scm_instance().name,
1061 pull_request_id=pull_request.pull_request_id))
1061 pull_request_id=pull_request.pull_request_id))
1062 assert response.status_int == 200
1062 assert response.status_int == 200
1063 assert_response = AssertResponse(response)
1064
1063
1065 origin = assert_response.get_element('.pr-origininfo .tag')
1064 origin = response.assert_response().get_element('.pr-origininfo .tag')
1066 assert origin.text.strip() == 'tag: origin'
1065 assert origin.text.strip() == 'tag: origin'
1067 assert origin.getchildren() == []
1066 assert origin.getchildren() == []
1068
1067
1069 target = assert_response.get_element('.pr-targetinfo .tag')
1068 target = response.assert_response().get_element('.pr-targetinfo .tag')
1070 assert target.text.strip() == 'tag: target'
1069 assert target.text.strip() == 'tag: target'
1071 assert target.getchildren() == []
1070 assert target.getchildren() == []
1072
1071
1073 @pytest.mark.parametrize('mergeable', [True, False])
1072 @pytest.mark.parametrize('mergeable', [True, False])
1074 def test_shadow_repository_link(
1073 def test_shadow_repository_link(
1075 self, mergeable, pr_util, http_host_only_stub):
1074 self, mergeable, pr_util, http_host_only_stub):
1076 """
1075 """
1077 Check that the pull request summary page displays a link to the shadow
1076 Check that the pull request summary page displays a link to the shadow
1078 repository if the pull request is mergeable. If it is not mergeable
1077 repository if the pull request is mergeable. If it is not mergeable
1079 the link should not be displayed.
1078 the link should not be displayed.
1080 """
1079 """
1081 pull_request = pr_util.create_pull_request(
1080 pull_request = pr_util.create_pull_request(
1082 mergeable=mergeable, enable_notifications=False)
1081 mergeable=mergeable, enable_notifications=False)
1083 target_repo = pull_request.target_repo.scm_instance()
1082 target_repo = pull_request.target_repo.scm_instance()
1084 pr_id = pull_request.pull_request_id
1083 pr_id = pull_request.pull_request_id
1085 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1084 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1086 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1085 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1087
1086
1088 response = self.app.get(route_path(
1087 response = self.app.get(route_path(
1089 'pullrequest_show',
1088 'pullrequest_show',
1090 repo_name=target_repo.name,
1089 repo_name=target_repo.name,
1091 pull_request_id=pr_id))
1090 pull_request_id=pr_id))
1092
1091
1093 assertr = AssertResponse(response)
1094 if mergeable:
1092 if mergeable:
1095 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1093 response.assert_response().element_value_contains(
1096 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1094 'input.pr-mergeinfo', shadow_url)
1095 response.assert_response().element_value_contains(
1096 'input.pr-mergeinfo ', 'pr-merge')
1097 else:
1097 else:
1098 assertr.no_element_exists('.pr-mergeinfo')
1098 response.assert_response().no_element_exists('.pr-mergeinfo')
1099
1099
1100
1100
1101 @pytest.mark.usefixtures('app')
1101 @pytest.mark.usefixtures('app')
1102 @pytest.mark.backends("git", "hg")
1102 @pytest.mark.backends("git", "hg")
1103 class TestPullrequestsControllerDelete(object):
1103 class TestPullrequestsControllerDelete(object):
1104 def test_pull_request_delete_button_permissions_admin(
1104 def test_pull_request_delete_button_permissions_admin(
1105 self, autologin_user, user_admin, pr_util):
1105 self, autologin_user, user_admin, pr_util):
1106 pull_request = pr_util.create_pull_request(
1106 pull_request = pr_util.create_pull_request(
1107 author=user_admin.username, enable_notifications=False)
1107 author=user_admin.username, enable_notifications=False)
1108
1108
1109 response = self.app.get(route_path(
1109 response = self.app.get(route_path(
1110 'pullrequest_show',
1110 'pullrequest_show',
1111 repo_name=pull_request.target_repo.scm_instance().name,
1111 repo_name=pull_request.target_repo.scm_instance().name,
1112 pull_request_id=pull_request.pull_request_id))
1112 pull_request_id=pull_request.pull_request_id))
1113
1113
1114 response.mustcontain('id="delete_pullrequest"')
1114 response.mustcontain('id="delete_pullrequest"')
1115 response.mustcontain('Confirm to delete this pull request')
1115 response.mustcontain('Confirm to delete this pull request')
1116
1116
1117 def test_pull_request_delete_button_permissions_owner(
1117 def test_pull_request_delete_button_permissions_owner(
1118 self, autologin_regular_user, user_regular, pr_util):
1118 self, autologin_regular_user, user_regular, pr_util):
1119 pull_request = pr_util.create_pull_request(
1119 pull_request = pr_util.create_pull_request(
1120 author=user_regular.username, enable_notifications=False)
1120 author=user_regular.username, enable_notifications=False)
1121
1121
1122 response = self.app.get(route_path(
1122 response = self.app.get(route_path(
1123 'pullrequest_show',
1123 'pullrequest_show',
1124 repo_name=pull_request.target_repo.scm_instance().name,
1124 repo_name=pull_request.target_repo.scm_instance().name,
1125 pull_request_id=pull_request.pull_request_id))
1125 pull_request_id=pull_request.pull_request_id))
1126
1126
1127 response.mustcontain('id="delete_pullrequest"')
1127 response.mustcontain('id="delete_pullrequest"')
1128 response.mustcontain('Confirm to delete this pull request')
1128 response.mustcontain('Confirm to delete this pull request')
1129
1129
1130 def test_pull_request_delete_button_permissions_forbidden(
1130 def test_pull_request_delete_button_permissions_forbidden(
1131 self, autologin_regular_user, user_regular, user_admin, pr_util):
1131 self, autologin_regular_user, user_regular, user_admin, pr_util):
1132 pull_request = pr_util.create_pull_request(
1132 pull_request = pr_util.create_pull_request(
1133 author=user_admin.username, enable_notifications=False)
1133 author=user_admin.username, enable_notifications=False)
1134
1134
1135 response = self.app.get(route_path(
1135 response = self.app.get(route_path(
1136 'pullrequest_show',
1136 'pullrequest_show',
1137 repo_name=pull_request.target_repo.scm_instance().name,
1137 repo_name=pull_request.target_repo.scm_instance().name,
1138 pull_request_id=pull_request.pull_request_id))
1138 pull_request_id=pull_request.pull_request_id))
1139 response.mustcontain(no=['id="delete_pullrequest"'])
1139 response.mustcontain(no=['id="delete_pullrequest"'])
1140 response.mustcontain(no=['Confirm to delete this pull request'])
1140 response.mustcontain(no=['Confirm to delete this pull request'])
1141
1141
1142 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1142 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1143 self, autologin_regular_user, user_regular, user_admin, pr_util,
1143 self, autologin_regular_user, user_regular, user_admin, pr_util,
1144 user_util):
1144 user_util):
1145
1145
1146 pull_request = pr_util.create_pull_request(
1146 pull_request = pr_util.create_pull_request(
1147 author=user_admin.username, enable_notifications=False)
1147 author=user_admin.username, enable_notifications=False)
1148
1148
1149 user_util.grant_user_permission_to_repo(
1149 user_util.grant_user_permission_to_repo(
1150 pull_request.target_repo, user_regular,
1150 pull_request.target_repo, user_regular,
1151 'repository.write')
1151 'repository.write')
1152
1152
1153 response = self.app.get(route_path(
1153 response = self.app.get(route_path(
1154 'pullrequest_show',
1154 'pullrequest_show',
1155 repo_name=pull_request.target_repo.scm_instance().name,
1155 repo_name=pull_request.target_repo.scm_instance().name,
1156 pull_request_id=pull_request.pull_request_id))
1156 pull_request_id=pull_request.pull_request_id))
1157
1157
1158 response.mustcontain('id="open_edit_pullrequest"')
1158 response.mustcontain('id="open_edit_pullrequest"')
1159 response.mustcontain('id="delete_pullrequest"')
1159 response.mustcontain('id="delete_pullrequest"')
1160 response.mustcontain(no=['Confirm to delete this pull request'])
1160 response.mustcontain(no=['Confirm to delete this pull request'])
1161
1161
1162 def test_delete_comment_returns_404_if_comment_does_not_exist(
1162 def test_delete_comment_returns_404_if_comment_does_not_exist(
1163 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1163 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1164
1164
1165 pull_request = pr_util.create_pull_request(
1165 pull_request = pr_util.create_pull_request(
1166 author=user_admin.username, enable_notifications=False)
1166 author=user_admin.username, enable_notifications=False)
1167
1167
1168 self.app.post(
1168 self.app.post(
1169 route_path(
1169 route_path(
1170 'pullrequest_comment_delete',
1170 'pullrequest_comment_delete',
1171 repo_name=pull_request.target_repo.scm_instance().name,
1171 repo_name=pull_request.target_repo.scm_instance().name,
1172 pull_request_id=pull_request.pull_request_id,
1172 pull_request_id=pull_request.pull_request_id,
1173 comment_id=1024404),
1173 comment_id=1024404),
1174 extra_environ=xhr_header,
1174 extra_environ=xhr_header,
1175 params={'csrf_token': csrf_token},
1175 params={'csrf_token': csrf_token},
1176 status=404
1176 status=404
1177 )
1177 )
1178
1178
1179 def test_delete_comment(
1179 def test_delete_comment(
1180 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1180 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1181
1181
1182 pull_request = pr_util.create_pull_request(
1182 pull_request = pr_util.create_pull_request(
1183 author=user_admin.username, enable_notifications=False)
1183 author=user_admin.username, enable_notifications=False)
1184 comment = pr_util.create_comment()
1184 comment = pr_util.create_comment()
1185 comment_id = comment.comment_id
1185 comment_id = comment.comment_id
1186
1186
1187 response = self.app.post(
1187 response = self.app.post(
1188 route_path(
1188 route_path(
1189 'pullrequest_comment_delete',
1189 'pullrequest_comment_delete',
1190 repo_name=pull_request.target_repo.scm_instance().name,
1190 repo_name=pull_request.target_repo.scm_instance().name,
1191 pull_request_id=pull_request.pull_request_id,
1191 pull_request_id=pull_request.pull_request_id,
1192 comment_id=comment_id),
1192 comment_id=comment_id),
1193 extra_environ=xhr_header,
1193 extra_environ=xhr_header,
1194 params={'csrf_token': csrf_token},
1194 params={'csrf_token': csrf_token},
1195 status=200
1195 status=200
1196 )
1196 )
1197 assert response.body == 'true'
1197 assert response.body == 'true'
1198
1198
1199 @pytest.mark.parametrize('url_type', [
1199 @pytest.mark.parametrize('url_type', [
1200 'pullrequest_new',
1200 'pullrequest_new',
1201 'pullrequest_create',
1201 'pullrequest_create',
1202 'pullrequest_update',
1202 'pullrequest_update',
1203 'pullrequest_merge',
1203 'pullrequest_merge',
1204 ])
1204 ])
1205 def test_pull_request_is_forbidden_on_archived_repo(
1205 def test_pull_request_is_forbidden_on_archived_repo(
1206 self, autologin_user, backend, xhr_header, user_util, url_type):
1206 self, autologin_user, backend, xhr_header, user_util, url_type):
1207
1207
1208 # create a temporary repo
1208 # create a temporary repo
1209 source = user_util.create_repo(repo_type=backend.alias)
1209 source = user_util.create_repo(repo_type=backend.alias)
1210 repo_name = source.repo_name
1210 repo_name = source.repo_name
1211 repo = Repository.get_by_repo_name(repo_name)
1211 repo = Repository.get_by_repo_name(repo_name)
1212 repo.archived = True
1212 repo.archived = True
1213 Session().commit()
1213 Session().commit()
1214
1214
1215 response = self.app.get(
1215 response = self.app.get(
1216 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1216 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1217
1217
1218 msg = 'Action not supported for archived repository.'
1218 msg = 'Action not supported for archived repository.'
1219 assert_session_flash(response, msg)
1219 assert_session_flash(response, msg)
1220
1220
1221
1221
1222 def assert_pull_request_status(pull_request, expected_status):
1222 def assert_pull_request_status(pull_request, expected_status):
1223 status = ChangesetStatusModel().calculated_review_status(
1223 status = ChangesetStatusModel().calculated_review_status(
1224 pull_request=pull_request)
1224 pull_request=pull_request)
1225 assert status == expected_status
1225 assert status == expected_status
1226
1226
1227
1227
1228 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1228 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1229 @pytest.mark.usefixtures("autologin_user")
1229 @pytest.mark.usefixtures("autologin_user")
1230 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1230 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1231 response = app.get(
1231 response = app.get(
1232 route_path(route, repo_name=backend_svn.repo_name), status=404)
1232 route_path(route, repo_name=backend_svn.repo_name), status=404)
1233
1233
@@ -1,1414 +1,1412 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34
34
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.base import vcs_operation_context
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 NotAnonymous, CSRFRequired)
41 NotAnonymous, CSRFRequired)
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
45 RepositoryRequirementError, EmptyRepositoryError)
45 RepositoryRequirementError, EmptyRepositoryError)
46 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
49 ChangesetComment, ChangesetStatus, Repository)
49 ChangesetComment, ChangesetStatus, Repository)
50 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.forms import PullRequestForm
51 from rhodecode.model.meta import Session
51 from rhodecode.model.meta import Session
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
53 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.scm import ScmModel
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
59
59
60 def load_default_context(self):
60 def load_default_context(self):
61 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c = self._get_local_tmpl_context(include_app_defaults=True)
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
64 # backward compat., we use for OLD PRs a plain renderer
64 # backward compat., we use for OLD PRs a plain renderer
65 c.renderer = 'plain'
65 c.renderer = 'plain'
66 return c
66 return c
67
67
68 def _get_pull_requests_list(
68 def _get_pull_requests_list(
69 self, repo_name, source, filter_type, opened_by, statuses):
69 self, repo_name, source, filter_type, opened_by, statuses):
70
70
71 draw, start, limit = self._extract_chunk(self.request)
71 draw, start, limit = self._extract_chunk(self.request)
72 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 search_q, order_by, order_dir = self._extract_ordering(self.request)
73 _render = self.request.get_partial_renderer(
73 _render = self.request.get_partial_renderer(
74 'rhodecode:templates/data_table/_dt_elements.mako')
74 'rhodecode:templates/data_table/_dt_elements.mako')
75
75
76 # pagination
76 # pagination
77
77
78 if filter_type == 'awaiting_review':
78 if filter_type == 'awaiting_review':
79 pull_requests = PullRequestModel().get_awaiting_review(
79 pull_requests = PullRequestModel().get_awaiting_review(
80 repo_name, source=source, opened_by=opened_by,
80 repo_name, source=source, opened_by=opened_by,
81 statuses=statuses, offset=start, length=limit,
81 statuses=statuses, offset=start, length=limit,
82 order_by=order_by, order_dir=order_dir)
82 order_by=order_by, order_dir=order_dir)
83 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 pull_requests_total_count = PullRequestModel().count_awaiting_review(
84 repo_name, source=source, statuses=statuses,
84 repo_name, source=source, statuses=statuses,
85 opened_by=opened_by)
85 opened_by=opened_by)
86 elif filter_type == 'awaiting_my_review':
86 elif filter_type == 'awaiting_my_review':
87 pull_requests = PullRequestModel().get_awaiting_my_review(
87 pull_requests = PullRequestModel().get_awaiting_my_review(
88 repo_name, source=source, opened_by=opened_by,
88 repo_name, source=source, opened_by=opened_by,
89 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 user_id=self._rhodecode_user.user_id, statuses=statuses,
90 offset=start, length=limit, order_by=order_by,
90 offset=start, length=limit, order_by=order_by,
91 order_dir=order_dir)
91 order_dir=order_dir)
92 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
93 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 repo_name, source=source, user_id=self._rhodecode_user.user_id,
94 statuses=statuses, opened_by=opened_by)
94 statuses=statuses, opened_by=opened_by)
95 else:
95 else:
96 pull_requests = PullRequestModel().get_all(
96 pull_requests = PullRequestModel().get_all(
97 repo_name, source=source, opened_by=opened_by,
97 repo_name, source=source, opened_by=opened_by,
98 statuses=statuses, offset=start, length=limit,
98 statuses=statuses, offset=start, length=limit,
99 order_by=order_by, order_dir=order_dir)
99 order_by=order_by, order_dir=order_dir)
100 pull_requests_total_count = PullRequestModel().count_all(
100 pull_requests_total_count = PullRequestModel().count_all(
101 repo_name, source=source, statuses=statuses,
101 repo_name, source=source, statuses=statuses,
102 opened_by=opened_by)
102 opened_by=opened_by)
103
103
104 data = []
104 data = []
105 comments_model = CommentsModel()
105 comments_model = CommentsModel()
106 for pr in pull_requests:
106 for pr in pull_requests:
107 comments = comments_model.get_all_comments(
107 comments = comments_model.get_all_comments(
108 self.db_repo.repo_id, pull_request=pr)
108 self.db_repo.repo_id, pull_request=pr)
109
109
110 data.append({
110 data.append({
111 'name': _render('pullrequest_name',
111 'name': _render('pullrequest_name',
112 pr.pull_request_id, pr.target_repo.repo_name),
112 pr.pull_request_id, pr.target_repo.repo_name),
113 'name_raw': pr.pull_request_id,
113 'name_raw': pr.pull_request_id,
114 'status': _render('pullrequest_status',
114 'status': _render('pullrequest_status',
115 pr.calculated_review_status()),
115 pr.calculated_review_status()),
116 'title': _render(
116 'title': _render(
117 'pullrequest_title', pr.title, pr.description),
117 'pullrequest_title', pr.title, pr.description),
118 'description': h.escape(pr.description),
118 'description': h.escape(pr.description),
119 'updated_on': _render('pullrequest_updated_on',
119 'updated_on': _render('pullrequest_updated_on',
120 h.datetime_to_time(pr.updated_on)),
120 h.datetime_to_time(pr.updated_on)),
121 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'updated_on_raw': h.datetime_to_time(pr.updated_on),
122 'created_on': _render('pullrequest_updated_on',
122 'created_on': _render('pullrequest_updated_on',
123 h.datetime_to_time(pr.created_on)),
123 h.datetime_to_time(pr.created_on)),
124 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'created_on_raw': h.datetime_to_time(pr.created_on),
125 'author': _render('pullrequest_author',
125 'author': _render('pullrequest_author',
126 pr.author.full_contact, ),
126 pr.author.full_contact, ),
127 'author_raw': pr.author.full_name,
127 'author_raw': pr.author.full_name,
128 'comments': _render('pullrequest_comments', len(comments)),
128 'comments': _render('pullrequest_comments', len(comments)),
129 'comments_raw': len(comments),
129 'comments_raw': len(comments),
130 'closed': pr.is_closed(),
130 'closed': pr.is_closed(),
131 })
131 })
132
132
133 data = ({
133 data = ({
134 'draw': draw,
134 'draw': draw,
135 'data': data,
135 'data': data,
136 'recordsTotal': pull_requests_total_count,
136 'recordsTotal': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
138 })
138 })
139 return data
139 return data
140
140
141 def get_recache_flag(self):
141 def get_recache_flag(self):
142 for flag_name in ['force_recache', 'force-recache', 'no-cache']:
142 for flag_name in ['force_recache', 'force-recache', 'no-cache']:
143 flag_val = self.request.GET.get(flag_name)
143 flag_val = self.request.GET.get(flag_name)
144 if str2bool(flag_val):
144 if str2bool(flag_val):
145 return True
145 return True
146 return False
146 return False
147
147
148 @LoginRequired()
148 @LoginRequired()
149 @HasRepoPermissionAnyDecorator(
149 @HasRepoPermissionAnyDecorator(
150 'repository.read', 'repository.write', 'repository.admin')
150 'repository.read', 'repository.write', 'repository.admin')
151 @view_config(
151 @view_config(
152 route_name='pullrequest_show_all', request_method='GET',
152 route_name='pullrequest_show_all', request_method='GET',
153 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
153 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
154 def pull_request_list(self):
154 def pull_request_list(self):
155 c = self.load_default_context()
155 c = self.load_default_context()
156
156
157 req_get = self.request.GET
157 req_get = self.request.GET
158 c.source = str2bool(req_get.get('source'))
158 c.source = str2bool(req_get.get('source'))
159 c.closed = str2bool(req_get.get('closed'))
159 c.closed = str2bool(req_get.get('closed'))
160 c.my = str2bool(req_get.get('my'))
160 c.my = str2bool(req_get.get('my'))
161 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
161 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
162 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
162 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
163
163
164 c.active = 'open'
164 c.active = 'open'
165 if c.my:
165 if c.my:
166 c.active = 'my'
166 c.active = 'my'
167 if c.closed:
167 if c.closed:
168 c.active = 'closed'
168 c.active = 'closed'
169 if c.awaiting_review and not c.source:
169 if c.awaiting_review and not c.source:
170 c.active = 'awaiting'
170 c.active = 'awaiting'
171 if c.source and not c.awaiting_review:
171 if c.source and not c.awaiting_review:
172 c.active = 'source'
172 c.active = 'source'
173 if c.awaiting_my_review:
173 if c.awaiting_my_review:
174 c.active = 'awaiting_my'
174 c.active = 'awaiting_my'
175
175
176 return self._get_template_context(c)
176 return self._get_template_context(c)
177
177
178 @LoginRequired()
178 @LoginRequired()
179 @HasRepoPermissionAnyDecorator(
179 @HasRepoPermissionAnyDecorator(
180 'repository.read', 'repository.write', 'repository.admin')
180 'repository.read', 'repository.write', 'repository.admin')
181 @view_config(
181 @view_config(
182 route_name='pullrequest_show_all_data', request_method='GET',
182 route_name='pullrequest_show_all_data', request_method='GET',
183 renderer='json_ext', xhr=True)
183 renderer='json_ext', xhr=True)
184 def pull_request_list_data(self):
184 def pull_request_list_data(self):
185 self.load_default_context()
185 self.load_default_context()
186
186
187 # additional filters
187 # additional filters
188 req_get = self.request.GET
188 req_get = self.request.GET
189 source = str2bool(req_get.get('source'))
189 source = str2bool(req_get.get('source'))
190 closed = str2bool(req_get.get('closed'))
190 closed = str2bool(req_get.get('closed'))
191 my = str2bool(req_get.get('my'))
191 my = str2bool(req_get.get('my'))
192 awaiting_review = str2bool(req_get.get('awaiting_review'))
192 awaiting_review = str2bool(req_get.get('awaiting_review'))
193 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
193 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
194
194
195 filter_type = 'awaiting_review' if awaiting_review \
195 filter_type = 'awaiting_review' if awaiting_review \
196 else 'awaiting_my_review' if awaiting_my_review \
196 else 'awaiting_my_review' if awaiting_my_review \
197 else None
197 else None
198
198
199 opened_by = None
199 opened_by = None
200 if my:
200 if my:
201 opened_by = [self._rhodecode_user.user_id]
201 opened_by = [self._rhodecode_user.user_id]
202
202
203 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
203 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
204 if closed:
204 if closed:
205 statuses = [PullRequest.STATUS_CLOSED]
205 statuses = [PullRequest.STATUS_CLOSED]
206
206
207 data = self._get_pull_requests_list(
207 data = self._get_pull_requests_list(
208 repo_name=self.db_repo_name, source=source,
208 repo_name=self.db_repo_name, source=source,
209 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
209 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
210
210
211 return data
211 return data
212
212
213 def _is_diff_cache_enabled(self, target_repo):
213 def _is_diff_cache_enabled(self, target_repo):
214 caching_enabled = self._get_general_setting(
214 caching_enabled = self._get_general_setting(
215 target_repo, 'rhodecode_diff_cache')
215 target_repo, 'rhodecode_diff_cache')
216 log.debug('Diff caching enabled: %s', caching_enabled)
216 log.debug('Diff caching enabled: %s', caching_enabled)
217 return caching_enabled
217 return caching_enabled
218
218
219 def _get_diffset(self, source_repo_name, source_repo,
219 def _get_diffset(self, source_repo_name, source_repo,
220 source_ref_id, target_ref_id,
220 source_ref_id, target_ref_id,
221 target_commit, source_commit, diff_limit, file_limit,
221 target_commit, source_commit, diff_limit, file_limit,
222 fulldiff, hide_whitespace_changes, diff_context):
222 fulldiff, hide_whitespace_changes, diff_context):
223
223
224 vcs_diff = PullRequestModel().get_diff(
224 vcs_diff = PullRequestModel().get_diff(
225 source_repo, source_ref_id, target_ref_id,
225 source_repo, source_ref_id, target_ref_id,
226 hide_whitespace_changes, diff_context)
226 hide_whitespace_changes, diff_context)
227
227
228 diff_processor = diffs.DiffProcessor(
228 diff_processor = diffs.DiffProcessor(
229 vcs_diff, format='newdiff', diff_limit=diff_limit,
229 vcs_diff, format='newdiff', diff_limit=diff_limit,
230 file_limit=file_limit, show_full_diff=fulldiff)
230 file_limit=file_limit, show_full_diff=fulldiff)
231
231
232 _parsed = diff_processor.prepare()
232 _parsed = diff_processor.prepare()
233
233
234 diffset = codeblocks.DiffSet(
234 diffset = codeblocks.DiffSet(
235 repo_name=self.db_repo_name,
235 repo_name=self.db_repo_name,
236 source_repo_name=source_repo_name,
236 source_repo_name=source_repo_name,
237 source_node_getter=codeblocks.diffset_node_getter(target_commit),
237 source_node_getter=codeblocks.diffset_node_getter(target_commit),
238 target_node_getter=codeblocks.diffset_node_getter(source_commit),
238 target_node_getter=codeblocks.diffset_node_getter(source_commit),
239 )
239 )
240 diffset = self.path_filter.render_patchset_filtered(
240 diffset = self.path_filter.render_patchset_filtered(
241 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
241 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
242
242
243 return diffset
243 return diffset
244
244
245 def _get_range_diffset(self, source_scm, source_repo,
245 def _get_range_diffset(self, source_scm, source_repo,
246 commit1, commit2, diff_limit, file_limit,
246 commit1, commit2, diff_limit, file_limit,
247 fulldiff, hide_whitespace_changes, diff_context):
247 fulldiff, hide_whitespace_changes, diff_context):
248 vcs_diff = source_scm.get_diff(
248 vcs_diff = source_scm.get_diff(
249 commit1, commit2,
249 commit1, commit2,
250 ignore_whitespace=hide_whitespace_changes,
250 ignore_whitespace=hide_whitespace_changes,
251 context=diff_context)
251 context=diff_context)
252
252
253 diff_processor = diffs.DiffProcessor(
253 diff_processor = diffs.DiffProcessor(
254 vcs_diff, format='newdiff', diff_limit=diff_limit,
254 vcs_diff, format='newdiff', diff_limit=diff_limit,
255 file_limit=file_limit, show_full_diff=fulldiff)
255 file_limit=file_limit, show_full_diff=fulldiff)
256
256
257 _parsed = diff_processor.prepare()
257 _parsed = diff_processor.prepare()
258
258
259 diffset = codeblocks.DiffSet(
259 diffset = codeblocks.DiffSet(
260 repo_name=source_repo.repo_name,
260 repo_name=source_repo.repo_name,
261 source_node_getter=codeblocks.diffset_node_getter(commit1),
261 source_node_getter=codeblocks.diffset_node_getter(commit1),
262 target_node_getter=codeblocks.diffset_node_getter(commit2))
262 target_node_getter=codeblocks.diffset_node_getter(commit2))
263
263
264 diffset = self.path_filter.render_patchset_filtered(
264 diffset = self.path_filter.render_patchset_filtered(
265 diffset, _parsed, commit1.raw_id, commit2.raw_id)
265 diffset, _parsed, commit1.raw_id, commit2.raw_id)
266
266
267 return diffset
267 return diffset
268
268
269 @LoginRequired()
269 @LoginRequired()
270 @HasRepoPermissionAnyDecorator(
270 @HasRepoPermissionAnyDecorator(
271 'repository.read', 'repository.write', 'repository.admin')
271 'repository.read', 'repository.write', 'repository.admin')
272 @view_config(
272 @view_config(
273 route_name='pullrequest_show', request_method='GET',
273 route_name='pullrequest_show', request_method='GET',
274 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
274 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
275 def pull_request_show(self):
275 def pull_request_show(self):
276 pull_request_id = self.request.matchdict['pull_request_id']
276 pull_request_id = self.request.matchdict['pull_request_id']
277
277
278 c = self.load_default_context()
278 c = self.load_default_context()
279
279
280 version = self.request.GET.get('version')
280 version = self.request.GET.get('version')
281 from_version = self.request.GET.get('from_version') or version
281 from_version = self.request.GET.get('from_version') or version
282 merge_checks = self.request.GET.get('merge_checks')
282 merge_checks = self.request.GET.get('merge_checks')
283 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
283 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
284
284
285 # fetch global flags of ignore ws or context lines
285 # fetch global flags of ignore ws or context lines
286 diff_context = diffs.get_diff_context(self.request)
286 diff_context = diffs.get_diff_context(self.request)
287 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
287 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
288
288
289 force_refresh = str2bool(self.request.GET.get('force_refresh'))
289 force_refresh = str2bool(self.request.GET.get('force_refresh'))
290
290
291 (pull_request_latest,
291 (pull_request_latest,
292 pull_request_at_ver,
292 pull_request_at_ver,
293 pull_request_display_obj,
293 pull_request_display_obj,
294 at_version) = PullRequestModel().get_pr_version(
294 at_version) = PullRequestModel().get_pr_version(
295 pull_request_id, version=version)
295 pull_request_id, version=version)
296 pr_closed = pull_request_latest.is_closed()
296 pr_closed = pull_request_latest.is_closed()
297
297
298 if pr_closed and (version or from_version):
298 if pr_closed and (version or from_version):
299 # not allow to browse versions
299 # not allow to browse versions
300 raise HTTPFound(h.route_path(
300 raise HTTPFound(h.route_path(
301 'pullrequest_show', repo_name=self.db_repo_name,
301 'pullrequest_show', repo_name=self.db_repo_name,
302 pull_request_id=pull_request_id))
302 pull_request_id=pull_request_id))
303
303
304 versions = pull_request_display_obj.versions()
304 versions = pull_request_display_obj.versions()
305 # used to store per-commit range diffs
305 # used to store per-commit range diffs
306 c.changes = collections.OrderedDict()
306 c.changes = collections.OrderedDict()
307 c.range_diff_on = self.request.GET.get('range-diff') == "1"
307 c.range_diff_on = self.request.GET.get('range-diff') == "1"
308
308
309 c.at_version = at_version
309 c.at_version = at_version
310 c.at_version_num = (at_version
310 c.at_version_num = (at_version
311 if at_version and at_version != 'latest'
311 if at_version and at_version != 'latest'
312 else None)
312 else None)
313 c.at_version_pos = ChangesetComment.get_index_from_version(
313 c.at_version_pos = ChangesetComment.get_index_from_version(
314 c.at_version_num, versions)
314 c.at_version_num, versions)
315
315
316 (prev_pull_request_latest,
316 (prev_pull_request_latest,
317 prev_pull_request_at_ver,
317 prev_pull_request_at_ver,
318 prev_pull_request_display_obj,
318 prev_pull_request_display_obj,
319 prev_at_version) = PullRequestModel().get_pr_version(
319 prev_at_version) = PullRequestModel().get_pr_version(
320 pull_request_id, version=from_version)
320 pull_request_id, version=from_version)
321
321
322 c.from_version = prev_at_version
322 c.from_version = prev_at_version
323 c.from_version_num = (prev_at_version
323 c.from_version_num = (prev_at_version
324 if prev_at_version and prev_at_version != 'latest'
324 if prev_at_version and prev_at_version != 'latest'
325 else None)
325 else None)
326 c.from_version_pos = ChangesetComment.get_index_from_version(
326 c.from_version_pos = ChangesetComment.get_index_from_version(
327 c.from_version_num, versions)
327 c.from_version_num, versions)
328
328
329 # define if we're in COMPARE mode or VIEW at version mode
329 # define if we're in COMPARE mode or VIEW at version mode
330 compare = at_version != prev_at_version
330 compare = at_version != prev_at_version
331
331
332 # pull_requests repo_name we opened it against
332 # pull_requests repo_name we opened it against
333 # ie. target_repo must match
333 # ie. target_repo must match
334 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
334 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
335 raise HTTPNotFound()
335 raise HTTPNotFound()
336
336
337 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
337 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
338 pull_request_at_ver)
338 pull_request_at_ver)
339
339
340 c.pull_request = pull_request_display_obj
340 c.pull_request = pull_request_display_obj
341 c.renderer = pull_request_at_ver.description_renderer or c.renderer
341 c.renderer = pull_request_at_ver.description_renderer or c.renderer
342 c.pull_request_latest = pull_request_latest
342 c.pull_request_latest = pull_request_latest
343
343
344 if compare or (at_version and not at_version == 'latest'):
344 if compare or (at_version and not at_version == 'latest'):
345 c.allowed_to_change_status = False
345 c.allowed_to_change_status = False
346 c.allowed_to_update = False
346 c.allowed_to_update = False
347 c.allowed_to_merge = False
347 c.allowed_to_merge = False
348 c.allowed_to_delete = False
348 c.allowed_to_delete = False
349 c.allowed_to_comment = False
349 c.allowed_to_comment = False
350 c.allowed_to_close = False
350 c.allowed_to_close = False
351 else:
351 else:
352 can_change_status = PullRequestModel().check_user_change_status(
352 can_change_status = PullRequestModel().check_user_change_status(
353 pull_request_at_ver, self._rhodecode_user)
353 pull_request_at_ver, self._rhodecode_user)
354 c.allowed_to_change_status = can_change_status and not pr_closed
354 c.allowed_to_change_status = can_change_status and not pr_closed
355
355
356 c.allowed_to_update = PullRequestModel().check_user_update(
356 c.allowed_to_update = PullRequestModel().check_user_update(
357 pull_request_latest, self._rhodecode_user) and not pr_closed
357 pull_request_latest, self._rhodecode_user) and not pr_closed
358 c.allowed_to_merge = PullRequestModel().check_user_merge(
358 c.allowed_to_merge = PullRequestModel().check_user_merge(
359 pull_request_latest, self._rhodecode_user) and not pr_closed
359 pull_request_latest, self._rhodecode_user) and not pr_closed
360 c.allowed_to_delete = PullRequestModel().check_user_delete(
360 c.allowed_to_delete = PullRequestModel().check_user_delete(
361 pull_request_latest, self._rhodecode_user) and not pr_closed
361 pull_request_latest, self._rhodecode_user) and not pr_closed
362 c.allowed_to_comment = not pr_closed
362 c.allowed_to_comment = not pr_closed
363 c.allowed_to_close = c.allowed_to_merge and not pr_closed
363 c.allowed_to_close = c.allowed_to_merge and not pr_closed
364
364
365 c.forbid_adding_reviewers = False
365 c.forbid_adding_reviewers = False
366 c.forbid_author_to_review = False
366 c.forbid_author_to_review = False
367 c.forbid_commit_author_to_review = False
367 c.forbid_commit_author_to_review = False
368
368
369 if pull_request_latest.reviewer_data and \
369 if pull_request_latest.reviewer_data and \
370 'rules' in pull_request_latest.reviewer_data:
370 'rules' in pull_request_latest.reviewer_data:
371 rules = pull_request_latest.reviewer_data['rules'] or {}
371 rules = pull_request_latest.reviewer_data['rules'] or {}
372 try:
372 try:
373 c.forbid_adding_reviewers = rules.get(
373 c.forbid_adding_reviewers = rules.get(
374 'forbid_adding_reviewers')
374 'forbid_adding_reviewers')
375 c.forbid_author_to_review = rules.get(
375 c.forbid_author_to_review = rules.get(
376 'forbid_author_to_review')
376 'forbid_author_to_review')
377 c.forbid_commit_author_to_review = rules.get(
377 c.forbid_commit_author_to_review = rules.get(
378 'forbid_commit_author_to_review')
378 'forbid_commit_author_to_review')
379 except Exception:
379 except Exception:
380 pass
380 pass
381
381
382 # check merge capabilities
382 # check merge capabilities
383 _merge_check = MergeCheck.validate(
383 _merge_check = MergeCheck.validate(
384 pull_request_latest, auth_user=self._rhodecode_user,
384 pull_request_latest, auth_user=self._rhodecode_user,
385 translator=self.request.translate,
385 translator=self.request.translate,
386 force_shadow_repo_refresh=force_refresh)
386 force_shadow_repo_refresh=force_refresh)
387 c.pr_merge_errors = _merge_check.error_details
387 c.pr_merge_errors = _merge_check.error_details
388 c.pr_merge_possible = not _merge_check.failed
388 c.pr_merge_possible = not _merge_check.failed
389 c.pr_merge_message = _merge_check.merge_msg
389 c.pr_merge_message = _merge_check.merge_msg
390
390
391 c.pr_merge_info = MergeCheck.get_merge_conditions(
391 c.pr_merge_info = MergeCheck.get_merge_conditions(
392 pull_request_latest, translator=self.request.translate)
392 pull_request_latest, translator=self.request.translate)
393
393
394 c.pull_request_review_status = _merge_check.review_status
394 c.pull_request_review_status = _merge_check.review_status
395 if merge_checks:
395 if merge_checks:
396 self.request.override_renderer = \
396 self.request.override_renderer = \
397 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
397 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
398 return self._get_template_context(c)
398 return self._get_template_context(c)
399
399
400 comments_model = CommentsModel()
400 comments_model = CommentsModel()
401
401
402 # reviewers and statuses
402 # reviewers and statuses
403 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
403 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
404 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
404 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
405
405
406 # GENERAL COMMENTS with versions #
406 # GENERAL COMMENTS with versions #
407 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
407 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
408 q = q.order_by(ChangesetComment.comment_id.asc())
408 q = q.order_by(ChangesetComment.comment_id.asc())
409 general_comments = q
409 general_comments = q
410
410
411 # pick comments we want to render at current version
411 # pick comments we want to render at current version
412 c.comment_versions = comments_model.aggregate_comments(
412 c.comment_versions = comments_model.aggregate_comments(
413 general_comments, versions, c.at_version_num)
413 general_comments, versions, c.at_version_num)
414 c.comments = c.comment_versions[c.at_version_num]['until']
414 c.comments = c.comment_versions[c.at_version_num]['until']
415
415
416 # INLINE COMMENTS with versions #
416 # INLINE COMMENTS with versions #
417 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
417 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
418 q = q.order_by(ChangesetComment.comment_id.asc())
418 q = q.order_by(ChangesetComment.comment_id.asc())
419 inline_comments = q
419 inline_comments = q
420
420
421 c.inline_versions = comments_model.aggregate_comments(
421 c.inline_versions = comments_model.aggregate_comments(
422 inline_comments, versions, c.at_version_num, inline=True)
422 inline_comments, versions, c.at_version_num, inline=True)
423
423
424 # inject latest version
424 # inject latest version
425 latest_ver = PullRequest.get_pr_display_object(
425 latest_ver = PullRequest.get_pr_display_object(
426 pull_request_latest, pull_request_latest)
426 pull_request_latest, pull_request_latest)
427
427
428 c.versions = versions + [latest_ver]
428 c.versions = versions + [latest_ver]
429
429
430 # if we use version, then do not show later comments
430 # if we use version, then do not show later comments
431 # than current version
431 # than current version
432 display_inline_comments = collections.defaultdict(
432 display_inline_comments = collections.defaultdict(
433 lambda: collections.defaultdict(list))
433 lambda: collections.defaultdict(list))
434 for co in inline_comments:
434 for co in inline_comments:
435 if c.at_version_num:
435 if c.at_version_num:
436 # pick comments that are at least UPTO given version, so we
436 # pick comments that are at least UPTO given version, so we
437 # don't render comments for higher version
437 # don't render comments for higher version
438 should_render = co.pull_request_version_id and \
438 should_render = co.pull_request_version_id and \
439 co.pull_request_version_id <= c.at_version_num
439 co.pull_request_version_id <= c.at_version_num
440 else:
440 else:
441 # showing all, for 'latest'
441 # showing all, for 'latest'
442 should_render = True
442 should_render = True
443
443
444 if should_render:
444 if should_render:
445 display_inline_comments[co.f_path][co.line_no].append(co)
445 display_inline_comments[co.f_path][co.line_no].append(co)
446
446
447 # load diff data into template context, if we use compare mode then
447 # load diff data into template context, if we use compare mode then
448 # diff is calculated based on changes between versions of PR
448 # diff is calculated based on changes between versions of PR
449
449
450 source_repo = pull_request_at_ver.source_repo
450 source_repo = pull_request_at_ver.source_repo
451 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
451 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
452
452
453 target_repo = pull_request_at_ver.target_repo
453 target_repo = pull_request_at_ver.target_repo
454 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
454 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
455
455
456 if compare:
456 if compare:
457 # in compare switch the diff base to latest commit from prev version
457 # in compare switch the diff base to latest commit from prev version
458 target_ref_id = prev_pull_request_display_obj.revisions[0]
458 target_ref_id = prev_pull_request_display_obj.revisions[0]
459
459
460 # despite opening commits for bookmarks/branches/tags, we always
460 # despite opening commits for bookmarks/branches/tags, we always
461 # convert this to rev to prevent changes after bookmark or branch change
461 # convert this to rev to prevent changes after bookmark or branch change
462 c.source_ref_type = 'rev'
462 c.source_ref_type = 'rev'
463 c.source_ref = source_ref_id
463 c.source_ref = source_ref_id
464
464
465 c.target_ref_type = 'rev'
465 c.target_ref_type = 'rev'
466 c.target_ref = target_ref_id
466 c.target_ref = target_ref_id
467
467
468 c.source_repo = source_repo
468 c.source_repo = source_repo
469 c.target_repo = target_repo
469 c.target_repo = target_repo
470
470
471 c.commit_ranges = []
471 c.commit_ranges = []
472 source_commit = EmptyCommit()
472 source_commit = EmptyCommit()
473 target_commit = EmptyCommit()
473 target_commit = EmptyCommit()
474 c.missing_requirements = False
474 c.missing_requirements = False
475
475
476 source_scm = source_repo.scm_instance()
476 source_scm = source_repo.scm_instance()
477 target_scm = target_repo.scm_instance()
477 target_scm = target_repo.scm_instance()
478
478
479 shadow_scm = None
479 shadow_scm = None
480 try:
480 try:
481 shadow_scm = pull_request_latest.get_shadow_repo()
481 shadow_scm = pull_request_latest.get_shadow_repo()
482 except Exception:
482 except Exception:
483 log.debug('Failed to get shadow repo', exc_info=True)
483 log.debug('Failed to get shadow repo', exc_info=True)
484 # try first the existing source_repo, and then shadow
484 # try first the existing source_repo, and then shadow
485 # repo if we can obtain one
485 # repo if we can obtain one
486 commits_source_repo = source_scm or shadow_scm
486 commits_source_repo = source_scm or shadow_scm
487
487
488 c.commits_source_repo = commits_source_repo
488 c.commits_source_repo = commits_source_repo
489 c.ancestor = None # set it to None, to hide it from PR view
489 c.ancestor = None # set it to None, to hide it from PR view
490
490
491 # empty version means latest, so we keep this to prevent
491 # empty version means latest, so we keep this to prevent
492 # double caching
492 # double caching
493 version_normalized = version or 'latest'
493 version_normalized = version or 'latest'
494 from_version_normalized = from_version or 'latest'
494 from_version_normalized = from_version or 'latest'
495
495
496 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
496 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
497 cache_file_path = diff_cache_exist(
497 cache_file_path = diff_cache_exist(
498 cache_path, 'pull_request', pull_request_id, version_normalized,
498 cache_path, 'pull_request', pull_request_id, version_normalized,
499 from_version_normalized, source_ref_id, target_ref_id,
499 from_version_normalized, source_ref_id, target_ref_id,
500 hide_whitespace_changes, diff_context, c.fulldiff)
500 hide_whitespace_changes, diff_context, c.fulldiff)
501
501
502 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
502 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
503 force_recache = self.get_recache_flag()
503 force_recache = self.get_recache_flag()
504
504
505 cached_diff = None
505 cached_diff = None
506 if caching_enabled:
506 if caching_enabled:
507 cached_diff = load_cached_diff(cache_file_path)
507 cached_diff = load_cached_diff(cache_file_path)
508
508
509 has_proper_commit_cache = (
509 has_proper_commit_cache = (
510 cached_diff and cached_diff.get('commits')
510 cached_diff and cached_diff.get('commits')
511 and len(cached_diff.get('commits', [])) == 5
511 and len(cached_diff.get('commits', [])) == 5
512 and cached_diff.get('commits')[0]
512 and cached_diff.get('commits')[0]
513 and cached_diff.get('commits')[3])
513 and cached_diff.get('commits')[3])
514
514
515 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
515 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
516 diff_commit_cache = \
516 diff_commit_cache = \
517 (ancestor_commit, commit_cache, missing_requirements,
517 (ancestor_commit, commit_cache, missing_requirements,
518 source_commit, target_commit) = cached_diff['commits']
518 source_commit, target_commit) = cached_diff['commits']
519 else:
519 else:
520 diff_commit_cache = \
520 diff_commit_cache = \
521 (ancestor_commit, commit_cache, missing_requirements,
521 (ancestor_commit, commit_cache, missing_requirements,
522 source_commit, target_commit) = self.get_commits(
522 source_commit, target_commit) = self.get_commits(
523 commits_source_repo,
523 commits_source_repo,
524 pull_request_at_ver,
524 pull_request_at_ver,
525 source_commit,
525 source_commit,
526 source_ref_id,
526 source_ref_id,
527 source_scm,
527 source_scm,
528 target_commit,
528 target_commit,
529 target_ref_id,
529 target_ref_id,
530 target_scm)
530 target_scm)
531
531
532 # register our commit range
532 # register our commit range
533 for comm in commit_cache.values():
533 for comm in commit_cache.values():
534 c.commit_ranges.append(comm)
534 c.commit_ranges.append(comm)
535
535
536 c.missing_requirements = missing_requirements
536 c.missing_requirements = missing_requirements
537 c.ancestor_commit = ancestor_commit
537 c.ancestor_commit = ancestor_commit
538 c.statuses = source_repo.statuses(
538 c.statuses = source_repo.statuses(
539 [x.raw_id for x in c.commit_ranges])
539 [x.raw_id for x in c.commit_ranges])
540
540
541 # auto collapse if we have more than limit
541 # auto collapse if we have more than limit
542 collapse_limit = diffs.DiffProcessor._collapse_commits_over
542 collapse_limit = diffs.DiffProcessor._collapse_commits_over
543 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
543 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
544 c.compare_mode = compare
544 c.compare_mode = compare
545
545
546 # diff_limit is the old behavior, will cut off the whole diff
546 # diff_limit is the old behavior, will cut off the whole diff
547 # if the limit is applied otherwise will just hide the
547 # if the limit is applied otherwise will just hide the
548 # big files from the front-end
548 # big files from the front-end
549 diff_limit = c.visual.cut_off_limit_diff
549 diff_limit = c.visual.cut_off_limit_diff
550 file_limit = c.visual.cut_off_limit_file
550 file_limit = c.visual.cut_off_limit_file
551
551
552 c.missing_commits = False
552 c.missing_commits = False
553 if (c.missing_requirements
553 if (c.missing_requirements
554 or isinstance(source_commit, EmptyCommit)
554 or isinstance(source_commit, EmptyCommit)
555 or source_commit == target_commit):
555 or source_commit == target_commit):
556
556
557 c.missing_commits = True
557 c.missing_commits = True
558 else:
558 else:
559 c.inline_comments = display_inline_comments
559 c.inline_comments = display_inline_comments
560
560
561 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
561 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
562 if not force_recache and has_proper_diff_cache:
562 if not force_recache and has_proper_diff_cache:
563 c.diffset = cached_diff['diff']
563 c.diffset = cached_diff['diff']
564 (ancestor_commit, commit_cache, missing_requirements,
564 (ancestor_commit, commit_cache, missing_requirements,
565 source_commit, target_commit) = cached_diff['commits']
565 source_commit, target_commit) = cached_diff['commits']
566 else:
566 else:
567 c.diffset = self._get_diffset(
567 c.diffset = self._get_diffset(
568 c.source_repo.repo_name, commits_source_repo,
568 c.source_repo.repo_name, commits_source_repo,
569 source_ref_id, target_ref_id,
569 source_ref_id, target_ref_id,
570 target_commit, source_commit,
570 target_commit, source_commit,
571 diff_limit, file_limit, c.fulldiff,
571 diff_limit, file_limit, c.fulldiff,
572 hide_whitespace_changes, diff_context)
572 hide_whitespace_changes, diff_context)
573
573
574 # save cached diff
574 # save cached diff
575 if caching_enabled:
575 if caching_enabled:
576 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
576 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
577
577
578 c.limited_diff = c.diffset.limited_diff
578 c.limited_diff = c.diffset.limited_diff
579
579
580 # calculate removed files that are bound to comments
580 # calculate removed files that are bound to comments
581 comment_deleted_files = [
581 comment_deleted_files = [
582 fname for fname in display_inline_comments
582 fname for fname in display_inline_comments
583 if fname not in c.diffset.file_stats]
583 if fname not in c.diffset.file_stats]
584
584
585 c.deleted_files_comments = collections.defaultdict(dict)
585 c.deleted_files_comments = collections.defaultdict(dict)
586 for fname, per_line_comments in display_inline_comments.items():
586 for fname, per_line_comments in display_inline_comments.items():
587 if fname in comment_deleted_files:
587 if fname in comment_deleted_files:
588 c.deleted_files_comments[fname]['stats'] = 0
588 c.deleted_files_comments[fname]['stats'] = 0
589 c.deleted_files_comments[fname]['comments'] = list()
589 c.deleted_files_comments[fname]['comments'] = list()
590 for lno, comments in per_line_comments.items():
590 for lno, comments in per_line_comments.items():
591 c.deleted_files_comments[fname]['comments'].extend(comments)
591 c.deleted_files_comments[fname]['comments'].extend(comments)
592
592
593 # maybe calculate the range diff
593 # maybe calculate the range diff
594 if c.range_diff_on:
594 if c.range_diff_on:
595 # TODO(marcink): set whitespace/context
595 # TODO(marcink): set whitespace/context
596 context_lcl = 3
596 context_lcl = 3
597 ign_whitespace_lcl = False
597 ign_whitespace_lcl = False
598
598
599 for commit in c.commit_ranges:
599 for commit in c.commit_ranges:
600 commit2 = commit
600 commit2 = commit
601 commit1 = commit.first_parent
601 commit1 = commit.first_parent
602
602
603 range_diff_cache_file_path = diff_cache_exist(
603 range_diff_cache_file_path = diff_cache_exist(
604 cache_path, 'diff', commit.raw_id,
604 cache_path, 'diff', commit.raw_id,
605 ign_whitespace_lcl, context_lcl, c.fulldiff)
605 ign_whitespace_lcl, context_lcl, c.fulldiff)
606
606
607 cached_diff = None
607 cached_diff = None
608 if caching_enabled:
608 if caching_enabled:
609 cached_diff = load_cached_diff(range_diff_cache_file_path)
609 cached_diff = load_cached_diff(range_diff_cache_file_path)
610
610
611 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
611 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
612 if not force_recache and has_proper_diff_cache:
612 if not force_recache and has_proper_diff_cache:
613 diffset = cached_diff['diff']
613 diffset = cached_diff['diff']
614 else:
614 else:
615 diffset = self._get_range_diffset(
615 diffset = self._get_range_diffset(
616 source_scm, source_repo,
616 source_scm, source_repo,
617 commit1, commit2, diff_limit, file_limit,
617 commit1, commit2, diff_limit, file_limit,
618 c.fulldiff, ign_whitespace_lcl, context_lcl
618 c.fulldiff, ign_whitespace_lcl, context_lcl
619 )
619 )
620
620
621 # save cached diff
621 # save cached diff
622 if caching_enabled:
622 if caching_enabled:
623 cache_diff(range_diff_cache_file_path, diffset, None)
623 cache_diff(range_diff_cache_file_path, diffset, None)
624
624
625 c.changes[commit.raw_id] = diffset
625 c.changes[commit.raw_id] = diffset
626
626
627 # this is a hack to properly display links, when creating PR, the
627 # this is a hack to properly display links, when creating PR, the
628 # compare view and others uses different notation, and
628 # compare view and others uses different notation, and
629 # compare_commits.mako renders links based on the target_repo.
629 # compare_commits.mako renders links based on the target_repo.
630 # We need to swap that here to generate it properly on the html side
630 # We need to swap that here to generate it properly on the html side
631 c.target_repo = c.source_repo
631 c.target_repo = c.source_repo
632
632
633 c.commit_statuses = ChangesetStatus.STATUSES
633 c.commit_statuses = ChangesetStatus.STATUSES
634
634
635 c.show_version_changes = not pr_closed
635 c.show_version_changes = not pr_closed
636 if c.show_version_changes:
636 if c.show_version_changes:
637 cur_obj = pull_request_at_ver
637 cur_obj = pull_request_at_ver
638 prev_obj = prev_pull_request_at_ver
638 prev_obj = prev_pull_request_at_ver
639
639
640 old_commit_ids = prev_obj.revisions
640 old_commit_ids = prev_obj.revisions
641 new_commit_ids = cur_obj.revisions
641 new_commit_ids = cur_obj.revisions
642 commit_changes = PullRequestModel()._calculate_commit_id_changes(
642 commit_changes = PullRequestModel()._calculate_commit_id_changes(
643 old_commit_ids, new_commit_ids)
643 old_commit_ids, new_commit_ids)
644 c.commit_changes_summary = commit_changes
644 c.commit_changes_summary = commit_changes
645
645
646 # calculate the diff for commits between versions
646 # calculate the diff for commits between versions
647 c.commit_changes = []
647 c.commit_changes = []
648 mark = lambda cs, fw: list(
648 mark = lambda cs, fw: list(
649 h.itertools.izip_longest([], cs, fillvalue=fw))
649 h.itertools.izip_longest([], cs, fillvalue=fw))
650 for c_type, raw_id in mark(commit_changes.added, 'a') \
650 for c_type, raw_id in mark(commit_changes.added, 'a') \
651 + mark(commit_changes.removed, 'r') \
651 + mark(commit_changes.removed, 'r') \
652 + mark(commit_changes.common, 'c'):
652 + mark(commit_changes.common, 'c'):
653
653
654 if raw_id in commit_cache:
654 if raw_id in commit_cache:
655 commit = commit_cache[raw_id]
655 commit = commit_cache[raw_id]
656 else:
656 else:
657 try:
657 try:
658 commit = commits_source_repo.get_commit(raw_id)
658 commit = commits_source_repo.get_commit(raw_id)
659 except CommitDoesNotExistError:
659 except CommitDoesNotExistError:
660 # in case we fail extracting still use "dummy" commit
660 # in case we fail extracting still use "dummy" commit
661 # for display in commit diff
661 # for display in commit diff
662 commit = h.AttributeDict(
662 commit = h.AttributeDict(
663 {'raw_id': raw_id,
663 {'raw_id': raw_id,
664 'message': 'EMPTY or MISSING COMMIT'})
664 'message': 'EMPTY or MISSING COMMIT'})
665 c.commit_changes.append([c_type, commit])
665 c.commit_changes.append([c_type, commit])
666
666
667 # current user review statuses for each version
667 # current user review statuses for each version
668 c.review_versions = {}
668 c.review_versions = {}
669 if self._rhodecode_user.user_id in allowed_reviewers:
669 if self._rhodecode_user.user_id in allowed_reviewers:
670 for co in general_comments:
670 for co in general_comments:
671 if co.author.user_id == self._rhodecode_user.user_id:
671 if co.author.user_id == self._rhodecode_user.user_id:
672 status = co.status_change
672 status = co.status_change
673 if status:
673 if status:
674 _ver_pr = status[0].comment.pull_request_version_id
674 _ver_pr = status[0].comment.pull_request_version_id
675 c.review_versions[_ver_pr] = status[0]
675 c.review_versions[_ver_pr] = status[0]
676
676
677 return self._get_template_context(c)
677 return self._get_template_context(c)
678
678
679 def get_commits(
679 def get_commits(
680 self, commits_source_repo, pull_request_at_ver, source_commit,
680 self, commits_source_repo, pull_request_at_ver, source_commit,
681 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
681 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
682 commit_cache = collections.OrderedDict()
682 commit_cache = collections.OrderedDict()
683 missing_requirements = False
683 missing_requirements = False
684 try:
684 try:
685 pre_load = ["author", "branch", "date", "message", "parents"]
685 pre_load = ["author", "branch", "date", "message", "parents"]
686 show_revs = pull_request_at_ver.revisions
686 show_revs = pull_request_at_ver.revisions
687 for rev in show_revs:
687 for rev in show_revs:
688 comm = commits_source_repo.get_commit(
688 comm = commits_source_repo.get_commit(
689 commit_id=rev, pre_load=pre_load)
689 commit_id=rev, pre_load=pre_load)
690 commit_cache[comm.raw_id] = comm
690 commit_cache[comm.raw_id] = comm
691
691
692 # Order here matters, we first need to get target, and then
692 # Order here matters, we first need to get target, and then
693 # the source
693 # the source
694 target_commit = commits_source_repo.get_commit(
694 target_commit = commits_source_repo.get_commit(
695 commit_id=safe_str(target_ref_id))
695 commit_id=safe_str(target_ref_id))
696
696
697 source_commit = commits_source_repo.get_commit(
697 source_commit = commits_source_repo.get_commit(
698 commit_id=safe_str(source_ref_id))
698 commit_id=safe_str(source_ref_id))
699 except CommitDoesNotExistError:
699 except CommitDoesNotExistError:
700 log.warning(
700 log.warning(
701 'Failed to get commit from `{}` repo'.format(
701 'Failed to get commit from `{}` repo'.format(
702 commits_source_repo), exc_info=True)
702 commits_source_repo), exc_info=True)
703 except RepositoryRequirementError:
703 except RepositoryRequirementError:
704 log.warning(
704 log.warning(
705 'Failed to get all required data from repo', exc_info=True)
705 'Failed to get all required data from repo', exc_info=True)
706 missing_requirements = True
706 missing_requirements = True
707 ancestor_commit = None
707 ancestor_commit = None
708 try:
708 try:
709 ancestor_id = source_scm.get_common_ancestor(
709 ancestor_id = source_scm.get_common_ancestor(
710 source_commit.raw_id, target_commit.raw_id, target_scm)
710 source_commit.raw_id, target_commit.raw_id, target_scm)
711 ancestor_commit = source_scm.get_commit(ancestor_id)
711 ancestor_commit = source_scm.get_commit(ancestor_id)
712 except Exception:
712 except Exception:
713 ancestor_commit = None
713 ancestor_commit = None
714 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
714 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
715
715
716 def assure_not_empty_repo(self):
716 def assure_not_empty_repo(self):
717 _ = self.request.translate
717 _ = self.request.translate
718
718
719 try:
719 try:
720 self.db_repo.scm_instance().get_commit()
720 self.db_repo.scm_instance().get_commit()
721 except EmptyRepositoryError:
721 except EmptyRepositoryError:
722 h.flash(h.literal(_('There are no commits yet')),
722 h.flash(h.literal(_('There are no commits yet')),
723 category='warning')
723 category='warning')
724 raise HTTPFound(
724 raise HTTPFound(
725 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
725 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
726
726
727 @LoginRequired()
727 @LoginRequired()
728 @NotAnonymous()
728 @NotAnonymous()
729 @HasRepoPermissionAnyDecorator(
729 @HasRepoPermissionAnyDecorator(
730 'repository.read', 'repository.write', 'repository.admin')
730 'repository.read', 'repository.write', 'repository.admin')
731 @view_config(
731 @view_config(
732 route_name='pullrequest_new', request_method='GET',
732 route_name='pullrequest_new', request_method='GET',
733 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
733 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
734 def pull_request_new(self):
734 def pull_request_new(self):
735 _ = self.request.translate
735 _ = self.request.translate
736 c = self.load_default_context()
736 c = self.load_default_context()
737
737
738 self.assure_not_empty_repo()
738 self.assure_not_empty_repo()
739 source_repo = self.db_repo
739 source_repo = self.db_repo
740
740
741 commit_id = self.request.GET.get('commit')
741 commit_id = self.request.GET.get('commit')
742 branch_ref = self.request.GET.get('branch')
742 branch_ref = self.request.GET.get('branch')
743 bookmark_ref = self.request.GET.get('bookmark')
743 bookmark_ref = self.request.GET.get('bookmark')
744
744
745 try:
745 try:
746 source_repo_data = PullRequestModel().generate_repo_data(
746 source_repo_data = PullRequestModel().generate_repo_data(
747 source_repo, commit_id=commit_id,
747 source_repo, commit_id=commit_id,
748 branch=branch_ref, bookmark=bookmark_ref,
748 branch=branch_ref, bookmark=bookmark_ref,
749 translator=self.request.translate)
749 translator=self.request.translate)
750 except CommitDoesNotExistError as e:
750 except CommitDoesNotExistError as e:
751 log.exception(e)
751 log.exception(e)
752 h.flash(_('Commit does not exist'), 'error')
752 h.flash(_('Commit does not exist'), 'error')
753 raise HTTPFound(
753 raise HTTPFound(
754 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
754 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
755
755
756 default_target_repo = source_repo
756 default_target_repo = source_repo
757
757
758 if source_repo.parent:
758 if source_repo.parent:
759 parent_vcs_obj = source_repo.parent.scm_instance()
759 parent_vcs_obj = source_repo.parent.scm_instance()
760 if parent_vcs_obj and not parent_vcs_obj.is_empty():
760 if parent_vcs_obj and not parent_vcs_obj.is_empty():
761 # change default if we have a parent repo
761 # change default if we have a parent repo
762 default_target_repo = source_repo.parent
762 default_target_repo = source_repo.parent
763
763
764 target_repo_data = PullRequestModel().generate_repo_data(
764 target_repo_data = PullRequestModel().generate_repo_data(
765 default_target_repo, translator=self.request.translate)
765 default_target_repo, translator=self.request.translate)
766
766
767 selected_source_ref = source_repo_data['refs']['selected_ref']
767 selected_source_ref = source_repo_data['refs']['selected_ref']
768 title_source_ref = ''
768 title_source_ref = ''
769 if selected_source_ref:
769 if selected_source_ref:
770 title_source_ref = selected_source_ref.split(':', 2)[1]
770 title_source_ref = selected_source_ref.split(':', 2)[1]
771 c.default_title = PullRequestModel().generate_pullrequest_title(
771 c.default_title = PullRequestModel().generate_pullrequest_title(
772 source=source_repo.repo_name,
772 source=source_repo.repo_name,
773 source_ref=title_source_ref,
773 source_ref=title_source_ref,
774 target=default_target_repo.repo_name
774 target=default_target_repo.repo_name
775 )
775 )
776
776
777 c.default_repo_data = {
777 c.default_repo_data = {
778 'source_repo_name': source_repo.repo_name,
778 'source_repo_name': source_repo.repo_name,
779 'source_refs_json': json.dumps(source_repo_data),
779 'source_refs_json': json.dumps(source_repo_data),
780 'target_repo_name': default_target_repo.repo_name,
780 'target_repo_name': default_target_repo.repo_name,
781 'target_refs_json': json.dumps(target_repo_data),
781 'target_refs_json': json.dumps(target_repo_data),
782 }
782 }
783 c.default_source_ref = selected_source_ref
783 c.default_source_ref = selected_source_ref
784
784
785 return self._get_template_context(c)
785 return self._get_template_context(c)
786
786
787 @LoginRequired()
787 @LoginRequired()
788 @NotAnonymous()
788 @NotAnonymous()
789 @HasRepoPermissionAnyDecorator(
789 @HasRepoPermissionAnyDecorator(
790 'repository.read', 'repository.write', 'repository.admin')
790 'repository.read', 'repository.write', 'repository.admin')
791 @view_config(
791 @view_config(
792 route_name='pullrequest_repo_refs', request_method='GET',
792 route_name='pullrequest_repo_refs', request_method='GET',
793 renderer='json_ext', xhr=True)
793 renderer='json_ext', xhr=True)
794 def pull_request_repo_refs(self):
794 def pull_request_repo_refs(self):
795 self.load_default_context()
795 self.load_default_context()
796 target_repo_name = self.request.matchdict['target_repo_name']
796 target_repo_name = self.request.matchdict['target_repo_name']
797 repo = Repository.get_by_repo_name(target_repo_name)
797 repo = Repository.get_by_repo_name(target_repo_name)
798 if not repo:
798 if not repo:
799 raise HTTPNotFound()
799 raise HTTPNotFound()
800
800
801 target_perm = HasRepoPermissionAny(
801 target_perm = HasRepoPermissionAny(
802 'repository.read', 'repository.write', 'repository.admin')(
802 'repository.read', 'repository.write', 'repository.admin')(
803 target_repo_name)
803 target_repo_name)
804 if not target_perm:
804 if not target_perm:
805 raise HTTPNotFound()
805 raise HTTPNotFound()
806
806
807 return PullRequestModel().generate_repo_data(
807 return PullRequestModel().generate_repo_data(
808 repo, translator=self.request.translate)
808 repo, translator=self.request.translate)
809
809
810 @LoginRequired()
810 @LoginRequired()
811 @NotAnonymous()
811 @NotAnonymous()
812 @HasRepoPermissionAnyDecorator(
812 @HasRepoPermissionAnyDecorator(
813 'repository.read', 'repository.write', 'repository.admin')
813 'repository.read', 'repository.write', 'repository.admin')
814 @view_config(
814 @view_config(
815 route_name='pullrequest_repo_targets', request_method='GET',
815 route_name='pullrequest_repo_targets', request_method='GET',
816 renderer='json_ext', xhr=True)
816 renderer='json_ext', xhr=True)
817 def pullrequest_repo_targets(self):
817 def pullrequest_repo_targets(self):
818 _ = self.request.translate
818 _ = self.request.translate
819 filter_query = self.request.GET.get('query')
819 filter_query = self.request.GET.get('query')
820
820
821 # get the parents
821 # get the parents
822 parent_target_repos = []
822 parent_target_repos = []
823 if self.db_repo.parent:
823 if self.db_repo.parent:
824 parents_query = Repository.query() \
824 parents_query = Repository.query() \
825 .order_by(func.length(Repository.repo_name)) \
825 .order_by(func.length(Repository.repo_name)) \
826 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
826 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
827
827
828 if filter_query:
828 if filter_query:
829 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
829 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
830 parents_query = parents_query.filter(
830 parents_query = parents_query.filter(
831 Repository.repo_name.ilike(ilike_expression))
831 Repository.repo_name.ilike(ilike_expression))
832 parents = parents_query.limit(20).all()
832 parents = parents_query.limit(20).all()
833
833
834 for parent in parents:
834 for parent in parents:
835 parent_vcs_obj = parent.scm_instance()
835 parent_vcs_obj = parent.scm_instance()
836 if parent_vcs_obj and not parent_vcs_obj.is_empty():
836 if parent_vcs_obj and not parent_vcs_obj.is_empty():
837 parent_target_repos.append(parent)
837 parent_target_repos.append(parent)
838
838
839 # get other forks, and repo itself
839 # get other forks, and repo itself
840 query = Repository.query() \
840 query = Repository.query() \
841 .order_by(func.length(Repository.repo_name)) \
841 .order_by(func.length(Repository.repo_name)) \
842 .filter(
842 .filter(
843 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
843 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
844 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
844 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
845 ) \
845 ) \
846 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
846 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
847
847
848 if filter_query:
848 if filter_query:
849 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
849 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
850 query = query.filter(Repository.repo_name.ilike(ilike_expression))
850 query = query.filter(Repository.repo_name.ilike(ilike_expression))
851
851
852 limit = max(20 - len(parent_target_repos), 5) # not less then 5
852 limit = max(20 - len(parent_target_repos), 5) # not less then 5
853 target_repos = query.limit(limit).all()
853 target_repos = query.limit(limit).all()
854
854
855 all_target_repos = target_repos + parent_target_repos
855 all_target_repos = target_repos + parent_target_repos
856
856
857 repos = []
857 repos = []
858 for obj in ScmModel().get_repos(all_target_repos):
858 for obj in ScmModel().get_repos(all_target_repos):
859 repos.append({
859 repos.append({
860 'id': obj['name'],
860 'id': obj['name'],
861 'text': obj['name'],
861 'text': obj['name'],
862 'type': 'repo',
862 'type': 'repo',
863 'repo_id': obj['dbrepo']['repo_id'],
863 'repo_id': obj['dbrepo']['repo_id'],
864 'repo_type': obj['dbrepo']['repo_type'],
864 'repo_type': obj['dbrepo']['repo_type'],
865 'private': obj['dbrepo']['private'],
865 'private': obj['dbrepo']['private'],
866
866
867 })
867 })
868
868
869 data = {
869 data = {
870 'more': False,
870 'more': False,
871 'results': [{
871 'results': [{
872 'text': _('Repositories'),
872 'text': _('Repositories'),
873 'children': repos
873 'children': repos
874 }] if repos else []
874 }] if repos else []
875 }
875 }
876 return data
876 return data
877
877
878 @LoginRequired()
878 @LoginRequired()
879 @NotAnonymous()
879 @NotAnonymous()
880 @HasRepoPermissionAnyDecorator(
880 @HasRepoPermissionAnyDecorator(
881 'repository.read', 'repository.write', 'repository.admin')
881 'repository.read', 'repository.write', 'repository.admin')
882 @CSRFRequired()
882 @CSRFRequired()
883 @view_config(
883 @view_config(
884 route_name='pullrequest_create', request_method='POST',
884 route_name='pullrequest_create', request_method='POST',
885 renderer=None)
885 renderer=None)
886 def pull_request_create(self):
886 def pull_request_create(self):
887 _ = self.request.translate
887 _ = self.request.translate
888 self.assure_not_empty_repo()
888 self.assure_not_empty_repo()
889 self.load_default_context()
889 self.load_default_context()
890
890
891 controls = peppercorn.parse(self.request.POST.items())
891 controls = peppercorn.parse(self.request.POST.items())
892
892
893 try:
893 try:
894 form = PullRequestForm(
894 form = PullRequestForm(
895 self.request.translate, self.db_repo.repo_id)()
895 self.request.translate, self.db_repo.repo_id)()
896 _form = form.to_python(controls)
896 _form = form.to_python(controls)
897 except formencode.Invalid as errors:
897 except formencode.Invalid as errors:
898 if errors.error_dict.get('revisions'):
898 if errors.error_dict.get('revisions'):
899 msg = 'Revisions: %s' % errors.error_dict['revisions']
899 msg = 'Revisions: %s' % errors.error_dict['revisions']
900 elif errors.error_dict.get('pullrequest_title'):
900 elif errors.error_dict.get('pullrequest_title'):
901 msg = errors.error_dict.get('pullrequest_title')
901 msg = errors.error_dict.get('pullrequest_title')
902 else:
902 else:
903 msg = _('Error creating pull request: {}').format(errors)
903 msg = _('Error creating pull request: {}').format(errors)
904 log.exception(msg)
904 log.exception(msg)
905 h.flash(msg, 'error')
905 h.flash(msg, 'error')
906
906
907 # would rather just go back to form ...
907 # would rather just go back to form ...
908 raise HTTPFound(
908 raise HTTPFound(
909 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
909 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
910
910
911 source_repo = _form['source_repo']
911 source_repo = _form['source_repo']
912 source_ref = _form['source_ref']
912 source_ref = _form['source_ref']
913 target_repo = _form['target_repo']
913 target_repo = _form['target_repo']
914 target_ref = _form['target_ref']
914 target_ref = _form['target_ref']
915 commit_ids = _form['revisions'][::-1]
915 commit_ids = _form['revisions'][::-1]
916
916
917 # find the ancestor for this pr
917 # find the ancestor for this pr
918 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
918 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
919 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
919 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
920
920
921 # re-check permissions again here
921 # re-check permissions again here
922 # source_repo we must have read permissions
922 # source_repo we must have read permissions
923
923
924 source_perm = HasRepoPermissionAny(
924 source_perm = HasRepoPermissionAny(
925 'repository.read',
925 'repository.read',
926 'repository.write', 'repository.admin')(source_db_repo.repo_name)
926 'repository.write', 'repository.admin')(source_db_repo.repo_name)
927 if not source_perm:
927 if not source_perm:
928 msg = _('Not Enough permissions to source repo `{}`.'.format(
928 msg = _('Not Enough permissions to source repo `{}`.'.format(
929 source_db_repo.repo_name))
929 source_db_repo.repo_name))
930 h.flash(msg, category='error')
930 h.flash(msg, category='error')
931 # copy the args back to redirect
931 # copy the args back to redirect
932 org_query = self.request.GET.mixed()
932 org_query = self.request.GET.mixed()
933 raise HTTPFound(
933 raise HTTPFound(
934 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
934 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
935 _query=org_query))
935 _query=org_query))
936
936
937 # target repo we must have read permissions, and also later on
937 # target repo we must have read permissions, and also later on
938 # we want to check branch permissions here
938 # we want to check branch permissions here
939 target_perm = HasRepoPermissionAny(
939 target_perm = HasRepoPermissionAny(
940 'repository.read',
940 'repository.read',
941 'repository.write', 'repository.admin')(target_db_repo.repo_name)
941 'repository.write', 'repository.admin')(target_db_repo.repo_name)
942 if not target_perm:
942 if not target_perm:
943 msg = _('Not Enough permissions to target repo `{}`.'.format(
943 msg = _('Not Enough permissions to target repo `{}`.'.format(
944 target_db_repo.repo_name))
944 target_db_repo.repo_name))
945 h.flash(msg, category='error')
945 h.flash(msg, category='error')
946 # copy the args back to redirect
946 # copy the args back to redirect
947 org_query = self.request.GET.mixed()
947 org_query = self.request.GET.mixed()
948 raise HTTPFound(
948 raise HTTPFound(
949 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
949 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
950 _query=org_query))
950 _query=org_query))
951
951
952 source_scm = source_db_repo.scm_instance()
952 source_scm = source_db_repo.scm_instance()
953 target_scm = target_db_repo.scm_instance()
953 target_scm = target_db_repo.scm_instance()
954
954
955 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
955 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
956 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
956 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
957
957
958 ancestor = source_scm.get_common_ancestor(
958 ancestor = source_scm.get_common_ancestor(
959 source_commit.raw_id, target_commit.raw_id, target_scm)
959 source_commit.raw_id, target_commit.raw_id, target_scm)
960
960
961 # recalculate target ref based on ancestor
961 # recalculate target ref based on ancestor
962 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
962 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
963 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
963 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
964
964
965 get_default_reviewers_data, validate_default_reviewers = \
965 get_default_reviewers_data, validate_default_reviewers = \
966 PullRequestModel().get_reviewer_functions()
966 PullRequestModel().get_reviewer_functions()
967
967
968 # recalculate reviewers logic, to make sure we can validate this
968 # recalculate reviewers logic, to make sure we can validate this
969 reviewer_rules = get_default_reviewers_data(
969 reviewer_rules = get_default_reviewers_data(
970 self._rhodecode_db_user, source_db_repo,
970 self._rhodecode_db_user, source_db_repo,
971 source_commit, target_db_repo, target_commit)
971 source_commit, target_db_repo, target_commit)
972
972
973 given_reviewers = _form['review_members']
973 given_reviewers = _form['review_members']
974 reviewers = validate_default_reviewers(
974 reviewers = validate_default_reviewers(
975 given_reviewers, reviewer_rules)
975 given_reviewers, reviewer_rules)
976
976
977 pullrequest_title = _form['pullrequest_title']
977 pullrequest_title = _form['pullrequest_title']
978 title_source_ref = source_ref.split(':', 2)[1]
978 title_source_ref = source_ref.split(':', 2)[1]
979 if not pullrequest_title:
979 if not pullrequest_title:
980 pullrequest_title = PullRequestModel().generate_pullrequest_title(
980 pullrequest_title = PullRequestModel().generate_pullrequest_title(
981 source=source_repo,
981 source=source_repo,
982 source_ref=title_source_ref,
982 source_ref=title_source_ref,
983 target=target_repo
983 target=target_repo
984 )
984 )
985
985
986 description = _form['pullrequest_desc']
986 description = _form['pullrequest_desc']
987 description_renderer = _form['description_renderer']
987 description_renderer = _form['description_renderer']
988
988
989 try:
989 try:
990 pull_request = PullRequestModel().create(
990 pull_request = PullRequestModel().create(
991 created_by=self._rhodecode_user.user_id,
991 created_by=self._rhodecode_user.user_id,
992 source_repo=source_repo,
992 source_repo=source_repo,
993 source_ref=source_ref,
993 source_ref=source_ref,
994 target_repo=target_repo,
994 target_repo=target_repo,
995 target_ref=target_ref,
995 target_ref=target_ref,
996 revisions=commit_ids,
996 revisions=commit_ids,
997 reviewers=reviewers,
997 reviewers=reviewers,
998 title=pullrequest_title,
998 title=pullrequest_title,
999 description=description,
999 description=description,
1000 description_renderer=description_renderer,
1000 description_renderer=description_renderer,
1001 reviewer_data=reviewer_rules,
1001 reviewer_data=reviewer_rules,
1002 auth_user=self._rhodecode_user
1002 auth_user=self._rhodecode_user
1003 )
1003 )
1004 Session().commit()
1004 Session().commit()
1005
1005
1006 h.flash(_('Successfully opened new pull request'),
1006 h.flash(_('Successfully opened new pull request'),
1007 category='success')
1007 category='success')
1008 except Exception:
1008 except Exception:
1009 msg = _('Error occurred during creation of this pull request.')
1009 msg = _('Error occurred during creation of this pull request.')
1010 log.exception(msg)
1010 log.exception(msg)
1011 h.flash(msg, category='error')
1011 h.flash(msg, category='error')
1012
1012
1013 # copy the args back to redirect
1013 # copy the args back to redirect
1014 org_query = self.request.GET.mixed()
1014 org_query = self.request.GET.mixed()
1015 raise HTTPFound(
1015 raise HTTPFound(
1016 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1016 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1017 _query=org_query))
1017 _query=org_query))
1018
1018
1019 raise HTTPFound(
1019 raise HTTPFound(
1020 h.route_path('pullrequest_show', repo_name=target_repo,
1020 h.route_path('pullrequest_show', repo_name=target_repo,
1021 pull_request_id=pull_request.pull_request_id))
1021 pull_request_id=pull_request.pull_request_id))
1022
1022
1023 @LoginRequired()
1023 @LoginRequired()
1024 @NotAnonymous()
1024 @NotAnonymous()
1025 @HasRepoPermissionAnyDecorator(
1025 @HasRepoPermissionAnyDecorator(
1026 'repository.read', 'repository.write', 'repository.admin')
1026 'repository.read', 'repository.write', 'repository.admin')
1027 @CSRFRequired()
1027 @CSRFRequired()
1028 @view_config(
1028 @view_config(
1029 route_name='pullrequest_update', request_method='POST',
1029 route_name='pullrequest_update', request_method='POST',
1030 renderer='json_ext')
1030 renderer='json_ext')
1031 def pull_request_update(self):
1031 def pull_request_update(self):
1032 pull_request = PullRequest.get_or_404(
1032 pull_request = PullRequest.get_or_404(
1033 self.request.matchdict['pull_request_id'])
1033 self.request.matchdict['pull_request_id'])
1034 _ = self.request.translate
1034 _ = self.request.translate
1035
1035
1036 self.load_default_context()
1036 self.load_default_context()
1037
1037
1038 if pull_request.is_closed():
1038 if pull_request.is_closed():
1039 log.debug('update: forbidden because pull request is closed')
1039 log.debug('update: forbidden because pull request is closed')
1040 msg = _(u'Cannot update closed pull requests.')
1040 msg = _(u'Cannot update closed pull requests.')
1041 h.flash(msg, category='error')
1041 h.flash(msg, category='error')
1042 return True
1042 return True
1043
1043
1044 # only owner or admin can update it
1044 # only owner or admin can update it
1045 allowed_to_update = PullRequestModel().check_user_update(
1045 allowed_to_update = PullRequestModel().check_user_update(
1046 pull_request, self._rhodecode_user)
1046 pull_request, self._rhodecode_user)
1047 if allowed_to_update:
1047 if allowed_to_update:
1048 controls = peppercorn.parse(self.request.POST.items())
1048 controls = peppercorn.parse(self.request.POST.items())
1049
1049
1050 if 'review_members' in controls:
1050 if 'review_members' in controls:
1051 self._update_reviewers(
1051 self._update_reviewers(
1052 pull_request, controls['review_members'],
1052 pull_request, controls['review_members'],
1053 pull_request.reviewer_data)
1053 pull_request.reviewer_data)
1054 elif str2bool(self.request.POST.get('update_commits', 'false')):
1054 elif str2bool(self.request.POST.get('update_commits', 'false')):
1055 self._update_commits(pull_request)
1055 self._update_commits(pull_request)
1056 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1056 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1057 self._edit_pull_request(pull_request)
1057 self._edit_pull_request(pull_request)
1058 else:
1058 else:
1059 raise HTTPBadRequest()
1059 raise HTTPBadRequest()
1060 return True
1060 return True
1061 raise HTTPForbidden()
1061 raise HTTPForbidden()
1062
1062
1063 def _edit_pull_request(self, pull_request):
1063 def _edit_pull_request(self, pull_request):
1064 _ = self.request.translate
1064 _ = self.request.translate
1065
1065
1066 try:
1066 try:
1067 PullRequestModel().edit(
1067 PullRequestModel().edit(
1068 pull_request,
1068 pull_request,
1069 self.request.POST.get('title'),
1069 self.request.POST.get('title'),
1070 self.request.POST.get('description'),
1070 self.request.POST.get('description'),
1071 self.request.POST.get('description_renderer'),
1071 self.request.POST.get('description_renderer'),
1072 self._rhodecode_user)
1072 self._rhodecode_user)
1073 except ValueError:
1073 except ValueError:
1074 msg = _(u'Cannot update closed pull requests.')
1074 msg = _(u'Cannot update closed pull requests.')
1075 h.flash(msg, category='error')
1075 h.flash(msg, category='error')
1076 return
1076 return
1077 else:
1077 else:
1078 Session().commit()
1078 Session().commit()
1079
1079
1080 msg = _(u'Pull request title & description updated.')
1080 msg = _(u'Pull request title & description updated.')
1081 h.flash(msg, category='success')
1081 h.flash(msg, category='success')
1082 return
1082 return
1083
1083
1084 def _update_commits(self, pull_request):
1084 def _update_commits(self, pull_request):
1085 _ = self.request.translate
1085 _ = self.request.translate
1086 resp = PullRequestModel().update_commits(pull_request)
1086 resp = PullRequestModel().update_commits(pull_request)
1087
1087
1088 if resp.executed:
1088 if resp.executed:
1089
1089
1090 if resp.target_changed and resp.source_changed:
1090 if resp.target_changed and resp.source_changed:
1091 changed = 'target and source repositories'
1091 changed = 'target and source repositories'
1092 elif resp.target_changed and not resp.source_changed:
1092 elif resp.target_changed and not resp.source_changed:
1093 changed = 'target repository'
1093 changed = 'target repository'
1094 elif not resp.target_changed and resp.source_changed:
1094 elif not resp.target_changed and resp.source_changed:
1095 changed = 'source repository'
1095 changed = 'source repository'
1096 else:
1096 else:
1097 changed = 'nothing'
1097 changed = 'nothing'
1098
1098
1099 msg = _(
1099 msg = _(
1100 u'Pull request updated to "{source_commit_id}" with '
1100 u'Pull request updated to "{source_commit_id}" with '
1101 u'{count_added} added, {count_removed} removed commits. '
1101 u'{count_added} added, {count_removed} removed commits. '
1102 u'Source of changes: {change_source}')
1102 u'Source of changes: {change_source}')
1103 msg = msg.format(
1103 msg = msg.format(
1104 source_commit_id=pull_request.source_ref_parts.commit_id,
1104 source_commit_id=pull_request.source_ref_parts.commit_id,
1105 count_added=len(resp.changes.added),
1105 count_added=len(resp.changes.added),
1106 count_removed=len(resp.changes.removed),
1106 count_removed=len(resp.changes.removed),
1107 change_source=changed)
1107 change_source=changed)
1108 h.flash(msg, category='success')
1108 h.flash(msg, category='success')
1109
1109
1110 channel = '/repo${}$/pr/{}'.format(
1110 channel = '/repo${}$/pr/{}'.format(
1111 pull_request.target_repo.repo_name,
1111 pull_request.target_repo.repo_name,
1112 pull_request.pull_request_id)
1112 pull_request.pull_request_id)
1113 message = msg + (
1113 message = msg + (
1114 ' - <a onclick="window.location.reload()">'
1114 ' - <a onclick="window.location.reload()">'
1115 '<strong>{}</strong></a>'.format(_('Reload page')))
1115 '<strong>{}</strong></a>'.format(_('Reload page')))
1116 channelstream.post_message(
1116 channelstream.post_message(
1117 channel, message, self._rhodecode_user.username,
1117 channel, message, self._rhodecode_user.username,
1118 registry=self.request.registry)
1118 registry=self.request.registry)
1119 else:
1119 else:
1120 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1120 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1121 warning_reasons = [
1121 warning_reasons = [
1122 UpdateFailureReason.NO_CHANGE,
1122 UpdateFailureReason.NO_CHANGE,
1123 UpdateFailureReason.WRONG_REF_TYPE,
1123 UpdateFailureReason.WRONG_REF_TYPE,
1124 ]
1124 ]
1125 category = 'warning' if resp.reason in warning_reasons else 'error'
1125 category = 'warning' if resp.reason in warning_reasons else 'error'
1126 h.flash(msg, category=category)
1126 h.flash(msg, category=category)
1127
1127
1128 @LoginRequired()
1128 @LoginRequired()
1129 @NotAnonymous()
1129 @NotAnonymous()
1130 @HasRepoPermissionAnyDecorator(
1130 @HasRepoPermissionAnyDecorator(
1131 'repository.read', 'repository.write', 'repository.admin')
1131 'repository.read', 'repository.write', 'repository.admin')
1132 @CSRFRequired()
1132 @CSRFRequired()
1133 @view_config(
1133 @view_config(
1134 route_name='pullrequest_merge', request_method='POST',
1134 route_name='pullrequest_merge', request_method='POST',
1135 renderer='json_ext')
1135 renderer='json_ext')
1136 def pull_request_merge(self):
1136 def pull_request_merge(self):
1137 """
1137 """
1138 Merge will perform a server-side merge of the specified
1138 Merge will perform a server-side merge of the specified
1139 pull request, if the pull request is approved and mergeable.
1139 pull request, if the pull request is approved and mergeable.
1140 After successful merging, the pull request is automatically
1140 After successful merging, the pull request is automatically
1141 closed, with a relevant comment.
1141 closed, with a relevant comment.
1142 """
1142 """
1143 pull_request = PullRequest.get_or_404(
1143 pull_request = PullRequest.get_or_404(
1144 self.request.matchdict['pull_request_id'])
1144 self.request.matchdict['pull_request_id'])
1145
1145
1146 self.load_default_context()
1146 self.load_default_context()
1147 check = MergeCheck.validate(
1147 check = MergeCheck.validate(
1148 pull_request, auth_user=self._rhodecode_user,
1148 pull_request, auth_user=self._rhodecode_user,
1149 translator=self.request.translate)
1149 translator=self.request.translate)
1150 merge_possible = not check.failed
1150 merge_possible = not check.failed
1151
1151
1152 for err_type, error_msg in check.errors:
1152 for err_type, error_msg in check.errors:
1153 h.flash(error_msg, category=err_type)
1153 h.flash(error_msg, category=err_type)
1154
1154
1155 if merge_possible:
1155 if merge_possible:
1156 log.debug("Pre-conditions checked, trying to merge.")
1156 log.debug("Pre-conditions checked, trying to merge.")
1157 extras = vcs_operation_context(
1157 extras = vcs_operation_context(
1158 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1158 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1159 username=self._rhodecode_db_user.username, action='push',
1159 username=self._rhodecode_db_user.username, action='push',
1160 scm=pull_request.target_repo.repo_type)
1160 scm=pull_request.target_repo.repo_type)
1161 self._merge_pull_request(
1161 self._merge_pull_request(
1162 pull_request, self._rhodecode_db_user, extras)
1162 pull_request, self._rhodecode_db_user, extras)
1163 else:
1163 else:
1164 log.debug("Pre-conditions failed, NOT merging.")
1164 log.debug("Pre-conditions failed, NOT merging.")
1165
1165
1166 raise HTTPFound(
1166 raise HTTPFound(
1167 h.route_path('pullrequest_show',
1167 h.route_path('pullrequest_show',
1168 repo_name=pull_request.target_repo.repo_name,
1168 repo_name=pull_request.target_repo.repo_name,
1169 pull_request_id=pull_request.pull_request_id))
1169 pull_request_id=pull_request.pull_request_id))
1170
1170
1171 def _merge_pull_request(self, pull_request, user, extras):
1171 def _merge_pull_request(self, pull_request, user, extras):
1172 _ = self.request.translate
1172 _ = self.request.translate
1173 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1173 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1174
1174
1175 if merge_resp.executed:
1175 if merge_resp.executed:
1176 log.debug("The merge was successful, closing the pull request.")
1176 log.debug("The merge was successful, closing the pull request.")
1177 PullRequestModel().close_pull_request(
1177 PullRequestModel().close_pull_request(
1178 pull_request.pull_request_id, user)
1178 pull_request.pull_request_id, user)
1179 Session().commit()
1179 Session().commit()
1180 msg = _('Pull request was successfully merged and closed.')
1180 msg = _('Pull request was successfully merged and closed.')
1181 h.flash(msg, category='success')
1181 h.flash(msg, category='success')
1182 else:
1182 else:
1183 log.debug(
1183 log.debug(
1184 "The merge was not successful. Merge response: %s",
1184 "The merge was not successful. Merge response: %s", merge_resp)
1185 merge_resp)
1185 msg = merge_resp.merge_status_message
1186 msg = PullRequestModel().merge_status_message(
1187 merge_resp.failure_reason)
1188 h.flash(msg, category='error')
1186 h.flash(msg, category='error')
1189
1187
1190 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1188 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1191 _ = self.request.translate
1189 _ = self.request.translate
1192 get_default_reviewers_data, validate_default_reviewers = \
1190 get_default_reviewers_data, validate_default_reviewers = \
1193 PullRequestModel().get_reviewer_functions()
1191 PullRequestModel().get_reviewer_functions()
1194
1192
1195 try:
1193 try:
1196 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1194 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1197 except ValueError as e:
1195 except ValueError as e:
1198 log.error('Reviewers Validation: {}'.format(e))
1196 log.error('Reviewers Validation: {}'.format(e))
1199 h.flash(e, category='error')
1197 h.flash(e, category='error')
1200 return
1198 return
1201
1199
1202 PullRequestModel().update_reviewers(
1200 PullRequestModel().update_reviewers(
1203 pull_request, reviewers, self._rhodecode_user)
1201 pull_request, reviewers, self._rhodecode_user)
1204 h.flash(_('Pull request reviewers updated.'), category='success')
1202 h.flash(_('Pull request reviewers updated.'), category='success')
1205 Session().commit()
1203 Session().commit()
1206
1204
1207 @LoginRequired()
1205 @LoginRequired()
1208 @NotAnonymous()
1206 @NotAnonymous()
1209 @HasRepoPermissionAnyDecorator(
1207 @HasRepoPermissionAnyDecorator(
1210 'repository.read', 'repository.write', 'repository.admin')
1208 'repository.read', 'repository.write', 'repository.admin')
1211 @CSRFRequired()
1209 @CSRFRequired()
1212 @view_config(
1210 @view_config(
1213 route_name='pullrequest_delete', request_method='POST',
1211 route_name='pullrequest_delete', request_method='POST',
1214 renderer='json_ext')
1212 renderer='json_ext')
1215 def pull_request_delete(self):
1213 def pull_request_delete(self):
1216 _ = self.request.translate
1214 _ = self.request.translate
1217
1215
1218 pull_request = PullRequest.get_or_404(
1216 pull_request = PullRequest.get_or_404(
1219 self.request.matchdict['pull_request_id'])
1217 self.request.matchdict['pull_request_id'])
1220 self.load_default_context()
1218 self.load_default_context()
1221
1219
1222 pr_closed = pull_request.is_closed()
1220 pr_closed = pull_request.is_closed()
1223 allowed_to_delete = PullRequestModel().check_user_delete(
1221 allowed_to_delete = PullRequestModel().check_user_delete(
1224 pull_request, self._rhodecode_user) and not pr_closed
1222 pull_request, self._rhodecode_user) and not pr_closed
1225
1223
1226 # only owner can delete it !
1224 # only owner can delete it !
1227 if allowed_to_delete:
1225 if allowed_to_delete:
1228 PullRequestModel().delete(pull_request, self._rhodecode_user)
1226 PullRequestModel().delete(pull_request, self._rhodecode_user)
1229 Session().commit()
1227 Session().commit()
1230 h.flash(_('Successfully deleted pull request'),
1228 h.flash(_('Successfully deleted pull request'),
1231 category='success')
1229 category='success')
1232 raise HTTPFound(h.route_path('pullrequest_show_all',
1230 raise HTTPFound(h.route_path('pullrequest_show_all',
1233 repo_name=self.db_repo_name))
1231 repo_name=self.db_repo_name))
1234
1232
1235 log.warning('user %s tried to delete pull request without access',
1233 log.warning('user %s tried to delete pull request without access',
1236 self._rhodecode_user)
1234 self._rhodecode_user)
1237 raise HTTPNotFound()
1235 raise HTTPNotFound()
1238
1236
1239 @LoginRequired()
1237 @LoginRequired()
1240 @NotAnonymous()
1238 @NotAnonymous()
1241 @HasRepoPermissionAnyDecorator(
1239 @HasRepoPermissionAnyDecorator(
1242 'repository.read', 'repository.write', 'repository.admin')
1240 'repository.read', 'repository.write', 'repository.admin')
1243 @CSRFRequired()
1241 @CSRFRequired()
1244 @view_config(
1242 @view_config(
1245 route_name='pullrequest_comment_create', request_method='POST',
1243 route_name='pullrequest_comment_create', request_method='POST',
1246 renderer='json_ext')
1244 renderer='json_ext')
1247 def pull_request_comment_create(self):
1245 def pull_request_comment_create(self):
1248 _ = self.request.translate
1246 _ = self.request.translate
1249
1247
1250 pull_request = PullRequest.get_or_404(
1248 pull_request = PullRequest.get_or_404(
1251 self.request.matchdict['pull_request_id'])
1249 self.request.matchdict['pull_request_id'])
1252 pull_request_id = pull_request.pull_request_id
1250 pull_request_id = pull_request.pull_request_id
1253
1251
1254 if pull_request.is_closed():
1252 if pull_request.is_closed():
1255 log.debug('comment: forbidden because pull request is closed')
1253 log.debug('comment: forbidden because pull request is closed')
1256 raise HTTPForbidden()
1254 raise HTTPForbidden()
1257
1255
1258 allowed_to_comment = PullRequestModel().check_user_comment(
1256 allowed_to_comment = PullRequestModel().check_user_comment(
1259 pull_request, self._rhodecode_user)
1257 pull_request, self._rhodecode_user)
1260 if not allowed_to_comment:
1258 if not allowed_to_comment:
1261 log.debug(
1259 log.debug(
1262 'comment: forbidden because pull request is from forbidden repo')
1260 'comment: forbidden because pull request is from forbidden repo')
1263 raise HTTPForbidden()
1261 raise HTTPForbidden()
1264
1262
1265 c = self.load_default_context()
1263 c = self.load_default_context()
1266
1264
1267 status = self.request.POST.get('changeset_status', None)
1265 status = self.request.POST.get('changeset_status', None)
1268 text = self.request.POST.get('text')
1266 text = self.request.POST.get('text')
1269 comment_type = self.request.POST.get('comment_type')
1267 comment_type = self.request.POST.get('comment_type')
1270 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1268 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1271 close_pull_request = self.request.POST.get('close_pull_request')
1269 close_pull_request = self.request.POST.get('close_pull_request')
1272
1270
1273 # the logic here should work like following, if we submit close
1271 # the logic here should work like following, if we submit close
1274 # pr comment, use `close_pull_request_with_comment` function
1272 # pr comment, use `close_pull_request_with_comment` function
1275 # else handle regular comment logic
1273 # else handle regular comment logic
1276
1274
1277 if close_pull_request:
1275 if close_pull_request:
1278 # only owner or admin or person with write permissions
1276 # only owner or admin or person with write permissions
1279 allowed_to_close = PullRequestModel().check_user_update(
1277 allowed_to_close = PullRequestModel().check_user_update(
1280 pull_request, self._rhodecode_user)
1278 pull_request, self._rhodecode_user)
1281 if not allowed_to_close:
1279 if not allowed_to_close:
1282 log.debug('comment: forbidden because not allowed to close '
1280 log.debug('comment: forbidden because not allowed to close '
1283 'pull request %s', pull_request_id)
1281 'pull request %s', pull_request_id)
1284 raise HTTPForbidden()
1282 raise HTTPForbidden()
1285 comment, status = PullRequestModel().close_pull_request_with_comment(
1283 comment, status = PullRequestModel().close_pull_request_with_comment(
1286 pull_request, self._rhodecode_user, self.db_repo, message=text,
1284 pull_request, self._rhodecode_user, self.db_repo, message=text,
1287 auth_user=self._rhodecode_user)
1285 auth_user=self._rhodecode_user)
1288 Session().flush()
1286 Session().flush()
1289 events.trigger(
1287 events.trigger(
1290 events.PullRequestCommentEvent(pull_request, comment))
1288 events.PullRequestCommentEvent(pull_request, comment))
1291
1289
1292 else:
1290 else:
1293 # regular comment case, could be inline, or one with status.
1291 # regular comment case, could be inline, or one with status.
1294 # for that one we check also permissions
1292 # for that one we check also permissions
1295
1293
1296 allowed_to_change_status = PullRequestModel().check_user_change_status(
1294 allowed_to_change_status = PullRequestModel().check_user_change_status(
1297 pull_request, self._rhodecode_user)
1295 pull_request, self._rhodecode_user)
1298
1296
1299 if status and allowed_to_change_status:
1297 if status and allowed_to_change_status:
1300 message = (_('Status change %(transition_icon)s %(status)s')
1298 message = (_('Status change %(transition_icon)s %(status)s')
1301 % {'transition_icon': '>',
1299 % {'transition_icon': '>',
1302 'status': ChangesetStatus.get_status_lbl(status)})
1300 'status': ChangesetStatus.get_status_lbl(status)})
1303 text = text or message
1301 text = text or message
1304
1302
1305 comment = CommentsModel().create(
1303 comment = CommentsModel().create(
1306 text=text,
1304 text=text,
1307 repo=self.db_repo.repo_id,
1305 repo=self.db_repo.repo_id,
1308 user=self._rhodecode_user.user_id,
1306 user=self._rhodecode_user.user_id,
1309 pull_request=pull_request,
1307 pull_request=pull_request,
1310 f_path=self.request.POST.get('f_path'),
1308 f_path=self.request.POST.get('f_path'),
1311 line_no=self.request.POST.get('line'),
1309 line_no=self.request.POST.get('line'),
1312 status_change=(ChangesetStatus.get_status_lbl(status)
1310 status_change=(ChangesetStatus.get_status_lbl(status)
1313 if status and allowed_to_change_status else None),
1311 if status and allowed_to_change_status else None),
1314 status_change_type=(status
1312 status_change_type=(status
1315 if status and allowed_to_change_status else None),
1313 if status and allowed_to_change_status else None),
1316 comment_type=comment_type,
1314 comment_type=comment_type,
1317 resolves_comment_id=resolves_comment_id,
1315 resolves_comment_id=resolves_comment_id,
1318 auth_user=self._rhodecode_user
1316 auth_user=self._rhodecode_user
1319 )
1317 )
1320
1318
1321 if allowed_to_change_status:
1319 if allowed_to_change_status:
1322 # calculate old status before we change it
1320 # calculate old status before we change it
1323 old_calculated_status = pull_request.calculated_review_status()
1321 old_calculated_status = pull_request.calculated_review_status()
1324
1322
1325 # get status if set !
1323 # get status if set !
1326 if status:
1324 if status:
1327 ChangesetStatusModel().set_status(
1325 ChangesetStatusModel().set_status(
1328 self.db_repo.repo_id,
1326 self.db_repo.repo_id,
1329 status,
1327 status,
1330 self._rhodecode_user.user_id,
1328 self._rhodecode_user.user_id,
1331 comment,
1329 comment,
1332 pull_request=pull_request
1330 pull_request=pull_request
1333 )
1331 )
1334
1332
1335 Session().flush()
1333 Session().flush()
1336 # this is somehow required to get access to some relationship
1334 # this is somehow required to get access to some relationship
1337 # loaded on comment
1335 # loaded on comment
1338 Session().refresh(comment)
1336 Session().refresh(comment)
1339
1337
1340 events.trigger(
1338 events.trigger(
1341 events.PullRequestCommentEvent(pull_request, comment))
1339 events.PullRequestCommentEvent(pull_request, comment))
1342
1340
1343 # we now calculate the status of pull request, and based on that
1341 # we now calculate the status of pull request, and based on that
1344 # calculation we set the commits status
1342 # calculation we set the commits status
1345 calculated_status = pull_request.calculated_review_status()
1343 calculated_status = pull_request.calculated_review_status()
1346 if old_calculated_status != calculated_status:
1344 if old_calculated_status != calculated_status:
1347 PullRequestModel()._trigger_pull_request_hook(
1345 PullRequestModel()._trigger_pull_request_hook(
1348 pull_request, self._rhodecode_user, 'review_status_change')
1346 pull_request, self._rhodecode_user, 'review_status_change')
1349
1347
1350 Session().commit()
1348 Session().commit()
1351
1349
1352 data = {
1350 data = {
1353 'target_id': h.safeid(h.safe_unicode(
1351 'target_id': h.safeid(h.safe_unicode(
1354 self.request.POST.get('f_path'))),
1352 self.request.POST.get('f_path'))),
1355 }
1353 }
1356 if comment:
1354 if comment:
1357 c.co = comment
1355 c.co = comment
1358 rendered_comment = render(
1356 rendered_comment = render(
1359 'rhodecode:templates/changeset/changeset_comment_block.mako',
1357 'rhodecode:templates/changeset/changeset_comment_block.mako',
1360 self._get_template_context(c), self.request)
1358 self._get_template_context(c), self.request)
1361
1359
1362 data.update(comment.get_dict())
1360 data.update(comment.get_dict())
1363 data.update({'rendered_text': rendered_comment})
1361 data.update({'rendered_text': rendered_comment})
1364
1362
1365 return data
1363 return data
1366
1364
1367 @LoginRequired()
1365 @LoginRequired()
1368 @NotAnonymous()
1366 @NotAnonymous()
1369 @HasRepoPermissionAnyDecorator(
1367 @HasRepoPermissionAnyDecorator(
1370 'repository.read', 'repository.write', 'repository.admin')
1368 'repository.read', 'repository.write', 'repository.admin')
1371 @CSRFRequired()
1369 @CSRFRequired()
1372 @view_config(
1370 @view_config(
1373 route_name='pullrequest_comment_delete', request_method='POST',
1371 route_name='pullrequest_comment_delete', request_method='POST',
1374 renderer='json_ext')
1372 renderer='json_ext')
1375 def pull_request_comment_delete(self):
1373 def pull_request_comment_delete(self):
1376 pull_request = PullRequest.get_or_404(
1374 pull_request = PullRequest.get_or_404(
1377 self.request.matchdict['pull_request_id'])
1375 self.request.matchdict['pull_request_id'])
1378
1376
1379 comment = ChangesetComment.get_or_404(
1377 comment = ChangesetComment.get_or_404(
1380 self.request.matchdict['comment_id'])
1378 self.request.matchdict['comment_id'])
1381 comment_id = comment.comment_id
1379 comment_id = comment.comment_id
1382
1380
1383 if pull_request.is_closed():
1381 if pull_request.is_closed():
1384 log.debug('comment: forbidden because pull request is closed')
1382 log.debug('comment: forbidden because pull request is closed')
1385 raise HTTPForbidden()
1383 raise HTTPForbidden()
1386
1384
1387 if not comment:
1385 if not comment:
1388 log.debug('Comment with id:%s not found, skipping', comment_id)
1386 log.debug('Comment with id:%s not found, skipping', comment_id)
1389 # comment already deleted in another call probably
1387 # comment already deleted in another call probably
1390 return True
1388 return True
1391
1389
1392 if comment.pull_request.is_closed():
1390 if comment.pull_request.is_closed():
1393 # don't allow deleting comments on closed pull request
1391 # don't allow deleting comments on closed pull request
1394 raise HTTPForbidden()
1392 raise HTTPForbidden()
1395
1393
1396 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1394 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1397 super_admin = h.HasPermissionAny('hg.admin')()
1395 super_admin = h.HasPermissionAny('hg.admin')()
1398 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1396 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1399 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1397 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1400 comment_repo_admin = is_repo_admin and is_repo_comment
1398 comment_repo_admin = is_repo_admin and is_repo_comment
1401
1399
1402 if super_admin or comment_owner or comment_repo_admin:
1400 if super_admin or comment_owner or comment_repo_admin:
1403 old_calculated_status = comment.pull_request.calculated_review_status()
1401 old_calculated_status = comment.pull_request.calculated_review_status()
1404 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1402 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1405 Session().commit()
1403 Session().commit()
1406 calculated_status = comment.pull_request.calculated_review_status()
1404 calculated_status = comment.pull_request.calculated_review_status()
1407 if old_calculated_status != calculated_status:
1405 if old_calculated_status != calculated_status:
1408 PullRequestModel()._trigger_pull_request_hook(
1406 PullRequestModel()._trigger_pull_request_hook(
1409 comment.pull_request, self._rhodecode_user, 'review_status_change')
1407 comment.pull_request, self._rhodecode_user, 'review_status_change')
1410 return True
1408 return True
1411 else:
1409 else:
1412 log.warning('No permissions for user %s to delete comment_id: %s',
1410 log.warning('No permissions for user %s to delete comment_id: %s',
1413 self._rhodecode_db_user, comment_id)
1411 self._rhodecode_db_user, comment_id)
1414 raise HTTPNotFound()
1412 raise HTTPNotFound()
@@ -1,1755 +1,1837 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24 import os
25 import collections
25 import re
26 import time
27 import shutil
26 import datetime
28 import datetime
27 import fnmatch
29 import fnmatch
28 import itertools
30 import itertools
29 import logging
31 import logging
30 import os
32 import collections
31 import re
32 import time
33 import warnings
33 import warnings
34 import shutil
35
34
36 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
37
36
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs import connection
40 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.utils import author_name, author_email
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 RepositoryError)
47 RepositoryError)
48
48
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 FILEMODE_DEFAULT = 0o100644
53 FILEMODE_DEFAULT = 0o100644
54 FILEMODE_EXECUTABLE = 0o100755
54 FILEMODE_EXECUTABLE = 0o100755
55
55
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 MergeResponse = collections.namedtuple(
58 'MergeResponse',
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60
57
61
58
62 class MergeFailureReason(object):
59 class MergeFailureReason(object):
63 """
60 """
64 Enumeration with all the reasons why the server side merge could fail.
61 Enumeration with all the reasons why the server side merge could fail.
65
62
66 DO NOT change the number of the reasons, as they may be stored in the
63 DO NOT change the number of the reasons, as they may be stored in the
67 database.
64 database.
68
65
69 Changing the name of a reason is acceptable and encouraged to deprecate old
66 Changing the name of a reason is acceptable and encouraged to deprecate old
70 reasons.
67 reasons.
71 """
68 """
72
69
73 # Everything went well.
70 # Everything went well.
74 NONE = 0
71 NONE = 0
75
72
76 # An unexpected exception was raised. Check the logs for more details.
73 # An unexpected exception was raised. Check the logs for more details.
77 UNKNOWN = 1
74 UNKNOWN = 1
78
75
79 # The merge was not successful, there are conflicts.
76 # The merge was not successful, there are conflicts.
80 MERGE_FAILED = 2
77 MERGE_FAILED = 2
81
78
82 # The merge succeeded but we could not push it to the target repository.
79 # The merge succeeded but we could not push it to the target repository.
83 PUSH_FAILED = 3
80 PUSH_FAILED = 3
84
81
85 # The specified target is not a head in the target repository.
82 # The specified target is not a head in the target repository.
86 TARGET_IS_NOT_HEAD = 4
83 TARGET_IS_NOT_HEAD = 4
87
84
88 # The source repository contains more branches than the target. Pushing
85 # The source repository contains more branches than the target. Pushing
89 # the merge will create additional branches in the target.
86 # the merge will create additional branches in the target.
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
87 HG_SOURCE_HAS_MORE_BRANCHES = 5
91
88
92 # The target reference has multiple heads. That does not allow to correctly
89 # The target reference has multiple heads. That does not allow to correctly
93 # identify the target location. This could only happen for mercurial
90 # identify the target location. This could only happen for mercurial
94 # branches.
91 # branches.
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96
93
97 # The target repository is locked
94 # The target repository is locked
98 TARGET_IS_LOCKED = 7
95 TARGET_IS_LOCKED = 7
99
96
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # A involved commit could not be found.
98 # A involved commit could not be found.
102 _DEPRECATED_MISSING_COMMIT = 8
99 _DEPRECATED_MISSING_COMMIT = 8
103
100
104 # The target repo reference is missing.
101 # The target repo reference is missing.
105 MISSING_TARGET_REF = 9
102 MISSING_TARGET_REF = 9
106
103
107 # The source repo reference is missing.
104 # The source repo reference is missing.
108 MISSING_SOURCE_REF = 10
105 MISSING_SOURCE_REF = 10
109
106
110 # The merge was not successful, there are conflicts related to sub
107 # The merge was not successful, there are conflicts related to sub
111 # repositories.
108 # repositories.
112 SUBREPO_MERGE_FAILED = 11
109 SUBREPO_MERGE_FAILED = 11
113
110
114
111
115 class UpdateFailureReason(object):
112 class UpdateFailureReason(object):
116 """
113 """
117 Enumeration with all the reasons why the pull request update could fail.
114 Enumeration with all the reasons why the pull request update could fail.
118
115
119 DO NOT change the number of the reasons, as they may be stored in the
116 DO NOT change the number of the reasons, as they may be stored in the
120 database.
117 database.
121
118
122 Changing the name of a reason is acceptable and encouraged to deprecate old
119 Changing the name of a reason is acceptable and encouraged to deprecate old
123 reasons.
120 reasons.
124 """
121 """
125
122
126 # Everything went well.
123 # Everything went well.
127 NONE = 0
124 NONE = 0
128
125
129 # An unexpected exception was raised. Check the logs for more details.
126 # An unexpected exception was raised. Check the logs for more details.
130 UNKNOWN = 1
127 UNKNOWN = 1
131
128
132 # The pull request is up to date.
129 # The pull request is up to date.
133 NO_CHANGE = 2
130 NO_CHANGE = 2
134
131
135 # The pull request has a reference type that is not supported for update.
132 # The pull request has a reference type that is not supported for update.
136 WRONG_REF_TYPE = 3
133 WRONG_REF_TYPE = 3
137
134
138 # Update failed because the target reference is missing.
135 # Update failed because the target reference is missing.
139 MISSING_TARGET_REF = 4
136 MISSING_TARGET_REF = 4
140
137
141 # Update failed because the source reference is missing.
138 # Update failed because the source reference is missing.
142 MISSING_SOURCE_REF = 5
139 MISSING_SOURCE_REF = 5
143
140
144
141
142 class MergeResponse(object):
143
144 # uses .format(**metadata) for variables
145 MERGE_STATUS_MESSAGES = {
146 MergeFailureReason.NONE: lazy_ugettext(
147 u'This pull request can be automatically merged.'),
148 MergeFailureReason.UNKNOWN: lazy_ugettext(
149 u'This pull request cannot be merged because of an unhandled exception. '
150 u'{exception}'),
151 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
152 u'This pull request cannot be merged because of merge conflicts.'),
153 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
154 u'This pull request could not be merged because push to '
155 u'target:`{target}@{merge_commit}` failed.'),
156 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
157 u'This pull request cannot be merged because the target '
158 u'`{target_ref.name}` is not a head.'),
159 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
160 u'This pull request cannot be merged because the source contains '
161 u'more branches than the target.'),
162 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
163 u'This pull request cannot be merged because the target '
164 u'has multiple heads: `{heads}`.'),
165 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
166 u'This pull request cannot be merged because the target repository is '
167 u'locked by {locked_by}.'),
168
169 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
170 u'This pull request cannot be merged because the target '
171 u'reference `{target_ref.name}` is missing.'),
172 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
173 u'This pull request cannot be merged because the source '
174 u'reference `{source_ref.name}` is missing.'),
175 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
176 u'This pull request cannot be merged because of conflicts related '
177 u'to sub repositories.'),
178
179 # Deprecations
180 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
181 u'This pull request cannot be merged because the target or the '
182 u'source reference is missing.'),
183
184 }
185
186 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
187 self.possible = possible
188 self.executed = executed
189 self.merge_ref = merge_ref
190 self.failure_reason = failure_reason
191 self.metadata = metadata or {}
192
193 def __repr__(self):
194 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
195
196 def __eq__(self, other):
197 same_instance = isinstance(other, self.__class__)
198 return same_instance \
199 and self.possible == other.possible \
200 and self.executed == other.executed \
201 and self.failure_reason == other.failure_reason
202
203 @property
204 def label(self):
205 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
206 not k.startswith('_'))
207 return label_dict.get(self.failure_reason)
208
209 @property
210 def merge_status_message(self):
211 """
212 Return a human friendly error message for the given merge status code.
213 """
214 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
215 try:
216 return msg.format(**self.metadata)
217 except Exception:
218 log.exception('Failed to format %s message', self)
219 return msg
220
221 def asdict(self):
222 data = {}
223 for k in ['possible', 'executed', 'merge_ref', 'failure_reason']:
224 data[k] = getattr(self, k)
225 return data
226
227
145 class BaseRepository(object):
228 class BaseRepository(object):
146 """
229 """
147 Base Repository for final backends
230 Base Repository for final backends
148
231
149 .. attribute:: DEFAULT_BRANCH_NAME
232 .. attribute:: DEFAULT_BRANCH_NAME
150
233
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
234 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152
235
153 .. attribute:: commit_ids
236 .. attribute:: commit_ids
154
237
155 list of all available commit ids, in ascending order
238 list of all available commit ids, in ascending order
156
239
157 .. attribute:: path
240 .. attribute:: path
158
241
159 absolute path to the repository
242 absolute path to the repository
160
243
161 .. attribute:: bookmarks
244 .. attribute:: bookmarks
162
245
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
246 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 there are no bookmarks or the backend implementation does not support
247 there are no bookmarks or the backend implementation does not support
165 bookmarks.
248 bookmarks.
166
249
167 .. attribute:: tags
250 .. attribute:: tags
168
251
169 Mapping from name to :term:`Commit ID` of the tag.
252 Mapping from name to :term:`Commit ID` of the tag.
170
253
171 """
254 """
172
255
173 DEFAULT_BRANCH_NAME = None
256 DEFAULT_BRANCH_NAME = None
174 DEFAULT_CONTACT = u"Unknown"
257 DEFAULT_CONTACT = u"Unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
258 DEFAULT_DESCRIPTION = u"unknown"
176 EMPTY_COMMIT_ID = '0' * 40
259 EMPTY_COMMIT_ID = '0' * 40
177
260
178 path = None
261 path = None
179
262
180 def __init__(self, repo_path, config=None, create=False, **kwargs):
263 def __init__(self, repo_path, config=None, create=False, **kwargs):
181 """
264 """
182 Initializes repository. Raises RepositoryError if repository could
265 Initializes repository. Raises RepositoryError if repository could
183 not be find at the given ``repo_path`` or directory at ``repo_path``
266 not be find at the given ``repo_path`` or directory at ``repo_path``
184 exists and ``create`` is set to True.
267 exists and ``create`` is set to True.
185
268
186 :param repo_path: local path of the repository
269 :param repo_path: local path of the repository
187 :param config: repository configuration
270 :param config: repository configuration
188 :param create=False: if set to True, would try to create repository.
271 :param create=False: if set to True, would try to create repository.
189 :param src_url=None: if set, should be proper url from which repository
272 :param src_url=None: if set, should be proper url from which repository
190 would be cloned; requires ``create`` parameter to be set to True -
273 would be cloned; requires ``create`` parameter to be set to True -
191 raises RepositoryError if src_url is set and create evaluates to
274 raises RepositoryError if src_url is set and create evaluates to
192 False
275 False
193 """
276 """
194 raise NotImplementedError
277 raise NotImplementedError
195
278
196 def __repr__(self):
279 def __repr__(self):
197 return '<%s at %s>' % (self.__class__.__name__, self.path)
280 return '<%s at %s>' % (self.__class__.__name__, self.path)
198
281
199 def __len__(self):
282 def __len__(self):
200 return self.count()
283 return self.count()
201
284
202 def __eq__(self, other):
285 def __eq__(self, other):
203 same_instance = isinstance(other, self.__class__)
286 same_instance = isinstance(other, self.__class__)
204 return same_instance and other.path == self.path
287 return same_instance and other.path == self.path
205
288
206 def __ne__(self, other):
289 def __ne__(self, other):
207 return not self.__eq__(other)
290 return not self.__eq__(other)
208
291
209 def get_create_shadow_cache_pr_path(self, db_repo):
292 def get_create_shadow_cache_pr_path(self, db_repo):
210 path = db_repo.cached_diffs_dir
293 path = db_repo.cached_diffs_dir
211 if not os.path.exists(path):
294 if not os.path.exists(path):
212 os.makedirs(path, 0o755)
295 os.makedirs(path, 0o755)
213 return path
296 return path
214
297
215 @classmethod
298 @classmethod
216 def get_default_config(cls, default=None):
299 def get_default_config(cls, default=None):
217 config = Config()
300 config = Config()
218 if default and isinstance(default, list):
301 if default and isinstance(default, list):
219 for section, key, val in default:
302 for section, key, val in default:
220 config.set(section, key, val)
303 config.set(section, key, val)
221 return config
304 return config
222
305
223 @LazyProperty
306 @LazyProperty
224 def _remote(self):
307 def _remote(self):
225 raise NotImplementedError
308 raise NotImplementedError
226
309
227 @LazyProperty
310 @LazyProperty
228 def EMPTY_COMMIT(self):
311 def EMPTY_COMMIT(self):
229 return EmptyCommit(self.EMPTY_COMMIT_ID)
312 return EmptyCommit(self.EMPTY_COMMIT_ID)
230
313
231 @LazyProperty
314 @LazyProperty
232 def alias(self):
315 def alias(self):
233 for k, v in settings.BACKENDS.items():
316 for k, v in settings.BACKENDS.items():
234 if v.split('.')[-1] == str(self.__class__.__name__):
317 if v.split('.')[-1] == str(self.__class__.__name__):
235 return k
318 return k
236
319
237 @LazyProperty
320 @LazyProperty
238 def name(self):
321 def name(self):
239 return safe_unicode(os.path.basename(self.path))
322 return safe_unicode(os.path.basename(self.path))
240
323
241 @LazyProperty
324 @LazyProperty
242 def description(self):
325 def description(self):
243 raise NotImplementedError
326 raise NotImplementedError
244
327
245 def refs(self):
328 def refs(self):
246 """
329 """
247 returns a `dict` with branches, bookmarks, tags, and closed_branches
330 returns a `dict` with branches, bookmarks, tags, and closed_branches
248 for this repository
331 for this repository
249 """
332 """
250 return dict(
333 return dict(
251 branches=self.branches,
334 branches=self.branches,
252 branches_closed=self.branches_closed,
335 branches_closed=self.branches_closed,
253 tags=self.tags,
336 tags=self.tags,
254 bookmarks=self.bookmarks
337 bookmarks=self.bookmarks
255 )
338 )
256
339
257 @LazyProperty
340 @LazyProperty
258 def branches(self):
341 def branches(self):
259 """
342 """
260 A `dict` which maps branch names to commit ids.
343 A `dict` which maps branch names to commit ids.
261 """
344 """
262 raise NotImplementedError
345 raise NotImplementedError
263
346
264 @LazyProperty
347 @LazyProperty
265 def branches_closed(self):
348 def branches_closed(self):
266 """
349 """
267 A `dict` which maps tags names to commit ids.
350 A `dict` which maps tags names to commit ids.
268 """
351 """
269 raise NotImplementedError
352 raise NotImplementedError
270
353
271 @LazyProperty
354 @LazyProperty
272 def bookmarks(self):
355 def bookmarks(self):
273 """
356 """
274 A `dict` which maps tags names to commit ids.
357 A `dict` which maps tags names to commit ids.
275 """
358 """
276 raise NotImplementedError
359 raise NotImplementedError
277
360
278 @LazyProperty
361 @LazyProperty
279 def tags(self):
362 def tags(self):
280 """
363 """
281 A `dict` which maps tags names to commit ids.
364 A `dict` which maps tags names to commit ids.
282 """
365 """
283 raise NotImplementedError
366 raise NotImplementedError
284
367
285 @LazyProperty
368 @LazyProperty
286 def size(self):
369 def size(self):
287 """
370 """
288 Returns combined size in bytes for all repository files
371 Returns combined size in bytes for all repository files
289 """
372 """
290 tip = self.get_commit()
373 tip = self.get_commit()
291 return tip.size
374 return tip.size
292
375
293 def size_at_commit(self, commit_id):
376 def size_at_commit(self, commit_id):
294 commit = self.get_commit(commit_id)
377 commit = self.get_commit(commit_id)
295 return commit.size
378 return commit.size
296
379
297 def is_empty(self):
380 def is_empty(self):
298 return not bool(self.commit_ids)
381 return not bool(self.commit_ids)
299
382
300 @staticmethod
383 @staticmethod
301 def check_url(url, config):
384 def check_url(url, config):
302 """
385 """
303 Function will check given url and try to verify if it's a valid
386 Function will check given url and try to verify if it's a valid
304 link.
387 link.
305 """
388 """
306 raise NotImplementedError
389 raise NotImplementedError
307
390
308 @staticmethod
391 @staticmethod
309 def is_valid_repository(path):
392 def is_valid_repository(path):
310 """
393 """
311 Check if given `path` contains a valid repository of this backend
394 Check if given `path` contains a valid repository of this backend
312 """
395 """
313 raise NotImplementedError
396 raise NotImplementedError
314
397
315 # ==========================================================================
398 # ==========================================================================
316 # COMMITS
399 # COMMITS
317 # ==========================================================================
400 # ==========================================================================
318
401
319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
402 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
320 """
403 """
321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
404 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
322 are both None, most recent commit is returned.
405 are both None, most recent commit is returned.
323
406
324 :param pre_load: Optional. List of commit attributes to load.
407 :param pre_load: Optional. List of commit attributes to load.
325
408
326 :raises ``EmptyRepositoryError``: if there are no commits
409 :raises ``EmptyRepositoryError``: if there are no commits
327 """
410 """
328 raise NotImplementedError
411 raise NotImplementedError
329
412
330 def __iter__(self):
413 def __iter__(self):
331 for commit_id in self.commit_ids:
414 for commit_id in self.commit_ids:
332 yield self.get_commit(commit_id=commit_id)
415 yield self.get_commit(commit_id=commit_id)
333
416
334 def get_commits(
417 def get_commits(
335 self, start_id=None, end_id=None, start_date=None, end_date=None,
418 self, start_id=None, end_id=None, start_date=None, end_date=None,
336 branch_name=None, show_hidden=False, pre_load=None):
419 branch_name=None, show_hidden=False, pre_load=None):
337 """
420 """
338 Returns iterator of `BaseCommit` objects from start to end
421 Returns iterator of `BaseCommit` objects from start to end
339 not inclusive. This should behave just like a list, ie. end is not
422 not inclusive. This should behave just like a list, ie. end is not
340 inclusive.
423 inclusive.
341
424
342 :param start_id: None or str, must be a valid commit id
425 :param start_id: None or str, must be a valid commit id
343 :param end_id: None or str, must be a valid commit id
426 :param end_id: None or str, must be a valid commit id
344 :param start_date:
427 :param start_date:
345 :param end_date:
428 :param end_date:
346 :param branch_name:
429 :param branch_name:
347 :param show_hidden:
430 :param show_hidden:
348 :param pre_load:
431 :param pre_load:
349 """
432 """
350 raise NotImplementedError
433 raise NotImplementedError
351
434
352 def __getitem__(self, key):
435 def __getitem__(self, key):
353 """
436 """
354 Allows index based access to the commit objects of this repository.
437 Allows index based access to the commit objects of this repository.
355 """
438 """
356 pre_load = ["author", "branch", "date", "message", "parents"]
439 pre_load = ["author", "branch", "date", "message", "parents"]
357 if isinstance(key, slice):
440 if isinstance(key, slice):
358 return self._get_range(key, pre_load)
441 return self._get_range(key, pre_load)
359 return self.get_commit(commit_idx=key, pre_load=pre_load)
442 return self.get_commit(commit_idx=key, pre_load=pre_load)
360
443
361 def _get_range(self, slice_obj, pre_load):
444 def _get_range(self, slice_obj, pre_load):
362 for commit_id in self.commit_ids.__getitem__(slice_obj):
445 for commit_id in self.commit_ids.__getitem__(slice_obj):
363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
446 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
364
447
365 def count(self):
448 def count(self):
366 return len(self.commit_ids)
449 return len(self.commit_ids)
367
450
368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
451 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
369 """
452 """
370 Creates and returns a tag for the given ``commit_id``.
453 Creates and returns a tag for the given ``commit_id``.
371
454
372 :param name: name for new tag
455 :param name: name for new tag
373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
456 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
374 :param commit_id: commit id for which new tag would be created
457 :param commit_id: commit id for which new tag would be created
375 :param message: message of the tag's commit
458 :param message: message of the tag's commit
376 :param date: date of tag's commit
459 :param date: date of tag's commit
377
460
378 :raises TagAlreadyExistError: if tag with same name already exists
461 :raises TagAlreadyExistError: if tag with same name already exists
379 """
462 """
380 raise NotImplementedError
463 raise NotImplementedError
381
464
382 def remove_tag(self, name, user, message=None, date=None):
465 def remove_tag(self, name, user, message=None, date=None):
383 """
466 """
384 Removes tag with the given ``name``.
467 Removes tag with the given ``name``.
385
468
386 :param name: name of the tag to be removed
469 :param name: name of the tag to be removed
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
470 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 :param message: message of the tag's removal commit
471 :param message: message of the tag's removal commit
389 :param date: date of tag's removal commit
472 :param date: date of tag's removal commit
390
473
391 :raises TagDoesNotExistError: if tag with given name does not exists
474 :raises TagDoesNotExistError: if tag with given name does not exists
392 """
475 """
393 raise NotImplementedError
476 raise NotImplementedError
394
477
395 def get_diff(
478 def get_diff(
396 self, commit1, commit2, path=None, ignore_whitespace=False,
479 self, commit1, commit2, path=None, ignore_whitespace=False,
397 context=3, path1=None):
480 context=3, path1=None):
398 """
481 """
399 Returns (git like) *diff*, as plain text. Shows changes introduced by
482 Returns (git like) *diff*, as plain text. Shows changes introduced by
400 `commit2` since `commit1`.
483 `commit2` since `commit1`.
401
484
402 :param commit1: Entry point from which diff is shown. Can be
485 :param commit1: Entry point from which diff is shown. Can be
403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
486 ``self.EMPTY_COMMIT`` - in this case, patch showing all
404 the changes since empty state of the repository until `commit2`
487 the changes since empty state of the repository until `commit2`
405 :param commit2: Until which commit changes should be shown.
488 :param commit2: Until which commit changes should be shown.
406 :param path: Can be set to a path of a file to create a diff of that
489 :param path: Can be set to a path of a file to create a diff of that
407 file. If `path1` is also set, this value is only associated to
490 file. If `path1` is also set, this value is only associated to
408 `commit2`.
491 `commit2`.
409 :param ignore_whitespace: If set to ``True``, would not show whitespace
492 :param ignore_whitespace: If set to ``True``, would not show whitespace
410 changes. Defaults to ``False``.
493 changes. Defaults to ``False``.
411 :param context: How many lines before/after changed lines should be
494 :param context: How many lines before/after changed lines should be
412 shown. Defaults to ``3``.
495 shown. Defaults to ``3``.
413 :param path1: Can be set to a path to associate with `commit1`. This
496 :param path1: Can be set to a path to associate with `commit1`. This
414 parameter works only for backends which support diff generation for
497 parameter works only for backends which support diff generation for
415 different paths. Other backends will raise a `ValueError` if `path1`
498 different paths. Other backends will raise a `ValueError` if `path1`
416 is set and has a different value than `path`.
499 is set and has a different value than `path`.
417 :param file_path: filter this diff by given path pattern
500 :param file_path: filter this diff by given path pattern
418 """
501 """
419 raise NotImplementedError
502 raise NotImplementedError
420
503
421 def strip(self, commit_id, branch=None):
504 def strip(self, commit_id, branch=None):
422 """
505 """
423 Strip given commit_id from the repository
506 Strip given commit_id from the repository
424 """
507 """
425 raise NotImplementedError
508 raise NotImplementedError
426
509
427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
510 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
428 """
511 """
429 Return a latest common ancestor commit if one exists for this repo
512 Return a latest common ancestor commit if one exists for this repo
430 `commit_id1` vs `commit_id2` from `repo2`.
513 `commit_id1` vs `commit_id2` from `repo2`.
431
514
432 :param commit_id1: Commit it from this repository to use as a
515 :param commit_id1: Commit it from this repository to use as a
433 target for the comparison.
516 target for the comparison.
434 :param commit_id2: Source commit id to use for comparison.
517 :param commit_id2: Source commit id to use for comparison.
435 :param repo2: Source repository to use for comparison.
518 :param repo2: Source repository to use for comparison.
436 """
519 """
437 raise NotImplementedError
520 raise NotImplementedError
438
521
439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
522 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
440 """
523 """
441 Compare this repository's revision `commit_id1` with `commit_id2`.
524 Compare this repository's revision `commit_id1` with `commit_id2`.
442
525
443 Returns a tuple(commits, ancestor) that would be merged from
526 Returns a tuple(commits, ancestor) that would be merged from
444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
527 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
445 will be returned as ancestor.
528 will be returned as ancestor.
446
529
447 :param commit_id1: Commit it from this repository to use as a
530 :param commit_id1: Commit it from this repository to use as a
448 target for the comparison.
531 target for the comparison.
449 :param commit_id2: Source commit id to use for comparison.
532 :param commit_id2: Source commit id to use for comparison.
450 :param repo2: Source repository to use for comparison.
533 :param repo2: Source repository to use for comparison.
451 :param merge: If set to ``True`` will do a merge compare which also
534 :param merge: If set to ``True`` will do a merge compare which also
452 returns the common ancestor.
535 returns the common ancestor.
453 :param pre_load: Optional. List of commit attributes to load.
536 :param pre_load: Optional. List of commit attributes to load.
454 """
537 """
455 raise NotImplementedError
538 raise NotImplementedError
456
539
457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
540 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
458 user_name='', user_email='', message='', dry_run=False,
541 user_name='', user_email='', message='', dry_run=False,
459 use_rebase=False, close_branch=False):
542 use_rebase=False, close_branch=False):
460 """
543 """
461 Merge the revisions specified in `source_ref` from `source_repo`
544 Merge the revisions specified in `source_ref` from `source_repo`
462 onto the `target_ref` of this repository.
545 onto the `target_ref` of this repository.
463
546
464 `source_ref` and `target_ref` are named tupls with the following
547 `source_ref` and `target_ref` are named tupls with the following
465 fields `type`, `name` and `commit_id`.
548 fields `type`, `name` and `commit_id`.
466
549
467 Returns a MergeResponse named tuple with the following fields
550 Returns a MergeResponse named tuple with the following fields
468 'possible', 'executed', 'source_commit', 'target_commit',
551 'possible', 'executed', 'source_commit', 'target_commit',
469 'merge_commit'.
552 'merge_commit'.
470
553
471 :param repo_id: `repo_id` target repo id.
554 :param repo_id: `repo_id` target repo id.
472 :param workspace_id: `workspace_id` unique identifier.
555 :param workspace_id: `workspace_id` unique identifier.
473 :param target_ref: `target_ref` points to the commit on top of which
556 :param target_ref: `target_ref` points to the commit on top of which
474 the `source_ref` should be merged.
557 the `source_ref` should be merged.
475 :param source_repo: The repository that contains the commits to be
558 :param source_repo: The repository that contains the commits to be
476 merged.
559 merged.
477 :param source_ref: `source_ref` points to the topmost commit from
560 :param source_ref: `source_ref` points to the topmost commit from
478 the `source_repo` which should be merged.
561 the `source_repo` which should be merged.
479 :param user_name: Merge commit `user_name`.
562 :param user_name: Merge commit `user_name`.
480 :param user_email: Merge commit `user_email`.
563 :param user_email: Merge commit `user_email`.
481 :param message: Merge commit `message`.
564 :param message: Merge commit `message`.
482 :param dry_run: If `True` the merge will not take place.
565 :param dry_run: If `True` the merge will not take place.
483 :param use_rebase: If `True` commits from the source will be rebased
566 :param use_rebase: If `True` commits from the source will be rebased
484 on top of the target instead of being merged.
567 on top of the target instead of being merged.
485 :param close_branch: If `True` branch will be close before merging it
568 :param close_branch: If `True` branch will be close before merging it
486 """
569 """
487 if dry_run:
570 if dry_run:
488 message = message or settings.MERGE_DRY_RUN_MESSAGE
571 message = message or settings.MERGE_DRY_RUN_MESSAGE
489 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
572 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
490 user_name = user_name or settings.MERGE_DRY_RUN_USER
573 user_name = user_name or settings.MERGE_DRY_RUN_USER
491 else:
574 else:
492 if not user_name:
575 if not user_name:
493 raise ValueError('user_name cannot be empty')
576 raise ValueError('user_name cannot be empty')
494 if not user_email:
577 if not user_email:
495 raise ValueError('user_email cannot be empty')
578 raise ValueError('user_email cannot be empty')
496 if not message:
579 if not message:
497 raise ValueError('message cannot be empty')
580 raise ValueError('message cannot be empty')
498
581
499 try:
582 try:
500 return self._merge_repo(
583 return self._merge_repo(
501 repo_id, workspace_id, target_ref, source_repo,
584 repo_id, workspace_id, target_ref, source_repo,
502 source_ref, message, user_name, user_email, dry_run=dry_run,
585 source_ref, message, user_name, user_email, dry_run=dry_run,
503 use_rebase=use_rebase, close_branch=close_branch)
586 use_rebase=use_rebase, close_branch=close_branch)
504 except RepositoryError:
587 except RepositoryError as exc:
505 log.exception(
588 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
506 'Unexpected failure when running merge, dry-run=%s',
507 dry_run)
508 return MergeResponse(
589 return MergeResponse(
509 False, False, None, MergeFailureReason.UNKNOWN)
590 False, False, None, MergeFailureReason.UNKNOWN,
591 metadata={'exception': str(exc)})
510
592
511 def _merge_repo(self, repo_id, workspace_id, target_ref,
593 def _merge_repo(self, repo_id, workspace_id, target_ref,
512 source_repo, source_ref, merge_message,
594 source_repo, source_ref, merge_message,
513 merger_name, merger_email, dry_run=False,
595 merger_name, merger_email, dry_run=False,
514 use_rebase=False, close_branch=False):
596 use_rebase=False, close_branch=False):
515 """Internal implementation of merge."""
597 """Internal implementation of merge."""
516 raise NotImplementedError
598 raise NotImplementedError
517
599
518 def _maybe_prepare_merge_workspace(
600 def _maybe_prepare_merge_workspace(
519 self, repo_id, workspace_id, target_ref, source_ref):
601 self, repo_id, workspace_id, target_ref, source_ref):
520 """
602 """
521 Create the merge workspace.
603 Create the merge workspace.
522
604
523 :param workspace_id: `workspace_id` unique identifier.
605 :param workspace_id: `workspace_id` unique identifier.
524 """
606 """
525 raise NotImplementedError
607 raise NotImplementedError
526
608
527 def _get_legacy_shadow_repository_path(self, workspace_id):
609 def _get_legacy_shadow_repository_path(self, workspace_id):
528 """
610 """
529 Legacy version that was used before. We still need it for
611 Legacy version that was used before. We still need it for
530 backward compat
612 backward compat
531 """
613 """
532 return os.path.join(
614 return os.path.join(
533 os.path.dirname(self.path),
615 os.path.dirname(self.path),
534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
616 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
535
617
536 def _get_shadow_repository_path(self, repo_id, workspace_id):
618 def _get_shadow_repository_path(self, repo_id, workspace_id):
537 # The name of the shadow repository must start with '.', so it is
619 # The name of the shadow repository must start with '.', so it is
538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
620 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
621 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
540 if os.path.exists(legacy_repository_path):
622 if os.path.exists(legacy_repository_path):
541 return legacy_repository_path
623 return legacy_repository_path
542 else:
624 else:
543 return os.path.join(
625 return os.path.join(
544 os.path.dirname(self.path),
626 os.path.dirname(self.path),
545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
627 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
546
628
547 def cleanup_merge_workspace(self, repo_id, workspace_id):
629 def cleanup_merge_workspace(self, repo_id, workspace_id):
548 """
630 """
549 Remove merge workspace.
631 Remove merge workspace.
550
632
551 This function MUST not fail in case there is no workspace associated to
633 This function MUST not fail in case there is no workspace associated to
552 the given `workspace_id`.
634 the given `workspace_id`.
553
635
554 :param workspace_id: `workspace_id` unique identifier.
636 :param workspace_id: `workspace_id` unique identifier.
555 """
637 """
556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
638 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
557 shadow_repository_path_del = '{}.{}.delete'.format(
639 shadow_repository_path_del = '{}.{}.delete'.format(
558 shadow_repository_path, time.time())
640 shadow_repository_path, time.time())
559
641
560 # move the shadow repo, so it never conflicts with the one used.
642 # move the shadow repo, so it never conflicts with the one used.
561 # we use this method because shutil.rmtree had some edge case problems
643 # we use this method because shutil.rmtree had some edge case problems
562 # removing symlinked repositories
644 # removing symlinked repositories
563 if not os.path.isdir(shadow_repository_path):
645 if not os.path.isdir(shadow_repository_path):
564 return
646 return
565
647
566 shutil.move(shadow_repository_path, shadow_repository_path_del)
648 shutil.move(shadow_repository_path, shadow_repository_path_del)
567 try:
649 try:
568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
650 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
569 except Exception:
651 except Exception:
570 log.exception('Failed to gracefully remove shadow repo under %s',
652 log.exception('Failed to gracefully remove shadow repo under %s',
571 shadow_repository_path_del)
653 shadow_repository_path_del)
572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
654 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
573
655
574 # ========== #
656 # ========== #
575 # COMMIT API #
657 # COMMIT API #
576 # ========== #
658 # ========== #
577
659
578 @LazyProperty
660 @LazyProperty
579 def in_memory_commit(self):
661 def in_memory_commit(self):
580 """
662 """
581 Returns :class:`InMemoryCommit` object for this repository.
663 Returns :class:`InMemoryCommit` object for this repository.
582 """
664 """
583 raise NotImplementedError
665 raise NotImplementedError
584
666
585 # ======================== #
667 # ======================== #
586 # UTILITIES FOR SUBCLASSES #
668 # UTILITIES FOR SUBCLASSES #
587 # ======================== #
669 # ======================== #
588
670
589 def _validate_diff_commits(self, commit1, commit2):
671 def _validate_diff_commits(self, commit1, commit2):
590 """
672 """
591 Validates that the given commits are related to this repository.
673 Validates that the given commits are related to this repository.
592
674
593 Intended as a utility for sub classes to have a consistent validation
675 Intended as a utility for sub classes to have a consistent validation
594 of input parameters in methods like :meth:`get_diff`.
676 of input parameters in methods like :meth:`get_diff`.
595 """
677 """
596 self._validate_commit(commit1)
678 self._validate_commit(commit1)
597 self._validate_commit(commit2)
679 self._validate_commit(commit2)
598 if (isinstance(commit1, EmptyCommit) and
680 if (isinstance(commit1, EmptyCommit) and
599 isinstance(commit2, EmptyCommit)):
681 isinstance(commit2, EmptyCommit)):
600 raise ValueError("Cannot compare two empty commits")
682 raise ValueError("Cannot compare two empty commits")
601
683
602 def _validate_commit(self, commit):
684 def _validate_commit(self, commit):
603 if not isinstance(commit, BaseCommit):
685 if not isinstance(commit, BaseCommit):
604 raise TypeError(
686 raise TypeError(
605 "%s is not of type BaseCommit" % repr(commit))
687 "%s is not of type BaseCommit" % repr(commit))
606 if commit.repository != self and not isinstance(commit, EmptyCommit):
688 if commit.repository != self and not isinstance(commit, EmptyCommit):
607 raise ValueError(
689 raise ValueError(
608 "Commit %s must be a valid commit from this repository %s, "
690 "Commit %s must be a valid commit from this repository %s, "
609 "related to this repository instead %s." %
691 "related to this repository instead %s." %
610 (commit, self, commit.repository))
692 (commit, self, commit.repository))
611
693
612 def _validate_commit_id(self, commit_id):
694 def _validate_commit_id(self, commit_id):
613 if not isinstance(commit_id, basestring):
695 if not isinstance(commit_id, basestring):
614 raise TypeError("commit_id must be a string value")
696 raise TypeError("commit_id must be a string value")
615
697
616 def _validate_commit_idx(self, commit_idx):
698 def _validate_commit_idx(self, commit_idx):
617 if not isinstance(commit_idx, (int, long)):
699 if not isinstance(commit_idx, (int, long)):
618 raise TypeError("commit_idx must be a numeric value")
700 raise TypeError("commit_idx must be a numeric value")
619
701
620 def _validate_branch_name(self, branch_name):
702 def _validate_branch_name(self, branch_name):
621 if branch_name and branch_name not in self.branches_all:
703 if branch_name and branch_name not in self.branches_all:
622 msg = ("Branch %s not found in %s" % (branch_name, self))
704 msg = ("Branch %s not found in %s" % (branch_name, self))
623 raise BranchDoesNotExistError(msg)
705 raise BranchDoesNotExistError(msg)
624
706
625 #
707 #
626 # Supporting deprecated API parts
708 # Supporting deprecated API parts
627 # TODO: johbo: consider to move this into a mixin
709 # TODO: johbo: consider to move this into a mixin
628 #
710 #
629
711
630 @property
712 @property
631 def EMPTY_CHANGESET(self):
713 def EMPTY_CHANGESET(self):
632 warnings.warn(
714 warnings.warn(
633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
715 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
634 return self.EMPTY_COMMIT_ID
716 return self.EMPTY_COMMIT_ID
635
717
636 @property
718 @property
637 def revisions(self):
719 def revisions(self):
638 warnings.warn("Use commits attribute instead", DeprecationWarning)
720 warnings.warn("Use commits attribute instead", DeprecationWarning)
639 return self.commit_ids
721 return self.commit_ids
640
722
641 @revisions.setter
723 @revisions.setter
642 def revisions(self, value):
724 def revisions(self, value):
643 warnings.warn("Use commits attribute instead", DeprecationWarning)
725 warnings.warn("Use commits attribute instead", DeprecationWarning)
644 self.commit_ids = value
726 self.commit_ids = value
645
727
646 def get_changeset(self, revision=None, pre_load=None):
728 def get_changeset(self, revision=None, pre_load=None):
647 warnings.warn("Use get_commit instead", DeprecationWarning)
729 warnings.warn("Use get_commit instead", DeprecationWarning)
648 commit_id = None
730 commit_id = None
649 commit_idx = None
731 commit_idx = None
650 if isinstance(revision, basestring):
732 if isinstance(revision, basestring):
651 commit_id = revision
733 commit_id = revision
652 else:
734 else:
653 commit_idx = revision
735 commit_idx = revision
654 return self.get_commit(
736 return self.get_commit(
655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
737 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
656
738
657 def get_changesets(
739 def get_changesets(
658 self, start=None, end=None, start_date=None, end_date=None,
740 self, start=None, end=None, start_date=None, end_date=None,
659 branch_name=None, pre_load=None):
741 branch_name=None, pre_load=None):
660 warnings.warn("Use get_commits instead", DeprecationWarning)
742 warnings.warn("Use get_commits instead", DeprecationWarning)
661 start_id = self._revision_to_commit(start)
743 start_id = self._revision_to_commit(start)
662 end_id = self._revision_to_commit(end)
744 end_id = self._revision_to_commit(end)
663 return self.get_commits(
745 return self.get_commits(
664 start_id=start_id, end_id=end_id, start_date=start_date,
746 start_id=start_id, end_id=end_id, start_date=start_date,
665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
747 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
666
748
667 def _revision_to_commit(self, revision):
749 def _revision_to_commit(self, revision):
668 """
750 """
669 Translates a revision to a commit_id
751 Translates a revision to a commit_id
670
752
671 Helps to support the old changeset based API which allows to use
753 Helps to support the old changeset based API which allows to use
672 commit ids and commit indices interchangeable.
754 commit ids and commit indices interchangeable.
673 """
755 """
674 if revision is None:
756 if revision is None:
675 return revision
757 return revision
676
758
677 if isinstance(revision, basestring):
759 if isinstance(revision, basestring):
678 commit_id = revision
760 commit_id = revision
679 else:
761 else:
680 commit_id = self.commit_ids[revision]
762 commit_id = self.commit_ids[revision]
681 return commit_id
763 return commit_id
682
764
683 @property
765 @property
684 def in_memory_changeset(self):
766 def in_memory_changeset(self):
685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
767 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
686 return self.in_memory_commit
768 return self.in_memory_commit
687
769
688 def get_path_permissions(self, username):
770 def get_path_permissions(self, username):
689 """
771 """
690 Returns a path permission checker or None if not supported
772 Returns a path permission checker or None if not supported
691
773
692 :param username: session user name
774 :param username: session user name
693 :return: an instance of BasePathPermissionChecker or None
775 :return: an instance of BasePathPermissionChecker or None
694 """
776 """
695 return None
777 return None
696
778
697 def install_hooks(self, force=False):
779 def install_hooks(self, force=False):
698 return self._remote.install_hooks(force)
780 return self._remote.install_hooks(force)
699
781
700
782
701 class BaseCommit(object):
783 class BaseCommit(object):
702 """
784 """
703 Each backend should implement it's commit representation.
785 Each backend should implement it's commit representation.
704
786
705 **Attributes**
787 **Attributes**
706
788
707 ``repository``
789 ``repository``
708 repository object within which commit exists
790 repository object within which commit exists
709
791
710 ``id``
792 ``id``
711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
793 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
712 just ``tip``.
794 just ``tip``.
713
795
714 ``raw_id``
796 ``raw_id``
715 raw commit representation (i.e. full 40 length sha for git
797 raw commit representation (i.e. full 40 length sha for git
716 backend)
798 backend)
717
799
718 ``short_id``
800 ``short_id``
719 shortened (if apply) version of ``raw_id``; it would be simple
801 shortened (if apply) version of ``raw_id``; it would be simple
720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
802 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
721 as ``raw_id`` for subversion
803 as ``raw_id`` for subversion
722
804
723 ``idx``
805 ``idx``
724 commit index
806 commit index
725
807
726 ``files``
808 ``files``
727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
809 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
728
810
729 ``dirs``
811 ``dirs``
730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
812 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
731
813
732 ``nodes``
814 ``nodes``
733 combined list of ``Node`` objects
815 combined list of ``Node`` objects
734
816
735 ``author``
817 ``author``
736 author of the commit, as unicode
818 author of the commit, as unicode
737
819
738 ``message``
820 ``message``
739 message of the commit, as unicode
821 message of the commit, as unicode
740
822
741 ``parents``
823 ``parents``
742 list of parent commits
824 list of parent commits
743
825
744 """
826 """
745
827
746 branch = None
828 branch = None
747 """
829 """
748 Depending on the backend this should be set to the branch name of the
830 Depending on the backend this should be set to the branch name of the
749 commit. Backends not supporting branches on commits should leave this
831 commit. Backends not supporting branches on commits should leave this
750 value as ``None``.
832 value as ``None``.
751 """
833 """
752
834
753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
835 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
754 """
836 """
755 This template is used to generate a default prefix for repository archives
837 This template is used to generate a default prefix for repository archives
756 if no prefix has been specified.
838 if no prefix has been specified.
757 """
839 """
758
840
759 def __str__(self):
841 def __str__(self):
760 return '<%s at %s:%s>' % (
842 return '<%s at %s:%s>' % (
761 self.__class__.__name__, self.idx, self.short_id)
843 self.__class__.__name__, self.idx, self.short_id)
762
844
763 def __repr__(self):
845 def __repr__(self):
764 return self.__str__()
846 return self.__str__()
765
847
766 def __unicode__(self):
848 def __unicode__(self):
767 return u'%s:%s' % (self.idx, self.short_id)
849 return u'%s:%s' % (self.idx, self.short_id)
768
850
769 def __eq__(self, other):
851 def __eq__(self, other):
770 same_instance = isinstance(other, self.__class__)
852 same_instance = isinstance(other, self.__class__)
771 return same_instance and self.raw_id == other.raw_id
853 return same_instance and self.raw_id == other.raw_id
772
854
773 def __json__(self):
855 def __json__(self):
774 parents = []
856 parents = []
775 try:
857 try:
776 for parent in self.parents:
858 for parent in self.parents:
777 parents.append({'raw_id': parent.raw_id})
859 parents.append({'raw_id': parent.raw_id})
778 except NotImplementedError:
860 except NotImplementedError:
779 # empty commit doesn't have parents implemented
861 # empty commit doesn't have parents implemented
780 pass
862 pass
781
863
782 return {
864 return {
783 'short_id': self.short_id,
865 'short_id': self.short_id,
784 'raw_id': self.raw_id,
866 'raw_id': self.raw_id,
785 'revision': self.idx,
867 'revision': self.idx,
786 'message': self.message,
868 'message': self.message,
787 'date': self.date,
869 'date': self.date,
788 'author': self.author,
870 'author': self.author,
789 'parents': parents,
871 'parents': parents,
790 'branch': self.branch
872 'branch': self.branch
791 }
873 }
792
874
793 def __getstate__(self):
875 def __getstate__(self):
794 d = self.__dict__.copy()
876 d = self.__dict__.copy()
795 d.pop('_remote', None)
877 d.pop('_remote', None)
796 d.pop('repository', None)
878 d.pop('repository', None)
797 return d
879 return d
798
880
799 def _get_refs(self):
881 def _get_refs(self):
800 return {
882 return {
801 'branches': [self.branch] if self.branch else [],
883 'branches': [self.branch] if self.branch else [],
802 'bookmarks': getattr(self, 'bookmarks', []),
884 'bookmarks': getattr(self, 'bookmarks', []),
803 'tags': self.tags
885 'tags': self.tags
804 }
886 }
805
887
806 @LazyProperty
888 @LazyProperty
807 def last(self):
889 def last(self):
808 """
890 """
809 ``True`` if this is last commit in repository, ``False``
891 ``True`` if this is last commit in repository, ``False``
810 otherwise; trying to access this attribute while there is no
892 otherwise; trying to access this attribute while there is no
811 commits would raise `EmptyRepositoryError`
893 commits would raise `EmptyRepositoryError`
812 """
894 """
813 if self.repository is None:
895 if self.repository is None:
814 raise CommitError("Cannot check if it's most recent commit")
896 raise CommitError("Cannot check if it's most recent commit")
815 return self.raw_id == self.repository.commit_ids[-1]
897 return self.raw_id == self.repository.commit_ids[-1]
816
898
817 @LazyProperty
899 @LazyProperty
818 def parents(self):
900 def parents(self):
819 """
901 """
820 Returns list of parent commits.
902 Returns list of parent commits.
821 """
903 """
822 raise NotImplementedError
904 raise NotImplementedError
823
905
824 @LazyProperty
906 @LazyProperty
825 def first_parent(self):
907 def first_parent(self):
826 """
908 """
827 Returns list of parent commits.
909 Returns list of parent commits.
828 """
910 """
829 return self.parents[0] if self.parents else EmptyCommit()
911 return self.parents[0] if self.parents else EmptyCommit()
830
912
831 @property
913 @property
832 def merge(self):
914 def merge(self):
833 """
915 """
834 Returns boolean if commit is a merge.
916 Returns boolean if commit is a merge.
835 """
917 """
836 return len(self.parents) > 1
918 return len(self.parents) > 1
837
919
838 @LazyProperty
920 @LazyProperty
839 def children(self):
921 def children(self):
840 """
922 """
841 Returns list of child commits.
923 Returns list of child commits.
842 """
924 """
843 raise NotImplementedError
925 raise NotImplementedError
844
926
845 @LazyProperty
927 @LazyProperty
846 def id(self):
928 def id(self):
847 """
929 """
848 Returns string identifying this commit.
930 Returns string identifying this commit.
849 """
931 """
850 raise NotImplementedError
932 raise NotImplementedError
851
933
852 @LazyProperty
934 @LazyProperty
853 def raw_id(self):
935 def raw_id(self):
854 """
936 """
855 Returns raw string identifying this commit.
937 Returns raw string identifying this commit.
856 """
938 """
857 raise NotImplementedError
939 raise NotImplementedError
858
940
859 @LazyProperty
941 @LazyProperty
860 def short_id(self):
942 def short_id(self):
861 """
943 """
862 Returns shortened version of ``raw_id`` attribute, as string,
944 Returns shortened version of ``raw_id`` attribute, as string,
863 identifying this commit, useful for presentation to users.
945 identifying this commit, useful for presentation to users.
864 """
946 """
865 raise NotImplementedError
947 raise NotImplementedError
866
948
867 @LazyProperty
949 @LazyProperty
868 def idx(self):
950 def idx(self):
869 """
951 """
870 Returns integer identifying this commit.
952 Returns integer identifying this commit.
871 """
953 """
872 raise NotImplementedError
954 raise NotImplementedError
873
955
874 @LazyProperty
956 @LazyProperty
875 def committer(self):
957 def committer(self):
876 """
958 """
877 Returns committer for this commit
959 Returns committer for this commit
878 """
960 """
879 raise NotImplementedError
961 raise NotImplementedError
880
962
881 @LazyProperty
963 @LazyProperty
882 def committer_name(self):
964 def committer_name(self):
883 """
965 """
884 Returns committer name for this commit
966 Returns committer name for this commit
885 """
967 """
886
968
887 return author_name(self.committer)
969 return author_name(self.committer)
888
970
889 @LazyProperty
971 @LazyProperty
890 def committer_email(self):
972 def committer_email(self):
891 """
973 """
892 Returns committer email address for this commit
974 Returns committer email address for this commit
893 """
975 """
894
976
895 return author_email(self.committer)
977 return author_email(self.committer)
896
978
897 @LazyProperty
979 @LazyProperty
898 def author(self):
980 def author(self):
899 """
981 """
900 Returns author for this commit
982 Returns author for this commit
901 """
983 """
902
984
903 raise NotImplementedError
985 raise NotImplementedError
904
986
905 @LazyProperty
987 @LazyProperty
906 def author_name(self):
988 def author_name(self):
907 """
989 """
908 Returns author name for this commit
990 Returns author name for this commit
909 """
991 """
910
992
911 return author_name(self.author)
993 return author_name(self.author)
912
994
913 @LazyProperty
995 @LazyProperty
914 def author_email(self):
996 def author_email(self):
915 """
997 """
916 Returns author email address for this commit
998 Returns author email address for this commit
917 """
999 """
918
1000
919 return author_email(self.author)
1001 return author_email(self.author)
920
1002
921 def get_file_mode(self, path):
1003 def get_file_mode(self, path):
922 """
1004 """
923 Returns stat mode of the file at `path`.
1005 Returns stat mode of the file at `path`.
924 """
1006 """
925 raise NotImplementedError
1007 raise NotImplementedError
926
1008
927 def is_link(self, path):
1009 def is_link(self, path):
928 """
1010 """
929 Returns ``True`` if given `path` is a symlink
1011 Returns ``True`` if given `path` is a symlink
930 """
1012 """
931 raise NotImplementedError
1013 raise NotImplementedError
932
1014
933 def get_file_content(self, path):
1015 def get_file_content(self, path):
934 """
1016 """
935 Returns content of the file at the given `path`.
1017 Returns content of the file at the given `path`.
936 """
1018 """
937 raise NotImplementedError
1019 raise NotImplementedError
938
1020
939 def get_file_size(self, path):
1021 def get_file_size(self, path):
940 """
1022 """
941 Returns size of the file at the given `path`.
1023 Returns size of the file at the given `path`.
942 """
1024 """
943 raise NotImplementedError
1025 raise NotImplementedError
944
1026
945 def get_path_commit(self, path, pre_load=None):
1027 def get_path_commit(self, path, pre_load=None):
946 """
1028 """
947 Returns last commit of the file at the given `path`.
1029 Returns last commit of the file at the given `path`.
948
1030
949 :param pre_load: Optional. List of commit attributes to load.
1031 :param pre_load: Optional. List of commit attributes to load.
950 """
1032 """
951 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1033 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
952 if not commits:
1034 if not commits:
953 raise RepositoryError(
1035 raise RepositoryError(
954 'Failed to fetch history for path {}. '
1036 'Failed to fetch history for path {}. '
955 'Please check if such path exists in your repository'.format(
1037 'Please check if such path exists in your repository'.format(
956 path))
1038 path))
957 return commits[0]
1039 return commits[0]
958
1040
959 def get_path_history(self, path, limit=None, pre_load=None):
1041 def get_path_history(self, path, limit=None, pre_load=None):
960 """
1042 """
961 Returns history of file as reversed list of :class:`BaseCommit`
1043 Returns history of file as reversed list of :class:`BaseCommit`
962 objects for which file at given `path` has been modified.
1044 objects for which file at given `path` has been modified.
963
1045
964 :param limit: Optional. Allows to limit the size of the returned
1046 :param limit: Optional. Allows to limit the size of the returned
965 history. This is intended as a hint to the underlying backend, so
1047 history. This is intended as a hint to the underlying backend, so
966 that it can apply optimizations depending on the limit.
1048 that it can apply optimizations depending on the limit.
967 :param pre_load: Optional. List of commit attributes to load.
1049 :param pre_load: Optional. List of commit attributes to load.
968 """
1050 """
969 raise NotImplementedError
1051 raise NotImplementedError
970
1052
971 def get_file_annotate(self, path, pre_load=None):
1053 def get_file_annotate(self, path, pre_load=None):
972 """
1054 """
973 Returns a generator of four element tuples with
1055 Returns a generator of four element tuples with
974 lineno, sha, commit lazy loader and line
1056 lineno, sha, commit lazy loader and line
975
1057
976 :param pre_load: Optional. List of commit attributes to load.
1058 :param pre_load: Optional. List of commit attributes to load.
977 """
1059 """
978 raise NotImplementedError
1060 raise NotImplementedError
979
1061
980 def get_nodes(self, path):
1062 def get_nodes(self, path):
981 """
1063 """
982 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1064 Returns combined ``DirNode`` and ``FileNode`` objects list representing
983 state of commit at the given ``path``.
1065 state of commit at the given ``path``.
984
1066
985 :raises ``CommitError``: if node at the given ``path`` is not
1067 :raises ``CommitError``: if node at the given ``path`` is not
986 instance of ``DirNode``
1068 instance of ``DirNode``
987 """
1069 """
988 raise NotImplementedError
1070 raise NotImplementedError
989
1071
990 def get_node(self, path):
1072 def get_node(self, path):
991 """
1073 """
992 Returns ``Node`` object from the given ``path``.
1074 Returns ``Node`` object from the given ``path``.
993
1075
994 :raises ``NodeDoesNotExistError``: if there is no node at the given
1076 :raises ``NodeDoesNotExistError``: if there is no node at the given
995 ``path``
1077 ``path``
996 """
1078 """
997 raise NotImplementedError
1079 raise NotImplementedError
998
1080
999 def get_largefile_node(self, path):
1081 def get_largefile_node(self, path):
1000 """
1082 """
1001 Returns the path to largefile from Mercurial/Git-lfs storage.
1083 Returns the path to largefile from Mercurial/Git-lfs storage.
1002 or None if it's not a largefile node
1084 or None if it's not a largefile node
1003 """
1085 """
1004 return None
1086 return None
1005
1087
1006 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1088 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1007 prefix=None, write_metadata=False, mtime=None):
1089 prefix=None, write_metadata=False, mtime=None):
1008 """
1090 """
1009 Creates an archive containing the contents of the repository.
1091 Creates an archive containing the contents of the repository.
1010
1092
1011 :param file_path: path to the file which to create the archive.
1093 :param file_path: path to the file which to create the archive.
1012 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1094 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1013 :param prefix: name of root directory in archive.
1095 :param prefix: name of root directory in archive.
1014 Default is repository name and commit's short_id joined with dash:
1096 Default is repository name and commit's short_id joined with dash:
1015 ``"{repo_name}-{short_id}"``.
1097 ``"{repo_name}-{short_id}"``.
1016 :param write_metadata: write a metadata file into archive.
1098 :param write_metadata: write a metadata file into archive.
1017 :param mtime: custom modification time for archive creation, defaults
1099 :param mtime: custom modification time for archive creation, defaults
1018 to time.time() if not given.
1100 to time.time() if not given.
1019
1101
1020 :raise VCSError: If prefix has a problem.
1102 :raise VCSError: If prefix has a problem.
1021 """
1103 """
1022 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1104 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1023 if kind not in allowed_kinds:
1105 if kind not in allowed_kinds:
1024 raise ImproperArchiveTypeError(
1106 raise ImproperArchiveTypeError(
1025 'Archive kind (%s) not supported use one of %s' %
1107 'Archive kind (%s) not supported use one of %s' %
1026 (kind, allowed_kinds))
1108 (kind, allowed_kinds))
1027
1109
1028 prefix = self._validate_archive_prefix(prefix)
1110 prefix = self._validate_archive_prefix(prefix)
1029
1111
1030 mtime = mtime or time.mktime(self.date.timetuple())
1112 mtime = mtime or time.mktime(self.date.timetuple())
1031
1113
1032 file_info = []
1114 file_info = []
1033 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1115 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1034 for _r, _d, files in cur_rev.walk('/'):
1116 for _r, _d, files in cur_rev.walk('/'):
1035 for f in files:
1117 for f in files:
1036 f_path = os.path.join(prefix, f.path)
1118 f_path = os.path.join(prefix, f.path)
1037 file_info.append(
1119 file_info.append(
1038 (f_path, f.mode, f.is_link(), f.raw_bytes))
1120 (f_path, f.mode, f.is_link(), f.raw_bytes))
1039
1121
1040 if write_metadata:
1122 if write_metadata:
1041 metadata = [
1123 metadata = [
1042 ('repo_name', self.repository.name),
1124 ('repo_name', self.repository.name),
1043 ('rev', self.raw_id),
1125 ('rev', self.raw_id),
1044 ('create_time', mtime),
1126 ('create_time', mtime),
1045 ('branch', self.branch),
1127 ('branch', self.branch),
1046 ('tags', ','.join(self.tags)),
1128 ('tags', ','.join(self.tags)),
1047 ]
1129 ]
1048 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1130 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1049 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1131 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1050
1132
1051 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1133 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1052
1134
1053 def _validate_archive_prefix(self, prefix):
1135 def _validate_archive_prefix(self, prefix):
1054 if prefix is None:
1136 if prefix is None:
1055 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1137 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1056 repo_name=safe_str(self.repository.name),
1138 repo_name=safe_str(self.repository.name),
1057 short_id=self.short_id)
1139 short_id=self.short_id)
1058 elif not isinstance(prefix, str):
1140 elif not isinstance(prefix, str):
1059 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1141 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1060 elif prefix.startswith('/'):
1142 elif prefix.startswith('/'):
1061 raise VCSError("Prefix cannot start with leading slash")
1143 raise VCSError("Prefix cannot start with leading slash")
1062 elif prefix.strip() == '':
1144 elif prefix.strip() == '':
1063 raise VCSError("Prefix cannot be empty")
1145 raise VCSError("Prefix cannot be empty")
1064 return prefix
1146 return prefix
1065
1147
1066 @LazyProperty
1148 @LazyProperty
1067 def root(self):
1149 def root(self):
1068 """
1150 """
1069 Returns ``RootNode`` object for this commit.
1151 Returns ``RootNode`` object for this commit.
1070 """
1152 """
1071 return self.get_node('')
1153 return self.get_node('')
1072
1154
1073 def next(self, branch=None):
1155 def next(self, branch=None):
1074 """
1156 """
1075 Returns next commit from current, if branch is gives it will return
1157 Returns next commit from current, if branch is gives it will return
1076 next commit belonging to this branch
1158 next commit belonging to this branch
1077
1159
1078 :param branch: show commits within the given named branch
1160 :param branch: show commits within the given named branch
1079 """
1161 """
1080 indexes = xrange(self.idx + 1, self.repository.count())
1162 indexes = xrange(self.idx + 1, self.repository.count())
1081 return self._find_next(indexes, branch)
1163 return self._find_next(indexes, branch)
1082
1164
1083 def prev(self, branch=None):
1165 def prev(self, branch=None):
1084 """
1166 """
1085 Returns previous commit from current, if branch is gives it will
1167 Returns previous commit from current, if branch is gives it will
1086 return previous commit belonging to this branch
1168 return previous commit belonging to this branch
1087
1169
1088 :param branch: show commit within the given named branch
1170 :param branch: show commit within the given named branch
1089 """
1171 """
1090 indexes = xrange(self.idx - 1, -1, -1)
1172 indexes = xrange(self.idx - 1, -1, -1)
1091 return self._find_next(indexes, branch)
1173 return self._find_next(indexes, branch)
1092
1174
1093 def _find_next(self, indexes, branch=None):
1175 def _find_next(self, indexes, branch=None):
1094 if branch and self.branch != branch:
1176 if branch and self.branch != branch:
1095 raise VCSError('Branch option used on commit not belonging '
1177 raise VCSError('Branch option used on commit not belonging '
1096 'to that branch')
1178 'to that branch')
1097
1179
1098 for next_idx in indexes:
1180 for next_idx in indexes:
1099 commit = self.repository.get_commit(commit_idx=next_idx)
1181 commit = self.repository.get_commit(commit_idx=next_idx)
1100 if branch and branch != commit.branch:
1182 if branch and branch != commit.branch:
1101 continue
1183 continue
1102 return commit
1184 return commit
1103 raise CommitDoesNotExistError
1185 raise CommitDoesNotExistError
1104
1186
1105 def diff(self, ignore_whitespace=True, context=3):
1187 def diff(self, ignore_whitespace=True, context=3):
1106 """
1188 """
1107 Returns a `Diff` object representing the change made by this commit.
1189 Returns a `Diff` object representing the change made by this commit.
1108 """
1190 """
1109 parent = self.first_parent
1191 parent = self.first_parent
1110 diff = self.repository.get_diff(
1192 diff = self.repository.get_diff(
1111 parent, self,
1193 parent, self,
1112 ignore_whitespace=ignore_whitespace,
1194 ignore_whitespace=ignore_whitespace,
1113 context=context)
1195 context=context)
1114 return diff
1196 return diff
1115
1197
1116 @LazyProperty
1198 @LazyProperty
1117 def added(self):
1199 def added(self):
1118 """
1200 """
1119 Returns list of added ``FileNode`` objects.
1201 Returns list of added ``FileNode`` objects.
1120 """
1202 """
1121 raise NotImplementedError
1203 raise NotImplementedError
1122
1204
1123 @LazyProperty
1205 @LazyProperty
1124 def changed(self):
1206 def changed(self):
1125 """
1207 """
1126 Returns list of modified ``FileNode`` objects.
1208 Returns list of modified ``FileNode`` objects.
1127 """
1209 """
1128 raise NotImplementedError
1210 raise NotImplementedError
1129
1211
1130 @LazyProperty
1212 @LazyProperty
1131 def removed(self):
1213 def removed(self):
1132 """
1214 """
1133 Returns list of removed ``FileNode`` objects.
1215 Returns list of removed ``FileNode`` objects.
1134 """
1216 """
1135 raise NotImplementedError
1217 raise NotImplementedError
1136
1218
1137 @LazyProperty
1219 @LazyProperty
1138 def size(self):
1220 def size(self):
1139 """
1221 """
1140 Returns total number of bytes from contents of all filenodes.
1222 Returns total number of bytes from contents of all filenodes.
1141 """
1223 """
1142 return sum((node.size for node in self.get_filenodes_generator()))
1224 return sum((node.size for node in self.get_filenodes_generator()))
1143
1225
1144 def walk(self, topurl=''):
1226 def walk(self, topurl=''):
1145 """
1227 """
1146 Similar to os.walk method. Insted of filesystem it walks through
1228 Similar to os.walk method. Insted of filesystem it walks through
1147 commit starting at given ``topurl``. Returns generator of tuples
1229 commit starting at given ``topurl``. Returns generator of tuples
1148 (topnode, dirnodes, filenodes).
1230 (topnode, dirnodes, filenodes).
1149 """
1231 """
1150 topnode = self.get_node(topurl)
1232 topnode = self.get_node(topurl)
1151 if not topnode.is_dir():
1233 if not topnode.is_dir():
1152 return
1234 return
1153 yield (topnode, topnode.dirs, topnode.files)
1235 yield (topnode, topnode.dirs, topnode.files)
1154 for dirnode in topnode.dirs:
1236 for dirnode in topnode.dirs:
1155 for tup in self.walk(dirnode.path):
1237 for tup in self.walk(dirnode.path):
1156 yield tup
1238 yield tup
1157
1239
1158 def get_filenodes_generator(self):
1240 def get_filenodes_generator(self):
1159 """
1241 """
1160 Returns generator that yields *all* file nodes.
1242 Returns generator that yields *all* file nodes.
1161 """
1243 """
1162 for topnode, dirs, files in self.walk():
1244 for topnode, dirs, files in self.walk():
1163 for node in files:
1245 for node in files:
1164 yield node
1246 yield node
1165
1247
1166 #
1248 #
1167 # Utilities for sub classes to support consistent behavior
1249 # Utilities for sub classes to support consistent behavior
1168 #
1250 #
1169
1251
1170 def no_node_at_path(self, path):
1252 def no_node_at_path(self, path):
1171 return NodeDoesNotExistError(
1253 return NodeDoesNotExistError(
1172 u"There is no file nor directory at the given path: "
1254 u"There is no file nor directory at the given path: "
1173 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1255 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1174
1256
1175 def _fix_path(self, path):
1257 def _fix_path(self, path):
1176 """
1258 """
1177 Paths are stored without trailing slash so we need to get rid off it if
1259 Paths are stored without trailing slash so we need to get rid off it if
1178 needed.
1260 needed.
1179 """
1261 """
1180 return path.rstrip('/')
1262 return path.rstrip('/')
1181
1263
1182 #
1264 #
1183 # Deprecated API based on changesets
1265 # Deprecated API based on changesets
1184 #
1266 #
1185
1267
1186 @property
1268 @property
1187 def revision(self):
1269 def revision(self):
1188 warnings.warn("Use idx instead", DeprecationWarning)
1270 warnings.warn("Use idx instead", DeprecationWarning)
1189 return self.idx
1271 return self.idx
1190
1272
1191 @revision.setter
1273 @revision.setter
1192 def revision(self, value):
1274 def revision(self, value):
1193 warnings.warn("Use idx instead", DeprecationWarning)
1275 warnings.warn("Use idx instead", DeprecationWarning)
1194 self.idx = value
1276 self.idx = value
1195
1277
1196 def get_file_changeset(self, path):
1278 def get_file_changeset(self, path):
1197 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1279 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1198 return self.get_path_commit(path)
1280 return self.get_path_commit(path)
1199
1281
1200
1282
1201 class BaseChangesetClass(type):
1283 class BaseChangesetClass(type):
1202
1284
1203 def __instancecheck__(self, instance):
1285 def __instancecheck__(self, instance):
1204 return isinstance(instance, BaseCommit)
1286 return isinstance(instance, BaseCommit)
1205
1287
1206
1288
1207 class BaseChangeset(BaseCommit):
1289 class BaseChangeset(BaseCommit):
1208
1290
1209 __metaclass__ = BaseChangesetClass
1291 __metaclass__ = BaseChangesetClass
1210
1292
1211 def __new__(cls, *args, **kwargs):
1293 def __new__(cls, *args, **kwargs):
1212 warnings.warn(
1294 warnings.warn(
1213 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1295 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1214 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1296 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1215
1297
1216
1298
1217 class BaseInMemoryCommit(object):
1299 class BaseInMemoryCommit(object):
1218 """
1300 """
1219 Represents differences between repository's state (most recent head) and
1301 Represents differences between repository's state (most recent head) and
1220 changes made *in place*.
1302 changes made *in place*.
1221
1303
1222 **Attributes**
1304 **Attributes**
1223
1305
1224 ``repository``
1306 ``repository``
1225 repository object for this in-memory-commit
1307 repository object for this in-memory-commit
1226
1308
1227 ``added``
1309 ``added``
1228 list of ``FileNode`` objects marked as *added*
1310 list of ``FileNode`` objects marked as *added*
1229
1311
1230 ``changed``
1312 ``changed``
1231 list of ``FileNode`` objects marked as *changed*
1313 list of ``FileNode`` objects marked as *changed*
1232
1314
1233 ``removed``
1315 ``removed``
1234 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1316 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1235 *removed*
1317 *removed*
1236
1318
1237 ``parents``
1319 ``parents``
1238 list of :class:`BaseCommit` instances representing parents of
1320 list of :class:`BaseCommit` instances representing parents of
1239 in-memory commit. Should always be 2-element sequence.
1321 in-memory commit. Should always be 2-element sequence.
1240
1322
1241 """
1323 """
1242
1324
1243 def __init__(self, repository):
1325 def __init__(self, repository):
1244 self.repository = repository
1326 self.repository = repository
1245 self.added = []
1327 self.added = []
1246 self.changed = []
1328 self.changed = []
1247 self.removed = []
1329 self.removed = []
1248 self.parents = []
1330 self.parents = []
1249
1331
1250 def add(self, *filenodes):
1332 def add(self, *filenodes):
1251 """
1333 """
1252 Marks given ``FileNode`` objects as *to be committed*.
1334 Marks given ``FileNode`` objects as *to be committed*.
1253
1335
1254 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1336 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1255 latest commit
1337 latest commit
1256 :raises ``NodeAlreadyAddedError``: if node with same path is already
1338 :raises ``NodeAlreadyAddedError``: if node with same path is already
1257 marked as *added*
1339 marked as *added*
1258 """
1340 """
1259 # Check if not already marked as *added* first
1341 # Check if not already marked as *added* first
1260 for node in filenodes:
1342 for node in filenodes:
1261 if node.path in (n.path for n in self.added):
1343 if node.path in (n.path for n in self.added):
1262 raise NodeAlreadyAddedError(
1344 raise NodeAlreadyAddedError(
1263 "Such FileNode %s is already marked for addition"
1345 "Such FileNode %s is already marked for addition"
1264 % node.path)
1346 % node.path)
1265 for node in filenodes:
1347 for node in filenodes:
1266 self.added.append(node)
1348 self.added.append(node)
1267
1349
1268 def change(self, *filenodes):
1350 def change(self, *filenodes):
1269 """
1351 """
1270 Marks given ``FileNode`` objects to be *changed* in next commit.
1352 Marks given ``FileNode`` objects to be *changed* in next commit.
1271
1353
1272 :raises ``EmptyRepositoryError``: if there are no commits yet
1354 :raises ``EmptyRepositoryError``: if there are no commits yet
1273 :raises ``NodeAlreadyExistsError``: if node with same path is already
1355 :raises ``NodeAlreadyExistsError``: if node with same path is already
1274 marked to be *changed*
1356 marked to be *changed*
1275 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1357 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1276 marked to be *removed*
1358 marked to be *removed*
1277 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1359 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1278 commit
1360 commit
1279 :raises ``NodeNotChangedError``: if node hasn't really be changed
1361 :raises ``NodeNotChangedError``: if node hasn't really be changed
1280 """
1362 """
1281 for node in filenodes:
1363 for node in filenodes:
1282 if node.path in (n.path for n in self.removed):
1364 if node.path in (n.path for n in self.removed):
1283 raise NodeAlreadyRemovedError(
1365 raise NodeAlreadyRemovedError(
1284 "Node at %s is already marked as removed" % node.path)
1366 "Node at %s is already marked as removed" % node.path)
1285 try:
1367 try:
1286 self.repository.get_commit()
1368 self.repository.get_commit()
1287 except EmptyRepositoryError:
1369 except EmptyRepositoryError:
1288 raise EmptyRepositoryError(
1370 raise EmptyRepositoryError(
1289 "Nothing to change - try to *add* new nodes rather than "
1371 "Nothing to change - try to *add* new nodes rather than "
1290 "changing them")
1372 "changing them")
1291 for node in filenodes:
1373 for node in filenodes:
1292 if node.path in (n.path for n in self.changed):
1374 if node.path in (n.path for n in self.changed):
1293 raise NodeAlreadyChangedError(
1375 raise NodeAlreadyChangedError(
1294 "Node at '%s' is already marked as changed" % node.path)
1376 "Node at '%s' is already marked as changed" % node.path)
1295 self.changed.append(node)
1377 self.changed.append(node)
1296
1378
1297 def remove(self, *filenodes):
1379 def remove(self, *filenodes):
1298 """
1380 """
1299 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1381 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1300 *removed* in next commit.
1382 *removed* in next commit.
1301
1383
1302 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1384 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1303 be *removed*
1385 be *removed*
1304 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1386 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1305 be *changed*
1387 be *changed*
1306 """
1388 """
1307 for node in filenodes:
1389 for node in filenodes:
1308 if node.path in (n.path for n in self.removed):
1390 if node.path in (n.path for n in self.removed):
1309 raise NodeAlreadyRemovedError(
1391 raise NodeAlreadyRemovedError(
1310 "Node is already marked to for removal at %s" % node.path)
1392 "Node is already marked to for removal at %s" % node.path)
1311 if node.path in (n.path for n in self.changed):
1393 if node.path in (n.path for n in self.changed):
1312 raise NodeAlreadyChangedError(
1394 raise NodeAlreadyChangedError(
1313 "Node is already marked to be changed at %s" % node.path)
1395 "Node is already marked to be changed at %s" % node.path)
1314 # We only mark node as *removed* - real removal is done by
1396 # We only mark node as *removed* - real removal is done by
1315 # commit method
1397 # commit method
1316 self.removed.append(node)
1398 self.removed.append(node)
1317
1399
1318 def reset(self):
1400 def reset(self):
1319 """
1401 """
1320 Resets this instance to initial state (cleans ``added``, ``changed``
1402 Resets this instance to initial state (cleans ``added``, ``changed``
1321 and ``removed`` lists).
1403 and ``removed`` lists).
1322 """
1404 """
1323 self.added = []
1405 self.added = []
1324 self.changed = []
1406 self.changed = []
1325 self.removed = []
1407 self.removed = []
1326 self.parents = []
1408 self.parents = []
1327
1409
1328 def get_ipaths(self):
1410 def get_ipaths(self):
1329 """
1411 """
1330 Returns generator of paths from nodes marked as added, changed or
1412 Returns generator of paths from nodes marked as added, changed or
1331 removed.
1413 removed.
1332 """
1414 """
1333 for node in itertools.chain(self.added, self.changed, self.removed):
1415 for node in itertools.chain(self.added, self.changed, self.removed):
1334 yield node.path
1416 yield node.path
1335
1417
1336 def get_paths(self):
1418 def get_paths(self):
1337 """
1419 """
1338 Returns list of paths from nodes marked as added, changed or removed.
1420 Returns list of paths from nodes marked as added, changed or removed.
1339 """
1421 """
1340 return list(self.get_ipaths())
1422 return list(self.get_ipaths())
1341
1423
1342 def check_integrity(self, parents=None):
1424 def check_integrity(self, parents=None):
1343 """
1425 """
1344 Checks in-memory commit's integrity. Also, sets parents if not
1426 Checks in-memory commit's integrity. Also, sets parents if not
1345 already set.
1427 already set.
1346
1428
1347 :raises CommitError: if any error occurs (i.e.
1429 :raises CommitError: if any error occurs (i.e.
1348 ``NodeDoesNotExistError``).
1430 ``NodeDoesNotExistError``).
1349 """
1431 """
1350 if not self.parents:
1432 if not self.parents:
1351 parents = parents or []
1433 parents = parents or []
1352 if len(parents) == 0:
1434 if len(parents) == 0:
1353 try:
1435 try:
1354 parents = [self.repository.get_commit(), None]
1436 parents = [self.repository.get_commit(), None]
1355 except EmptyRepositoryError:
1437 except EmptyRepositoryError:
1356 parents = [None, None]
1438 parents = [None, None]
1357 elif len(parents) == 1:
1439 elif len(parents) == 1:
1358 parents += [None]
1440 parents += [None]
1359 self.parents = parents
1441 self.parents = parents
1360
1442
1361 # Local parents, only if not None
1443 # Local parents, only if not None
1362 parents = [p for p in self.parents if p]
1444 parents = [p for p in self.parents if p]
1363
1445
1364 # Check nodes marked as added
1446 # Check nodes marked as added
1365 for p in parents:
1447 for p in parents:
1366 for node in self.added:
1448 for node in self.added:
1367 try:
1449 try:
1368 p.get_node(node.path)
1450 p.get_node(node.path)
1369 except NodeDoesNotExistError:
1451 except NodeDoesNotExistError:
1370 pass
1452 pass
1371 else:
1453 else:
1372 raise NodeAlreadyExistsError(
1454 raise NodeAlreadyExistsError(
1373 "Node `%s` already exists at %s" % (node.path, p))
1455 "Node `%s` already exists at %s" % (node.path, p))
1374
1456
1375 # Check nodes marked as changed
1457 # Check nodes marked as changed
1376 missing = set(self.changed)
1458 missing = set(self.changed)
1377 not_changed = set(self.changed)
1459 not_changed = set(self.changed)
1378 if self.changed and not parents:
1460 if self.changed and not parents:
1379 raise NodeDoesNotExistError(str(self.changed[0].path))
1461 raise NodeDoesNotExistError(str(self.changed[0].path))
1380 for p in parents:
1462 for p in parents:
1381 for node in self.changed:
1463 for node in self.changed:
1382 try:
1464 try:
1383 old = p.get_node(node.path)
1465 old = p.get_node(node.path)
1384 missing.remove(node)
1466 missing.remove(node)
1385 # if content actually changed, remove node from not_changed
1467 # if content actually changed, remove node from not_changed
1386 if old.content != node.content:
1468 if old.content != node.content:
1387 not_changed.remove(node)
1469 not_changed.remove(node)
1388 except NodeDoesNotExistError:
1470 except NodeDoesNotExistError:
1389 pass
1471 pass
1390 if self.changed and missing:
1472 if self.changed and missing:
1391 raise NodeDoesNotExistError(
1473 raise NodeDoesNotExistError(
1392 "Node `%s` marked as modified but missing in parents: %s"
1474 "Node `%s` marked as modified but missing in parents: %s"
1393 % (node.path, parents))
1475 % (node.path, parents))
1394
1476
1395 if self.changed and not_changed:
1477 if self.changed and not_changed:
1396 raise NodeNotChangedError(
1478 raise NodeNotChangedError(
1397 "Node `%s` wasn't actually changed (parents: %s)"
1479 "Node `%s` wasn't actually changed (parents: %s)"
1398 % (not_changed.pop().path, parents))
1480 % (not_changed.pop().path, parents))
1399
1481
1400 # Check nodes marked as removed
1482 # Check nodes marked as removed
1401 if self.removed and not parents:
1483 if self.removed and not parents:
1402 raise NodeDoesNotExistError(
1484 raise NodeDoesNotExistError(
1403 "Cannot remove node at %s as there "
1485 "Cannot remove node at %s as there "
1404 "were no parents specified" % self.removed[0].path)
1486 "were no parents specified" % self.removed[0].path)
1405 really_removed = set()
1487 really_removed = set()
1406 for p in parents:
1488 for p in parents:
1407 for node in self.removed:
1489 for node in self.removed:
1408 try:
1490 try:
1409 p.get_node(node.path)
1491 p.get_node(node.path)
1410 really_removed.add(node)
1492 really_removed.add(node)
1411 except CommitError:
1493 except CommitError:
1412 pass
1494 pass
1413 not_removed = set(self.removed) - really_removed
1495 not_removed = set(self.removed) - really_removed
1414 if not_removed:
1496 if not_removed:
1415 # TODO: johbo: This code branch does not seem to be covered
1497 # TODO: johbo: This code branch does not seem to be covered
1416 raise NodeDoesNotExistError(
1498 raise NodeDoesNotExistError(
1417 "Cannot remove node at %s from "
1499 "Cannot remove node at %s from "
1418 "following parents: %s" % (not_removed, parents))
1500 "following parents: %s" % (not_removed, parents))
1419
1501
1420 def commit(
1502 def commit(
1421 self, message, author, parents=None, branch=None, date=None,
1503 self, message, author, parents=None, branch=None, date=None,
1422 **kwargs):
1504 **kwargs):
1423 """
1505 """
1424 Performs in-memory commit (doesn't check workdir in any way) and
1506 Performs in-memory commit (doesn't check workdir in any way) and
1425 returns newly created :class:`BaseCommit`. Updates repository's
1507 returns newly created :class:`BaseCommit`. Updates repository's
1426 attribute `commits`.
1508 attribute `commits`.
1427
1509
1428 .. note::
1510 .. note::
1429
1511
1430 While overriding this method each backend's should call
1512 While overriding this method each backend's should call
1431 ``self.check_integrity(parents)`` in the first place.
1513 ``self.check_integrity(parents)`` in the first place.
1432
1514
1433 :param message: message of the commit
1515 :param message: message of the commit
1434 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1516 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1435 :param parents: single parent or sequence of parents from which commit
1517 :param parents: single parent or sequence of parents from which commit
1436 would be derived
1518 would be derived
1437 :param date: ``datetime.datetime`` instance. Defaults to
1519 :param date: ``datetime.datetime`` instance. Defaults to
1438 ``datetime.datetime.now()``.
1520 ``datetime.datetime.now()``.
1439 :param branch: branch name, as string. If none given, default backend's
1521 :param branch: branch name, as string. If none given, default backend's
1440 branch would be used.
1522 branch would be used.
1441
1523
1442 :raises ``CommitError``: if any error occurs while committing
1524 :raises ``CommitError``: if any error occurs while committing
1443 """
1525 """
1444 raise NotImplementedError
1526 raise NotImplementedError
1445
1527
1446
1528
1447 class BaseInMemoryChangesetClass(type):
1529 class BaseInMemoryChangesetClass(type):
1448
1530
1449 def __instancecheck__(self, instance):
1531 def __instancecheck__(self, instance):
1450 return isinstance(instance, BaseInMemoryCommit)
1532 return isinstance(instance, BaseInMemoryCommit)
1451
1533
1452
1534
1453 class BaseInMemoryChangeset(BaseInMemoryCommit):
1535 class BaseInMemoryChangeset(BaseInMemoryCommit):
1454
1536
1455 __metaclass__ = BaseInMemoryChangesetClass
1537 __metaclass__ = BaseInMemoryChangesetClass
1456
1538
1457 def __new__(cls, *args, **kwargs):
1539 def __new__(cls, *args, **kwargs):
1458 warnings.warn(
1540 warnings.warn(
1459 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1541 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1460 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1542 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1461
1543
1462
1544
1463 class EmptyCommit(BaseCommit):
1545 class EmptyCommit(BaseCommit):
1464 """
1546 """
1465 An dummy empty commit. It's possible to pass hash when creating
1547 An dummy empty commit. It's possible to pass hash when creating
1466 an EmptyCommit
1548 an EmptyCommit
1467 """
1549 """
1468
1550
1469 def __init__(
1551 def __init__(
1470 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1552 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1471 message='', author='', date=None):
1553 message='', author='', date=None):
1472 self._empty_commit_id = commit_id
1554 self._empty_commit_id = commit_id
1473 # TODO: johbo: Solve idx parameter, default value does not make
1555 # TODO: johbo: Solve idx parameter, default value does not make
1474 # too much sense
1556 # too much sense
1475 self.idx = idx
1557 self.idx = idx
1476 self.message = message
1558 self.message = message
1477 self.author = author
1559 self.author = author
1478 self.date = date or datetime.datetime.fromtimestamp(0)
1560 self.date = date or datetime.datetime.fromtimestamp(0)
1479 self.repository = repo
1561 self.repository = repo
1480 self.alias = alias
1562 self.alias = alias
1481
1563
1482 @LazyProperty
1564 @LazyProperty
1483 def raw_id(self):
1565 def raw_id(self):
1484 """
1566 """
1485 Returns raw string identifying this commit, useful for web
1567 Returns raw string identifying this commit, useful for web
1486 representation.
1568 representation.
1487 """
1569 """
1488
1570
1489 return self._empty_commit_id
1571 return self._empty_commit_id
1490
1572
1491 @LazyProperty
1573 @LazyProperty
1492 def branch(self):
1574 def branch(self):
1493 if self.alias:
1575 if self.alias:
1494 from rhodecode.lib.vcs.backends import get_backend
1576 from rhodecode.lib.vcs.backends import get_backend
1495 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1577 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1496
1578
1497 @LazyProperty
1579 @LazyProperty
1498 def short_id(self):
1580 def short_id(self):
1499 return self.raw_id[:12]
1581 return self.raw_id[:12]
1500
1582
1501 @LazyProperty
1583 @LazyProperty
1502 def id(self):
1584 def id(self):
1503 return self.raw_id
1585 return self.raw_id
1504
1586
1505 def get_path_commit(self, path):
1587 def get_path_commit(self, path):
1506 return self
1588 return self
1507
1589
1508 def get_file_content(self, path):
1590 def get_file_content(self, path):
1509 return u''
1591 return u''
1510
1592
1511 def get_file_size(self, path):
1593 def get_file_size(self, path):
1512 return 0
1594 return 0
1513
1595
1514
1596
1515 class EmptyChangesetClass(type):
1597 class EmptyChangesetClass(type):
1516
1598
1517 def __instancecheck__(self, instance):
1599 def __instancecheck__(self, instance):
1518 return isinstance(instance, EmptyCommit)
1600 return isinstance(instance, EmptyCommit)
1519
1601
1520
1602
1521 class EmptyChangeset(EmptyCommit):
1603 class EmptyChangeset(EmptyCommit):
1522
1604
1523 __metaclass__ = EmptyChangesetClass
1605 __metaclass__ = EmptyChangesetClass
1524
1606
1525 def __new__(cls, *args, **kwargs):
1607 def __new__(cls, *args, **kwargs):
1526 warnings.warn(
1608 warnings.warn(
1527 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1609 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1528 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1610 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1529
1611
1530 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1612 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1531 alias=None, revision=-1, message='', author='', date=None):
1613 alias=None, revision=-1, message='', author='', date=None):
1532 if requested_revision is not None:
1614 if requested_revision is not None:
1533 warnings.warn(
1615 warnings.warn(
1534 "Parameter requested_revision not supported anymore",
1616 "Parameter requested_revision not supported anymore",
1535 DeprecationWarning)
1617 DeprecationWarning)
1536 super(EmptyChangeset, self).__init__(
1618 super(EmptyChangeset, self).__init__(
1537 commit_id=cs, repo=repo, alias=alias, idx=revision,
1619 commit_id=cs, repo=repo, alias=alias, idx=revision,
1538 message=message, author=author, date=date)
1620 message=message, author=author, date=date)
1539
1621
1540 @property
1622 @property
1541 def revision(self):
1623 def revision(self):
1542 warnings.warn("Use idx instead", DeprecationWarning)
1624 warnings.warn("Use idx instead", DeprecationWarning)
1543 return self.idx
1625 return self.idx
1544
1626
1545 @revision.setter
1627 @revision.setter
1546 def revision(self, value):
1628 def revision(self, value):
1547 warnings.warn("Use idx instead", DeprecationWarning)
1629 warnings.warn("Use idx instead", DeprecationWarning)
1548 self.idx = value
1630 self.idx = value
1549
1631
1550
1632
1551 class EmptyRepository(BaseRepository):
1633 class EmptyRepository(BaseRepository):
1552 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1634 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1553 pass
1635 pass
1554
1636
1555 def get_diff(self, *args, **kwargs):
1637 def get_diff(self, *args, **kwargs):
1556 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1638 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1557 return GitDiff('')
1639 return GitDiff('')
1558
1640
1559
1641
1560 class CollectionGenerator(object):
1642 class CollectionGenerator(object):
1561
1643
1562 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1644 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1563 self.repo = repo
1645 self.repo = repo
1564 self.commit_ids = commit_ids
1646 self.commit_ids = commit_ids
1565 # TODO: (oliver) this isn't currently hooked up
1647 # TODO: (oliver) this isn't currently hooked up
1566 self.collection_size = None
1648 self.collection_size = None
1567 self.pre_load = pre_load
1649 self.pre_load = pre_load
1568
1650
1569 def __len__(self):
1651 def __len__(self):
1570 if self.collection_size is not None:
1652 if self.collection_size is not None:
1571 return self.collection_size
1653 return self.collection_size
1572 return self.commit_ids.__len__()
1654 return self.commit_ids.__len__()
1573
1655
1574 def __iter__(self):
1656 def __iter__(self):
1575 for commit_id in self.commit_ids:
1657 for commit_id in self.commit_ids:
1576 # TODO: johbo: Mercurial passes in commit indices or commit ids
1658 # TODO: johbo: Mercurial passes in commit indices or commit ids
1577 yield self._commit_factory(commit_id)
1659 yield self._commit_factory(commit_id)
1578
1660
1579 def _commit_factory(self, commit_id):
1661 def _commit_factory(self, commit_id):
1580 """
1662 """
1581 Allows backends to override the way commits are generated.
1663 Allows backends to override the way commits are generated.
1582 """
1664 """
1583 return self.repo.get_commit(commit_id=commit_id,
1665 return self.repo.get_commit(commit_id=commit_id,
1584 pre_load=self.pre_load)
1666 pre_load=self.pre_load)
1585
1667
1586 def __getslice__(self, i, j):
1668 def __getslice__(self, i, j):
1587 """
1669 """
1588 Returns an iterator of sliced repository
1670 Returns an iterator of sliced repository
1589 """
1671 """
1590 commit_ids = self.commit_ids[i:j]
1672 commit_ids = self.commit_ids[i:j]
1591 return self.__class__(
1673 return self.__class__(
1592 self.repo, commit_ids, pre_load=self.pre_load)
1674 self.repo, commit_ids, pre_load=self.pre_load)
1593
1675
1594 def __repr__(self):
1676 def __repr__(self):
1595 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1677 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1596
1678
1597
1679
1598 class Config(object):
1680 class Config(object):
1599 """
1681 """
1600 Represents the configuration for a repository.
1682 Represents the configuration for a repository.
1601
1683
1602 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1684 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1603 standard library. It implements only the needed subset.
1685 standard library. It implements only the needed subset.
1604 """
1686 """
1605
1687
1606 def __init__(self):
1688 def __init__(self):
1607 self._values = {}
1689 self._values = {}
1608
1690
1609 def copy(self):
1691 def copy(self):
1610 clone = Config()
1692 clone = Config()
1611 for section, values in self._values.items():
1693 for section, values in self._values.items():
1612 clone._values[section] = values.copy()
1694 clone._values[section] = values.copy()
1613 return clone
1695 return clone
1614
1696
1615 def __repr__(self):
1697 def __repr__(self):
1616 return '<Config(%s sections) at %s>' % (
1698 return '<Config(%s sections) at %s>' % (
1617 len(self._values), hex(id(self)))
1699 len(self._values), hex(id(self)))
1618
1700
1619 def items(self, section):
1701 def items(self, section):
1620 return self._values.get(section, {}).iteritems()
1702 return self._values.get(section, {}).iteritems()
1621
1703
1622 def get(self, section, option):
1704 def get(self, section, option):
1623 return self._values.get(section, {}).get(option)
1705 return self._values.get(section, {}).get(option)
1624
1706
1625 def set(self, section, option, value):
1707 def set(self, section, option, value):
1626 section_values = self._values.setdefault(section, {})
1708 section_values = self._values.setdefault(section, {})
1627 section_values[option] = value
1709 section_values[option] = value
1628
1710
1629 def clear_section(self, section):
1711 def clear_section(self, section):
1630 self._values[section] = {}
1712 self._values[section] = {}
1631
1713
1632 def serialize(self):
1714 def serialize(self):
1633 """
1715 """
1634 Creates a list of three tuples (section, key, value) representing
1716 Creates a list of three tuples (section, key, value) representing
1635 this config object.
1717 this config object.
1636 """
1718 """
1637 items = []
1719 items = []
1638 for section in self._values:
1720 for section in self._values:
1639 for option, value in self._values[section].items():
1721 for option, value in self._values[section].items():
1640 items.append(
1722 items.append(
1641 (safe_str(section), safe_str(option), safe_str(value)))
1723 (safe_str(section), safe_str(option), safe_str(value)))
1642 return items
1724 return items
1643
1725
1644
1726
1645 class Diff(object):
1727 class Diff(object):
1646 """
1728 """
1647 Represents a diff result from a repository backend.
1729 Represents a diff result from a repository backend.
1648
1730
1649 Subclasses have to provide a backend specific value for
1731 Subclasses have to provide a backend specific value for
1650 :attr:`_header_re` and :attr:`_meta_re`.
1732 :attr:`_header_re` and :attr:`_meta_re`.
1651 """
1733 """
1652 _meta_re = None
1734 _meta_re = None
1653 _header_re = None
1735 _header_re = None
1654
1736
1655 def __init__(self, raw_diff):
1737 def __init__(self, raw_diff):
1656 self.raw = raw_diff
1738 self.raw = raw_diff
1657
1739
1658 def chunks(self):
1740 def chunks(self):
1659 """
1741 """
1660 split the diff in chunks of separate --git a/file b/file chunks
1742 split the diff in chunks of separate --git a/file b/file chunks
1661 to make diffs consistent we must prepend with \n, and make sure
1743 to make diffs consistent we must prepend with \n, and make sure
1662 we can detect last chunk as this was also has special rule
1744 we can detect last chunk as this was also has special rule
1663 """
1745 """
1664
1746
1665 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1747 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1666 header = diff_parts[0]
1748 header = diff_parts[0]
1667
1749
1668 if self._meta_re:
1750 if self._meta_re:
1669 match = self._meta_re.match(header)
1751 match = self._meta_re.match(header)
1670
1752
1671 chunks = diff_parts[1:]
1753 chunks = diff_parts[1:]
1672 total_chunks = len(chunks)
1754 total_chunks = len(chunks)
1673
1755
1674 return (
1756 return (
1675 DiffChunk(chunk, self, cur_chunk == total_chunks)
1757 DiffChunk(chunk, self, cur_chunk == total_chunks)
1676 for cur_chunk, chunk in enumerate(chunks, start=1))
1758 for cur_chunk, chunk in enumerate(chunks, start=1))
1677
1759
1678
1760
1679 class DiffChunk(object):
1761 class DiffChunk(object):
1680
1762
1681 def __init__(self, chunk, diff, last_chunk):
1763 def __init__(self, chunk, diff, last_chunk):
1682 self._diff = diff
1764 self._diff = diff
1683
1765
1684 # since we split by \ndiff --git that part is lost from original diff
1766 # since we split by \ndiff --git that part is lost from original diff
1685 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1767 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1686 if not last_chunk:
1768 if not last_chunk:
1687 chunk += '\n'
1769 chunk += '\n'
1688
1770
1689 match = self._diff._header_re.match(chunk)
1771 match = self._diff._header_re.match(chunk)
1690 self.header = match.groupdict()
1772 self.header = match.groupdict()
1691 self.diff = chunk[match.end():]
1773 self.diff = chunk[match.end():]
1692 self.raw = chunk
1774 self.raw = chunk
1693
1775
1694
1776
1695 class BasePathPermissionChecker(object):
1777 class BasePathPermissionChecker(object):
1696
1778
1697 @staticmethod
1779 @staticmethod
1698 def create_from_patterns(includes, excludes):
1780 def create_from_patterns(includes, excludes):
1699 if includes and '*' in includes and not excludes:
1781 if includes and '*' in includes and not excludes:
1700 return AllPathPermissionChecker()
1782 return AllPathPermissionChecker()
1701 elif excludes and '*' in excludes:
1783 elif excludes and '*' in excludes:
1702 return NonePathPermissionChecker()
1784 return NonePathPermissionChecker()
1703 else:
1785 else:
1704 return PatternPathPermissionChecker(includes, excludes)
1786 return PatternPathPermissionChecker(includes, excludes)
1705
1787
1706 @property
1788 @property
1707 def has_full_access(self):
1789 def has_full_access(self):
1708 raise NotImplemented()
1790 raise NotImplemented()
1709
1791
1710 def has_access(self, path):
1792 def has_access(self, path):
1711 raise NotImplemented()
1793 raise NotImplemented()
1712
1794
1713
1795
1714 class AllPathPermissionChecker(BasePathPermissionChecker):
1796 class AllPathPermissionChecker(BasePathPermissionChecker):
1715
1797
1716 @property
1798 @property
1717 def has_full_access(self):
1799 def has_full_access(self):
1718 return True
1800 return True
1719
1801
1720 def has_access(self, path):
1802 def has_access(self, path):
1721 return True
1803 return True
1722
1804
1723
1805
1724 class NonePathPermissionChecker(BasePathPermissionChecker):
1806 class NonePathPermissionChecker(BasePathPermissionChecker):
1725
1807
1726 @property
1808 @property
1727 def has_full_access(self):
1809 def has_full_access(self):
1728 return False
1810 return False
1729
1811
1730 def has_access(self, path):
1812 def has_access(self, path):
1731 return False
1813 return False
1732
1814
1733
1815
1734 class PatternPathPermissionChecker(BasePathPermissionChecker):
1816 class PatternPathPermissionChecker(BasePathPermissionChecker):
1735
1817
1736 def __init__(self, includes, excludes):
1818 def __init__(self, includes, excludes):
1737 self.includes = includes
1819 self.includes = includes
1738 self.excludes = excludes
1820 self.excludes = excludes
1739 self.includes_re = [] if not includes else [
1821 self.includes_re = [] if not includes else [
1740 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1822 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1741 self.excludes_re = [] if not excludes else [
1823 self.excludes_re = [] if not excludes else [
1742 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1824 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1743
1825
1744 @property
1826 @property
1745 def has_full_access(self):
1827 def has_full_access(self):
1746 return '*' in self.includes and not self.excludes
1828 return '*' in self.includes and not self.excludes
1747
1829
1748 def has_access(self, path):
1830 def has_access(self, path):
1749 for regex in self.excludes_re:
1831 for regex in self.excludes_re:
1750 if regex.match(path):
1832 if regex.match(path):
1751 return False
1833 return False
1752 for regex in self.includes_re:
1834 for regex in self.includes_re:
1753 if regex.match(path):
1835 if regex.match(path):
1754 return True
1836 return True
1755 return False
1837 return False
@@ -1,999 +1,1010 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference)
38 MergeFailureReason, Reference)
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError,
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45
45
46
46
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 class GitRepository(BaseRepository):
52 class GitRepository(BaseRepository):
53 """
53 """
54 Git repository backend.
54 Git repository backend.
55 """
55 """
56 DEFAULT_BRANCH_NAME = 'master'
56 DEFAULT_BRANCH_NAME = 'master'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire
65 self.with_wire = with_wire
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75
75
76 @LazyProperty
76 @LazyProperty
77 def bare(self):
77 def bare(self):
78 return self._remote.bare()
78 return self._remote.bare()
79
79
80 @LazyProperty
80 @LazyProperty
81 def head(self):
81 def head(self):
82 return self._remote.head()
82 return self._remote.head()
83
83
84 @LazyProperty
84 @LazyProperty
85 def commit_ids(self):
85 def commit_ids(self):
86 """
86 """
87 Returns list of commit ids, in ascending order. Being lazy
87 Returns list of commit ids, in ascending order. Being lazy
88 attribute allows external tools to inject commit ids from cache.
88 attribute allows external tools to inject commit ids from cache.
89 """
89 """
90 commit_ids = self._get_all_commit_ids()
90 commit_ids = self._get_all_commit_ids()
91 self._rebuild_cache(commit_ids)
91 self._rebuild_cache(commit_ids)
92 return commit_ids
92 return commit_ids
93
93
94 def _rebuild_cache(self, commit_ids):
94 def _rebuild_cache(self, commit_ids):
95 self._commit_ids = dict((commit_id, index)
95 self._commit_ids = dict((commit_id, index)
96 for index, commit_id in enumerate(commit_ids))
96 for index, commit_id in enumerate(commit_ids))
97
97
98 def run_git_command(self, cmd, **opts):
98 def run_git_command(self, cmd, **opts):
99 """
99 """
100 Runs given ``cmd`` as git command and returns tuple
100 Runs given ``cmd`` as git command and returns tuple
101 (stdout, stderr).
101 (stdout, stderr).
102
102
103 :param cmd: git command to be executed
103 :param cmd: git command to be executed
104 :param opts: env options to pass into Subprocess command
104 :param opts: env options to pass into Subprocess command
105 """
105 """
106 if not isinstance(cmd, list):
106 if not isinstance(cmd, list):
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108
108
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 out, err = self._remote.run_git_command(cmd, **opts)
110 out, err = self._remote.run_git_command(cmd, **opts)
111 if err and not skip_stderr_log:
111 if err and not skip_stderr_log:
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 return out, err
113 return out, err
114
114
115 @staticmethod
115 @staticmethod
116 def check_url(url, config):
116 def check_url(url, config):
117 """
117 """
118 Function will check given url and try to verify if it's a valid
118 Function will check given url and try to verify if it's a valid
119 link. Sometimes it may happened that git will issue basic
119 link. Sometimes it may happened that git will issue basic
120 auth request that can cause whole API to hang when used from python
120 auth request that can cause whole API to hang when used from python
121 or other external calls.
121 or other external calls.
122
122
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 when the return code is non 200
124 when the return code is non 200
125 """
125 """
126 # check first if it's not an url
126 # check first if it's not an url
127 if os.path.isdir(url) or url.startswith('file:'):
127 if os.path.isdir(url) or url.startswith('file:'):
128 return True
128 return True
129
129
130 if '+' in url.split('://', 1)[0]:
130 if '+' in url.split('://', 1)[0]:
131 url = url.split('+', 1)[1]
131 url = url.split('+', 1)[1]
132
132
133 # Request the _remote to verify the url
133 # Request the _remote to verify the url
134 return connection.Git.check_url(url, config.serialize())
134 return connection.Git.check_url(url, config.serialize())
135
135
136 @staticmethod
136 @staticmethod
137 def is_valid_repository(path):
137 def is_valid_repository(path):
138 if os.path.isdir(os.path.join(path, '.git')):
138 if os.path.isdir(os.path.join(path, '.git')):
139 return True
139 return True
140 # check case of bare repository
140 # check case of bare repository
141 try:
141 try:
142 GitRepository(path)
142 GitRepository(path)
143 return True
143 return True
144 except VCSError:
144 except VCSError:
145 pass
145 pass
146 return False
146 return False
147
147
148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 bare=False):
149 bare=False):
150 if create and os.path.exists(self.path):
150 if create and os.path.exists(self.path):
151 raise RepositoryError(
151 raise RepositoryError(
152 "Cannot create repository at %s, location already exist"
152 "Cannot create repository at %s, location already exist"
153 % self.path)
153 % self.path)
154
154
155 if bare and do_workspace_checkout:
155 if bare and do_workspace_checkout:
156 raise RepositoryError("Cannot update a bare repository")
156 raise RepositoryError("Cannot update a bare repository")
157 try:
157 try:
158
158
159 if src_url:
159 if src_url:
160 # check URL before any actions
160 # check URL before any actions
161 GitRepository.check_url(src_url, self.config)
161 GitRepository.check_url(src_url, self.config)
162
162
163 if create:
163 if create:
164 os.makedirs(self.path, mode=0o755)
164 os.makedirs(self.path, mode=0o755)
165
165
166 if bare:
166 if bare:
167 self._remote.init_bare()
167 self._remote.init_bare()
168 else:
168 else:
169 self._remote.init()
169 self._remote.init()
170
170
171 if src_url and bare:
171 if src_url and bare:
172 # bare repository only allows a fetch and checkout is not allowed
172 # bare repository only allows a fetch and checkout is not allowed
173 self.fetch(src_url, commit_ids=None)
173 self.fetch(src_url, commit_ids=None)
174 elif src_url:
174 elif src_url:
175 self.pull(src_url, commit_ids=None,
175 self.pull(src_url, commit_ids=None,
176 update_after=do_workspace_checkout)
176 update_after=do_workspace_checkout)
177
177
178 else:
178 else:
179 if not self._remote.assert_correct_path():
179 if not self._remote.assert_correct_path():
180 raise RepositoryError(
180 raise RepositoryError(
181 'Path "%s" does not contain a Git repository' %
181 'Path "%s" does not contain a Git repository' %
182 (self.path,))
182 (self.path,))
183
183
184 # TODO: johbo: check if we have to translate the OSError here
184 # TODO: johbo: check if we have to translate the OSError here
185 except OSError as err:
185 except OSError as err:
186 raise RepositoryError(err)
186 raise RepositoryError(err)
187
187
188 def _get_all_commit_ids(self, filters=None):
188 def _get_all_commit_ids(self, filters=None):
189 # we must check if this repo is not empty, since later command
189 # we must check if this repo is not empty, since later command
190 # fails if it is. And it's cheaper to ask than throw the subprocess
190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 # errors
191 # errors
192
192
193 head = self._remote.head(show_exc=False)
193 head = self._remote.head(show_exc=False)
194 if not head:
194 if not head:
195 return []
195 return []
196
196
197 rev_filter = ['--branches', '--tags']
197 rev_filter = ['--branches', '--tags']
198 extra_filter = []
198 extra_filter = []
199
199
200 if filters:
200 if filters:
201 if filters.get('since'):
201 if filters.get('since'):
202 extra_filter.append('--since=%s' % (filters['since']))
202 extra_filter.append('--since=%s' % (filters['since']))
203 if filters.get('until'):
203 if filters.get('until'):
204 extra_filter.append('--until=%s' % (filters['until']))
204 extra_filter.append('--until=%s' % (filters['until']))
205 if filters.get('branch_name'):
205 if filters.get('branch_name'):
206 rev_filter = ['--tags']
206 rev_filter = ['--tags']
207 extra_filter.append(filters['branch_name'])
207 extra_filter.append(filters['branch_name'])
208 rev_filter.extend(extra_filter)
208 rev_filter.extend(extra_filter)
209
209
210 # if filters.get('start') or filters.get('end'):
210 # if filters.get('start') or filters.get('end'):
211 # # skip is offset, max-count is limit
211 # # skip is offset, max-count is limit
212 # if filters.get('start'):
212 # if filters.get('start'):
213 # extra_filter += ' --skip=%s' % filters['start']
213 # extra_filter += ' --skip=%s' % filters['start']
214 # if filters.get('end'):
214 # if filters.get('end'):
215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216
216
217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 try:
218 try:
219 output, __ = self.run_git_command(cmd)
219 output, __ = self.run_git_command(cmd)
220 except RepositoryError:
220 except RepositoryError:
221 # Can be raised for empty repositories
221 # Can be raised for empty repositories
222 return []
222 return []
223 return output.splitlines()
223 return output.splitlines()
224
224
225 def _get_commit_id(self, commit_id_or_idx):
225 def _get_commit_id(self, commit_id_or_idx):
226 def is_null(value):
226 def is_null(value):
227 return len(value) == commit_id_or_idx.count('0')
227 return len(value) == commit_id_or_idx.count('0')
228
228
229 if self.is_empty():
229 if self.is_empty():
230 raise EmptyRepositoryError("There are no commits yet")
230 raise EmptyRepositoryError("There are no commits yet")
231
231
232 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
232 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
233 return self.commit_ids[-1]
233 return self.commit_ids[-1]
234
234
235 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
235 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
236 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
236 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
237 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
237 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
238 try:
238 try:
239 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
239 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
240 except Exception:
240 except Exception:
241 msg = "Commit %s does not exist for %s" % (
241 msg = "Commit %s does not exist for %s" % (
242 commit_id_or_idx, self)
242 commit_id_or_idx, self)
243 raise CommitDoesNotExistError(msg)
243 raise CommitDoesNotExistError(msg)
244
244
245 elif is_bstr:
245 elif is_bstr:
246 # check full path ref, eg. refs/heads/master
246 # check full path ref, eg. refs/heads/master
247 ref_id = self._refs.get(commit_id_or_idx)
247 ref_id = self._refs.get(commit_id_or_idx)
248 if ref_id:
248 if ref_id:
249 return ref_id
249 return ref_id
250
250
251 # check branch name
251 # check branch name
252 branch_ids = self.branches.values()
252 branch_ids = self.branches.values()
253 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
253 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
254 if ref_id:
254 if ref_id:
255 return ref_id
255 return ref_id
256
256
257 # check tag name
257 # check tag name
258 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
258 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
259 if ref_id:
259 if ref_id:
260 return ref_id
260 return ref_id
261
261
262 if (not SHA_PATTERN.match(commit_id_or_idx) or
262 if (not SHA_PATTERN.match(commit_id_or_idx) or
263 commit_id_or_idx not in self.commit_ids):
263 commit_id_or_idx not in self.commit_ids):
264 msg = "Commit %s does not exist for %s" % (
264 msg = "Commit %s does not exist for %s" % (
265 commit_id_or_idx, self)
265 commit_id_or_idx, self)
266 raise CommitDoesNotExistError(msg)
266 raise CommitDoesNotExistError(msg)
267
267
268 # Ensure we return full id
268 # Ensure we return full id
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 raise CommitDoesNotExistError(
270 raise CommitDoesNotExistError(
271 "Given commit id %s not recognized" % commit_id_or_idx)
271 "Given commit id %s not recognized" % commit_id_or_idx)
272 return commit_id_or_idx
272 return commit_id_or_idx
273
273
274 def get_hook_location(self):
274 def get_hook_location(self):
275 """
275 """
276 returns absolute path to location where hooks are stored
276 returns absolute path to location where hooks are stored
277 """
277 """
278 loc = os.path.join(self.path, 'hooks')
278 loc = os.path.join(self.path, 'hooks')
279 if not self.bare:
279 if not self.bare:
280 loc = os.path.join(self.path, '.git', 'hooks')
280 loc = os.path.join(self.path, '.git', 'hooks')
281 return loc
281 return loc
282
282
283 @LazyProperty
283 @LazyProperty
284 def last_change(self):
284 def last_change(self):
285 """
285 """
286 Returns last change made on this repository as
286 Returns last change made on this repository as
287 `datetime.datetime` object.
287 `datetime.datetime` object.
288 """
288 """
289 try:
289 try:
290 return self.get_commit().date
290 return self.get_commit().date
291 except RepositoryError:
291 except RepositoryError:
292 tzoffset = makedate()[1]
292 tzoffset = makedate()[1]
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294
294
295 def _get_fs_mtime(self):
295 def _get_fs_mtime(self):
296 idx_loc = '' if self.bare else '.git'
296 idx_loc = '' if self.bare else '.git'
297 # fallback to filesystem
297 # fallback to filesystem
298 in_path = os.path.join(self.path, idx_loc, "index")
298 in_path = os.path.join(self.path, idx_loc, "index")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 if os.path.exists(in_path):
300 if os.path.exists(in_path):
301 return os.stat(in_path).st_mtime
301 return os.stat(in_path).st_mtime
302 else:
302 else:
303 return os.stat(he_path).st_mtime
303 return os.stat(he_path).st_mtime
304
304
305 @LazyProperty
305 @LazyProperty
306 def description(self):
306 def description(self):
307 description = self._remote.get_description()
307 description = self._remote.get_description()
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309
309
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 if self.is_empty():
311 if self.is_empty():
312 return OrderedDict()
312 return OrderedDict()
313
313
314 result = []
314 result = []
315 for ref, sha in self._refs.iteritems():
315 for ref, sha in self._refs.iteritems():
316 if ref.startswith(prefix):
316 if ref.startswith(prefix):
317 ref_name = ref
317 ref_name = ref
318 if strip_prefix:
318 if strip_prefix:
319 ref_name = ref[len(prefix):]
319 ref_name = ref[len(prefix):]
320 result.append((safe_unicode(ref_name), sha))
320 result.append((safe_unicode(ref_name), sha))
321
321
322 def get_name(entry):
322 def get_name(entry):
323 return entry[0]
323 return entry[0]
324
324
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326
326
327 def _get_branches(self):
327 def _get_branches(self):
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329
329
330 @LazyProperty
330 @LazyProperty
331 def branches(self):
331 def branches(self):
332 return self._get_branches()
332 return self._get_branches()
333
333
334 @LazyProperty
334 @LazyProperty
335 def branches_closed(self):
335 def branches_closed(self):
336 return {}
336 return {}
337
337
338 @LazyProperty
338 @LazyProperty
339 def bookmarks(self):
339 def bookmarks(self):
340 return {}
340 return {}
341
341
342 @LazyProperty
342 @LazyProperty
343 def branches_all(self):
343 def branches_all(self):
344 all_branches = {}
344 all_branches = {}
345 all_branches.update(self.branches)
345 all_branches.update(self.branches)
346 all_branches.update(self.branches_closed)
346 all_branches.update(self.branches_closed)
347 return all_branches
347 return all_branches
348
348
349 @LazyProperty
349 @LazyProperty
350 def tags(self):
350 def tags(self):
351 return self._get_tags()
351 return self._get_tags()
352
352
353 def _get_tags(self):
353 def _get_tags(self):
354 return self._get_refs_entries(
354 return self._get_refs_entries(
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
356
356
357 def tag(self, name, user, commit_id=None, message=None, date=None,
357 def tag(self, name, user, commit_id=None, message=None, date=None,
358 **kwargs):
358 **kwargs):
359 # TODO: fix this method to apply annotated tags correct with message
359 # TODO: fix this method to apply annotated tags correct with message
360 """
360 """
361 Creates and returns a tag for the given ``commit_id``.
361 Creates and returns a tag for the given ``commit_id``.
362
362
363 :param name: name for new tag
363 :param name: name for new tag
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 :param commit_id: commit id for which new tag would be created
365 :param commit_id: commit id for which new tag would be created
366 :param message: message of the tag's commit
366 :param message: message of the tag's commit
367 :param date: date of tag's commit
367 :param date: date of tag's commit
368
368
369 :raises TagAlreadyExistError: if tag with same name already exists
369 :raises TagAlreadyExistError: if tag with same name already exists
370 """
370 """
371 if name in self.tags:
371 if name in self.tags:
372 raise TagAlreadyExistError("Tag %s already exists" % name)
372 raise TagAlreadyExistError("Tag %s already exists" % name)
373 commit = self.get_commit(commit_id=commit_id)
373 commit = self.get_commit(commit_id=commit_id)
374 message = message or "Added tag %s for commit %s" % (
374 message = message or "Added tag %s for commit %s" % (
375 name, commit.raw_id)
375 name, commit.raw_id)
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
377
377
378 self._refs = self._get_refs()
378 self._refs = self._get_refs()
379 self.tags = self._get_tags()
379 self.tags = self._get_tags()
380 return commit
380 return commit
381
381
382 def remove_tag(self, name, user, message=None, date=None):
382 def remove_tag(self, name, user, message=None, date=None):
383 """
383 """
384 Removes tag with the given ``name``.
384 Removes tag with the given ``name``.
385
385
386 :param name: name of the tag to be removed
386 :param name: name of the tag to be removed
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 :param message: message of the tag's removal commit
388 :param message: message of the tag's removal commit
389 :param date: date of tag's removal commit
389 :param date: date of tag's removal commit
390
390
391 :raises TagDoesNotExistError: if tag with given name does not exists
391 :raises TagDoesNotExistError: if tag with given name does not exists
392 """
392 """
393 if name not in self.tags:
393 if name not in self.tags:
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 tagpath = vcspath.join(
395 tagpath = vcspath.join(
396 self._remote.get_refs_path(), 'refs', 'tags', name)
396 self._remote.get_refs_path(), 'refs', 'tags', name)
397 try:
397 try:
398 os.remove(tagpath)
398 os.remove(tagpath)
399 self._refs = self._get_refs()
399 self._refs = self._get_refs()
400 self.tags = self._get_tags()
400 self.tags = self._get_tags()
401 except OSError as e:
401 except OSError as e:
402 raise RepositoryError(e.strerror)
402 raise RepositoryError(e.strerror)
403
403
404 def _get_refs(self):
404 def _get_refs(self):
405 return self._remote.get_refs()
405 return self._remote.get_refs()
406
406
407 @LazyProperty
407 @LazyProperty
408 def _refs(self):
408 def _refs(self):
409 return self._get_refs()
409 return self._get_refs()
410
410
411 @property
411 @property
412 def _ref_tree(self):
412 def _ref_tree(self):
413 node = tree = {}
413 node = tree = {}
414 for ref, sha in self._refs.iteritems():
414 for ref, sha in self._refs.iteritems():
415 path = ref.split('/')
415 path = ref.split('/')
416 for bit in path[:-1]:
416 for bit in path[:-1]:
417 node = node.setdefault(bit, {})
417 node = node.setdefault(bit, {})
418 node[path[-1]] = sha
418 node[path[-1]] = sha
419 node = tree
419 node = tree
420 return tree
420 return tree
421
421
422 def get_remote_ref(self, ref_name):
422 def get_remote_ref(self, ref_name):
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 try:
424 try:
425 return self._refs[ref_key]
425 return self._refs[ref_key]
426 except Exception:
426 except Exception:
427 return
427 return
428
428
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
430 """
430 """
431 Returns `GitCommit` object representing commit from git repository
431 Returns `GitCommit` object representing commit from git repository
432 at the given `commit_id` or head (most recent commit) if None given.
432 at the given `commit_id` or head (most recent commit) if None given.
433 """
433 """
434 if commit_id is not None:
434 if commit_id is not None:
435 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
436 elif commit_idx is not None:
436 elif commit_idx is not None:
437 self._validate_commit_idx(commit_idx)
437 self._validate_commit_idx(commit_idx)
438 commit_id = commit_idx
438 commit_id = commit_idx
439 commit_id = self._get_commit_id(commit_id)
439 commit_id = self._get_commit_id(commit_id)
440 try:
440 try:
441 # Need to call remote to translate id for tagging scenario
441 # Need to call remote to translate id for tagging scenario
442 commit_id = self._remote.get_object(commit_id)["commit_id"]
442 commit_id = self._remote.get_object(commit_id)["commit_id"]
443 idx = self._commit_ids[commit_id]
443 idx = self._commit_ids[commit_id]
444 except KeyError:
444 except KeyError:
445 raise RepositoryError("Cannot get object with id %s" % commit_id)
445 raise RepositoryError("Cannot get object with id %s" % commit_id)
446
446
447 return GitCommit(self, commit_id, idx, pre_load=pre_load)
447 return GitCommit(self, commit_id, idx, pre_load=pre_load)
448
448
449 def get_commits(
449 def get_commits(
450 self, start_id=None, end_id=None, start_date=None, end_date=None,
450 self, start_id=None, end_id=None, start_date=None, end_date=None,
451 branch_name=None, show_hidden=False, pre_load=None):
451 branch_name=None, show_hidden=False, pre_load=None):
452 """
452 """
453 Returns generator of `GitCommit` objects from start to end (both
453 Returns generator of `GitCommit` objects from start to end (both
454 are inclusive), in ascending date order.
454 are inclusive), in ascending date order.
455
455
456 :param start_id: None, str(commit_id)
456 :param start_id: None, str(commit_id)
457 :param end_id: None, str(commit_id)
457 :param end_id: None, str(commit_id)
458 :param start_date: if specified, commits with commit date less than
458 :param start_date: if specified, commits with commit date less than
459 ``start_date`` would be filtered out from returned set
459 ``start_date`` would be filtered out from returned set
460 :param end_date: if specified, commits with commit date greater than
460 :param end_date: if specified, commits with commit date greater than
461 ``end_date`` would be filtered out from returned set
461 ``end_date`` would be filtered out from returned set
462 :param branch_name: if specified, commits not reachable from given
462 :param branch_name: if specified, commits not reachable from given
463 branch would be filtered out from returned set
463 branch would be filtered out from returned set
464 :param show_hidden: Show hidden commits such as obsolete or hidden from
464 :param show_hidden: Show hidden commits such as obsolete or hidden from
465 Mercurial evolve
465 Mercurial evolve
466 :raise BranchDoesNotExistError: If given `branch_name` does not
466 :raise BranchDoesNotExistError: If given `branch_name` does not
467 exist.
467 exist.
468 :raise CommitDoesNotExistError: If commits for given `start` or
468 :raise CommitDoesNotExistError: If commits for given `start` or
469 `end` could not be found.
469 `end` could not be found.
470
470
471 """
471 """
472 if self.is_empty():
472 if self.is_empty():
473 raise EmptyRepositoryError("There are no commits yet")
473 raise EmptyRepositoryError("There are no commits yet")
474 self._validate_branch_name(branch_name)
474 self._validate_branch_name(branch_name)
475
475
476 if start_id is not None:
476 if start_id is not None:
477 self._validate_commit_id(start_id)
477 self._validate_commit_id(start_id)
478 if end_id is not None:
478 if end_id is not None:
479 self._validate_commit_id(end_id)
479 self._validate_commit_id(end_id)
480
480
481 start_raw_id = self._get_commit_id(start_id)
481 start_raw_id = self._get_commit_id(start_id)
482 start_pos = self._commit_ids[start_raw_id] if start_id else None
482 start_pos = self._commit_ids[start_raw_id] if start_id else None
483 end_raw_id = self._get_commit_id(end_id)
483 end_raw_id = self._get_commit_id(end_id)
484 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
484 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
485
485
486 if None not in [start_id, end_id] and start_pos > end_pos:
486 if None not in [start_id, end_id] and start_pos > end_pos:
487 raise RepositoryError(
487 raise RepositoryError(
488 "Start commit '%s' cannot be after end commit '%s'" %
488 "Start commit '%s' cannot be after end commit '%s'" %
489 (start_id, end_id))
489 (start_id, end_id))
490
490
491 if end_pos is not None:
491 if end_pos is not None:
492 end_pos += 1
492 end_pos += 1
493
493
494 filter_ = []
494 filter_ = []
495 if branch_name:
495 if branch_name:
496 filter_.append({'branch_name': branch_name})
496 filter_.append({'branch_name': branch_name})
497 if start_date and not end_date:
497 if start_date and not end_date:
498 filter_.append({'since': start_date})
498 filter_.append({'since': start_date})
499 if end_date and not start_date:
499 if end_date and not start_date:
500 filter_.append({'until': end_date})
500 filter_.append({'until': end_date})
501 if start_date and end_date:
501 if start_date and end_date:
502 filter_.append({'since': start_date})
502 filter_.append({'since': start_date})
503 filter_.append({'until': end_date})
503 filter_.append({'until': end_date})
504
504
505 # if start_pos or end_pos:
505 # if start_pos or end_pos:
506 # filter_.append({'start': start_pos})
506 # filter_.append({'start': start_pos})
507 # filter_.append({'end': end_pos})
507 # filter_.append({'end': end_pos})
508
508
509 if filter_:
509 if filter_:
510 revfilters = {
510 revfilters = {
511 'branch_name': branch_name,
511 'branch_name': branch_name,
512 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
512 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
513 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
513 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
514 'start': start_pos,
514 'start': start_pos,
515 'end': end_pos,
515 'end': end_pos,
516 }
516 }
517 commit_ids = self._get_all_commit_ids(filters=revfilters)
517 commit_ids = self._get_all_commit_ids(filters=revfilters)
518
518
519 # pure python stuff, it's slow due to walker walking whole repo
519 # pure python stuff, it's slow due to walker walking whole repo
520 # def get_revs(walker):
520 # def get_revs(walker):
521 # for walker_entry in walker:
521 # for walker_entry in walker:
522 # yield walker_entry.commit.id
522 # yield walker_entry.commit.id
523 # revfilters = {}
523 # revfilters = {}
524 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
524 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
525 else:
525 else:
526 commit_ids = self.commit_ids
526 commit_ids = self.commit_ids
527
527
528 if start_pos or end_pos:
528 if start_pos or end_pos:
529 commit_ids = commit_ids[start_pos: end_pos]
529 commit_ids = commit_ids[start_pos: end_pos]
530
530
531 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
531 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
532
532
533 def get_diff(
533 def get_diff(
534 self, commit1, commit2, path='', ignore_whitespace=False,
534 self, commit1, commit2, path='', ignore_whitespace=False,
535 context=3, path1=None):
535 context=3, path1=None):
536 """
536 """
537 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 Returns (git like) *diff*, as plain text. Shows changes introduced by
538 ``commit2`` since ``commit1``.
538 ``commit2`` since ``commit1``.
539
539
540 :param commit1: Entry point from which diff is shown. Can be
540 :param commit1: Entry point from which diff is shown. Can be
541 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 ``self.EMPTY_COMMIT`` - in this case, patch showing all
542 the changes since empty state of the repository until ``commit2``
542 the changes since empty state of the repository until ``commit2``
543 :param commit2: Until which commits changes should be shown.
543 :param commit2: Until which commits changes should be shown.
544 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 :param ignore_whitespace: If set to ``True``, would not show whitespace
545 changes. Defaults to ``False``.
545 changes. Defaults to ``False``.
546 :param context: How many lines before/after changed lines should be
546 :param context: How many lines before/after changed lines should be
547 shown. Defaults to ``3``.
547 shown. Defaults to ``3``.
548 """
548 """
549 self._validate_diff_commits(commit1, commit2)
549 self._validate_diff_commits(commit1, commit2)
550 if path1 is not None and path1 != path:
550 if path1 is not None and path1 != path:
551 raise ValueError("Diff of two different paths not supported.")
551 raise ValueError("Diff of two different paths not supported.")
552
552
553 flags = [
553 flags = [
554 '-U%s' % context, '--full-index', '--binary', '-p',
554 '-U%s' % context, '--full-index', '--binary', '-p',
555 '-M', '--abbrev=40']
555 '-M', '--abbrev=40']
556 if ignore_whitespace:
556 if ignore_whitespace:
557 flags.append('-w')
557 flags.append('-w')
558
558
559 if commit1 == self.EMPTY_COMMIT:
559 if commit1 == self.EMPTY_COMMIT:
560 cmd = ['show'] + flags + [commit2.raw_id]
560 cmd = ['show'] + flags + [commit2.raw_id]
561 else:
561 else:
562 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
562 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
563
563
564 if path:
564 if path:
565 cmd.extend(['--', path])
565 cmd.extend(['--', path])
566
566
567 stdout, __ = self.run_git_command(cmd)
567 stdout, __ = self.run_git_command(cmd)
568 # If we used 'show' command, strip first few lines (until actual diff
568 # If we used 'show' command, strip first few lines (until actual diff
569 # starts)
569 # starts)
570 if commit1 == self.EMPTY_COMMIT:
570 if commit1 == self.EMPTY_COMMIT:
571 lines = stdout.splitlines()
571 lines = stdout.splitlines()
572 x = 0
572 x = 0
573 for line in lines:
573 for line in lines:
574 if line.startswith('diff'):
574 if line.startswith('diff'):
575 break
575 break
576 x += 1
576 x += 1
577 # Append new line just like 'diff' command do
577 # Append new line just like 'diff' command do
578 stdout = '\n'.join(lines[x:]) + '\n'
578 stdout = '\n'.join(lines[x:]) + '\n'
579 return GitDiff(stdout)
579 return GitDiff(stdout)
580
580
581 def strip(self, commit_id, branch_name):
581 def strip(self, commit_id, branch_name):
582 commit = self.get_commit(commit_id=commit_id)
582 commit = self.get_commit(commit_id=commit_id)
583 if commit.merge:
583 if commit.merge:
584 raise Exception('Cannot reset to merge commit')
584 raise Exception('Cannot reset to merge commit')
585
585
586 # parent is going to be the new head now
586 # parent is going to be the new head now
587 commit = commit.parents[0]
587 commit = commit.parents[0]
588 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
588 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
589
589
590 self.commit_ids = self._get_all_commit_ids()
590 self.commit_ids = self._get_all_commit_ids()
591 self._rebuild_cache(self.commit_ids)
591 self._rebuild_cache(self.commit_ids)
592
592
593 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
593 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
594 if commit_id1 == commit_id2:
594 if commit_id1 == commit_id2:
595 return commit_id1
595 return commit_id1
596
596
597 if self != repo2:
597 if self != repo2:
598 commits = self._remote.get_missing_revs(
598 commits = self._remote.get_missing_revs(
599 commit_id1, commit_id2, repo2.path)
599 commit_id1, commit_id2, repo2.path)
600 if commits:
600 if commits:
601 commit = repo2.get_commit(commits[-1])
601 commit = repo2.get_commit(commits[-1])
602 if commit.parents:
602 if commit.parents:
603 ancestor_id = commit.parents[0].raw_id
603 ancestor_id = commit.parents[0].raw_id
604 else:
604 else:
605 ancestor_id = None
605 ancestor_id = None
606 else:
606 else:
607 # no commits from other repo, ancestor_id is the commit_id2
607 # no commits from other repo, ancestor_id is the commit_id2
608 ancestor_id = commit_id2
608 ancestor_id = commit_id2
609 else:
609 else:
610 output, __ = self.run_git_command(
610 output, __ = self.run_git_command(
611 ['merge-base', commit_id1, commit_id2])
611 ['merge-base', commit_id1, commit_id2])
612 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
612 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
613
613
614 return ancestor_id
614 return ancestor_id
615
615
616 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
616 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
617 repo1 = self
617 repo1 = self
618 ancestor_id = None
618 ancestor_id = None
619
619
620 if commit_id1 == commit_id2:
620 if commit_id1 == commit_id2:
621 commits = []
621 commits = []
622 elif repo1 != repo2:
622 elif repo1 != repo2:
623 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
623 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
624 repo2.path)
624 repo2.path)
625 commits = [
625 commits = [
626 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
626 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
627 for commit_id in reversed(missing_ids)]
627 for commit_id in reversed(missing_ids)]
628 else:
628 else:
629 output, __ = repo1.run_git_command(
629 output, __ = repo1.run_git_command(
630 ['log', '--reverse', '--pretty=format: %H', '-s',
630 ['log', '--reverse', '--pretty=format: %H', '-s',
631 '%s..%s' % (commit_id1, commit_id2)])
631 '%s..%s' % (commit_id1, commit_id2)])
632 commits = [
632 commits = [
633 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
633 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
634 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
634 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
635
635
636 return commits
636 return commits
637
637
638 @LazyProperty
638 @LazyProperty
639 def in_memory_commit(self):
639 def in_memory_commit(self):
640 """
640 """
641 Returns ``GitInMemoryCommit`` object for this repository.
641 Returns ``GitInMemoryCommit`` object for this repository.
642 """
642 """
643 return GitInMemoryCommit(self)
643 return GitInMemoryCommit(self)
644
644
645 def pull(self, url, commit_ids=None, update_after=False):
645 def pull(self, url, commit_ids=None, update_after=False):
646 """
646 """
647 Pull changes from external location. Pull is different in GIT
647 Pull changes from external location. Pull is different in GIT
648 that fetch since it's doing a checkout
648 that fetch since it's doing a checkout
649
649
650 :param commit_ids: Optional. Can be set to a list of commit ids
650 :param commit_ids: Optional. Can be set to a list of commit ids
651 which shall be pulled from the other repository.
651 which shall be pulled from the other repository.
652 """
652 """
653 refs = None
653 refs = None
654 if commit_ids is not None:
654 if commit_ids is not None:
655 remote_refs = self._remote.get_remote_refs(url)
655 remote_refs = self._remote.get_remote_refs(url)
656 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
656 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
657 self._remote.pull(url, refs=refs, update_after=update_after)
657 self._remote.pull(url, refs=refs, update_after=update_after)
658 self._remote.invalidate_vcs_cache()
658 self._remote.invalidate_vcs_cache()
659
659
660 def fetch(self, url, commit_ids=None):
660 def fetch(self, url, commit_ids=None):
661 """
661 """
662 Fetch all git objects from external location.
662 Fetch all git objects from external location.
663 """
663 """
664 self._remote.sync_fetch(url, refs=commit_ids)
664 self._remote.sync_fetch(url, refs=commit_ids)
665 self._remote.invalidate_vcs_cache()
665 self._remote.invalidate_vcs_cache()
666
666
667 def push(self, url):
667 def push(self, url):
668 refs = None
668 refs = None
669 self._remote.sync_push(url, refs=refs)
669 self._remote.sync_push(url, refs=refs)
670
670
671 def set_refs(self, ref_name, commit_id):
671 def set_refs(self, ref_name, commit_id):
672 self._remote.set_refs(ref_name, commit_id)
672 self._remote.set_refs(ref_name, commit_id)
673
673
674 def remove_ref(self, ref_name):
674 def remove_ref(self, ref_name):
675 self._remote.remove_ref(ref_name)
675 self._remote.remove_ref(ref_name)
676
676
677 def _update_server_info(self):
677 def _update_server_info(self):
678 """
678 """
679 runs gits update-server-info command in this repo instance
679 runs gits update-server-info command in this repo instance
680 """
680 """
681 self._remote.update_server_info()
681 self._remote.update_server_info()
682
682
683 def _current_branch(self):
683 def _current_branch(self):
684 """
684 """
685 Return the name of the current branch.
685 Return the name of the current branch.
686
686
687 It only works for non bare repositories (i.e. repositories with a
687 It only works for non bare repositories (i.e. repositories with a
688 working copy)
688 working copy)
689 """
689 """
690 if self.bare:
690 if self.bare:
691 raise RepositoryError('Bare git repos do not have active branches')
691 raise RepositoryError('Bare git repos do not have active branches')
692
692
693 if self.is_empty():
693 if self.is_empty():
694 return None
694 return None
695
695
696 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
696 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
697 return stdout.strip()
697 return stdout.strip()
698
698
699 def _checkout(self, branch_name, create=False, force=False):
699 def _checkout(self, branch_name, create=False, force=False):
700 """
700 """
701 Checkout a branch in the working directory.
701 Checkout a branch in the working directory.
702
702
703 It tries to create the branch if create is True, failing if the branch
703 It tries to create the branch if create is True, failing if the branch
704 already exists.
704 already exists.
705
705
706 It only works for non bare repositories (i.e. repositories with a
706 It only works for non bare repositories (i.e. repositories with a
707 working copy)
707 working copy)
708 """
708 """
709 if self.bare:
709 if self.bare:
710 raise RepositoryError('Cannot checkout branches in a bare git repo')
710 raise RepositoryError('Cannot checkout branches in a bare git repo')
711
711
712 cmd = ['checkout']
712 cmd = ['checkout']
713 if force:
713 if force:
714 cmd.append('-f')
714 cmd.append('-f')
715 if create:
715 if create:
716 cmd.append('-b')
716 cmd.append('-b')
717 cmd.append(branch_name)
717 cmd.append(branch_name)
718 self.run_git_command(cmd, fail_on_stderr=False)
718 self.run_git_command(cmd, fail_on_stderr=False)
719
719
720 def _identify(self):
720 def _identify(self):
721 """
721 """
722 Return the current state of the working directory.
722 Return the current state of the working directory.
723 """
723 """
724 if self.bare:
724 if self.bare:
725 raise RepositoryError('Bare git repos do not have active branches')
725 raise RepositoryError('Bare git repos do not have active branches')
726
726
727 if self.is_empty():
727 if self.is_empty():
728 return None
728 return None
729
729
730 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
730 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
731 return stdout.strip()
731 return stdout.strip()
732
732
733 def _local_clone(self, clone_path, branch_name, source_branch=None):
733 def _local_clone(self, clone_path, branch_name, source_branch=None):
734 """
734 """
735 Create a local clone of the current repo.
735 Create a local clone of the current repo.
736 """
736 """
737 # N.B.(skreft): the --branch option is required as otherwise the shallow
737 # N.B.(skreft): the --branch option is required as otherwise the shallow
738 # clone will only fetch the active branch.
738 # clone will only fetch the active branch.
739 cmd = ['clone', '--branch', branch_name,
739 cmd = ['clone', '--branch', branch_name,
740 self.path, os.path.abspath(clone_path)]
740 self.path, os.path.abspath(clone_path)]
741
741
742 self.run_git_command(cmd, fail_on_stderr=False)
742 self.run_git_command(cmd, fail_on_stderr=False)
743
743
744 # if we get the different source branch, make sure we also fetch it for
744 # if we get the different source branch, make sure we also fetch it for
745 # merge conditions
745 # merge conditions
746 if source_branch and source_branch != branch_name:
746 if source_branch and source_branch != branch_name:
747 # check if the ref exists.
747 # check if the ref exists.
748 shadow_repo = GitRepository(os.path.abspath(clone_path))
748 shadow_repo = GitRepository(os.path.abspath(clone_path))
749 if shadow_repo.get_remote_ref(source_branch):
749 if shadow_repo.get_remote_ref(source_branch):
750 cmd = ['fetch', self.path, source_branch]
750 cmd = ['fetch', self.path, source_branch]
751 self.run_git_command(cmd, fail_on_stderr=False)
751 self.run_git_command(cmd, fail_on_stderr=False)
752
752
753 def _local_fetch(self, repository_path, branch_name, use_origin=False):
753 def _local_fetch(self, repository_path, branch_name, use_origin=False):
754 """
754 """
755 Fetch a branch from a local repository.
755 Fetch a branch from a local repository.
756 """
756 """
757 repository_path = os.path.abspath(repository_path)
757 repository_path = os.path.abspath(repository_path)
758 if repository_path == self.path:
758 if repository_path == self.path:
759 raise ValueError('Cannot fetch from the same repository')
759 raise ValueError('Cannot fetch from the same repository')
760
760
761 if use_origin:
761 if use_origin:
762 branch_name = '+{branch}:refs/heads/{branch}'.format(
762 branch_name = '+{branch}:refs/heads/{branch}'.format(
763 branch=branch_name)
763 branch=branch_name)
764
764
765 cmd = ['fetch', '--no-tags', '--update-head-ok',
765 cmd = ['fetch', '--no-tags', '--update-head-ok',
766 repository_path, branch_name]
766 repository_path, branch_name]
767 self.run_git_command(cmd, fail_on_stderr=False)
767 self.run_git_command(cmd, fail_on_stderr=False)
768
768
769 def _local_reset(self, branch_name):
769 def _local_reset(self, branch_name):
770 branch_name = '{}'.format(branch_name)
770 branch_name = '{}'.format(branch_name)
771 cmd = ['reset', '--hard', branch_name, '--']
771 cmd = ['reset', '--hard', branch_name, '--']
772 self.run_git_command(cmd, fail_on_stderr=False)
772 self.run_git_command(cmd, fail_on_stderr=False)
773
773
774 def _last_fetch_heads(self):
774 def _last_fetch_heads(self):
775 """
775 """
776 Return the last fetched heads that need merging.
776 Return the last fetched heads that need merging.
777
777
778 The algorithm is defined at
778 The algorithm is defined at
779 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
779 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
780 """
780 """
781 if not self.bare:
781 if not self.bare:
782 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
782 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
783 else:
783 else:
784 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
784 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
785
785
786 heads = []
786 heads = []
787 with open(fetch_heads_path) as f:
787 with open(fetch_heads_path) as f:
788 for line in f:
788 for line in f:
789 if ' not-for-merge ' in line:
789 if ' not-for-merge ' in line:
790 continue
790 continue
791 line = re.sub('\t.*', '', line, flags=re.DOTALL)
791 line = re.sub('\t.*', '', line, flags=re.DOTALL)
792 heads.append(line)
792 heads.append(line)
793
793
794 return heads
794 return heads
795
795
796 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
796 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
797 return GitRepository(shadow_repository_path)
797 return GitRepository(shadow_repository_path)
798
798
799 def _local_pull(self, repository_path, branch_name, ff_only=True):
799 def _local_pull(self, repository_path, branch_name, ff_only=True):
800 """
800 """
801 Pull a branch from a local repository.
801 Pull a branch from a local repository.
802 """
802 """
803 if self.bare:
803 if self.bare:
804 raise RepositoryError('Cannot pull into a bare git repository')
804 raise RepositoryError('Cannot pull into a bare git repository')
805 # N.B.(skreft): The --ff-only option is to make sure this is a
805 # N.B.(skreft): The --ff-only option is to make sure this is a
806 # fast-forward (i.e., we are only pulling new changes and there are no
806 # fast-forward (i.e., we are only pulling new changes and there are no
807 # conflicts with our current branch)
807 # conflicts with our current branch)
808 # Additionally, that option needs to go before --no-tags, otherwise git
808 # Additionally, that option needs to go before --no-tags, otherwise git
809 # pull complains about it being an unknown flag.
809 # pull complains about it being an unknown flag.
810 cmd = ['pull']
810 cmd = ['pull']
811 if ff_only:
811 if ff_only:
812 cmd.append('--ff-only')
812 cmd.append('--ff-only')
813 cmd.extend(['--no-tags', repository_path, branch_name])
813 cmd.extend(['--no-tags', repository_path, branch_name])
814 self.run_git_command(cmd, fail_on_stderr=False)
814 self.run_git_command(cmd, fail_on_stderr=False)
815
815
816 def _local_merge(self, merge_message, user_name, user_email, heads):
816 def _local_merge(self, merge_message, user_name, user_email, heads):
817 """
817 """
818 Merge the given head into the checked out branch.
818 Merge the given head into the checked out branch.
819
819
820 It will force a merge commit.
820 It will force a merge commit.
821
821
822 Currently it raises an error if the repo is empty, as it is not possible
822 Currently it raises an error if the repo is empty, as it is not possible
823 to create a merge commit in an empty repo.
823 to create a merge commit in an empty repo.
824
824
825 :param merge_message: The message to use for the merge commit.
825 :param merge_message: The message to use for the merge commit.
826 :param heads: the heads to merge.
826 :param heads: the heads to merge.
827 """
827 """
828 if self.bare:
828 if self.bare:
829 raise RepositoryError('Cannot merge into a bare git repository')
829 raise RepositoryError('Cannot merge into a bare git repository')
830
830
831 if not heads:
831 if not heads:
832 return
832 return
833
833
834 if self.is_empty():
834 if self.is_empty():
835 # TODO(skreft): do somehting more robust in this case.
835 # TODO(skreft): do somehting more robust in this case.
836 raise RepositoryError(
836 raise RepositoryError(
837 'Do not know how to merge into empty repositories yet')
837 'Do not know how to merge into empty repositories yet')
838
838
839 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
839 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
840 # commit message. We also specify the user who is doing the merge.
840 # commit message. We also specify the user who is doing the merge.
841 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
841 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
842 '-c', 'user.email=%s' % safe_str(user_email),
842 '-c', 'user.email=%s' % safe_str(user_email),
843 'merge', '--no-ff', '-m', safe_str(merge_message)]
843 'merge', '--no-ff', '-m', safe_str(merge_message)]
844 cmd.extend(heads)
844 cmd.extend(heads)
845 try:
845 try:
846 output = self.run_git_command(cmd, fail_on_stderr=False)
846 output = self.run_git_command(cmd, fail_on_stderr=False)
847 except RepositoryError:
847 except RepositoryError:
848 # Cleanup any merge leftovers
848 # Cleanup any merge leftovers
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
850 raise
850 raise
851
851
852 def _local_push(
852 def _local_push(
853 self, source_branch, repository_path, target_branch,
853 self, source_branch, repository_path, target_branch,
854 enable_hooks=False, rc_scm_data=None):
854 enable_hooks=False, rc_scm_data=None):
855 """
855 """
856 Push the source_branch to the given repository and target_branch.
856 Push the source_branch to the given repository and target_branch.
857
857
858 Currently it if the target_branch is not master and the target repo is
858 Currently it if the target_branch is not master and the target repo is
859 empty, the push will work, but then GitRepository won't be able to find
859 empty, the push will work, but then GitRepository won't be able to find
860 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
860 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
861 pointing to master, which does not exist).
861 pointing to master, which does not exist).
862
862
863 It does not run the hooks in the target repo.
863 It does not run the hooks in the target repo.
864 """
864 """
865 # TODO(skreft): deal with the case in which the target repo is empty,
865 # TODO(skreft): deal with the case in which the target repo is empty,
866 # and the target_branch is not master.
866 # and the target_branch is not master.
867 target_repo = GitRepository(repository_path)
867 target_repo = GitRepository(repository_path)
868 if (not target_repo.bare and
868 if (not target_repo.bare and
869 target_repo._current_branch() == target_branch):
869 target_repo._current_branch() == target_branch):
870 # Git prevents pushing to the checked out branch, so simulate it by
870 # Git prevents pushing to the checked out branch, so simulate it by
871 # pulling into the target repository.
871 # pulling into the target repository.
872 target_repo._local_pull(self.path, source_branch)
872 target_repo._local_pull(self.path, source_branch)
873 else:
873 else:
874 cmd = ['push', os.path.abspath(repository_path),
874 cmd = ['push', os.path.abspath(repository_path),
875 '%s:%s' % (source_branch, target_branch)]
875 '%s:%s' % (source_branch, target_branch)]
876 gitenv = {}
876 gitenv = {}
877 if rc_scm_data:
877 if rc_scm_data:
878 gitenv.update({'RC_SCM_DATA': rc_scm_data})
878 gitenv.update({'RC_SCM_DATA': rc_scm_data})
879
879
880 if not enable_hooks:
880 if not enable_hooks:
881 gitenv['RC_SKIP_HOOKS'] = '1'
881 gitenv['RC_SKIP_HOOKS'] = '1'
882 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
882 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
883
883
884 def _get_new_pr_branch(self, source_branch, target_branch):
884 def _get_new_pr_branch(self, source_branch, target_branch):
885 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
885 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
886 pr_branches = []
886 pr_branches = []
887 for branch in self.branches:
887 for branch in self.branches:
888 if branch.startswith(prefix):
888 if branch.startswith(prefix):
889 pr_branches.append(int(branch[len(prefix):]))
889 pr_branches.append(int(branch[len(prefix):]))
890
890
891 if not pr_branches:
891 if not pr_branches:
892 branch_id = 0
892 branch_id = 0
893 else:
893 else:
894 branch_id = max(pr_branches) + 1
894 branch_id = max(pr_branches) + 1
895
895
896 return '%s%d' % (prefix, branch_id)
896 return '%s%d' % (prefix, branch_id)
897
897
898 def _maybe_prepare_merge_workspace(
898 def _maybe_prepare_merge_workspace(
899 self, repo_id, workspace_id, target_ref, source_ref):
899 self, repo_id, workspace_id, target_ref, source_ref):
900 shadow_repository_path = self._get_shadow_repository_path(
900 shadow_repository_path = self._get_shadow_repository_path(
901 repo_id, workspace_id)
901 repo_id, workspace_id)
902 if not os.path.exists(shadow_repository_path):
902 if not os.path.exists(shadow_repository_path):
903 self._local_clone(
903 self._local_clone(
904 shadow_repository_path, target_ref.name, source_ref.name)
904 shadow_repository_path, target_ref.name, source_ref.name)
905 log.debug(
905 log.debug(
906 'Prepared shadow repository in %s', shadow_repository_path)
906 'Prepared shadow repository in %s', shadow_repository_path)
907
907
908 return shadow_repository_path
908 return shadow_repository_path
909
909
910 def _merge_repo(self, repo_id, workspace_id, target_ref,
910 def _merge_repo(self, repo_id, workspace_id, target_ref,
911 source_repo, source_ref, merge_message,
911 source_repo, source_ref, merge_message,
912 merger_name, merger_email, dry_run=False,
912 merger_name, merger_email, dry_run=False,
913 use_rebase=False, close_branch=False):
913 use_rebase=False, close_branch=False):
914
915 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
916 'rebase' if use_rebase else 'merge', dry_run)
914 if target_ref.commit_id != self.branches[target_ref.name]:
917 if target_ref.commit_id != self.branches[target_ref.name]:
915 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
918 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
916 target_ref.commit_id, self.branches[target_ref.name])
919 target_ref.commit_id, self.branches[target_ref.name])
917 return MergeResponse(
920 return MergeResponse(
918 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
921 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
922 metadata={'target_ref': target_ref})
919
923
920 shadow_repository_path = self._maybe_prepare_merge_workspace(
924 shadow_repository_path = self._maybe_prepare_merge_workspace(
921 repo_id, workspace_id, target_ref, source_ref)
925 repo_id, workspace_id, target_ref, source_ref)
922 shadow_repo = self._get_shadow_instance(shadow_repository_path)
926 shadow_repo = self._get_shadow_instance(shadow_repository_path)
923
927
924 # checkout source, if it's different. Otherwise we could not
928 # checkout source, if it's different. Otherwise we could not
925 # fetch proper commits for merge testing
929 # fetch proper commits for merge testing
926 if source_ref.name != target_ref.name:
930 if source_ref.name != target_ref.name:
927 if shadow_repo.get_remote_ref(source_ref.name):
931 if shadow_repo.get_remote_ref(source_ref.name):
928 shadow_repo._checkout(source_ref.name, force=True)
932 shadow_repo._checkout(source_ref.name, force=True)
929
933
930 # checkout target, and fetch changes
934 # checkout target, and fetch changes
931 shadow_repo._checkout(target_ref.name, force=True)
935 shadow_repo._checkout(target_ref.name, force=True)
932
936
933 # fetch/reset pull the target, in case it is changed
937 # fetch/reset pull the target, in case it is changed
934 # this handles even force changes
938 # this handles even force changes
935 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
939 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
936 shadow_repo._local_reset(target_ref.name)
940 shadow_repo._local_reset(target_ref.name)
937
941
938 # Need to reload repo to invalidate the cache, or otherwise we cannot
942 # Need to reload repo to invalidate the cache, or otherwise we cannot
939 # retrieve the last target commit.
943 # retrieve the last target commit.
940 shadow_repo = self._get_shadow_instance(shadow_repository_path)
944 shadow_repo = self._get_shadow_instance(shadow_repository_path)
941 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
945 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
942 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
946 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
943 target_ref, target_ref.commit_id,
947 target_ref, target_ref.commit_id,
944 shadow_repo.branches[target_ref.name])
948 shadow_repo.branches[target_ref.name])
945 return MergeResponse(
949 return MergeResponse(
946 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
950 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
951 metadata={'target_ref': target_ref})
947
952
948 # calculate new branch
953 # calculate new branch
949 pr_branch = shadow_repo._get_new_pr_branch(
954 pr_branch = shadow_repo._get_new_pr_branch(
950 source_ref.name, target_ref.name)
955 source_ref.name, target_ref.name)
951 log.debug('using pull-request merge branch: `%s`', pr_branch)
956 log.debug('using pull-request merge branch: `%s`', pr_branch)
952 # checkout to temp branch, and fetch changes
957 # checkout to temp branch, and fetch changes
953 shadow_repo._checkout(pr_branch, create=True)
958 shadow_repo._checkout(pr_branch, create=True)
954 try:
959 try:
955 shadow_repo._local_fetch(source_repo.path, source_ref.name)
960 shadow_repo._local_fetch(source_repo.path, source_ref.name)
956 except RepositoryError:
961 except RepositoryError:
957 log.exception('Failure when doing local fetch on git shadow repo')
962 log.exception('Failure when doing local fetch on '
963 'shadow repo: %s', shadow_repo)
958 return MergeResponse(
964 return MergeResponse(
959 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
965 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
966 metadata={'source_ref': source_ref})
960
967
961 merge_ref = None
968 merge_ref = None
962 merge_failure_reason = MergeFailureReason.NONE
969 merge_failure_reason = MergeFailureReason.NONE
970 metadata = {}
963 try:
971 try:
964 shadow_repo._local_merge(merge_message, merger_name, merger_email,
972 shadow_repo._local_merge(merge_message, merger_name, merger_email,
965 [source_ref.commit_id])
973 [source_ref.commit_id])
966 merge_possible = True
974 merge_possible = True
967
975
968 # Need to reload repo to invalidate the cache, or otherwise we
976 # Need to reload repo to invalidate the cache, or otherwise we
969 # cannot retrieve the merge commit.
977 # cannot retrieve the merge commit.
970 shadow_repo = GitRepository(shadow_repository_path)
978 shadow_repo = GitRepository(shadow_repository_path)
971 merge_commit_id = shadow_repo.branches[pr_branch]
979 merge_commit_id = shadow_repo.branches[pr_branch]
972
980
973 # Set a reference pointing to the merge commit. This reference may
981 # Set a reference pointing to the merge commit. This reference may
974 # be used to easily identify the last successful merge commit in
982 # be used to easily identify the last successful merge commit in
975 # the shadow repository.
983 # the shadow repository.
976 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
984 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
977 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
985 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
978 except RepositoryError:
986 except RepositoryError:
979 log.exception('Failure when doing local merge on git shadow repo')
987 log.exception('Failure when doing local merge on git shadow repo')
980 merge_possible = False
988 merge_possible = False
981 merge_failure_reason = MergeFailureReason.MERGE_FAILED
989 merge_failure_reason = MergeFailureReason.MERGE_FAILED
982
990
983 if merge_possible and not dry_run:
991 if merge_possible and not dry_run:
984 try:
992 try:
985 shadow_repo._local_push(
993 shadow_repo._local_push(
986 pr_branch, self.path, target_ref.name, enable_hooks=True,
994 pr_branch, self.path, target_ref.name, enable_hooks=True,
987 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
995 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
988 merge_succeeded = True
996 merge_succeeded = True
989 except RepositoryError:
997 except RepositoryError:
990 log.exception(
998 log.exception(
991 'Failure when doing local push on git shadow repo')
999 'Failure when doing local push from the shadow '
1000 'repository to the target repository at %s.', self.path)
992 merge_succeeded = False
1001 merge_succeeded = False
993 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1002 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1003 metadata['target'] = 'git shadow repo'
1004 metadata['merge_commit'] = pr_branch
994 else:
1005 else:
995 merge_succeeded = False
1006 merge_succeeded = False
996
1007
997 return MergeResponse(
1008 return MergeResponse(
998 merge_possible, merge_succeeded, merge_ref,
1009 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
999 merge_failure_reason)
1010 metadata=metadata)
@@ -1,924 +1,932 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
35 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
45 from rhodecode.lib.vcs.compat import configparser
46
46
47 hexlify = binascii.hexlify
47 hexlify = binascii.hexlify
48 nullid = "\0" * 20
48 nullid = "\0" * 20
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class MercurialRepository(BaseRepository):
53 class MercurialRepository(BaseRepository):
54 """
54 """
55 Mercurial repository backend
55 Mercurial repository backend
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'default'
57 DEFAULT_BRANCH_NAME = 'default'
58
58
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 do_workspace_checkout=False, with_wire=None, bare=False):
60 do_workspace_checkout=False, with_wire=None, bare=False):
61 """
61 """
62 Raises RepositoryError if repository could not be find at the given
62 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
63 ``repo_path``.
64
64
65 :param repo_path: local path of the repository
65 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
66 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
67 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
68 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
69 :param src_url=None: would try to clone repository from given location
70 :param do_workspace_checkout=False: sets update of working copy after
70 :param do_workspace_checkout=False: sets update of working copy after
71 making a clone
71 making a clone
72 :param bare: not used, compatible with other VCS
72 :param bare: not used, compatible with other VCS
73 """
73 """
74
74
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 # mercurial since 4.4.X requires certain configuration to be present
76 # mercurial since 4.4.X requires certain configuration to be present
77 # because sometimes we init the repos with config we need to meet
77 # because sometimes we init the repos with config we need to meet
78 # special requirements
78 # special requirements
79 self.config = config if config else self.get_default_config(
79 self.config = config if config else self.get_default_config(
80 default=[('extensions', 'largefiles', '1')])
80 default=[('extensions', 'largefiles', '1')])
81 self.with_wire = with_wire
81 self.with_wire = with_wire
82
82
83 self._init_repo(create, src_url, do_workspace_checkout)
83 self._init_repo(create, src_url, do_workspace_checkout)
84
84
85 # caches
85 # caches
86 self._commit_ids = {}
86 self._commit_ids = {}
87
87
88 @LazyProperty
88 @LazyProperty
89 def _remote(self):
89 def _remote(self):
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91
91
92 @LazyProperty
92 @LazyProperty
93 def commit_ids(self):
93 def commit_ids(self):
94 """
94 """
95 Returns list of commit ids, in ascending order. Being lazy
95 Returns list of commit ids, in ascending order. Being lazy
96 attribute allows external tools to inject shas from cache.
96 attribute allows external tools to inject shas from cache.
97 """
97 """
98 commit_ids = self._get_all_commit_ids()
98 commit_ids = self._get_all_commit_ids()
99 self._rebuild_cache(commit_ids)
99 self._rebuild_cache(commit_ids)
100 return commit_ids
100 return commit_ids
101
101
102 def _rebuild_cache(self, commit_ids):
102 def _rebuild_cache(self, commit_ids):
103 self._commit_ids = dict((commit_id, index)
103 self._commit_ids = dict((commit_id, index)
104 for index, commit_id in enumerate(commit_ids))
104 for index, commit_id in enumerate(commit_ids))
105
105
106 @LazyProperty
106 @LazyProperty
107 def branches(self):
107 def branches(self):
108 return self._get_branches()
108 return self._get_branches()
109
109
110 @LazyProperty
110 @LazyProperty
111 def branches_closed(self):
111 def branches_closed(self):
112 return self._get_branches(active=False, closed=True)
112 return self._get_branches(active=False, closed=True)
113
113
114 @LazyProperty
114 @LazyProperty
115 def branches_all(self):
115 def branches_all(self):
116 all_branches = {}
116 all_branches = {}
117 all_branches.update(self.branches)
117 all_branches.update(self.branches)
118 all_branches.update(self.branches_closed)
118 all_branches.update(self.branches_closed)
119 return all_branches
119 return all_branches
120
120
121 def _get_branches(self, active=True, closed=False):
121 def _get_branches(self, active=True, closed=False):
122 """
122 """
123 Gets branches for this repository
123 Gets branches for this repository
124 Returns only not closed active branches by default
124 Returns only not closed active branches by default
125
125
126 :param active: return also active branches
126 :param active: return also active branches
127 :param closed: return also closed branches
127 :param closed: return also closed branches
128
128
129 """
129 """
130 if self.is_empty():
130 if self.is_empty():
131 return {}
131 return {}
132
132
133 def get_name(ctx):
133 def get_name(ctx):
134 return ctx[0]
134 return ctx[0]
135
135
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 self._remote.branches(active, closed).items()]
137 self._remote.branches(active, closed).items()]
138
138
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140
140
141 @LazyProperty
141 @LazyProperty
142 def tags(self):
142 def tags(self):
143 """
143 """
144 Gets tags for this repository
144 Gets tags for this repository
145 """
145 """
146 return self._get_tags()
146 return self._get_tags()
147
147
148 def _get_tags(self):
148 def _get_tags(self):
149 if self.is_empty():
149 if self.is_empty():
150 return {}
150 return {}
151
151
152 def get_name(ctx):
152 def get_name(ctx):
153 return ctx[0]
153 return ctx[0]
154
154
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 self._remote.tags().items()]
156 self._remote.tags().items()]
157
157
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159
159
160 def tag(self, name, user, commit_id=None, message=None, date=None,
160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 **kwargs):
161 **kwargs):
162 """
162 """
163 Creates and returns a tag for the given ``commit_id``.
163 Creates and returns a tag for the given ``commit_id``.
164
164
165 :param name: name for new tag
165 :param name: name for new tag
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param commit_id: commit id for which new tag would be created
167 :param commit_id: commit id for which new tag would be created
168 :param message: message of the tag's commit
168 :param message: message of the tag's commit
169 :param date: date of tag's commit
169 :param date: date of tag's commit
170
170
171 :raises TagAlreadyExistError: if tag with same name already exists
171 :raises TagAlreadyExistError: if tag with same name already exists
172 """
172 """
173 if name in self.tags:
173 if name in self.tags:
174 raise TagAlreadyExistError("Tag %s already exists" % name)
174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 commit = self.get_commit(commit_id=commit_id)
175 commit = self.get_commit(commit_id=commit_id)
176 local = kwargs.setdefault('local', False)
176 local = kwargs.setdefault('local', False)
177
177
178 if message is None:
178 if message is None:
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180
180
181 date, tz = date_to_timestamp_plus_offset(date)
181 date, tz = date_to_timestamp_plus_offset(date)
182
182
183 self._remote.tag(
183 self._remote.tag(
184 name, commit.raw_id, message, local, user, date, tz)
184 name, commit.raw_id, message, local, user, date, tz)
185 self._remote.invalidate_vcs_cache()
185 self._remote.invalidate_vcs_cache()
186
186
187 # Reinitialize tags
187 # Reinitialize tags
188 self.tags = self._get_tags()
188 self.tags = self._get_tags()
189 tag_id = self.tags[name]
189 tag_id = self.tags[name]
190
190
191 return self.get_commit(commit_id=tag_id)
191 return self.get_commit(commit_id=tag_id)
192
192
193 def remove_tag(self, name, user, message=None, date=None):
193 def remove_tag(self, name, user, message=None, date=None):
194 """
194 """
195 Removes tag with the given `name`.
195 Removes tag with the given `name`.
196
196
197 :param name: name of the tag to be removed
197 :param name: name of the tag to be removed
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param message: message of the tag's removal commit
199 :param message: message of the tag's removal commit
200 :param date: date of tag's removal commit
200 :param date: date of tag's removal commit
201
201
202 :raises TagDoesNotExistError: if tag with given name does not exists
202 :raises TagDoesNotExistError: if tag with given name does not exists
203 """
203 """
204 if name not in self.tags:
204 if name not in self.tags:
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 if message is None:
206 if message is None:
207 message = "Removed tag %s" % name
207 message = "Removed tag %s" % name
208 local = False
208 local = False
209
209
210 date, tz = date_to_timestamp_plus_offset(date)
210 date, tz = date_to_timestamp_plus_offset(date)
211
211
212 self._remote.tag(name, nullid, message, local, user, date, tz)
212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 self._remote.invalidate_vcs_cache()
213 self._remote.invalidate_vcs_cache()
214 self.tags = self._get_tags()
214 self.tags = self._get_tags()
215
215
216 @LazyProperty
216 @LazyProperty
217 def bookmarks(self):
217 def bookmarks(self):
218 """
218 """
219 Gets bookmarks for this repository
219 Gets bookmarks for this repository
220 """
220 """
221 return self._get_bookmarks()
221 return self._get_bookmarks()
222
222
223 def _get_bookmarks(self):
223 def _get_bookmarks(self):
224 if self.is_empty():
224 if self.is_empty():
225 return {}
225 return {}
226
226
227 def get_name(ctx):
227 def get_name(ctx):
228 return ctx[0]
228 return ctx[0]
229
229
230 _bookmarks = [
230 _bookmarks = [
231 (safe_unicode(n), hexlify(h)) for n, h in
231 (safe_unicode(n), hexlify(h)) for n, h in
232 self._remote.bookmarks().items()]
232 self._remote.bookmarks().items()]
233
233
234 return OrderedDict(sorted(_bookmarks, key=get_name))
234 return OrderedDict(sorted(_bookmarks, key=get_name))
235
235
236 def _get_all_commit_ids(self):
236 def _get_all_commit_ids(self):
237 return self._remote.get_all_commit_ids('visible')
237 return self._remote.get_all_commit_ids('visible')
238
238
239 def get_diff(
239 def get_diff(
240 self, commit1, commit2, path='', ignore_whitespace=False,
240 self, commit1, commit2, path='', ignore_whitespace=False,
241 context=3, path1=None):
241 context=3, path1=None):
242 """
242 """
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 `commit2` since `commit1`.
244 `commit2` since `commit1`.
245
245
246 :param commit1: Entry point from which diff is shown. Can be
246 :param commit1: Entry point from which diff is shown. Can be
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 the changes since empty state of the repository until `commit2`
248 the changes since empty state of the repository until `commit2`
249 :param commit2: Until which commit changes should be shown.
249 :param commit2: Until which commit changes should be shown.
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 changes. Defaults to ``False``.
251 changes. Defaults to ``False``.
252 :param context: How many lines before/after changed lines should be
252 :param context: How many lines before/after changed lines should be
253 shown. Defaults to ``3``.
253 shown. Defaults to ``3``.
254 """
254 """
255 self._validate_diff_commits(commit1, commit2)
255 self._validate_diff_commits(commit1, commit2)
256 if path1 is not None and path1 != path:
256 if path1 is not None and path1 != path:
257 raise ValueError("Diff of two different paths not supported.")
257 raise ValueError("Diff of two different paths not supported.")
258
258
259 if path:
259 if path:
260 file_filter = [self.path, path]
260 file_filter = [self.path, path]
261 else:
261 else:
262 file_filter = None
262 file_filter = None
263
263
264 diff = self._remote.diff(
264 diff = self._remote.diff(
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 opt_git=True, opt_ignorews=ignore_whitespace,
266 opt_git=True, opt_ignorews=ignore_whitespace,
267 context=context)
267 context=context)
268 return MercurialDiff(diff)
268 return MercurialDiff(diff)
269
269
270 def strip(self, commit_id, branch=None):
270 def strip(self, commit_id, branch=None):
271 self._remote.strip(commit_id, update=False, backup="none")
271 self._remote.strip(commit_id, update=False, backup="none")
272
272
273 self._remote.invalidate_vcs_cache()
273 self._remote.invalidate_vcs_cache()
274 self.commit_ids = self._get_all_commit_ids()
274 self.commit_ids = self._get_all_commit_ids()
275 self._rebuild_cache(self.commit_ids)
275 self._rebuild_cache(self.commit_ids)
276
276
277 def verify(self):
277 def verify(self):
278 verify = self._remote.verify()
278 verify = self._remote.verify()
279
279
280 self._remote.invalidate_vcs_cache()
280 self._remote.invalidate_vcs_cache()
281 return verify
281 return verify
282
282
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 if commit_id1 == commit_id2:
284 if commit_id1 == commit_id2:
285 return commit_id1
285 return commit_id1
286
286
287 ancestors = self._remote.revs_from_revspec(
287 ancestors = self._remote.revs_from_revspec(
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 other_path=repo2.path)
289 other_path=repo2.path)
290 return repo2[ancestors[0]].raw_id if ancestors else None
290 return repo2[ancestors[0]].raw_id if ancestors else None
291
291
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 if commit_id1 == commit_id2:
293 if commit_id1 == commit_id2:
294 commits = []
294 commits = []
295 else:
295 else:
296 if merge:
296 if merge:
297 indexes = self._remote.revs_from_revspec(
297 indexes = self._remote.revs_from_revspec(
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 else:
300 else:
301 indexes = self._remote.revs_from_revspec(
301 indexes = self._remote.revs_from_revspec(
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 commit_id1, other_path=repo2.path)
303 commit_id1, other_path=repo2.path)
304
304
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 for idx in indexes]
306 for idx in indexes]
307
307
308 return commits
308 return commits
309
309
310 @staticmethod
310 @staticmethod
311 def check_url(url, config):
311 def check_url(url, config):
312 """
312 """
313 Function will check given url and try to verify if it's a valid
313 Function will check given url and try to verify if it's a valid
314 link. Sometimes it may happened that mercurial will issue basic
314 link. Sometimes it may happened that mercurial will issue basic
315 auth request that can cause whole API to hang when used from python
315 auth request that can cause whole API to hang when used from python
316 or other external calls.
316 or other external calls.
317
317
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 when the return code is non 200
319 when the return code is non 200
320 """
320 """
321 # check first if it's not an local url
321 # check first if it's not an local url
322 if os.path.isdir(url) or url.startswith('file:'):
322 if os.path.isdir(url) or url.startswith('file:'):
323 return True
323 return True
324
324
325 # Request the _remote to verify the url
325 # Request the _remote to verify the url
326 return connection.Hg.check_url(url, config.serialize())
326 return connection.Hg.check_url(url, config.serialize())
327
327
328 @staticmethod
328 @staticmethod
329 def is_valid_repository(path):
329 def is_valid_repository(path):
330 return os.path.isdir(os.path.join(path, '.hg'))
330 return os.path.isdir(os.path.join(path, '.hg'))
331
331
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 """
333 """
334 Function will check for mercurial repository in given path. If there
334 Function will check for mercurial repository in given path. If there
335 is no repository in that path it will raise an exception unless
335 is no repository in that path it will raise an exception unless
336 `create` parameter is set to True - in that case repository would
336 `create` parameter is set to True - in that case repository would
337 be created.
337 be created.
338
338
339 If `src_url` is given, would try to clone repository from the
339 If `src_url` is given, would try to clone repository from the
340 location at given clone_point. Additionally it'll make update to
340 location at given clone_point. Additionally it'll make update to
341 working copy accordingly to `do_workspace_checkout` flag.
341 working copy accordingly to `do_workspace_checkout` flag.
342 """
342 """
343 if create and os.path.exists(self.path):
343 if create and os.path.exists(self.path):
344 raise RepositoryError(
344 raise RepositoryError(
345 "Cannot create repository at %s, location already exist"
345 "Cannot create repository at %s, location already exist"
346 % self.path)
346 % self.path)
347
347
348 if src_url:
348 if src_url:
349 url = str(self._get_url(src_url))
349 url = str(self._get_url(src_url))
350 MercurialRepository.check_url(url, self.config)
350 MercurialRepository.check_url(url, self.config)
351
351
352 self._remote.clone(url, self.path, do_workspace_checkout)
352 self._remote.clone(url, self.path, do_workspace_checkout)
353
353
354 # Don't try to create if we've already cloned repo
354 # Don't try to create if we've already cloned repo
355 create = False
355 create = False
356
356
357 if create:
357 if create:
358 os.makedirs(self.path, mode=0o755)
358 os.makedirs(self.path, mode=0o755)
359
359
360 self._remote.localrepository(create)
360 self._remote.localrepository(create)
361
361
362 @LazyProperty
362 @LazyProperty
363 def in_memory_commit(self):
363 def in_memory_commit(self):
364 return MercurialInMemoryCommit(self)
364 return MercurialInMemoryCommit(self)
365
365
366 @LazyProperty
366 @LazyProperty
367 def description(self):
367 def description(self):
368 description = self._remote.get_config_value(
368 description = self._remote.get_config_value(
369 'web', 'description', untrusted=True)
369 'web', 'description', untrusted=True)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371
371
372 @LazyProperty
372 @LazyProperty
373 def contact(self):
373 def contact(self):
374 contact = (
374 contact = (
375 self._remote.get_config_value("web", "contact") or
375 self._remote.get_config_value("web", "contact") or
376 self._remote.get_config_value("ui", "username"))
376 self._remote.get_config_value("ui", "username"))
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378
378
379 @LazyProperty
379 @LazyProperty
380 def last_change(self):
380 def last_change(self):
381 """
381 """
382 Returns last change made on this repository as
382 Returns last change made on this repository as
383 `datetime.datetime` object.
383 `datetime.datetime` object.
384 """
384 """
385 try:
385 try:
386 return self.get_commit().date
386 return self.get_commit().date
387 except RepositoryError:
387 except RepositoryError:
388 tzoffset = makedate()[1]
388 tzoffset = makedate()[1]
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390
390
391 def _get_fs_mtime(self):
391 def _get_fs_mtime(self):
392 # fallback to filesystem
392 # fallback to filesystem
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 st_path = os.path.join(self.path, '.hg', "store")
394 st_path = os.path.join(self.path, '.hg', "store")
395 if os.path.exists(cl_path):
395 if os.path.exists(cl_path):
396 return os.stat(cl_path).st_mtime
396 return os.stat(cl_path).st_mtime
397 else:
397 else:
398 return os.stat(st_path).st_mtime
398 return os.stat(st_path).st_mtime
399
399
400 def _get_url(self, url):
400 def _get_url(self, url):
401 """
401 """
402 Returns normalized url. If schema is not given, would fall
402 Returns normalized url. If schema is not given, would fall
403 to filesystem
403 to filesystem
404 (``file:///``) schema.
404 (``file:///``) schema.
405 """
405 """
406 url = url.encode('utf8')
406 url = url.encode('utf8')
407 if url != 'default' and '://' not in url:
407 if url != 'default' and '://' not in url:
408 url = "file:" + urllib.pathname2url(url)
408 url = "file:" + urllib.pathname2url(url)
409 return url
409 return url
410
410
411 def get_hook_location(self):
411 def get_hook_location(self):
412 """
412 """
413 returns absolute path to location where hooks are stored
413 returns absolute path to location where hooks are stored
414 """
414 """
415 return os.path.join(self.path, '.hg', '.hgrc')
415 return os.path.join(self.path, '.hg', '.hgrc')
416
416
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
418 """
418 """
419 Returns ``MercurialCommit`` object representing repository's
419 Returns ``MercurialCommit`` object representing repository's
420 commit at the given `commit_id` or `commit_idx`.
420 commit at the given `commit_id` or `commit_idx`.
421 """
421 """
422 if self.is_empty():
422 if self.is_empty():
423 raise EmptyRepositoryError("There are no commits yet")
423 raise EmptyRepositoryError("There are no commits yet")
424
424
425 if commit_id is not None:
425 if commit_id is not None:
426 self._validate_commit_id(commit_id)
426 self._validate_commit_id(commit_id)
427 try:
427 try:
428 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 except KeyError:
430 except KeyError:
431 pass
431 pass
432 elif commit_idx is not None:
432 elif commit_idx is not None:
433 self._validate_commit_idx(commit_idx)
433 self._validate_commit_idx(commit_idx)
434 try:
434 try:
435 id_ = self.commit_ids[commit_idx]
435 id_ = self.commit_ids[commit_idx]
436 if commit_idx < 0:
436 if commit_idx < 0:
437 commit_idx += len(self.commit_ids)
437 commit_idx += len(self.commit_ids)
438 return MercurialCommit(
438 return MercurialCommit(
439 self, id_, commit_idx, pre_load=pre_load)
439 self, id_, commit_idx, pre_load=pre_load)
440 except IndexError:
440 except IndexError:
441 commit_id = commit_idx
441 commit_id = commit_idx
442 else:
442 else:
443 commit_id = "tip"
443 commit_id = "tip"
444
444
445 if isinstance(commit_id, unicode):
445 if isinstance(commit_id, unicode):
446 commit_id = safe_str(commit_id)
446 commit_id = safe_str(commit_id)
447
447
448 try:
448 try:
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 except CommitDoesNotExistError:
450 except CommitDoesNotExistError:
451 msg = "Commit %s does not exist for %s" % (
451 msg = "Commit %s does not exist for %s" % (
452 commit_id, self)
452 commit_id, self)
453 raise CommitDoesNotExistError(msg)
453 raise CommitDoesNotExistError(msg)
454
454
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456
456
457 def get_commits(
457 def get_commits(
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 branch_name=None, show_hidden=False, pre_load=None):
459 branch_name=None, show_hidden=False, pre_load=None):
460 """
460 """
461 Returns generator of ``MercurialCommit`` objects from start to end
461 Returns generator of ``MercurialCommit`` objects from start to end
462 (both are inclusive)
462 (both are inclusive)
463
463
464 :param start_id: None, str(commit_id)
464 :param start_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
466 :param start_date: if specified, commits with commit date less than
466 :param start_date: if specified, commits with commit date less than
467 ``start_date`` would be filtered out from returned set
467 ``start_date`` would be filtered out from returned set
468 :param end_date: if specified, commits with commit date greater than
468 :param end_date: if specified, commits with commit date greater than
469 ``end_date`` would be filtered out from returned set
469 ``end_date`` would be filtered out from returned set
470 :param branch_name: if specified, commits not reachable from given
470 :param branch_name: if specified, commits not reachable from given
471 branch would be filtered out from returned set
471 branch would be filtered out from returned set
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 Mercurial evolve
473 Mercurial evolve
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 exist.
475 exist.
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 ``end`` could not be found.
477 ``end`` could not be found.
478 """
478 """
479 # actually we should check now if it's not an empty repo
479 # actually we should check now if it's not an empty repo
480 branch_ancestors = False
480 branch_ancestors = False
481 if self.is_empty():
481 if self.is_empty():
482 raise EmptyRepositoryError("There are no commits yet")
482 raise EmptyRepositoryError("There are no commits yet")
483 self._validate_branch_name(branch_name)
483 self._validate_branch_name(branch_name)
484
484
485 if start_id is not None:
485 if start_id is not None:
486 self._validate_commit_id(start_id)
486 self._validate_commit_id(start_id)
487 c_start = self.get_commit(commit_id=start_id)
487 c_start = self.get_commit(commit_id=start_id)
488 start_pos = self._commit_ids[c_start.raw_id]
488 start_pos = self._commit_ids[c_start.raw_id]
489 else:
489 else:
490 start_pos = None
490 start_pos = None
491
491
492 if end_id is not None:
492 if end_id is not None:
493 self._validate_commit_id(end_id)
493 self._validate_commit_id(end_id)
494 c_end = self.get_commit(commit_id=end_id)
494 c_end = self.get_commit(commit_id=end_id)
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 else:
496 else:
497 end_pos = None
497 end_pos = None
498
498
499 if None not in [start_id, end_id] and start_pos > end_pos:
499 if None not in [start_id, end_id] and start_pos > end_pos:
500 raise RepositoryError(
500 raise RepositoryError(
501 "Start commit '%s' cannot be after end commit '%s'" %
501 "Start commit '%s' cannot be after end commit '%s'" %
502 (start_id, end_id))
502 (start_id, end_id))
503
503
504 if end_pos is not None:
504 if end_pos is not None:
505 end_pos += 1
505 end_pos += 1
506
506
507 commit_filter = []
507 commit_filter = []
508
508
509 if branch_name and not branch_ancestors:
509 if branch_name and not branch_ancestors:
510 commit_filter.append('branch("%s")' % (branch_name,))
510 commit_filter.append('branch("%s")' % (branch_name,))
511 elif branch_name and branch_ancestors:
511 elif branch_name and branch_ancestors:
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513
513
514 if start_date and not end_date:
514 if start_date and not end_date:
515 commit_filter.append('date(">%s")' % (start_date,))
515 commit_filter.append('date(">%s")' % (start_date,))
516 if end_date and not start_date:
516 if end_date and not start_date:
517 commit_filter.append('date("<%s")' % (end_date,))
517 commit_filter.append('date("<%s")' % (end_date,))
518 if start_date and end_date:
518 if start_date and end_date:
519 commit_filter.append(
519 commit_filter.append(
520 'date(">%s") and date("<%s")' % (start_date, end_date))
520 'date(">%s") and date("<%s")' % (start_date, end_date))
521
521
522 if not show_hidden:
522 if not show_hidden:
523 commit_filter.append('not obsolete()')
523 commit_filter.append('not obsolete()')
524 commit_filter.append('not hidden()')
524 commit_filter.append('not hidden()')
525
525
526 # TODO: johbo: Figure out a simpler way for this solution
526 # TODO: johbo: Figure out a simpler way for this solution
527 collection_generator = CollectionGenerator
527 collection_generator = CollectionGenerator
528 if commit_filter:
528 if commit_filter:
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 revisions = self._remote.rev_range([commit_filter])
530 revisions = self._remote.rev_range([commit_filter])
531 collection_generator = MercurialIndexBasedCollectionGenerator
531 collection_generator = MercurialIndexBasedCollectionGenerator
532 else:
532 else:
533 revisions = self.commit_ids
533 revisions = self.commit_ids
534
534
535 if start_pos or end_pos:
535 if start_pos or end_pos:
536 revisions = revisions[start_pos:end_pos]
536 revisions = revisions[start_pos:end_pos]
537
537
538 return collection_generator(self, revisions, pre_load=pre_load)
538 return collection_generator(self, revisions, pre_load=pre_load)
539
539
540 def pull(self, url, commit_ids=None):
540 def pull(self, url, commit_ids=None):
541 """
541 """
542 Pull changes from external location.
542 Pull changes from external location.
543
543
544 :param commit_ids: Optional. Can be set to a list of commit ids
544 :param commit_ids: Optional. Can be set to a list of commit ids
545 which shall be pulled from the other repository.
545 which shall be pulled from the other repository.
546 """
546 """
547 url = self._get_url(url)
547 url = self._get_url(url)
548 self._remote.pull(url, commit_ids=commit_ids)
548 self._remote.pull(url, commit_ids=commit_ids)
549 self._remote.invalidate_vcs_cache()
549 self._remote.invalidate_vcs_cache()
550
550
551 def fetch(self, url, commit_ids=None):
551 def fetch(self, url, commit_ids=None):
552 """
552 """
553 Backward compatibility with GIT fetch==pull
553 Backward compatibility with GIT fetch==pull
554 """
554 """
555 return self.pull(url, commit_ids=commit_ids)
555 return self.pull(url, commit_ids=commit_ids)
556
556
557 def push(self, url):
557 def push(self, url):
558 url = self._get_url(url)
558 url = self._get_url(url)
559 self._remote.sync_push(url)
559 self._remote.sync_push(url)
560
560
561 def _local_clone(self, clone_path):
561 def _local_clone(self, clone_path):
562 """
562 """
563 Create a local clone of the current repo.
563 Create a local clone of the current repo.
564 """
564 """
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 hooks=False)
566 hooks=False)
567
567
568 def _update(self, revision, clean=False):
568 def _update(self, revision, clean=False):
569 """
569 """
570 Update the working copy to the specified revision.
570 Update the working copy to the specified revision.
571 """
571 """
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 self._remote.update(revision, clean=clean)
573 self._remote.update(revision, clean=clean)
574
574
575 def _identify(self):
575 def _identify(self):
576 """
576 """
577 Return the current state of the working directory.
577 Return the current state of the working directory.
578 """
578 """
579 return self._remote.identify().strip().rstrip('+')
579 return self._remote.identify().strip().rstrip('+')
580
580
581 def _heads(self, branch=None):
581 def _heads(self, branch=None):
582 """
582 """
583 Return the commit ids of the repository heads.
583 Return the commit ids of the repository heads.
584 """
584 """
585 return self._remote.heads(branch=branch).strip().split(' ')
585 return self._remote.heads(branch=branch).strip().split(' ')
586
586
587 def _ancestor(self, revision1, revision2):
587 def _ancestor(self, revision1, revision2):
588 """
588 """
589 Return the common ancestor of the two revisions.
589 Return the common ancestor of the two revisions.
590 """
590 """
591 return self._remote.ancestor(revision1, revision2)
591 return self._remote.ancestor(revision1, revision2)
592
592
593 def _local_push(
593 def _local_push(
594 self, revision, repository_path, push_branches=False,
594 self, revision, repository_path, push_branches=False,
595 enable_hooks=False):
595 enable_hooks=False):
596 """
596 """
597 Push the given revision to the specified repository.
597 Push the given revision to the specified repository.
598
598
599 :param push_branches: allow to create branches in the target repo.
599 :param push_branches: allow to create branches in the target repo.
600 """
600 """
601 self._remote.push(
601 self._remote.push(
602 [revision], repository_path, hooks=enable_hooks,
602 [revision], repository_path, hooks=enable_hooks,
603 push_branches=push_branches)
603 push_branches=push_branches)
604
604
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 source_ref, use_rebase=False, dry_run=False):
606 source_ref, use_rebase=False, dry_run=False):
607 """
607 """
608 Merge the given source_revision into the checked out revision.
608 Merge the given source_revision into the checked out revision.
609
609
610 Returns the commit id of the merge and a boolean indicating if the
610 Returns the commit id of the merge and a boolean indicating if the
611 commit needs to be pushed.
611 commit needs to be pushed.
612 """
612 """
613 self._update(target_ref.commit_id)
613 self._update(target_ref.commit_id, clean=True)
614
614
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617
617
618 if ancestor == source_ref.commit_id:
618 if ancestor == source_ref.commit_id:
619 # Nothing to do, the changes were already integrated
619 # Nothing to do, the changes were already integrated
620 return target_ref.commit_id, False
620 return target_ref.commit_id, False
621
621
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 # In this case we should force a commit message
623 # In this case we should force a commit message
624 return source_ref.commit_id, True
624 return source_ref.commit_id, True
625
625
626 if use_rebase:
626 if use_rebase:
627 try:
627 try:
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 target_ref.commit_id)
629 target_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 self._remote.rebase(
631 self._remote.rebase(
632 source=source_ref.commit_id, dest=target_ref.commit_id)
632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 self._remote.invalidate_vcs_cache()
633 self._remote.invalidate_vcs_cache()
634 self._update(bookmark_name)
634 self._update(bookmark_name, clean=True)
635 return self._identify(), True
635 return self._identify(), True
636 except RepositoryError:
636 except RepositoryError:
637 # The rebase-abort may raise another exception which 'hides'
637 # The rebase-abort may raise another exception which 'hides'
638 # the original one, therefore we log it here.
638 # the original one, therefore we log it here.
639 log.exception('Error while rebasing shadow repo during merge.')
639 log.exception('Error while rebasing shadow repo during merge.')
640
640
641 # Cleanup any rebase leftovers
641 # Cleanup any rebase leftovers
642 self._remote.invalidate_vcs_cache()
642 self._remote.invalidate_vcs_cache()
643 self._remote.rebase(abort=True)
643 self._remote.rebase(abort=True)
644 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
645 self._remote.update(clean=True)
645 self._remote.update(clean=True)
646 raise
646 raise
647 else:
647 else:
648 try:
648 try:
649 self._remote.merge(source_ref.commit_id)
649 self._remote.merge(source_ref.commit_id)
650 self._remote.invalidate_vcs_cache()
650 self._remote.invalidate_vcs_cache()
651 self._remote.commit(
651 self._remote.commit(
652 message=safe_str(merge_message),
652 message=safe_str(merge_message),
653 username=safe_str('%s <%s>' % (user_name, user_email)))
653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 self._remote.invalidate_vcs_cache()
654 self._remote.invalidate_vcs_cache()
655 return self._identify(), True
655 return self._identify(), True
656 except RepositoryError:
656 except RepositoryError:
657 # Cleanup any merge leftovers
657 # Cleanup any merge leftovers
658 self._remote.update(clean=True)
658 self._remote.update(clean=True)
659 raise
659 raise
660
660
661 def _local_close(self, target_ref, user_name, user_email,
661 def _local_close(self, target_ref, user_name, user_email,
662 source_ref, close_message=''):
662 source_ref, close_message=''):
663 """
663 """
664 Close the branch of the given source_revision
664 Close the branch of the given source_revision
665
665
666 Returns the commit id of the close and a boolean indicating if the
666 Returns the commit id of the close and a boolean indicating if the
667 commit needs to be pushed.
667 commit needs to be pushed.
668 """
668 """
669 self._update(source_ref.commit_id)
669 self._update(source_ref.commit_id)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 try:
671 try:
672 self._remote.commit(
672 self._remote.commit(
673 message=safe_str(message),
673 message=safe_str(message),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 close_branch=True)
675 close_branch=True)
676 self._remote.invalidate_vcs_cache()
676 self._remote.invalidate_vcs_cache()
677 return self._identify(), True
677 return self._identify(), True
678 except RepositoryError:
678 except RepositoryError:
679 # Cleanup any commit leftovers
679 # Cleanup any commit leftovers
680 self._remote.update(clean=True)
680 self._remote.update(clean=True)
681 raise
681 raise
682
682
683 def _is_the_same_branch(self, target_ref, source_ref):
683 def _is_the_same_branch(self, target_ref, source_ref):
684 return (
684 return (
685 self._get_branch_name(target_ref) ==
685 self._get_branch_name(target_ref) ==
686 self._get_branch_name(source_ref))
686 self._get_branch_name(source_ref))
687
687
688 def _get_branch_name(self, ref):
688 def _get_branch_name(self, ref):
689 if ref.type == 'branch':
689 if ref.type == 'branch':
690 return ref.name
690 return ref.name
691 return self._remote.ctx_branch(ref.commit_id)
691 return self._remote.ctx_branch(ref.commit_id)
692
692
693 def _maybe_prepare_merge_workspace(
693 def _maybe_prepare_merge_workspace(
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 shadow_repository_path = self._get_shadow_repository_path(
695 shadow_repository_path = self._get_shadow_repository_path(
696 repo_id, workspace_id)
696 repo_id, workspace_id)
697 if not os.path.exists(shadow_repository_path):
697 if not os.path.exists(shadow_repository_path):
698 self._local_clone(shadow_repository_path)
698 self._local_clone(shadow_repository_path)
699 log.debug(
699 log.debug(
700 'Prepared shadow repository in %s', shadow_repository_path)
700 'Prepared shadow repository in %s', shadow_repository_path)
701
701
702 return shadow_repository_path
702 return shadow_repository_path
703
703
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 source_repo, source_ref, merge_message,
705 source_repo, source_ref, merge_message,
706 merger_name, merger_email, dry_run=False,
706 merger_name, merger_email, dry_run=False,
707 use_rebase=False, close_branch=False):
707 use_rebase=False, close_branch=False):
708
708
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 'rebase' if use_rebase else 'merge', dry_run)
710 'rebase' if use_rebase else 'merge', dry_run)
711 if target_ref.commit_id not in self._heads():
711 if target_ref.commit_id not in self._heads():
712 return MergeResponse(
712 return MergeResponse(
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 metadata={'target_ref': target_ref})
714
715
715 try:
716 try:
716 if (target_ref.type == 'branch' and
717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
717 len(self._heads(target_ref.name)) != 1):
718 heads = ','.join(self._heads(target_ref.name))
718 return MergeResponse(
719 return MergeResponse(
719 False, False, None,
720 False, False, None,
720 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
721 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
722 metadata={'heads': heads})
721 except CommitDoesNotExistError:
723 except CommitDoesNotExistError:
722 log.exception('Failure when looking up branch heads on hg target')
724 log.exception('Failure when looking up branch heads on hg target')
723 return MergeResponse(
725 return MergeResponse(
724 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
726 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
727 metadata={'target_ref': target_ref})
725
728
726 shadow_repository_path = self._maybe_prepare_merge_workspace(
729 shadow_repository_path = self._maybe_prepare_merge_workspace(
727 repo_id, workspace_id, target_ref, source_ref)
730 repo_id, workspace_id, target_ref, source_ref)
728 shadow_repo = self._get_shadow_instance(shadow_repository_path)
731 shadow_repo = self._get_shadow_instance(shadow_repository_path)
729
732
730 log.debug('Pulling in target reference %s', target_ref)
733 log.debug('Pulling in target reference %s', target_ref)
731 self._validate_pull_reference(target_ref)
734 self._validate_pull_reference(target_ref)
732 shadow_repo._local_pull(self.path, target_ref)
735 shadow_repo._local_pull(self.path, target_ref)
736
733 try:
737 try:
734 log.debug('Pulling in source reference %s', source_ref)
738 log.debug('Pulling in source reference %s', source_ref)
735 source_repo._validate_pull_reference(source_ref)
739 source_repo._validate_pull_reference(source_ref)
736 shadow_repo._local_pull(source_repo.path, source_ref)
740 shadow_repo._local_pull(source_repo.path, source_ref)
737 except CommitDoesNotExistError:
741 except CommitDoesNotExistError:
738 log.exception('Failure when doing local pull on hg shadow repo')
742 log.exception('Failure when doing local pull on hg shadow repo')
739 return MergeResponse(
743 return MergeResponse(
740 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
744 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
745 metadata={'source_ref': source_ref})
741
746
742 merge_ref = None
747 merge_ref = None
743 merge_commit_id = None
748 merge_commit_id = None
744 close_commit_id = None
749 close_commit_id = None
745 merge_failure_reason = MergeFailureReason.NONE
750 merge_failure_reason = MergeFailureReason.NONE
751 metadata = {}
746
752
747 # enforce that close branch should be used only in case we source from
753 # enforce that close branch should be used only in case we source from
748 # an actual Branch
754 # an actual Branch
749 close_branch = close_branch and source_ref.type == 'branch'
755 close_branch = close_branch and source_ref.type == 'branch'
750
756
751 # don't allow to close branch if source and target are the same
757 # don't allow to close branch if source and target are the same
752 close_branch = close_branch and source_ref.name != target_ref.name
758 close_branch = close_branch and source_ref.name != target_ref.name
753
759
754 needs_push_on_close = False
760 needs_push_on_close = False
755 if close_branch and not use_rebase and not dry_run:
761 if close_branch and not use_rebase and not dry_run:
756 try:
762 try:
757 close_commit_id, needs_push_on_close = shadow_repo._local_close(
763 close_commit_id, needs_push_on_close = shadow_repo._local_close(
758 target_ref, merger_name, merger_email, source_ref)
764 target_ref, merger_name, merger_email, source_ref)
759 merge_possible = True
765 merge_possible = True
760 except RepositoryError:
766 except RepositoryError:
761 log.exception(
767 log.exception('Failure when doing close branch on '
762 'Failure when doing close branch on hg shadow repo')
768 'shadow repo: %s', shadow_repo)
763 merge_possible = False
769 merge_possible = False
764 merge_failure_reason = MergeFailureReason.MERGE_FAILED
770 merge_failure_reason = MergeFailureReason.MERGE_FAILED
765 else:
771 else:
766 merge_possible = True
772 merge_possible = True
767
773
768 needs_push = False
774 needs_push = False
769 if merge_possible:
775 if merge_possible:
770 try:
776 try:
771 merge_commit_id, needs_push = shadow_repo._local_merge(
777 merge_commit_id, needs_push = shadow_repo._local_merge(
772 target_ref, merge_message, merger_name, merger_email,
778 target_ref, merge_message, merger_name, merger_email,
773 source_ref, use_rebase=use_rebase, dry_run=dry_run)
779 source_ref, use_rebase=use_rebase, dry_run=dry_run)
774 merge_possible = True
780 merge_possible = True
775
781
776 # read the state of the close action, if it
782 # read the state of the close action, if it
777 # maybe required a push
783 # maybe required a push
778 needs_push = needs_push or needs_push_on_close
784 needs_push = needs_push or needs_push_on_close
779
785
780 # Set a bookmark pointing to the merge commit. This bookmark
786 # Set a bookmark pointing to the merge commit. This bookmark
781 # may be used to easily identify the last successful merge
787 # may be used to easily identify the last successful merge
782 # commit in the shadow repository.
788 # commit in the shadow repository.
783 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
789 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
784 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
790 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
785 except SubrepoMergeError:
791 except SubrepoMergeError:
786 log.exception(
792 log.exception(
787 'Subrepo merge error during local merge on hg shadow repo.')
793 'Subrepo merge error during local merge on hg shadow repo.')
788 merge_possible = False
794 merge_possible = False
789 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
795 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
790 needs_push = False
796 needs_push = False
791 except RepositoryError:
797 except RepositoryError:
792 log.exception('Failure when doing local merge on hg shadow repo')
798 log.exception('Failure when doing local merge on hg shadow repo')
793 merge_possible = False
799 merge_possible = False
794 merge_failure_reason = MergeFailureReason.MERGE_FAILED
800 merge_failure_reason = MergeFailureReason.MERGE_FAILED
795 needs_push = False
801 needs_push = False
796
802
797 if merge_possible and not dry_run:
803 if merge_possible and not dry_run:
798 if needs_push:
804 if needs_push:
799 # In case the target is a bookmark, update it, so after pushing
805 # In case the target is a bookmark, update it, so after pushing
800 # the bookmarks is also updated in the target.
806 # the bookmarks is also updated in the target.
801 if target_ref.type == 'book':
807 if target_ref.type == 'book':
802 shadow_repo.bookmark(
808 shadow_repo.bookmark(
803 target_ref.name, revision=merge_commit_id)
809 target_ref.name, revision=merge_commit_id)
804 try:
810 try:
805 shadow_repo_with_hooks = self._get_shadow_instance(
811 shadow_repo_with_hooks = self._get_shadow_instance(
806 shadow_repository_path,
812 shadow_repository_path,
807 enable_hooks=True)
813 enable_hooks=True)
808 # This is the actual merge action, we push from shadow
814 # This is the actual merge action, we push from shadow
809 # into origin.
815 # into origin.
810 # Note: the push_branches option will push any new branch
816 # Note: the push_branches option will push any new branch
811 # defined in the source repository to the target. This may
817 # defined in the source repository to the target. This may
812 # be dangerous as branches are permanent in Mercurial.
818 # be dangerous as branches are permanent in Mercurial.
813 # This feature was requested in issue #441.
819 # This feature was requested in issue #441.
814 shadow_repo_with_hooks._local_push(
820 shadow_repo_with_hooks._local_push(
815 merge_commit_id, self.path, push_branches=True,
821 merge_commit_id, self.path, push_branches=True,
816 enable_hooks=True)
822 enable_hooks=True)
817
823
818 # maybe we also need to push the close_commit_id
824 # maybe we also need to push the close_commit_id
819 if close_commit_id:
825 if close_commit_id:
820 shadow_repo_with_hooks._local_push(
826 shadow_repo_with_hooks._local_push(
821 close_commit_id, self.path, push_branches=True,
827 close_commit_id, self.path, push_branches=True,
822 enable_hooks=True)
828 enable_hooks=True)
823 merge_succeeded = True
829 merge_succeeded = True
824 except RepositoryError:
830 except RepositoryError:
825 log.exception(
831 log.exception(
826 'Failure when doing local push from the shadow '
832 'Failure when doing local push from the shadow '
827 'repository to the target repository.')
833 'repository to the target repository at %s.', self.path)
828 merge_succeeded = False
834 merge_succeeded = False
829 merge_failure_reason = MergeFailureReason.PUSH_FAILED
835 merge_failure_reason = MergeFailureReason.PUSH_FAILED
836 metadata['target'] = 'hg shadow repo'
837 metadata['merge_commit'] = merge_commit_id
830 else:
838 else:
831 merge_succeeded = True
839 merge_succeeded = True
832 else:
840 else:
833 merge_succeeded = False
841 merge_succeeded = False
834
842
835 return MergeResponse(
843 return MergeResponse(
836 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
844 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
845 metadata=metadata)
837
846
838 def _get_shadow_instance(
847 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
839 self, shadow_repository_path, enable_hooks=False):
840 config = self.config.copy()
848 config = self.config.copy()
841 if not enable_hooks:
849 if not enable_hooks:
842 config.clear_section('hooks')
850 config.clear_section('hooks')
843 return MercurialRepository(shadow_repository_path, config)
851 return MercurialRepository(shadow_repository_path, config)
844
852
845 def _validate_pull_reference(self, reference):
853 def _validate_pull_reference(self, reference):
846 if not (reference.name in self.bookmarks or
854 if not (reference.name in self.bookmarks or
847 reference.name in self.branches or
855 reference.name in self.branches or
848 self.get_commit(reference.commit_id)):
856 self.get_commit(reference.commit_id)):
849 raise CommitDoesNotExistError(
857 raise CommitDoesNotExistError(
850 'Unknown branch, bookmark or commit id')
858 'Unknown branch, bookmark or commit id')
851
859
852 def _local_pull(self, repository_path, reference):
860 def _local_pull(self, repository_path, reference):
853 """
861 """
854 Fetch a branch, bookmark or commit from a local repository.
862 Fetch a branch, bookmark or commit from a local repository.
855 """
863 """
856 repository_path = os.path.abspath(repository_path)
864 repository_path = os.path.abspath(repository_path)
857 if repository_path == self.path:
865 if repository_path == self.path:
858 raise ValueError('Cannot pull from the same repository')
866 raise ValueError('Cannot pull from the same repository')
859
867
860 reference_type_to_option_name = {
868 reference_type_to_option_name = {
861 'book': 'bookmark',
869 'book': 'bookmark',
862 'branch': 'branch',
870 'branch': 'branch',
863 }
871 }
864 option_name = reference_type_to_option_name.get(
872 option_name = reference_type_to_option_name.get(
865 reference.type, 'revision')
873 reference.type, 'revision')
866
874
867 if option_name == 'revision':
875 if option_name == 'revision':
868 ref = reference.commit_id
876 ref = reference.commit_id
869 else:
877 else:
870 ref = reference.name
878 ref = reference.name
871
879
872 options = {option_name: [ref]}
880 options = {option_name: [ref]}
873 self._remote.pull_cmd(repository_path, hooks=False, **options)
881 self._remote.pull_cmd(repository_path, hooks=False, **options)
874 self._remote.invalidate_vcs_cache()
882 self._remote.invalidate_vcs_cache()
875
883
876 def bookmark(self, bookmark, revision=None):
884 def bookmark(self, bookmark, revision=None):
877 if isinstance(bookmark, unicode):
885 if isinstance(bookmark, unicode):
878 bookmark = safe_str(bookmark)
886 bookmark = safe_str(bookmark)
879 self._remote.bookmark(bookmark, revision=revision)
887 self._remote.bookmark(bookmark, revision=revision)
880 self._remote.invalidate_vcs_cache()
888 self._remote.invalidate_vcs_cache()
881
889
882 def get_path_permissions(self, username):
890 def get_path_permissions(self, username):
883 hgacl_file = os.path.join(self.path, '.hg/hgacl')
891 hgacl_file = os.path.join(self.path, '.hg/hgacl')
884
892
885 def read_patterns(suffix):
893 def read_patterns(suffix):
886 svalue = None
894 svalue = None
887 try:
895 try:
888 svalue = hgacl.get('narrowhgacl', username + suffix)
896 svalue = hgacl.get('narrowhgacl', username + suffix)
889 except configparser.NoOptionError:
897 except configparser.NoOptionError:
890 try:
898 try:
891 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
899 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
892 except configparser.NoOptionError:
900 except configparser.NoOptionError:
893 pass
901 pass
894 if not svalue:
902 if not svalue:
895 return None
903 return None
896 result = ['/']
904 result = ['/']
897 for pattern in svalue.split():
905 for pattern in svalue.split():
898 result.append(pattern)
906 result.append(pattern)
899 if '*' not in pattern and '?' not in pattern:
907 if '*' not in pattern and '?' not in pattern:
900 result.append(pattern + '/*')
908 result.append(pattern + '/*')
901 return result
909 return result
902
910
903 if os.path.exists(hgacl_file):
911 if os.path.exists(hgacl_file):
904 try:
912 try:
905 hgacl = configparser.RawConfigParser()
913 hgacl = configparser.RawConfigParser()
906 hgacl.read(hgacl_file)
914 hgacl.read(hgacl_file)
907
915
908 includes = read_patterns('.includes')
916 includes = read_patterns('.includes')
909 excludes = read_patterns('.excludes')
917 excludes = read_patterns('.excludes')
910 return BasePathPermissionChecker.create_from_patterns(
918 return BasePathPermissionChecker.create_from_patterns(
911 includes, excludes)
919 includes, excludes)
912 except BaseException as e:
920 except BaseException as e:
913 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
921 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
914 hgacl_file, self.name, e)
922 hgacl_file, self.name, e)
915 raise exceptions.RepositoryRequirementError(msg)
923 raise exceptions.RepositoryRequirementError(msg)
916 else:
924 else:
917 return None
925 return None
918
926
919
927
920 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
928 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
921
929
922 def _commit_factory(self, commit_id):
930 def _commit_factory(self, commit_id):
923 return self.repo.get_commit(
931 return self.repo.get_commit(
924 commit_idx=commit_id, pre_load=self.pre_load)
932 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,1737 +1,1694 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2018 RhodeCode GmbH
3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid.threadlocal import get_current_request
33 from pyramid.threadlocal import get_current_request
34
34
35 from rhodecode import events
35 from rhodecode import events
36 from rhodecode.translation import lazy_ugettext#, _
36 from rhodecode.translation import lazy_ugettext
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import audit_logger
38 from rhodecode.lib import audit_logger
39 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.markup_renderer import (
41 from rhodecode.lib.markup_renderer import (
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.vcs.backends.base import (
44 from rhodecode.lib.vcs.backends.base import (
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.exceptions import (
47 from rhodecode.lib.vcs.exceptions import (
48 CommitDoesNotExistError, EmptyRepositoryError)
48 CommitDoesNotExistError, EmptyRepositoryError)
49 from rhodecode.model import BaseModel
49 from rhodecode.model import BaseModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.notification import NotificationModel, \
56 from rhodecode.model.notification import NotificationModel, \
57 EmailNotificationModel
57 EmailNotificationModel
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64
64
65 # Data structure to hold the response data when updating commits during a pull
65 # Data structure to hold the response data when updating commits during a pull
66 # request update.
66 # request update.
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 'executed', 'reason', 'new', 'old', 'changes',
68 'executed', 'reason', 'new', 'old', 'changes',
69 'source_changed', 'target_changed'])
69 'source_changed', 'target_changed'])
70
70
71
71
72 class PullRequestModel(BaseModel):
72 class PullRequestModel(BaseModel):
73
73
74 cls = PullRequest
74 cls = PullRequest
75
75
76 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
76 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
77
77
78 MERGE_STATUS_MESSAGES = {
79 MergeFailureReason.NONE: lazy_ugettext(
80 'This pull request can be automatically merged.'),
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 'This pull request cannot be merged because of an unhandled'
83 ' exception.'),
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 'This pull request cannot be merged because of merge conflicts.'),
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 'This pull request could not be merged because push to target'
88 ' failed.'),
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 'This pull request cannot be merged because the target is not a'
91 ' head.'),
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 'This pull request cannot be merged because the source contains'
94 ' more branches than the target.'),
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 'This pull request cannot be merged because the target has'
97 ' multiple heads.'),
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 'This pull request cannot be merged because the target repository'
100 ' is locked.'),
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 'This pull request cannot be merged because the target or the '
103 'source reference is missing.'),
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 'This pull request cannot be merged because the target '
106 'reference is missing.'),
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 'This pull request cannot be merged because the source '
109 'reference is missing.'),
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 'This pull request cannot be merged because of conflicts related '
112 'to sub repositories.'),
113 }
114
115 UPDATE_STATUS_MESSAGES = {
78 UPDATE_STATUS_MESSAGES = {
116 UpdateFailureReason.NONE: lazy_ugettext(
79 UpdateFailureReason.NONE: lazy_ugettext(
117 'Pull request update successful.'),
80 'Pull request update successful.'),
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
81 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 'Pull request update failed because of an unknown error.'),
82 'Pull request update failed because of an unknown error.'),
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
83 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 'No update needed because the source and target have not changed.'),
84 'No update needed because the source and target have not changed.'),
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
85 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 'Pull request cannot be updated because the reference type is '
86 'Pull request cannot be updated because the reference type is '
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
87 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
88 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 'This pull request cannot be updated because the target '
89 'This pull request cannot be updated because the target '
127 'reference is missing.'),
90 'reference is missing.'),
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
91 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 'This pull request cannot be updated because the source '
92 'This pull request cannot be updated because the source '
130 'reference is missing.'),
93 'reference is missing.'),
131 }
94 }
132
95
133 def __get_pull_request(self, pull_request):
96 def __get_pull_request(self, pull_request):
134 return self._get_instance((
97 return self._get_instance((
135 PullRequest, PullRequestVersion), pull_request)
98 PullRequest, PullRequestVersion), pull_request)
136
99
137 def _check_perms(self, perms, pull_request, user, api=False):
100 def _check_perms(self, perms, pull_request, user, api=False):
138 if not api:
101 if not api:
139 return h.HasRepoPermissionAny(*perms)(
102 return h.HasRepoPermissionAny(*perms)(
140 user=user, repo_name=pull_request.target_repo.repo_name)
103 user=user, repo_name=pull_request.target_repo.repo_name)
141 else:
104 else:
142 return h.HasRepoPermissionAnyApi(*perms)(
105 return h.HasRepoPermissionAnyApi(*perms)(
143 user=user, repo_name=pull_request.target_repo.repo_name)
106 user=user, repo_name=pull_request.target_repo.repo_name)
144
107
145 def check_user_read(self, pull_request, user, api=False):
108 def check_user_read(self, pull_request, user, api=False):
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 return self._check_perms(_perms, pull_request, user, api)
110 return self._check_perms(_perms, pull_request, user, api)
148
111
149 def check_user_merge(self, pull_request, user, api=False):
112 def check_user_merge(self, pull_request, user, api=False):
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 return self._check_perms(_perms, pull_request, user, api)
114 return self._check_perms(_perms, pull_request, user, api)
152
115
153 def check_user_update(self, pull_request, user, api=False):
116 def check_user_update(self, pull_request, user, api=False):
154 owner = user.user_id == pull_request.user_id
117 owner = user.user_id == pull_request.user_id
155 return self.check_user_merge(pull_request, user, api) or owner
118 return self.check_user_merge(pull_request, user, api) or owner
156
119
157 def check_user_delete(self, pull_request, user):
120 def check_user_delete(self, pull_request, user):
158 owner = user.user_id == pull_request.user_id
121 owner = user.user_id == pull_request.user_id
159 _perms = ('repository.admin',)
122 _perms = ('repository.admin',)
160 return self._check_perms(_perms, pull_request, user) or owner
123 return self._check_perms(_perms, pull_request, user) or owner
161
124
162 def check_user_change_status(self, pull_request, user, api=False):
125 def check_user_change_status(self, pull_request, user, api=False):
163 reviewer = user.user_id in [x.user_id for x in
126 reviewer = user.user_id in [x.user_id for x in
164 pull_request.reviewers]
127 pull_request.reviewers]
165 return self.check_user_update(pull_request, user, api) or reviewer
128 return self.check_user_update(pull_request, user, api) or reviewer
166
129
167 def check_user_comment(self, pull_request, user):
130 def check_user_comment(self, pull_request, user):
168 owner = user.user_id == pull_request.user_id
131 owner = user.user_id == pull_request.user_id
169 return self.check_user_read(pull_request, user) or owner
132 return self.check_user_read(pull_request, user) or owner
170
133
171 def get(self, pull_request):
134 def get(self, pull_request):
172 return self.__get_pull_request(pull_request)
135 return self.__get_pull_request(pull_request)
173
136
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
137 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 opened_by=None, order_by=None,
138 opened_by=None, order_by=None,
176 order_dir='desc'):
139 order_dir='desc'):
177 repo = None
140 repo = None
178 if repo_name:
141 if repo_name:
179 repo = self._get_repo(repo_name)
142 repo = self._get_repo(repo_name)
180
143
181 q = PullRequest.query()
144 q = PullRequest.query()
182
145
183 # source or target
146 # source or target
184 if repo and source:
147 if repo and source:
185 q = q.filter(PullRequest.source_repo == repo)
148 q = q.filter(PullRequest.source_repo == repo)
186 elif repo:
149 elif repo:
187 q = q.filter(PullRequest.target_repo == repo)
150 q = q.filter(PullRequest.target_repo == repo)
188
151
189 # closed,opened
152 # closed,opened
190 if statuses:
153 if statuses:
191 q = q.filter(PullRequest.status.in_(statuses))
154 q = q.filter(PullRequest.status.in_(statuses))
192
155
193 # opened by filter
156 # opened by filter
194 if opened_by:
157 if opened_by:
195 q = q.filter(PullRequest.user_id.in_(opened_by))
158 q = q.filter(PullRequest.user_id.in_(opened_by))
196
159
197 if order_by:
160 if order_by:
198 order_map = {
161 order_map = {
199 'name_raw': PullRequest.pull_request_id,
162 'name_raw': PullRequest.pull_request_id,
200 'title': PullRequest.title,
163 'title': PullRequest.title,
201 'updated_on_raw': PullRequest.updated_on,
164 'updated_on_raw': PullRequest.updated_on,
202 'target_repo': PullRequest.target_repo_id
165 'target_repo': PullRequest.target_repo_id
203 }
166 }
204 if order_dir == 'asc':
167 if order_dir == 'asc':
205 q = q.order_by(order_map[order_by].asc())
168 q = q.order_by(order_map[order_by].asc())
206 else:
169 else:
207 q = q.order_by(order_map[order_by].desc())
170 q = q.order_by(order_map[order_by].desc())
208
171
209 return q
172 return q
210
173
211 def count_all(self, repo_name, source=False, statuses=None,
174 def count_all(self, repo_name, source=False, statuses=None,
212 opened_by=None):
175 opened_by=None):
213 """
176 """
214 Count the number of pull requests for a specific repository.
177 Count the number of pull requests for a specific repository.
215
178
216 :param repo_name: target or source repo
179 :param repo_name: target or source repo
217 :param source: boolean flag to specify if repo_name refers to source
180 :param source: boolean flag to specify if repo_name refers to source
218 :param statuses: list of pull request statuses
181 :param statuses: list of pull request statuses
219 :param opened_by: author user of the pull request
182 :param opened_by: author user of the pull request
220 :returns: int number of pull requests
183 :returns: int number of pull requests
221 """
184 """
222 q = self._prepare_get_all_query(
185 q = self._prepare_get_all_query(
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
186 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224
187
225 return q.count()
188 return q.count()
226
189
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
190 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 offset=0, length=None, order_by=None, order_dir='desc'):
191 offset=0, length=None, order_by=None, order_dir='desc'):
229 """
192 """
230 Get all pull requests for a specific repository.
193 Get all pull requests for a specific repository.
231
194
232 :param repo_name: target or source repo
195 :param repo_name: target or source repo
233 :param source: boolean flag to specify if repo_name refers to source
196 :param source: boolean flag to specify if repo_name refers to source
234 :param statuses: list of pull request statuses
197 :param statuses: list of pull request statuses
235 :param opened_by: author user of the pull request
198 :param opened_by: author user of the pull request
236 :param offset: pagination offset
199 :param offset: pagination offset
237 :param length: length of returned list
200 :param length: length of returned list
238 :param order_by: order of the returned list
201 :param order_by: order of the returned list
239 :param order_dir: 'asc' or 'desc' ordering direction
202 :param order_dir: 'asc' or 'desc' ordering direction
240 :returns: list of pull requests
203 :returns: list of pull requests
241 """
204 """
242 q = self._prepare_get_all_query(
205 q = self._prepare_get_all_query(
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
206 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 order_by=order_by, order_dir=order_dir)
207 order_by=order_by, order_dir=order_dir)
245
208
246 if length:
209 if length:
247 pull_requests = q.limit(length).offset(offset).all()
210 pull_requests = q.limit(length).offset(offset).all()
248 else:
211 else:
249 pull_requests = q.all()
212 pull_requests = q.all()
250
213
251 return pull_requests
214 return pull_requests
252
215
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
216 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 opened_by=None):
217 opened_by=None):
255 """
218 """
256 Count the number of pull requests for a specific repository that are
219 Count the number of pull requests for a specific repository that are
257 awaiting review.
220 awaiting review.
258
221
259 :param repo_name: target or source repo
222 :param repo_name: target or source repo
260 :param source: boolean flag to specify if repo_name refers to source
223 :param source: boolean flag to specify if repo_name refers to source
261 :param statuses: list of pull request statuses
224 :param statuses: list of pull request statuses
262 :param opened_by: author user of the pull request
225 :param opened_by: author user of the pull request
263 :returns: int number of pull requests
226 :returns: int number of pull requests
264 """
227 """
265 pull_requests = self.get_awaiting_review(
228 pull_requests = self.get_awaiting_review(
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
229 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267
230
268 return len(pull_requests)
231 return len(pull_requests)
269
232
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
233 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 opened_by=None, offset=0, length=None,
234 opened_by=None, offset=0, length=None,
272 order_by=None, order_dir='desc'):
235 order_by=None, order_dir='desc'):
273 """
236 """
274 Get all pull requests for a specific repository that are awaiting
237 Get all pull requests for a specific repository that are awaiting
275 review.
238 review.
276
239
277 :param repo_name: target or source repo
240 :param repo_name: target or source repo
278 :param source: boolean flag to specify if repo_name refers to source
241 :param source: boolean flag to specify if repo_name refers to source
279 :param statuses: list of pull request statuses
242 :param statuses: list of pull request statuses
280 :param opened_by: author user of the pull request
243 :param opened_by: author user of the pull request
281 :param offset: pagination offset
244 :param offset: pagination offset
282 :param length: length of returned list
245 :param length: length of returned list
283 :param order_by: order of the returned list
246 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
247 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
248 :returns: list of pull requests
286 """
249 """
287 pull_requests = self.get_all(
250 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
251 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
252 order_by=order_by, order_dir=order_dir)
290
253
291 _filtered_pull_requests = []
254 _filtered_pull_requests = []
292 for pr in pull_requests:
255 for pr in pull_requests:
293 status = pr.calculated_review_status()
256 status = pr.calculated_review_status()
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
257 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
258 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 _filtered_pull_requests.append(pr)
259 _filtered_pull_requests.append(pr)
297 if length:
260 if length:
298 return _filtered_pull_requests[offset:offset+length]
261 return _filtered_pull_requests[offset:offset+length]
299 else:
262 else:
300 return _filtered_pull_requests
263 return _filtered_pull_requests
301
264
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
265 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 opened_by=None, user_id=None):
266 opened_by=None, user_id=None):
304 """
267 """
305 Count the number of pull requests for a specific repository that are
268 Count the number of pull requests for a specific repository that are
306 awaiting review from a specific user.
269 awaiting review from a specific user.
307
270
308 :param repo_name: target or source repo
271 :param repo_name: target or source repo
309 :param source: boolean flag to specify if repo_name refers to source
272 :param source: boolean flag to specify if repo_name refers to source
310 :param statuses: list of pull request statuses
273 :param statuses: list of pull request statuses
311 :param opened_by: author user of the pull request
274 :param opened_by: author user of the pull request
312 :param user_id: reviewer user of the pull request
275 :param user_id: reviewer user of the pull request
313 :returns: int number of pull requests
276 :returns: int number of pull requests
314 """
277 """
315 pull_requests = self.get_awaiting_my_review(
278 pull_requests = self.get_awaiting_my_review(
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
279 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 user_id=user_id)
280 user_id=user_id)
318
281
319 return len(pull_requests)
282 return len(pull_requests)
320
283
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
284 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 opened_by=None, user_id=None, offset=0,
285 opened_by=None, user_id=None, offset=0,
323 length=None, order_by=None, order_dir='desc'):
286 length=None, order_by=None, order_dir='desc'):
324 """
287 """
325 Get all pull requests for a specific repository that are awaiting
288 Get all pull requests for a specific repository that are awaiting
326 review from a specific user.
289 review from a specific user.
327
290
328 :param repo_name: target or source repo
291 :param repo_name: target or source repo
329 :param source: boolean flag to specify if repo_name refers to source
292 :param source: boolean flag to specify if repo_name refers to source
330 :param statuses: list of pull request statuses
293 :param statuses: list of pull request statuses
331 :param opened_by: author user of the pull request
294 :param opened_by: author user of the pull request
332 :param user_id: reviewer user of the pull request
295 :param user_id: reviewer user of the pull request
333 :param offset: pagination offset
296 :param offset: pagination offset
334 :param length: length of returned list
297 :param length: length of returned list
335 :param order_by: order of the returned list
298 :param order_by: order of the returned list
336 :param order_dir: 'asc' or 'desc' ordering direction
299 :param order_dir: 'asc' or 'desc' ordering direction
337 :returns: list of pull requests
300 :returns: list of pull requests
338 """
301 """
339 pull_requests = self.get_all(
302 pull_requests = self.get_all(
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
303 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 order_by=order_by, order_dir=order_dir)
304 order_by=order_by, order_dir=order_dir)
342
305
343 _my = PullRequestModel().get_not_reviewed(user_id)
306 _my = PullRequestModel().get_not_reviewed(user_id)
344 my_participation = []
307 my_participation = []
345 for pr in pull_requests:
308 for pr in pull_requests:
346 if pr in _my:
309 if pr in _my:
347 my_participation.append(pr)
310 my_participation.append(pr)
348 _filtered_pull_requests = my_participation
311 _filtered_pull_requests = my_participation
349 if length:
312 if length:
350 return _filtered_pull_requests[offset:offset+length]
313 return _filtered_pull_requests[offset:offset+length]
351 else:
314 else:
352 return _filtered_pull_requests
315 return _filtered_pull_requests
353
316
354 def get_not_reviewed(self, user_id):
317 def get_not_reviewed(self, user_id):
355 return [
318 return [
356 x.pull_request for x in PullRequestReviewers.query().filter(
319 x.pull_request for x in PullRequestReviewers.query().filter(
357 PullRequestReviewers.user_id == user_id).all()
320 PullRequestReviewers.user_id == user_id).all()
358 ]
321 ]
359
322
360 def _prepare_participating_query(self, user_id=None, statuses=None,
323 def _prepare_participating_query(self, user_id=None, statuses=None,
361 order_by=None, order_dir='desc'):
324 order_by=None, order_dir='desc'):
362 q = PullRequest.query()
325 q = PullRequest.query()
363 if user_id:
326 if user_id:
364 reviewers_subquery = Session().query(
327 reviewers_subquery = Session().query(
365 PullRequestReviewers.pull_request_id).filter(
328 PullRequestReviewers.pull_request_id).filter(
366 PullRequestReviewers.user_id == user_id).subquery()
329 PullRequestReviewers.user_id == user_id).subquery()
367 user_filter = or_(
330 user_filter = or_(
368 PullRequest.user_id == user_id,
331 PullRequest.user_id == user_id,
369 PullRequest.pull_request_id.in_(reviewers_subquery)
332 PullRequest.pull_request_id.in_(reviewers_subquery)
370 )
333 )
371 q = PullRequest.query().filter(user_filter)
334 q = PullRequest.query().filter(user_filter)
372
335
373 # closed,opened
336 # closed,opened
374 if statuses:
337 if statuses:
375 q = q.filter(PullRequest.status.in_(statuses))
338 q = q.filter(PullRequest.status.in_(statuses))
376
339
377 if order_by:
340 if order_by:
378 order_map = {
341 order_map = {
379 'name_raw': PullRequest.pull_request_id,
342 'name_raw': PullRequest.pull_request_id,
380 'title': PullRequest.title,
343 'title': PullRequest.title,
381 'updated_on_raw': PullRequest.updated_on,
344 'updated_on_raw': PullRequest.updated_on,
382 'target_repo': PullRequest.target_repo_id
345 'target_repo': PullRequest.target_repo_id
383 }
346 }
384 if order_dir == 'asc':
347 if order_dir == 'asc':
385 q = q.order_by(order_map[order_by].asc())
348 q = q.order_by(order_map[order_by].asc())
386 else:
349 else:
387 q = q.order_by(order_map[order_by].desc())
350 q = q.order_by(order_map[order_by].desc())
388
351
389 return q
352 return q
390
353
391 def count_im_participating_in(self, user_id=None, statuses=None):
354 def count_im_participating_in(self, user_id=None, statuses=None):
392 q = self._prepare_participating_query(user_id, statuses=statuses)
355 q = self._prepare_participating_query(user_id, statuses=statuses)
393 return q.count()
356 return q.count()
394
357
395 def get_im_participating_in(
358 def get_im_participating_in(
396 self, user_id=None, statuses=None, offset=0,
359 self, user_id=None, statuses=None, offset=0,
397 length=None, order_by=None, order_dir='desc'):
360 length=None, order_by=None, order_dir='desc'):
398 """
361 """
399 Get all Pull requests that i'm participating in, or i have opened
362 Get all Pull requests that i'm participating in, or i have opened
400 """
363 """
401
364
402 q = self._prepare_participating_query(
365 q = self._prepare_participating_query(
403 user_id, statuses=statuses, order_by=order_by,
366 user_id, statuses=statuses, order_by=order_by,
404 order_dir=order_dir)
367 order_dir=order_dir)
405
368
406 if length:
369 if length:
407 pull_requests = q.limit(length).offset(offset).all()
370 pull_requests = q.limit(length).offset(offset).all()
408 else:
371 else:
409 pull_requests = q.all()
372 pull_requests = q.all()
410
373
411 return pull_requests
374 return pull_requests
412
375
413 def get_versions(self, pull_request):
376 def get_versions(self, pull_request):
414 """
377 """
415 returns version of pull request sorted by ID descending
378 returns version of pull request sorted by ID descending
416 """
379 """
417 return PullRequestVersion.query()\
380 return PullRequestVersion.query()\
418 .filter(PullRequestVersion.pull_request == pull_request)\
381 .filter(PullRequestVersion.pull_request == pull_request)\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
382 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 .all()
383 .all()
421
384
422 def get_pr_version(self, pull_request_id, version=None):
385 def get_pr_version(self, pull_request_id, version=None):
423 at_version = None
386 at_version = None
424
387
425 if version and version == 'latest':
388 if version and version == 'latest':
426 pull_request_ver = PullRequest.get(pull_request_id)
389 pull_request_ver = PullRequest.get(pull_request_id)
427 pull_request_obj = pull_request_ver
390 pull_request_obj = pull_request_ver
428 _org_pull_request_obj = pull_request_obj
391 _org_pull_request_obj = pull_request_obj
429 at_version = 'latest'
392 at_version = 'latest'
430 elif version:
393 elif version:
431 pull_request_ver = PullRequestVersion.get_or_404(version)
394 pull_request_ver = PullRequestVersion.get_or_404(version)
432 pull_request_obj = pull_request_ver
395 pull_request_obj = pull_request_ver
433 _org_pull_request_obj = pull_request_ver.pull_request
396 _org_pull_request_obj = pull_request_ver.pull_request
434 at_version = pull_request_ver.pull_request_version_id
397 at_version = pull_request_ver.pull_request_version_id
435 else:
398 else:
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
399 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 pull_request_id)
400 pull_request_id)
438
401
439 pull_request_display_obj = PullRequest.get_pr_display_object(
402 pull_request_display_obj = PullRequest.get_pr_display_object(
440 pull_request_obj, _org_pull_request_obj)
403 pull_request_obj, _org_pull_request_obj)
441
404
442 return _org_pull_request_obj, pull_request_obj, \
405 return _org_pull_request_obj, pull_request_obj, \
443 pull_request_display_obj, at_version
406 pull_request_display_obj, at_version
444
407
445 def create(self, created_by, source_repo, source_ref, target_repo,
408 def create(self, created_by, source_repo, source_ref, target_repo,
446 target_ref, revisions, reviewers, title, description=None,
409 target_ref, revisions, reviewers, title, description=None,
447 description_renderer=None,
410 description_renderer=None,
448 reviewer_data=None, translator=None, auth_user=None):
411 reviewer_data=None, translator=None, auth_user=None):
449 translator = translator or get_current_request().translate
412 translator = translator or get_current_request().translate
450
413
451 created_by_user = self._get_user(created_by)
414 created_by_user = self._get_user(created_by)
452 auth_user = auth_user or created_by_user.AuthUser()
415 auth_user = auth_user or created_by_user.AuthUser()
453 source_repo = self._get_repo(source_repo)
416 source_repo = self._get_repo(source_repo)
454 target_repo = self._get_repo(target_repo)
417 target_repo = self._get_repo(target_repo)
455
418
456 pull_request = PullRequest()
419 pull_request = PullRequest()
457 pull_request.source_repo = source_repo
420 pull_request.source_repo = source_repo
458 pull_request.source_ref = source_ref
421 pull_request.source_ref = source_ref
459 pull_request.target_repo = target_repo
422 pull_request.target_repo = target_repo
460 pull_request.target_ref = target_ref
423 pull_request.target_ref = target_ref
461 pull_request.revisions = revisions
424 pull_request.revisions = revisions
462 pull_request.title = title
425 pull_request.title = title
463 pull_request.description = description
426 pull_request.description = description
464 pull_request.description_renderer = description_renderer
427 pull_request.description_renderer = description_renderer
465 pull_request.author = created_by_user
428 pull_request.author = created_by_user
466 pull_request.reviewer_data = reviewer_data
429 pull_request.reviewer_data = reviewer_data
467
430
468 Session().add(pull_request)
431 Session().add(pull_request)
469 Session().flush()
432 Session().flush()
470
433
471 reviewer_ids = set()
434 reviewer_ids = set()
472 # members / reviewers
435 # members / reviewers
473 for reviewer_object in reviewers:
436 for reviewer_object in reviewers:
474 user_id, reasons, mandatory, rules = reviewer_object
437 user_id, reasons, mandatory, rules = reviewer_object
475 user = self._get_user(user_id)
438 user = self._get_user(user_id)
476
439
477 # skip duplicates
440 # skip duplicates
478 if user.user_id in reviewer_ids:
441 if user.user_id in reviewer_ids:
479 continue
442 continue
480
443
481 reviewer_ids.add(user.user_id)
444 reviewer_ids.add(user.user_id)
482
445
483 reviewer = PullRequestReviewers()
446 reviewer = PullRequestReviewers()
484 reviewer.user = user
447 reviewer.user = user
485 reviewer.pull_request = pull_request
448 reviewer.pull_request = pull_request
486 reviewer.reasons = reasons
449 reviewer.reasons = reasons
487 reviewer.mandatory = mandatory
450 reviewer.mandatory = mandatory
488
451
489 # NOTE(marcink): pick only first rule for now
452 # NOTE(marcink): pick only first rule for now
490 rule_id = list(rules)[0] if rules else None
453 rule_id = list(rules)[0] if rules else None
491 rule = RepoReviewRule.get(rule_id) if rule_id else None
454 rule = RepoReviewRule.get(rule_id) if rule_id else None
492 if rule:
455 if rule:
493 review_group = rule.user_group_vote_rule(user_id)
456 review_group = rule.user_group_vote_rule(user_id)
494 # we check if this particular reviewer is member of a voting group
457 # we check if this particular reviewer is member of a voting group
495 if review_group:
458 if review_group:
496 # NOTE(marcink):
459 # NOTE(marcink):
497 # can be that user is member of more but we pick the first same,
460 # can be that user is member of more but we pick the first same,
498 # same as default reviewers algo
461 # same as default reviewers algo
499 review_group = review_group[0]
462 review_group = review_group[0]
500
463
501 rule_data = {
464 rule_data = {
502 'rule_name':
465 'rule_name':
503 rule.review_rule_name,
466 rule.review_rule_name,
504 'rule_user_group_entry_id':
467 'rule_user_group_entry_id':
505 review_group.repo_review_rule_users_group_id,
468 review_group.repo_review_rule_users_group_id,
506 'rule_user_group_name':
469 'rule_user_group_name':
507 review_group.users_group.users_group_name,
470 review_group.users_group.users_group_name,
508 'rule_user_group_members':
471 'rule_user_group_members':
509 [x.user.username for x in review_group.users_group.members],
472 [x.user.username for x in review_group.users_group.members],
510 'rule_user_group_members_id':
473 'rule_user_group_members_id':
511 [x.user.user_id for x in review_group.users_group.members],
474 [x.user.user_id for x in review_group.users_group.members],
512 }
475 }
513 # e.g {'vote_rule': -1, 'mandatory': True}
476 # e.g {'vote_rule': -1, 'mandatory': True}
514 rule_data.update(review_group.rule_data())
477 rule_data.update(review_group.rule_data())
515
478
516 reviewer.rule_data = rule_data
479 reviewer.rule_data = rule_data
517
480
518 Session().add(reviewer)
481 Session().add(reviewer)
519 Session().flush()
482 Session().flush()
520
483
521 # Set approval status to "Under Review" for all commits which are
484 # Set approval status to "Under Review" for all commits which are
522 # part of this pull request.
485 # part of this pull request.
523 ChangesetStatusModel().set_status(
486 ChangesetStatusModel().set_status(
524 repo=target_repo,
487 repo=target_repo,
525 status=ChangesetStatus.STATUS_UNDER_REVIEW,
488 status=ChangesetStatus.STATUS_UNDER_REVIEW,
526 user=created_by_user,
489 user=created_by_user,
527 pull_request=pull_request
490 pull_request=pull_request
528 )
491 )
529 # we commit early at this point. This has to do with a fact
492 # we commit early at this point. This has to do with a fact
530 # that before queries do some row-locking. And because of that
493 # that before queries do some row-locking. And because of that
531 # we need to commit and finish transation before below validate call
494 # we need to commit and finish transation before below validate call
532 # that for large repos could be long resulting in long row locks
495 # that for large repos could be long resulting in long row locks
533 Session().commit()
496 Session().commit()
534
497
535 # prepare workspace, and run initial merge simulation
498 # prepare workspace, and run initial merge simulation
536 MergeCheck.validate(
499 MergeCheck.validate(
537 pull_request, auth_user=auth_user, translator=translator)
500 pull_request, auth_user=auth_user, translator=translator)
538
501
539 self.notify_reviewers(pull_request, reviewer_ids)
502 self.notify_reviewers(pull_request, reviewer_ids)
540 self._trigger_pull_request_hook(
503 self._trigger_pull_request_hook(
541 pull_request, created_by_user, 'create')
504 pull_request, created_by_user, 'create')
542
505
543 creation_data = pull_request.get_api_data(with_merge_state=False)
506 creation_data = pull_request.get_api_data(with_merge_state=False)
544 self._log_audit_action(
507 self._log_audit_action(
545 'repo.pull_request.create', {'data': creation_data},
508 'repo.pull_request.create', {'data': creation_data},
546 auth_user, pull_request)
509 auth_user, pull_request)
547
510
548 return pull_request
511 return pull_request
549
512
550 def _trigger_pull_request_hook(self, pull_request, user, action):
513 def _trigger_pull_request_hook(self, pull_request, user, action):
551 pull_request = self.__get_pull_request(pull_request)
514 pull_request = self.__get_pull_request(pull_request)
552 target_scm = pull_request.target_repo.scm_instance()
515 target_scm = pull_request.target_repo.scm_instance()
553 if action == 'create':
516 if action == 'create':
554 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
517 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
555 elif action == 'merge':
518 elif action == 'merge':
556 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
519 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
557 elif action == 'close':
520 elif action == 'close':
558 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
521 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
559 elif action == 'review_status_change':
522 elif action == 'review_status_change':
560 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
523 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
561 elif action == 'update':
524 elif action == 'update':
562 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
525 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
563 else:
526 else:
564 return
527 return
565
528
566 trigger_hook(
529 trigger_hook(
567 username=user.username,
530 username=user.username,
568 repo_name=pull_request.target_repo.repo_name,
531 repo_name=pull_request.target_repo.repo_name,
569 repo_alias=target_scm.alias,
532 repo_alias=target_scm.alias,
570 pull_request=pull_request)
533 pull_request=pull_request)
571
534
572 def _get_commit_ids(self, pull_request):
535 def _get_commit_ids(self, pull_request):
573 """
536 """
574 Return the commit ids of the merged pull request.
537 Return the commit ids of the merged pull request.
575
538
576 This method is not dealing correctly yet with the lack of autoupdates
539 This method is not dealing correctly yet with the lack of autoupdates
577 nor with the implicit target updates.
540 nor with the implicit target updates.
578 For example: if a commit in the source repo is already in the target it
541 For example: if a commit in the source repo is already in the target it
579 will be reported anyways.
542 will be reported anyways.
580 """
543 """
581 merge_rev = pull_request.merge_rev
544 merge_rev = pull_request.merge_rev
582 if merge_rev is None:
545 if merge_rev is None:
583 raise ValueError('This pull request was not merged yet')
546 raise ValueError('This pull request was not merged yet')
584
547
585 commit_ids = list(pull_request.revisions)
548 commit_ids = list(pull_request.revisions)
586 if merge_rev not in commit_ids:
549 if merge_rev not in commit_ids:
587 commit_ids.append(merge_rev)
550 commit_ids.append(merge_rev)
588
551
589 return commit_ids
552 return commit_ids
590
553
591 def merge_repo(self, pull_request, user, extras):
554 def merge_repo(self, pull_request, user, extras):
592 log.debug("Merging pull request %s", pull_request.pull_request_id)
555 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 extras['user_agent'] = 'internal-merge'
556 extras['user_agent'] = 'internal-merge'
594 merge_state = self._merge_pull_request(pull_request, user, extras)
557 merge_state = self._merge_pull_request(pull_request, user, extras)
595 if merge_state.executed:
558 if merge_state.executed:
596 log.debug(
559 log.debug("Merge was successful, updating the pull request comments.")
597 "Merge was successful, updating the pull request comments.")
598 self._comment_and_close_pr(pull_request, user, merge_state)
560 self._comment_and_close_pr(pull_request, user, merge_state)
599
561
600 self._log_audit_action(
562 self._log_audit_action(
601 'repo.pull_request.merge',
563 'repo.pull_request.merge',
602 {'merge_state': merge_state.__dict__},
564 {'merge_state': merge_state.__dict__},
603 user, pull_request)
565 user, pull_request)
604
566
605 else:
567 else:
606 log.warn("Merge failed, not updating the pull request.")
568 log.warn("Merge failed, not updating the pull request.")
607 return merge_state
569 return merge_state
608
570
609 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
571 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
610 target_vcs = pull_request.target_repo.scm_instance()
572 target_vcs = pull_request.target_repo.scm_instance()
611 source_vcs = pull_request.source_repo.scm_instance()
573 source_vcs = pull_request.source_repo.scm_instance()
612
574
613 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
575 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
614 pr_id=pull_request.pull_request_id,
576 pr_id=pull_request.pull_request_id,
615 pr_title=pull_request.title,
577 pr_title=pull_request.title,
616 source_repo=source_vcs.name,
578 source_repo=source_vcs.name,
617 source_ref_name=pull_request.source_ref_parts.name,
579 source_ref_name=pull_request.source_ref_parts.name,
618 target_repo=target_vcs.name,
580 target_repo=target_vcs.name,
619 target_ref_name=pull_request.target_ref_parts.name,
581 target_ref_name=pull_request.target_ref_parts.name,
620 )
582 )
621
583
622 workspace_id = self._workspace_id(pull_request)
584 workspace_id = self._workspace_id(pull_request)
623 repo_id = pull_request.target_repo.repo_id
585 repo_id = pull_request.target_repo.repo_id
624 use_rebase = self._use_rebase_for_merging(pull_request)
586 use_rebase = self._use_rebase_for_merging(pull_request)
625 close_branch = self._close_branch_before_merging(pull_request)
587 close_branch = self._close_branch_before_merging(pull_request)
626
588
627 target_ref = self._refresh_reference(
589 target_ref = self._refresh_reference(
628 pull_request.target_ref_parts, target_vcs)
590 pull_request.target_ref_parts, target_vcs)
629
591
630 callback_daemon, extras = prepare_callback_daemon(
592 callback_daemon, extras = prepare_callback_daemon(
631 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
593 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
632 host=vcs_settings.HOOKS_HOST,
594 host=vcs_settings.HOOKS_HOST,
633 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
595 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
634
596
635 with callback_daemon:
597 with callback_daemon:
636 # TODO: johbo: Implement a clean way to run a config_override
598 # TODO: johbo: Implement a clean way to run a config_override
637 # for a single call.
599 # for a single call.
638 target_vcs.config.set(
600 target_vcs.config.set(
639 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
601 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
640
602
641 user_name = user.short_contact
603 user_name = user.short_contact
642 merge_state = target_vcs.merge(
604 merge_state = target_vcs.merge(
643 repo_id, workspace_id, target_ref, source_vcs,
605 repo_id, workspace_id, target_ref, source_vcs,
644 pull_request.source_ref_parts,
606 pull_request.source_ref_parts,
645 user_name=user_name, user_email=user.email,
607 user_name=user_name, user_email=user.email,
646 message=message, use_rebase=use_rebase,
608 message=message, use_rebase=use_rebase,
647 close_branch=close_branch)
609 close_branch=close_branch)
648 return merge_state
610 return merge_state
649
611
650 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
612 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
651 pull_request.merge_rev = merge_state.merge_ref.commit_id
613 pull_request.merge_rev = merge_state.merge_ref.commit_id
652 pull_request.updated_on = datetime.datetime.now()
614 pull_request.updated_on = datetime.datetime.now()
653 close_msg = close_msg or 'Pull request merged and closed'
615 close_msg = close_msg or 'Pull request merged and closed'
654
616
655 CommentsModel().create(
617 CommentsModel().create(
656 text=safe_unicode(close_msg),
618 text=safe_unicode(close_msg),
657 repo=pull_request.target_repo.repo_id,
619 repo=pull_request.target_repo.repo_id,
658 user=user.user_id,
620 user=user.user_id,
659 pull_request=pull_request.pull_request_id,
621 pull_request=pull_request.pull_request_id,
660 f_path=None,
622 f_path=None,
661 line_no=None,
623 line_no=None,
662 closing_pr=True
624 closing_pr=True
663 )
625 )
664
626
665 Session().add(pull_request)
627 Session().add(pull_request)
666 Session().flush()
628 Session().flush()
667 # TODO: paris: replace invalidation with less radical solution
629 # TODO: paris: replace invalidation with less radical solution
668 ScmModel().mark_for_invalidation(
630 ScmModel().mark_for_invalidation(
669 pull_request.target_repo.repo_name)
631 pull_request.target_repo.repo_name)
670 self._trigger_pull_request_hook(pull_request, user, 'merge')
632 self._trigger_pull_request_hook(pull_request, user, 'merge')
671
633
672 def has_valid_update_type(self, pull_request):
634 def has_valid_update_type(self, pull_request):
673 source_ref_type = pull_request.source_ref_parts.type
635 source_ref_type = pull_request.source_ref_parts.type
674 return source_ref_type in ['book', 'branch', 'tag']
636 return source_ref_type in ['book', 'branch', 'tag']
675
637
676 def update_commits(self, pull_request):
638 def update_commits(self, pull_request):
677 """
639 """
678 Get the updated list of commits for the pull request
640 Get the updated list of commits for the pull request
679 and return the new pull request version and the list
641 and return the new pull request version and the list
680 of commits processed by this update action
642 of commits processed by this update action
681 """
643 """
682 pull_request = self.__get_pull_request(pull_request)
644 pull_request = self.__get_pull_request(pull_request)
683 source_ref_type = pull_request.source_ref_parts.type
645 source_ref_type = pull_request.source_ref_parts.type
684 source_ref_name = pull_request.source_ref_parts.name
646 source_ref_name = pull_request.source_ref_parts.name
685 source_ref_id = pull_request.source_ref_parts.commit_id
647 source_ref_id = pull_request.source_ref_parts.commit_id
686
648
687 target_ref_type = pull_request.target_ref_parts.type
649 target_ref_type = pull_request.target_ref_parts.type
688 target_ref_name = pull_request.target_ref_parts.name
650 target_ref_name = pull_request.target_ref_parts.name
689 target_ref_id = pull_request.target_ref_parts.commit_id
651 target_ref_id = pull_request.target_ref_parts.commit_id
690
652
691 if not self.has_valid_update_type(pull_request):
653 if not self.has_valid_update_type(pull_request):
692 log.debug(
654 log.debug(
693 "Skipping update of pull request %s due to ref type: %s",
655 "Skipping update of pull request %s due to ref type: %s",
694 pull_request, source_ref_type)
656 pull_request, source_ref_type)
695 return UpdateResponse(
657 return UpdateResponse(
696 executed=False,
658 executed=False,
697 reason=UpdateFailureReason.WRONG_REF_TYPE,
659 reason=UpdateFailureReason.WRONG_REF_TYPE,
698 old=pull_request, new=None, changes=None,
660 old=pull_request, new=None, changes=None,
699 source_changed=False, target_changed=False)
661 source_changed=False, target_changed=False)
700
662
701 # source repo
663 # source repo
702 source_repo = pull_request.source_repo.scm_instance()
664 source_repo = pull_request.source_repo.scm_instance()
703 try:
665 try:
704 source_commit = source_repo.get_commit(commit_id=source_ref_name)
666 source_commit = source_repo.get_commit(commit_id=source_ref_name)
705 except CommitDoesNotExistError:
667 except CommitDoesNotExistError:
706 return UpdateResponse(
668 return UpdateResponse(
707 executed=False,
669 executed=False,
708 reason=UpdateFailureReason.MISSING_SOURCE_REF,
670 reason=UpdateFailureReason.MISSING_SOURCE_REF,
709 old=pull_request, new=None, changes=None,
671 old=pull_request, new=None, changes=None,
710 source_changed=False, target_changed=False)
672 source_changed=False, target_changed=False)
711
673
712 source_changed = source_ref_id != source_commit.raw_id
674 source_changed = source_ref_id != source_commit.raw_id
713
675
714 # target repo
676 # target repo
715 target_repo = pull_request.target_repo.scm_instance()
677 target_repo = pull_request.target_repo.scm_instance()
716 try:
678 try:
717 target_commit = target_repo.get_commit(commit_id=target_ref_name)
679 target_commit = target_repo.get_commit(commit_id=target_ref_name)
718 except CommitDoesNotExistError:
680 except CommitDoesNotExistError:
719 return UpdateResponse(
681 return UpdateResponse(
720 executed=False,
682 executed=False,
721 reason=UpdateFailureReason.MISSING_TARGET_REF,
683 reason=UpdateFailureReason.MISSING_TARGET_REF,
722 old=pull_request, new=None, changes=None,
684 old=pull_request, new=None, changes=None,
723 source_changed=False, target_changed=False)
685 source_changed=False, target_changed=False)
724 target_changed = target_ref_id != target_commit.raw_id
686 target_changed = target_ref_id != target_commit.raw_id
725
687
726 if not (source_changed or target_changed):
688 if not (source_changed or target_changed):
727 log.debug("Nothing changed in pull request %s", pull_request)
689 log.debug("Nothing changed in pull request %s", pull_request)
728 return UpdateResponse(
690 return UpdateResponse(
729 executed=False,
691 executed=False,
730 reason=UpdateFailureReason.NO_CHANGE,
692 reason=UpdateFailureReason.NO_CHANGE,
731 old=pull_request, new=None, changes=None,
693 old=pull_request, new=None, changes=None,
732 source_changed=target_changed, target_changed=source_changed)
694 source_changed=target_changed, target_changed=source_changed)
733
695
734 change_in_found = 'target repo' if target_changed else 'source repo'
696 change_in_found = 'target repo' if target_changed else 'source repo'
735 log.debug('Updating pull request because of change in %s detected',
697 log.debug('Updating pull request because of change in %s detected',
736 change_in_found)
698 change_in_found)
737
699
738 # Finally there is a need for an update, in case of source change
700 # Finally there is a need for an update, in case of source change
739 # we create a new version, else just an update
701 # we create a new version, else just an update
740 if source_changed:
702 if source_changed:
741 pull_request_version = self._create_version_from_snapshot(pull_request)
703 pull_request_version = self._create_version_from_snapshot(pull_request)
742 self._link_comments_to_version(pull_request_version)
704 self._link_comments_to_version(pull_request_version)
743 else:
705 else:
744 try:
706 try:
745 ver = pull_request.versions[-1]
707 ver = pull_request.versions[-1]
746 except IndexError:
708 except IndexError:
747 ver = None
709 ver = None
748
710
749 pull_request.pull_request_version_id = \
711 pull_request.pull_request_version_id = \
750 ver.pull_request_version_id if ver else None
712 ver.pull_request_version_id if ver else None
751 pull_request_version = pull_request
713 pull_request_version = pull_request
752
714
753 try:
715 try:
754 if target_ref_type in ('tag', 'branch', 'book'):
716 if target_ref_type in ('tag', 'branch', 'book'):
755 target_commit = target_repo.get_commit(target_ref_name)
717 target_commit = target_repo.get_commit(target_ref_name)
756 else:
718 else:
757 target_commit = target_repo.get_commit(target_ref_id)
719 target_commit = target_repo.get_commit(target_ref_id)
758 except CommitDoesNotExistError:
720 except CommitDoesNotExistError:
759 return UpdateResponse(
721 return UpdateResponse(
760 executed=False,
722 executed=False,
761 reason=UpdateFailureReason.MISSING_TARGET_REF,
723 reason=UpdateFailureReason.MISSING_TARGET_REF,
762 old=pull_request, new=None, changes=None,
724 old=pull_request, new=None, changes=None,
763 source_changed=source_changed, target_changed=target_changed)
725 source_changed=source_changed, target_changed=target_changed)
764
726
765 # re-compute commit ids
727 # re-compute commit ids
766 old_commit_ids = pull_request.revisions
728 old_commit_ids = pull_request.revisions
767 pre_load = ["author", "branch", "date", "message"]
729 pre_load = ["author", "branch", "date", "message"]
768 commit_ranges = target_repo.compare(
730 commit_ranges = target_repo.compare(
769 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
731 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
770 pre_load=pre_load)
732 pre_load=pre_load)
771
733
772 ancestor = target_repo.get_common_ancestor(
734 ancestor = target_repo.get_common_ancestor(
773 target_commit.raw_id, source_commit.raw_id, source_repo)
735 target_commit.raw_id, source_commit.raw_id, source_repo)
774
736
775 pull_request.source_ref = '%s:%s:%s' % (
737 pull_request.source_ref = '%s:%s:%s' % (
776 source_ref_type, source_ref_name, source_commit.raw_id)
738 source_ref_type, source_ref_name, source_commit.raw_id)
777 pull_request.target_ref = '%s:%s:%s' % (
739 pull_request.target_ref = '%s:%s:%s' % (
778 target_ref_type, target_ref_name, ancestor)
740 target_ref_type, target_ref_name, ancestor)
779
741
780 pull_request.revisions = [
742 pull_request.revisions = [
781 commit.raw_id for commit in reversed(commit_ranges)]
743 commit.raw_id for commit in reversed(commit_ranges)]
782 pull_request.updated_on = datetime.datetime.now()
744 pull_request.updated_on = datetime.datetime.now()
783 Session().add(pull_request)
745 Session().add(pull_request)
784 new_commit_ids = pull_request.revisions
746 new_commit_ids = pull_request.revisions
785
747
786 old_diff_data, new_diff_data = self._generate_update_diffs(
748 old_diff_data, new_diff_data = self._generate_update_diffs(
787 pull_request, pull_request_version)
749 pull_request, pull_request_version)
788
750
789 # calculate commit and file changes
751 # calculate commit and file changes
790 changes = self._calculate_commit_id_changes(
752 changes = self._calculate_commit_id_changes(
791 old_commit_ids, new_commit_ids)
753 old_commit_ids, new_commit_ids)
792 file_changes = self._calculate_file_changes(
754 file_changes = self._calculate_file_changes(
793 old_diff_data, new_diff_data)
755 old_diff_data, new_diff_data)
794
756
795 # set comments as outdated if DIFFS changed
757 # set comments as outdated if DIFFS changed
796 CommentsModel().outdate_comments(
758 CommentsModel().outdate_comments(
797 pull_request, old_diff_data=old_diff_data,
759 pull_request, old_diff_data=old_diff_data,
798 new_diff_data=new_diff_data)
760 new_diff_data=new_diff_data)
799
761
800 commit_changes = (changes.added or changes.removed)
762 commit_changes = (changes.added or changes.removed)
801 file_node_changes = (
763 file_node_changes = (
802 file_changes.added or file_changes.modified or file_changes.removed)
764 file_changes.added or file_changes.modified or file_changes.removed)
803 pr_has_changes = commit_changes or file_node_changes
765 pr_has_changes = commit_changes or file_node_changes
804
766
805 # Add an automatic comment to the pull request, in case
767 # Add an automatic comment to the pull request, in case
806 # anything has changed
768 # anything has changed
807 if pr_has_changes:
769 if pr_has_changes:
808 update_comment = CommentsModel().create(
770 update_comment = CommentsModel().create(
809 text=self._render_update_message(changes, file_changes),
771 text=self._render_update_message(changes, file_changes),
810 repo=pull_request.target_repo,
772 repo=pull_request.target_repo,
811 user=pull_request.author,
773 user=pull_request.author,
812 pull_request=pull_request,
774 pull_request=pull_request,
813 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
775 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
814
776
815 # Update status to "Under Review" for added commits
777 # Update status to "Under Review" for added commits
816 for commit_id in changes.added:
778 for commit_id in changes.added:
817 ChangesetStatusModel().set_status(
779 ChangesetStatusModel().set_status(
818 repo=pull_request.source_repo,
780 repo=pull_request.source_repo,
819 status=ChangesetStatus.STATUS_UNDER_REVIEW,
781 status=ChangesetStatus.STATUS_UNDER_REVIEW,
820 comment=update_comment,
782 comment=update_comment,
821 user=pull_request.author,
783 user=pull_request.author,
822 pull_request=pull_request,
784 pull_request=pull_request,
823 revision=commit_id)
785 revision=commit_id)
824
786
825 log.debug(
787 log.debug(
826 'Updated pull request %s, added_ids: %s, common_ids: %s, '
788 'Updated pull request %s, added_ids: %s, common_ids: %s, '
827 'removed_ids: %s', pull_request.pull_request_id,
789 'removed_ids: %s', pull_request.pull_request_id,
828 changes.added, changes.common, changes.removed)
790 changes.added, changes.common, changes.removed)
829 log.debug(
791 log.debug(
830 'Updated pull request with the following file changes: %s',
792 'Updated pull request with the following file changes: %s',
831 file_changes)
793 file_changes)
832
794
833 log.info(
795 log.info(
834 "Updated pull request %s from commit %s to commit %s, "
796 "Updated pull request %s from commit %s to commit %s, "
835 "stored new version %s of this pull request.",
797 "stored new version %s of this pull request.",
836 pull_request.pull_request_id, source_ref_id,
798 pull_request.pull_request_id, source_ref_id,
837 pull_request.source_ref_parts.commit_id,
799 pull_request.source_ref_parts.commit_id,
838 pull_request_version.pull_request_version_id)
800 pull_request_version.pull_request_version_id)
839 Session().commit()
801 Session().commit()
840 self._trigger_pull_request_hook(
802 self._trigger_pull_request_hook(
841 pull_request, pull_request.author, 'update')
803 pull_request, pull_request.author, 'update')
842
804
843 return UpdateResponse(
805 return UpdateResponse(
844 executed=True, reason=UpdateFailureReason.NONE,
806 executed=True, reason=UpdateFailureReason.NONE,
845 old=pull_request, new=pull_request_version, changes=changes,
807 old=pull_request, new=pull_request_version, changes=changes,
846 source_changed=source_changed, target_changed=target_changed)
808 source_changed=source_changed, target_changed=target_changed)
847
809
848 def _create_version_from_snapshot(self, pull_request):
810 def _create_version_from_snapshot(self, pull_request):
849 version = PullRequestVersion()
811 version = PullRequestVersion()
850 version.title = pull_request.title
812 version.title = pull_request.title
851 version.description = pull_request.description
813 version.description = pull_request.description
852 version.status = pull_request.status
814 version.status = pull_request.status
853 version.created_on = datetime.datetime.now()
815 version.created_on = datetime.datetime.now()
854 version.updated_on = pull_request.updated_on
816 version.updated_on = pull_request.updated_on
855 version.user_id = pull_request.user_id
817 version.user_id = pull_request.user_id
856 version.source_repo = pull_request.source_repo
818 version.source_repo = pull_request.source_repo
857 version.source_ref = pull_request.source_ref
819 version.source_ref = pull_request.source_ref
858 version.target_repo = pull_request.target_repo
820 version.target_repo = pull_request.target_repo
859 version.target_ref = pull_request.target_ref
821 version.target_ref = pull_request.target_ref
860
822
861 version._last_merge_source_rev = pull_request._last_merge_source_rev
823 version._last_merge_source_rev = pull_request._last_merge_source_rev
862 version._last_merge_target_rev = pull_request._last_merge_target_rev
824 version._last_merge_target_rev = pull_request._last_merge_target_rev
863 version.last_merge_status = pull_request.last_merge_status
825 version.last_merge_status = pull_request.last_merge_status
864 version.shadow_merge_ref = pull_request.shadow_merge_ref
826 version.shadow_merge_ref = pull_request.shadow_merge_ref
865 version.merge_rev = pull_request.merge_rev
827 version.merge_rev = pull_request.merge_rev
866 version.reviewer_data = pull_request.reviewer_data
828 version.reviewer_data = pull_request.reviewer_data
867
829
868 version.revisions = pull_request.revisions
830 version.revisions = pull_request.revisions
869 version.pull_request = pull_request
831 version.pull_request = pull_request
870 Session().add(version)
832 Session().add(version)
871 Session().flush()
833 Session().flush()
872
834
873 return version
835 return version
874
836
875 def _generate_update_diffs(self, pull_request, pull_request_version):
837 def _generate_update_diffs(self, pull_request, pull_request_version):
876
838
877 diff_context = (
839 diff_context = (
878 self.DIFF_CONTEXT +
840 self.DIFF_CONTEXT +
879 CommentsModel.needed_extra_diff_context())
841 CommentsModel.needed_extra_diff_context())
880 hide_whitespace_changes = False
842 hide_whitespace_changes = False
881 source_repo = pull_request_version.source_repo
843 source_repo = pull_request_version.source_repo
882 source_ref_id = pull_request_version.source_ref_parts.commit_id
844 source_ref_id = pull_request_version.source_ref_parts.commit_id
883 target_ref_id = pull_request_version.target_ref_parts.commit_id
845 target_ref_id = pull_request_version.target_ref_parts.commit_id
884 old_diff = self._get_diff_from_pr_or_version(
846 old_diff = self._get_diff_from_pr_or_version(
885 source_repo, source_ref_id, target_ref_id,
847 source_repo, source_ref_id, target_ref_id,
886 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
848 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
887
849
888 source_repo = pull_request.source_repo
850 source_repo = pull_request.source_repo
889 source_ref_id = pull_request.source_ref_parts.commit_id
851 source_ref_id = pull_request.source_ref_parts.commit_id
890 target_ref_id = pull_request.target_ref_parts.commit_id
852 target_ref_id = pull_request.target_ref_parts.commit_id
891
853
892 new_diff = self._get_diff_from_pr_or_version(
854 new_diff = self._get_diff_from_pr_or_version(
893 source_repo, source_ref_id, target_ref_id,
855 source_repo, source_ref_id, target_ref_id,
894 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
856 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
895
857
896 old_diff_data = diffs.DiffProcessor(old_diff)
858 old_diff_data = diffs.DiffProcessor(old_diff)
897 old_diff_data.prepare()
859 old_diff_data.prepare()
898 new_diff_data = diffs.DiffProcessor(new_diff)
860 new_diff_data = diffs.DiffProcessor(new_diff)
899 new_diff_data.prepare()
861 new_diff_data.prepare()
900
862
901 return old_diff_data, new_diff_data
863 return old_diff_data, new_diff_data
902
864
903 def _link_comments_to_version(self, pull_request_version):
865 def _link_comments_to_version(self, pull_request_version):
904 """
866 """
905 Link all unlinked comments of this pull request to the given version.
867 Link all unlinked comments of this pull request to the given version.
906
868
907 :param pull_request_version: The `PullRequestVersion` to which
869 :param pull_request_version: The `PullRequestVersion` to which
908 the comments shall be linked.
870 the comments shall be linked.
909
871
910 """
872 """
911 pull_request = pull_request_version.pull_request
873 pull_request = pull_request_version.pull_request
912 comments = ChangesetComment.query()\
874 comments = ChangesetComment.query()\
913 .filter(
875 .filter(
914 # TODO: johbo: Should we query for the repo at all here?
876 # TODO: johbo: Should we query for the repo at all here?
915 # Pending decision on how comments of PRs are to be related
877 # Pending decision on how comments of PRs are to be related
916 # to either the source repo, the target repo or no repo at all.
878 # to either the source repo, the target repo or no repo at all.
917 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
879 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
918 ChangesetComment.pull_request == pull_request,
880 ChangesetComment.pull_request == pull_request,
919 ChangesetComment.pull_request_version == None)\
881 ChangesetComment.pull_request_version == None)\
920 .order_by(ChangesetComment.comment_id.asc())
882 .order_by(ChangesetComment.comment_id.asc())
921
883
922 # TODO: johbo: Find out why this breaks if it is done in a bulk
884 # TODO: johbo: Find out why this breaks if it is done in a bulk
923 # operation.
885 # operation.
924 for comment in comments:
886 for comment in comments:
925 comment.pull_request_version_id = (
887 comment.pull_request_version_id = (
926 pull_request_version.pull_request_version_id)
888 pull_request_version.pull_request_version_id)
927 Session().add(comment)
889 Session().add(comment)
928
890
929 def _calculate_commit_id_changes(self, old_ids, new_ids):
891 def _calculate_commit_id_changes(self, old_ids, new_ids):
930 added = [x for x in new_ids if x not in old_ids]
892 added = [x for x in new_ids if x not in old_ids]
931 common = [x for x in new_ids if x in old_ids]
893 common = [x for x in new_ids if x in old_ids]
932 removed = [x for x in old_ids if x not in new_ids]
894 removed = [x for x in old_ids if x not in new_ids]
933 total = new_ids
895 total = new_ids
934 return ChangeTuple(added, common, removed, total)
896 return ChangeTuple(added, common, removed, total)
935
897
936 def _calculate_file_changes(self, old_diff_data, new_diff_data):
898 def _calculate_file_changes(self, old_diff_data, new_diff_data):
937
899
938 old_files = OrderedDict()
900 old_files = OrderedDict()
939 for diff_data in old_diff_data.parsed_diff:
901 for diff_data in old_diff_data.parsed_diff:
940 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
902 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
941
903
942 added_files = []
904 added_files = []
943 modified_files = []
905 modified_files = []
944 removed_files = []
906 removed_files = []
945 for diff_data in new_diff_data.parsed_diff:
907 for diff_data in new_diff_data.parsed_diff:
946 new_filename = diff_data['filename']
908 new_filename = diff_data['filename']
947 new_hash = md5_safe(diff_data['raw_diff'])
909 new_hash = md5_safe(diff_data['raw_diff'])
948
910
949 old_hash = old_files.get(new_filename)
911 old_hash = old_files.get(new_filename)
950 if not old_hash:
912 if not old_hash:
951 # file is not present in old diff, means it's added
913 # file is not present in old diff, means it's added
952 added_files.append(new_filename)
914 added_files.append(new_filename)
953 else:
915 else:
954 if new_hash != old_hash:
916 if new_hash != old_hash:
955 modified_files.append(new_filename)
917 modified_files.append(new_filename)
956 # now remove a file from old, since we have seen it already
918 # now remove a file from old, since we have seen it already
957 del old_files[new_filename]
919 del old_files[new_filename]
958
920
959 # removed files is when there are present in old, but not in NEW,
921 # removed files is when there are present in old, but not in NEW,
960 # since we remove old files that are present in new diff, left-overs
922 # since we remove old files that are present in new diff, left-overs
961 # if any should be the removed files
923 # if any should be the removed files
962 removed_files.extend(old_files.keys())
924 removed_files.extend(old_files.keys())
963
925
964 return FileChangeTuple(added_files, modified_files, removed_files)
926 return FileChangeTuple(added_files, modified_files, removed_files)
965
927
966 def _render_update_message(self, changes, file_changes):
928 def _render_update_message(self, changes, file_changes):
967 """
929 """
968 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
930 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
969 so it's always looking the same disregarding on which default
931 so it's always looking the same disregarding on which default
970 renderer system is using.
932 renderer system is using.
971
933
972 :param changes: changes named tuple
934 :param changes: changes named tuple
973 :param file_changes: file changes named tuple
935 :param file_changes: file changes named tuple
974
936
975 """
937 """
976 new_status = ChangesetStatus.get_status_lbl(
938 new_status = ChangesetStatus.get_status_lbl(
977 ChangesetStatus.STATUS_UNDER_REVIEW)
939 ChangesetStatus.STATUS_UNDER_REVIEW)
978
940
979 changed_files = (
941 changed_files = (
980 file_changes.added + file_changes.modified + file_changes.removed)
942 file_changes.added + file_changes.modified + file_changes.removed)
981
943
982 params = {
944 params = {
983 'under_review_label': new_status,
945 'under_review_label': new_status,
984 'added_commits': changes.added,
946 'added_commits': changes.added,
985 'removed_commits': changes.removed,
947 'removed_commits': changes.removed,
986 'changed_files': changed_files,
948 'changed_files': changed_files,
987 'added_files': file_changes.added,
949 'added_files': file_changes.added,
988 'modified_files': file_changes.modified,
950 'modified_files': file_changes.modified,
989 'removed_files': file_changes.removed,
951 'removed_files': file_changes.removed,
990 }
952 }
991 renderer = RstTemplateRenderer()
953 renderer = RstTemplateRenderer()
992 return renderer.render('pull_request_update.mako', **params)
954 return renderer.render('pull_request_update.mako', **params)
993
955
994 def edit(self, pull_request, title, description, description_renderer, user):
956 def edit(self, pull_request, title, description, description_renderer, user):
995 pull_request = self.__get_pull_request(pull_request)
957 pull_request = self.__get_pull_request(pull_request)
996 old_data = pull_request.get_api_data(with_merge_state=False)
958 old_data = pull_request.get_api_data(with_merge_state=False)
997 if pull_request.is_closed():
959 if pull_request.is_closed():
998 raise ValueError('This pull request is closed')
960 raise ValueError('This pull request is closed')
999 if title:
961 if title:
1000 pull_request.title = title
962 pull_request.title = title
1001 pull_request.description = description
963 pull_request.description = description
1002 pull_request.updated_on = datetime.datetime.now()
964 pull_request.updated_on = datetime.datetime.now()
1003 pull_request.description_renderer = description_renderer
965 pull_request.description_renderer = description_renderer
1004 Session().add(pull_request)
966 Session().add(pull_request)
1005 self._log_audit_action(
967 self._log_audit_action(
1006 'repo.pull_request.edit', {'old_data': old_data},
968 'repo.pull_request.edit', {'old_data': old_data},
1007 user, pull_request)
969 user, pull_request)
1008
970
1009 def update_reviewers(self, pull_request, reviewer_data, user):
971 def update_reviewers(self, pull_request, reviewer_data, user):
1010 """
972 """
1011 Update the reviewers in the pull request
973 Update the reviewers in the pull request
1012
974
1013 :param pull_request: the pr to update
975 :param pull_request: the pr to update
1014 :param reviewer_data: list of tuples
976 :param reviewer_data: list of tuples
1015 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
977 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1016 """
978 """
1017 pull_request = self.__get_pull_request(pull_request)
979 pull_request = self.__get_pull_request(pull_request)
1018 if pull_request.is_closed():
980 if pull_request.is_closed():
1019 raise ValueError('This pull request is closed')
981 raise ValueError('This pull request is closed')
1020
982
1021 reviewers = {}
983 reviewers = {}
1022 for user_id, reasons, mandatory, rules in reviewer_data:
984 for user_id, reasons, mandatory, rules in reviewer_data:
1023 if isinstance(user_id, (int, basestring)):
985 if isinstance(user_id, (int, basestring)):
1024 user_id = self._get_user(user_id).user_id
986 user_id = self._get_user(user_id).user_id
1025 reviewers[user_id] = {
987 reviewers[user_id] = {
1026 'reasons': reasons, 'mandatory': mandatory}
988 'reasons': reasons, 'mandatory': mandatory}
1027
989
1028 reviewers_ids = set(reviewers.keys())
990 reviewers_ids = set(reviewers.keys())
1029 current_reviewers = PullRequestReviewers.query()\
991 current_reviewers = PullRequestReviewers.query()\
1030 .filter(PullRequestReviewers.pull_request ==
992 .filter(PullRequestReviewers.pull_request ==
1031 pull_request).all()
993 pull_request).all()
1032 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
994 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1033
995
1034 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
996 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1035 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
997 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1036
998
1037 log.debug("Adding %s reviewers", ids_to_add)
999 log.debug("Adding %s reviewers", ids_to_add)
1038 log.debug("Removing %s reviewers", ids_to_remove)
1000 log.debug("Removing %s reviewers", ids_to_remove)
1039 changed = False
1001 changed = False
1040 for uid in ids_to_add:
1002 for uid in ids_to_add:
1041 changed = True
1003 changed = True
1042 _usr = self._get_user(uid)
1004 _usr = self._get_user(uid)
1043 reviewer = PullRequestReviewers()
1005 reviewer = PullRequestReviewers()
1044 reviewer.user = _usr
1006 reviewer.user = _usr
1045 reviewer.pull_request = pull_request
1007 reviewer.pull_request = pull_request
1046 reviewer.reasons = reviewers[uid]['reasons']
1008 reviewer.reasons = reviewers[uid]['reasons']
1047 # NOTE(marcink): mandatory shouldn't be changed now
1009 # NOTE(marcink): mandatory shouldn't be changed now
1048 # reviewer.mandatory = reviewers[uid]['reasons']
1010 # reviewer.mandatory = reviewers[uid]['reasons']
1049 Session().add(reviewer)
1011 Session().add(reviewer)
1050 self._log_audit_action(
1012 self._log_audit_action(
1051 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1013 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1052 user, pull_request)
1014 user, pull_request)
1053
1015
1054 for uid in ids_to_remove:
1016 for uid in ids_to_remove:
1055 changed = True
1017 changed = True
1056 reviewers = PullRequestReviewers.query()\
1018 reviewers = PullRequestReviewers.query()\
1057 .filter(PullRequestReviewers.user_id == uid,
1019 .filter(PullRequestReviewers.user_id == uid,
1058 PullRequestReviewers.pull_request == pull_request)\
1020 PullRequestReviewers.pull_request == pull_request)\
1059 .all()
1021 .all()
1060 # use .all() in case we accidentally added the same person twice
1022 # use .all() in case we accidentally added the same person twice
1061 # this CAN happen due to the lack of DB checks
1023 # this CAN happen due to the lack of DB checks
1062 for obj in reviewers:
1024 for obj in reviewers:
1063 old_data = obj.get_dict()
1025 old_data = obj.get_dict()
1064 Session().delete(obj)
1026 Session().delete(obj)
1065 self._log_audit_action(
1027 self._log_audit_action(
1066 'repo.pull_request.reviewer.delete',
1028 'repo.pull_request.reviewer.delete',
1067 {'old_data': old_data}, user, pull_request)
1029 {'old_data': old_data}, user, pull_request)
1068
1030
1069 if changed:
1031 if changed:
1070 pull_request.updated_on = datetime.datetime.now()
1032 pull_request.updated_on = datetime.datetime.now()
1071 Session().add(pull_request)
1033 Session().add(pull_request)
1072
1034
1073 self.notify_reviewers(pull_request, ids_to_add)
1035 self.notify_reviewers(pull_request, ids_to_add)
1074 return ids_to_add, ids_to_remove
1036 return ids_to_add, ids_to_remove
1075
1037
1076 def get_url(self, pull_request, request=None, permalink=False):
1038 def get_url(self, pull_request, request=None, permalink=False):
1077 if not request:
1039 if not request:
1078 request = get_current_request()
1040 request = get_current_request()
1079
1041
1080 if permalink:
1042 if permalink:
1081 return request.route_url(
1043 return request.route_url(
1082 'pull_requests_global',
1044 'pull_requests_global',
1083 pull_request_id=pull_request.pull_request_id,)
1045 pull_request_id=pull_request.pull_request_id,)
1084 else:
1046 else:
1085 return request.route_url('pullrequest_show',
1047 return request.route_url('pullrequest_show',
1086 repo_name=safe_str(pull_request.target_repo.repo_name),
1048 repo_name=safe_str(pull_request.target_repo.repo_name),
1087 pull_request_id=pull_request.pull_request_id,)
1049 pull_request_id=pull_request.pull_request_id,)
1088
1050
1089 def get_shadow_clone_url(self, pull_request, request=None):
1051 def get_shadow_clone_url(self, pull_request, request=None):
1090 """
1052 """
1091 Returns qualified url pointing to the shadow repository. If this pull
1053 Returns qualified url pointing to the shadow repository. If this pull
1092 request is closed there is no shadow repository and ``None`` will be
1054 request is closed there is no shadow repository and ``None`` will be
1093 returned.
1055 returned.
1094 """
1056 """
1095 if pull_request.is_closed():
1057 if pull_request.is_closed():
1096 return None
1058 return None
1097 else:
1059 else:
1098 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1060 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1099 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1061 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1100
1062
1101 def notify_reviewers(self, pull_request, reviewers_ids):
1063 def notify_reviewers(self, pull_request, reviewers_ids):
1102 # notification to reviewers
1064 # notification to reviewers
1103 if not reviewers_ids:
1065 if not reviewers_ids:
1104 return
1066 return
1105
1067
1106 pull_request_obj = pull_request
1068 pull_request_obj = pull_request
1107 # get the current participants of this pull request
1069 # get the current participants of this pull request
1108 recipients = reviewers_ids
1070 recipients = reviewers_ids
1109 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1071 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1110
1072
1111 pr_source_repo = pull_request_obj.source_repo
1073 pr_source_repo = pull_request_obj.source_repo
1112 pr_target_repo = pull_request_obj.target_repo
1074 pr_target_repo = pull_request_obj.target_repo
1113
1075
1114 pr_url = h.route_url('pullrequest_show',
1076 pr_url = h.route_url('pullrequest_show',
1115 repo_name=pr_target_repo.repo_name,
1077 repo_name=pr_target_repo.repo_name,
1116 pull_request_id=pull_request_obj.pull_request_id,)
1078 pull_request_id=pull_request_obj.pull_request_id,)
1117
1079
1118 # set some variables for email notification
1080 # set some variables for email notification
1119 pr_target_repo_url = h.route_url(
1081 pr_target_repo_url = h.route_url(
1120 'repo_summary', repo_name=pr_target_repo.repo_name)
1082 'repo_summary', repo_name=pr_target_repo.repo_name)
1121
1083
1122 pr_source_repo_url = h.route_url(
1084 pr_source_repo_url = h.route_url(
1123 'repo_summary', repo_name=pr_source_repo.repo_name)
1085 'repo_summary', repo_name=pr_source_repo.repo_name)
1124
1086
1125 # pull request specifics
1087 # pull request specifics
1126 pull_request_commits = [
1088 pull_request_commits = [
1127 (x.raw_id, x.message)
1089 (x.raw_id, x.message)
1128 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1090 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1129
1091
1130 kwargs = {
1092 kwargs = {
1131 'user': pull_request.author,
1093 'user': pull_request.author,
1132 'pull_request': pull_request_obj,
1094 'pull_request': pull_request_obj,
1133 'pull_request_commits': pull_request_commits,
1095 'pull_request_commits': pull_request_commits,
1134
1096
1135 'pull_request_target_repo': pr_target_repo,
1097 'pull_request_target_repo': pr_target_repo,
1136 'pull_request_target_repo_url': pr_target_repo_url,
1098 'pull_request_target_repo_url': pr_target_repo_url,
1137
1099
1138 'pull_request_source_repo': pr_source_repo,
1100 'pull_request_source_repo': pr_source_repo,
1139 'pull_request_source_repo_url': pr_source_repo_url,
1101 'pull_request_source_repo_url': pr_source_repo_url,
1140
1102
1141 'pull_request_url': pr_url,
1103 'pull_request_url': pr_url,
1142 }
1104 }
1143
1105
1144 # pre-generate the subject for notification itself
1106 # pre-generate the subject for notification itself
1145 (subject,
1107 (subject,
1146 _h, _e, # we don't care about those
1108 _h, _e, # we don't care about those
1147 body_plaintext) = EmailNotificationModel().render_email(
1109 body_plaintext) = EmailNotificationModel().render_email(
1148 notification_type, **kwargs)
1110 notification_type, **kwargs)
1149
1111
1150 # create notification objects, and emails
1112 # create notification objects, and emails
1151 NotificationModel().create(
1113 NotificationModel().create(
1152 created_by=pull_request.author,
1114 created_by=pull_request.author,
1153 notification_subject=subject,
1115 notification_subject=subject,
1154 notification_body=body_plaintext,
1116 notification_body=body_plaintext,
1155 notification_type=notification_type,
1117 notification_type=notification_type,
1156 recipients=recipients,
1118 recipients=recipients,
1157 email_kwargs=kwargs,
1119 email_kwargs=kwargs,
1158 )
1120 )
1159
1121
1160 def delete(self, pull_request, user):
1122 def delete(self, pull_request, user):
1161 pull_request = self.__get_pull_request(pull_request)
1123 pull_request = self.__get_pull_request(pull_request)
1162 old_data = pull_request.get_api_data(with_merge_state=False)
1124 old_data = pull_request.get_api_data(with_merge_state=False)
1163 self._cleanup_merge_workspace(pull_request)
1125 self._cleanup_merge_workspace(pull_request)
1164 self._log_audit_action(
1126 self._log_audit_action(
1165 'repo.pull_request.delete', {'old_data': old_data},
1127 'repo.pull_request.delete', {'old_data': old_data},
1166 user, pull_request)
1128 user, pull_request)
1167 Session().delete(pull_request)
1129 Session().delete(pull_request)
1168
1130
1169 def close_pull_request(self, pull_request, user):
1131 def close_pull_request(self, pull_request, user):
1170 pull_request = self.__get_pull_request(pull_request)
1132 pull_request = self.__get_pull_request(pull_request)
1171 self._cleanup_merge_workspace(pull_request)
1133 self._cleanup_merge_workspace(pull_request)
1172 pull_request.status = PullRequest.STATUS_CLOSED
1134 pull_request.status = PullRequest.STATUS_CLOSED
1173 pull_request.updated_on = datetime.datetime.now()
1135 pull_request.updated_on = datetime.datetime.now()
1174 Session().add(pull_request)
1136 Session().add(pull_request)
1175 self._trigger_pull_request_hook(
1137 self._trigger_pull_request_hook(
1176 pull_request, pull_request.author, 'close')
1138 pull_request, pull_request.author, 'close')
1177
1139
1178 pr_data = pull_request.get_api_data(with_merge_state=False)
1140 pr_data = pull_request.get_api_data(with_merge_state=False)
1179 self._log_audit_action(
1141 self._log_audit_action(
1180 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1142 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1181
1143
1182 def close_pull_request_with_comment(
1144 def close_pull_request_with_comment(
1183 self, pull_request, user, repo, message=None, auth_user=None):
1145 self, pull_request, user, repo, message=None, auth_user=None):
1184
1146
1185 pull_request_review_status = pull_request.calculated_review_status()
1147 pull_request_review_status = pull_request.calculated_review_status()
1186
1148
1187 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1149 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1188 # approved only if we have voting consent
1150 # approved only if we have voting consent
1189 status = ChangesetStatus.STATUS_APPROVED
1151 status = ChangesetStatus.STATUS_APPROVED
1190 else:
1152 else:
1191 status = ChangesetStatus.STATUS_REJECTED
1153 status = ChangesetStatus.STATUS_REJECTED
1192 status_lbl = ChangesetStatus.get_status_lbl(status)
1154 status_lbl = ChangesetStatus.get_status_lbl(status)
1193
1155
1194 default_message = (
1156 default_message = (
1195 'Closing with status change {transition_icon} {status}.'
1157 'Closing with status change {transition_icon} {status}.'
1196 ).format(transition_icon='>', status=status_lbl)
1158 ).format(transition_icon='>', status=status_lbl)
1197 text = message or default_message
1159 text = message or default_message
1198
1160
1199 # create a comment, and link it to new status
1161 # create a comment, and link it to new status
1200 comment = CommentsModel().create(
1162 comment = CommentsModel().create(
1201 text=text,
1163 text=text,
1202 repo=repo.repo_id,
1164 repo=repo.repo_id,
1203 user=user.user_id,
1165 user=user.user_id,
1204 pull_request=pull_request.pull_request_id,
1166 pull_request=pull_request.pull_request_id,
1205 status_change=status_lbl,
1167 status_change=status_lbl,
1206 status_change_type=status,
1168 status_change_type=status,
1207 closing_pr=True,
1169 closing_pr=True,
1208 auth_user=auth_user,
1170 auth_user=auth_user,
1209 )
1171 )
1210
1172
1211 # calculate old status before we change it
1173 # calculate old status before we change it
1212 old_calculated_status = pull_request.calculated_review_status()
1174 old_calculated_status = pull_request.calculated_review_status()
1213 ChangesetStatusModel().set_status(
1175 ChangesetStatusModel().set_status(
1214 repo.repo_id,
1176 repo.repo_id,
1215 status,
1177 status,
1216 user.user_id,
1178 user.user_id,
1217 comment=comment,
1179 comment=comment,
1218 pull_request=pull_request.pull_request_id
1180 pull_request=pull_request.pull_request_id
1219 )
1181 )
1220
1182
1221 Session().flush()
1183 Session().flush()
1222 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1184 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1223 # we now calculate the status of pull request again, and based on that
1185 # we now calculate the status of pull request again, and based on that
1224 # calculation trigger status change. This might happen in cases
1186 # calculation trigger status change. This might happen in cases
1225 # that non-reviewer admin closes a pr, which means his vote doesn't
1187 # that non-reviewer admin closes a pr, which means his vote doesn't
1226 # change the status, while if he's a reviewer this might change it.
1188 # change the status, while if he's a reviewer this might change it.
1227 calculated_status = pull_request.calculated_review_status()
1189 calculated_status = pull_request.calculated_review_status()
1228 if old_calculated_status != calculated_status:
1190 if old_calculated_status != calculated_status:
1229 self._trigger_pull_request_hook(
1191 self._trigger_pull_request_hook(
1230 pull_request, user, 'review_status_change')
1192 pull_request, user, 'review_status_change')
1231
1193
1232 # finally close the PR
1194 # finally close the PR
1233 PullRequestModel().close_pull_request(
1195 PullRequestModel().close_pull_request(
1234 pull_request.pull_request_id, user)
1196 pull_request.pull_request_id, user)
1235
1197
1236 return comment, status
1198 return comment, status
1237
1199
1238 def merge_status(self, pull_request, translator=None,
1200 def merge_status(self, pull_request, translator=None,
1239 force_shadow_repo_refresh=False):
1201 force_shadow_repo_refresh=False):
1240 _ = translator or get_current_request().translate
1202 _ = translator or get_current_request().translate
1241
1203
1242 if not self._is_merge_enabled(pull_request):
1204 if not self._is_merge_enabled(pull_request):
1243 return False, _('Server-side pull request merging is disabled.')
1205 return False, _('Server-side pull request merging is disabled.')
1244 if pull_request.is_closed():
1206 if pull_request.is_closed():
1245 return False, _('This pull request is closed.')
1207 return False, _('This pull request is closed.')
1246 merge_possible, msg = self._check_repo_requirements(
1208 merge_possible, msg = self._check_repo_requirements(
1247 target=pull_request.target_repo, source=pull_request.source_repo,
1209 target=pull_request.target_repo, source=pull_request.source_repo,
1248 translator=_)
1210 translator=_)
1249 if not merge_possible:
1211 if not merge_possible:
1250 return merge_possible, msg
1212 return merge_possible, msg
1251
1213
1252 try:
1214 try:
1253 resp = self._try_merge(
1215 resp = self._try_merge(
1254 pull_request,
1216 pull_request,
1255 force_shadow_repo_refresh=force_shadow_repo_refresh)
1217 force_shadow_repo_refresh=force_shadow_repo_refresh)
1256 log.debug("Merge response: %s", resp)
1218 log.debug("Merge response: %s", resp)
1257 status = resp.possible, self.merge_status_message(
1219 status = resp.possible, resp.merge_status_message
1258 resp.failure_reason)
1259 except NotImplementedError:
1220 except NotImplementedError:
1260 status = False, _('Pull request merging is not supported.')
1221 status = False, _('Pull request merging is not supported.')
1261
1222
1262 return status
1223 return status
1263
1224
1264 def _check_repo_requirements(self, target, source, translator):
1225 def _check_repo_requirements(self, target, source, translator):
1265 """
1226 """
1266 Check if `target` and `source` have compatible requirements.
1227 Check if `target` and `source` have compatible requirements.
1267
1228
1268 Currently this is just checking for largefiles.
1229 Currently this is just checking for largefiles.
1269 """
1230 """
1270 _ = translator
1231 _ = translator
1271 target_has_largefiles = self._has_largefiles(target)
1232 target_has_largefiles = self._has_largefiles(target)
1272 source_has_largefiles = self._has_largefiles(source)
1233 source_has_largefiles = self._has_largefiles(source)
1273 merge_possible = True
1234 merge_possible = True
1274 message = u''
1235 message = u''
1275
1236
1276 if target_has_largefiles != source_has_largefiles:
1237 if target_has_largefiles != source_has_largefiles:
1277 merge_possible = False
1238 merge_possible = False
1278 if source_has_largefiles:
1239 if source_has_largefiles:
1279 message = _(
1240 message = _(
1280 'Target repository large files support is disabled.')
1241 'Target repository large files support is disabled.')
1281 else:
1242 else:
1282 message = _(
1243 message = _(
1283 'Source repository large files support is disabled.')
1244 'Source repository large files support is disabled.')
1284
1245
1285 return merge_possible, message
1246 return merge_possible, message
1286
1247
1287 def _has_largefiles(self, repo):
1248 def _has_largefiles(self, repo):
1288 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1249 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1289 'extensions', 'largefiles')
1250 'extensions', 'largefiles')
1290 return largefiles_ui and largefiles_ui[0].active
1251 return largefiles_ui and largefiles_ui[0].active
1291
1252
1292 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1253 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1293 """
1254 """
1294 Try to merge the pull request and return the merge status.
1255 Try to merge the pull request and return the merge status.
1295 """
1256 """
1296 log.debug(
1257 log.debug(
1297 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1258 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1298 pull_request.pull_request_id, force_shadow_repo_refresh)
1259 pull_request.pull_request_id, force_shadow_repo_refresh)
1299 target_vcs = pull_request.target_repo.scm_instance()
1260 target_vcs = pull_request.target_repo.scm_instance()
1300
1301 # Refresh the target reference.
1261 # Refresh the target reference.
1302 try:
1262 try:
1303 target_ref = self._refresh_reference(
1263 target_ref = self._refresh_reference(
1304 pull_request.target_ref_parts, target_vcs)
1264 pull_request.target_ref_parts, target_vcs)
1305 except CommitDoesNotExistError:
1265 except CommitDoesNotExistError:
1306 merge_state = MergeResponse(
1266 merge_state = MergeResponse(
1307 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1267 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1268 metadata={'target_ref': pull_request.target_ref_parts})
1308 return merge_state
1269 return merge_state
1309
1270
1310 target_locked = pull_request.target_repo.locked
1271 target_locked = pull_request.target_repo.locked
1311 if target_locked and target_locked[0]:
1272 if target_locked and target_locked[0]:
1312 log.debug("The target repository is locked.")
1273 locked_by = 'user:{}'.format(target_locked[0])
1274 log.debug("The target repository is locked by %s.", locked_by)
1313 merge_state = MergeResponse(
1275 merge_state = MergeResponse(
1314 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1276 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1277 metadata={'locked_by': locked_by})
1315 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1278 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1316 pull_request, target_ref):
1279 pull_request, target_ref):
1317 log.debug("Refreshing the merge status of the repository.")
1280 log.debug("Refreshing the merge status of the repository.")
1318 merge_state = self._refresh_merge_state(
1281 merge_state = self._refresh_merge_state(
1319 pull_request, target_vcs, target_ref)
1282 pull_request, target_vcs, target_ref)
1320 else:
1283 else:
1321 possible = pull_request.\
1284 possible = pull_request.\
1322 last_merge_status == MergeFailureReason.NONE
1285 last_merge_status == MergeFailureReason.NONE
1323 merge_state = MergeResponse(
1286 merge_state = MergeResponse(
1324 possible, False, None, pull_request.last_merge_status)
1287 possible, False, None, pull_request.last_merge_status)
1325
1288
1326 return merge_state
1289 return merge_state
1327
1290
1328 def _refresh_reference(self, reference, vcs_repository):
1291 def _refresh_reference(self, reference, vcs_repository):
1329 if reference.type in ('branch', 'book'):
1292 if reference.type in ('branch', 'book'):
1330 name_or_id = reference.name
1293 name_or_id = reference.name
1331 else:
1294 else:
1332 name_or_id = reference.commit_id
1295 name_or_id = reference.commit_id
1333 refreshed_commit = vcs_repository.get_commit(name_or_id)
1296 refreshed_commit = vcs_repository.get_commit(name_or_id)
1334 refreshed_reference = Reference(
1297 refreshed_reference = Reference(
1335 reference.type, reference.name, refreshed_commit.raw_id)
1298 reference.type, reference.name, refreshed_commit.raw_id)
1336 return refreshed_reference
1299 return refreshed_reference
1337
1300
1338 def _needs_merge_state_refresh(self, pull_request, target_reference):
1301 def _needs_merge_state_refresh(self, pull_request, target_reference):
1339 return not(
1302 return not(
1340 pull_request.revisions and
1303 pull_request.revisions and
1341 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1304 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1342 target_reference.commit_id == pull_request._last_merge_target_rev)
1305 target_reference.commit_id == pull_request._last_merge_target_rev)
1343
1306
1344 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1307 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1345 workspace_id = self._workspace_id(pull_request)
1308 workspace_id = self._workspace_id(pull_request)
1346 source_vcs = pull_request.source_repo.scm_instance()
1309 source_vcs = pull_request.source_repo.scm_instance()
1347 repo_id = pull_request.target_repo.repo_id
1310 repo_id = pull_request.target_repo.repo_id
1348 use_rebase = self._use_rebase_for_merging(pull_request)
1311 use_rebase = self._use_rebase_for_merging(pull_request)
1349 close_branch = self._close_branch_before_merging(pull_request)
1312 close_branch = self._close_branch_before_merging(pull_request)
1350 merge_state = target_vcs.merge(
1313 merge_state = target_vcs.merge(
1351 repo_id, workspace_id,
1314 repo_id, workspace_id,
1352 target_reference, source_vcs, pull_request.source_ref_parts,
1315 target_reference, source_vcs, pull_request.source_ref_parts,
1353 dry_run=True, use_rebase=use_rebase,
1316 dry_run=True, use_rebase=use_rebase,
1354 close_branch=close_branch)
1317 close_branch=close_branch)
1355
1318
1356 # Do not store the response if there was an unknown error.
1319 # Do not store the response if there was an unknown error.
1357 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1320 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1358 pull_request._last_merge_source_rev = \
1321 pull_request._last_merge_source_rev = \
1359 pull_request.source_ref_parts.commit_id
1322 pull_request.source_ref_parts.commit_id
1360 pull_request._last_merge_target_rev = target_reference.commit_id
1323 pull_request._last_merge_target_rev = target_reference.commit_id
1361 pull_request.last_merge_status = merge_state.failure_reason
1324 pull_request.last_merge_status = merge_state.failure_reason
1362 pull_request.shadow_merge_ref = merge_state.merge_ref
1325 pull_request.shadow_merge_ref = merge_state.merge_ref
1363 Session().add(pull_request)
1326 Session().add(pull_request)
1364 Session().commit()
1327 Session().commit()
1365
1328
1366 return merge_state
1329 return merge_state
1367
1330
1368 def _workspace_id(self, pull_request):
1331 def _workspace_id(self, pull_request):
1369 workspace_id = 'pr-%s' % pull_request.pull_request_id
1332 workspace_id = 'pr-%s' % pull_request.pull_request_id
1370 return workspace_id
1333 return workspace_id
1371
1334
1372 def merge_status_message(self, status_code):
1373 """
1374 Return a human friendly error message for the given merge status code.
1375 """
1376 return self.MERGE_STATUS_MESSAGES[status_code]
1377
1378 def generate_repo_data(self, repo, commit_id=None, branch=None,
1335 def generate_repo_data(self, repo, commit_id=None, branch=None,
1379 bookmark=None, translator=None):
1336 bookmark=None, translator=None):
1380 from rhodecode.model.repo import RepoModel
1337 from rhodecode.model.repo import RepoModel
1381
1338
1382 all_refs, selected_ref = \
1339 all_refs, selected_ref = \
1383 self._get_repo_pullrequest_sources(
1340 self._get_repo_pullrequest_sources(
1384 repo.scm_instance(), commit_id=commit_id,
1341 repo.scm_instance(), commit_id=commit_id,
1385 branch=branch, bookmark=bookmark, translator=translator)
1342 branch=branch, bookmark=bookmark, translator=translator)
1386
1343
1387 refs_select2 = []
1344 refs_select2 = []
1388 for element in all_refs:
1345 for element in all_refs:
1389 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1346 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1390 refs_select2.append({'text': element[1], 'children': children})
1347 refs_select2.append({'text': element[1], 'children': children})
1391
1348
1392 return {
1349 return {
1393 'user': {
1350 'user': {
1394 'user_id': repo.user.user_id,
1351 'user_id': repo.user.user_id,
1395 'username': repo.user.username,
1352 'username': repo.user.username,
1396 'firstname': repo.user.first_name,
1353 'firstname': repo.user.first_name,
1397 'lastname': repo.user.last_name,
1354 'lastname': repo.user.last_name,
1398 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1355 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1399 },
1356 },
1400 'name': repo.repo_name,
1357 'name': repo.repo_name,
1401 'link': RepoModel().get_url(repo),
1358 'link': RepoModel().get_url(repo),
1402 'description': h.chop_at_smart(repo.description_safe, '\n'),
1359 'description': h.chop_at_smart(repo.description_safe, '\n'),
1403 'refs': {
1360 'refs': {
1404 'all_refs': all_refs,
1361 'all_refs': all_refs,
1405 'selected_ref': selected_ref,
1362 'selected_ref': selected_ref,
1406 'select2_refs': refs_select2
1363 'select2_refs': refs_select2
1407 }
1364 }
1408 }
1365 }
1409
1366
1410 def generate_pullrequest_title(self, source, source_ref, target):
1367 def generate_pullrequest_title(self, source, source_ref, target):
1411 return u'{source}#{at_ref} to {target}'.format(
1368 return u'{source}#{at_ref} to {target}'.format(
1412 source=source,
1369 source=source,
1413 at_ref=source_ref,
1370 at_ref=source_ref,
1414 target=target,
1371 target=target,
1415 )
1372 )
1416
1373
1417 def _cleanup_merge_workspace(self, pull_request):
1374 def _cleanup_merge_workspace(self, pull_request):
1418 # Merging related cleanup
1375 # Merging related cleanup
1419 repo_id = pull_request.target_repo.repo_id
1376 repo_id = pull_request.target_repo.repo_id
1420 target_scm = pull_request.target_repo.scm_instance()
1377 target_scm = pull_request.target_repo.scm_instance()
1421 workspace_id = self._workspace_id(pull_request)
1378 workspace_id = self._workspace_id(pull_request)
1422
1379
1423 try:
1380 try:
1424 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1381 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1425 except NotImplementedError:
1382 except NotImplementedError:
1426 pass
1383 pass
1427
1384
1428 def _get_repo_pullrequest_sources(
1385 def _get_repo_pullrequest_sources(
1429 self, repo, commit_id=None, branch=None, bookmark=None,
1386 self, repo, commit_id=None, branch=None, bookmark=None,
1430 translator=None):
1387 translator=None):
1431 """
1388 """
1432 Return a structure with repo's interesting commits, suitable for
1389 Return a structure with repo's interesting commits, suitable for
1433 the selectors in pullrequest controller
1390 the selectors in pullrequest controller
1434
1391
1435 :param commit_id: a commit that must be in the list somehow
1392 :param commit_id: a commit that must be in the list somehow
1436 and selected by default
1393 and selected by default
1437 :param branch: a branch that must be in the list and selected
1394 :param branch: a branch that must be in the list and selected
1438 by default - even if closed
1395 by default - even if closed
1439 :param bookmark: a bookmark that must be in the list and selected
1396 :param bookmark: a bookmark that must be in the list and selected
1440 """
1397 """
1441 _ = translator or get_current_request().translate
1398 _ = translator or get_current_request().translate
1442
1399
1443 commit_id = safe_str(commit_id) if commit_id else None
1400 commit_id = safe_str(commit_id) if commit_id else None
1444 branch = safe_str(branch) if branch else None
1401 branch = safe_str(branch) if branch else None
1445 bookmark = safe_str(bookmark) if bookmark else None
1402 bookmark = safe_str(bookmark) if bookmark else None
1446
1403
1447 selected = None
1404 selected = None
1448
1405
1449 # order matters: first source that has commit_id in it will be selected
1406 # order matters: first source that has commit_id in it will be selected
1450 sources = []
1407 sources = []
1451 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1408 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1452 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1409 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1453
1410
1454 if commit_id:
1411 if commit_id:
1455 ref_commit = (h.short_id(commit_id), commit_id)
1412 ref_commit = (h.short_id(commit_id), commit_id)
1456 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1413 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1457
1414
1458 sources.append(
1415 sources.append(
1459 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1416 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1460 )
1417 )
1461
1418
1462 groups = []
1419 groups = []
1463 for group_key, ref_list, group_name, match in sources:
1420 for group_key, ref_list, group_name, match in sources:
1464 group_refs = []
1421 group_refs = []
1465 for ref_name, ref_id in ref_list:
1422 for ref_name, ref_id in ref_list:
1466 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1423 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1467 group_refs.append((ref_key, ref_name))
1424 group_refs.append((ref_key, ref_name))
1468
1425
1469 if not selected:
1426 if not selected:
1470 if set([commit_id, match]) & set([ref_id, ref_name]):
1427 if set([commit_id, match]) & set([ref_id, ref_name]):
1471 selected = ref_key
1428 selected = ref_key
1472
1429
1473 if group_refs:
1430 if group_refs:
1474 groups.append((group_refs, group_name))
1431 groups.append((group_refs, group_name))
1475
1432
1476 if not selected:
1433 if not selected:
1477 ref = commit_id or branch or bookmark
1434 ref = commit_id or branch or bookmark
1478 if ref:
1435 if ref:
1479 raise CommitDoesNotExistError(
1436 raise CommitDoesNotExistError(
1480 'No commit refs could be found matching: %s' % ref)
1437 'No commit refs could be found matching: %s' % ref)
1481 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1438 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1482 selected = 'branch:%s:%s' % (
1439 selected = 'branch:%s:%s' % (
1483 repo.DEFAULT_BRANCH_NAME,
1440 repo.DEFAULT_BRANCH_NAME,
1484 repo.branches[repo.DEFAULT_BRANCH_NAME]
1441 repo.branches[repo.DEFAULT_BRANCH_NAME]
1485 )
1442 )
1486 elif repo.commit_ids:
1443 elif repo.commit_ids:
1487 # make the user select in this case
1444 # make the user select in this case
1488 selected = None
1445 selected = None
1489 else:
1446 else:
1490 raise EmptyRepositoryError()
1447 raise EmptyRepositoryError()
1491 return groups, selected
1448 return groups, selected
1492
1449
1493 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1450 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1494 hide_whitespace_changes, diff_context):
1451 hide_whitespace_changes, diff_context):
1495
1452
1496 return self._get_diff_from_pr_or_version(
1453 return self._get_diff_from_pr_or_version(
1497 source_repo, source_ref_id, target_ref_id,
1454 source_repo, source_ref_id, target_ref_id,
1498 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1455 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1499
1456
1500 def _get_diff_from_pr_or_version(
1457 def _get_diff_from_pr_or_version(
1501 self, source_repo, source_ref_id, target_ref_id,
1458 self, source_repo, source_ref_id, target_ref_id,
1502 hide_whitespace_changes, diff_context):
1459 hide_whitespace_changes, diff_context):
1503
1460
1504 target_commit = source_repo.get_commit(
1461 target_commit = source_repo.get_commit(
1505 commit_id=safe_str(target_ref_id))
1462 commit_id=safe_str(target_ref_id))
1506 source_commit = source_repo.get_commit(
1463 source_commit = source_repo.get_commit(
1507 commit_id=safe_str(source_ref_id))
1464 commit_id=safe_str(source_ref_id))
1508 if isinstance(source_repo, Repository):
1465 if isinstance(source_repo, Repository):
1509 vcs_repo = source_repo.scm_instance()
1466 vcs_repo = source_repo.scm_instance()
1510 else:
1467 else:
1511 vcs_repo = source_repo
1468 vcs_repo = source_repo
1512
1469
1513 # TODO: johbo: In the context of an update, we cannot reach
1470 # TODO: johbo: In the context of an update, we cannot reach
1514 # the old commit anymore with our normal mechanisms. It needs
1471 # the old commit anymore with our normal mechanisms. It needs
1515 # some sort of special support in the vcs layer to avoid this
1472 # some sort of special support in the vcs layer to avoid this
1516 # workaround.
1473 # workaround.
1517 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1474 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1518 vcs_repo.alias == 'git'):
1475 vcs_repo.alias == 'git'):
1519 source_commit.raw_id = safe_str(source_ref_id)
1476 source_commit.raw_id = safe_str(source_ref_id)
1520
1477
1521 log.debug('calculating diff between '
1478 log.debug('calculating diff between '
1522 'source_ref:%s and target_ref:%s for repo `%s`',
1479 'source_ref:%s and target_ref:%s for repo `%s`',
1523 target_ref_id, source_ref_id,
1480 target_ref_id, source_ref_id,
1524 safe_unicode(vcs_repo.path))
1481 safe_unicode(vcs_repo.path))
1525
1482
1526 vcs_diff = vcs_repo.get_diff(
1483 vcs_diff = vcs_repo.get_diff(
1527 commit1=target_commit, commit2=source_commit,
1484 commit1=target_commit, commit2=source_commit,
1528 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1485 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1529 return vcs_diff
1486 return vcs_diff
1530
1487
1531 def _is_merge_enabled(self, pull_request):
1488 def _is_merge_enabled(self, pull_request):
1532 return self._get_general_setting(
1489 return self._get_general_setting(
1533 pull_request, 'rhodecode_pr_merge_enabled')
1490 pull_request, 'rhodecode_pr_merge_enabled')
1534
1491
1535 def _use_rebase_for_merging(self, pull_request):
1492 def _use_rebase_for_merging(self, pull_request):
1536 repo_type = pull_request.target_repo.repo_type
1493 repo_type = pull_request.target_repo.repo_type
1537 if repo_type == 'hg':
1494 if repo_type == 'hg':
1538 return self._get_general_setting(
1495 return self._get_general_setting(
1539 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1496 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1540 elif repo_type == 'git':
1497 elif repo_type == 'git':
1541 return self._get_general_setting(
1498 return self._get_general_setting(
1542 pull_request, 'rhodecode_git_use_rebase_for_merging')
1499 pull_request, 'rhodecode_git_use_rebase_for_merging')
1543
1500
1544 return False
1501 return False
1545
1502
1546 def _close_branch_before_merging(self, pull_request):
1503 def _close_branch_before_merging(self, pull_request):
1547 repo_type = pull_request.target_repo.repo_type
1504 repo_type = pull_request.target_repo.repo_type
1548 if repo_type == 'hg':
1505 if repo_type == 'hg':
1549 return self._get_general_setting(
1506 return self._get_general_setting(
1550 pull_request, 'rhodecode_hg_close_branch_before_merging')
1507 pull_request, 'rhodecode_hg_close_branch_before_merging')
1551 elif repo_type == 'git':
1508 elif repo_type == 'git':
1552 return self._get_general_setting(
1509 return self._get_general_setting(
1553 pull_request, 'rhodecode_git_close_branch_before_merging')
1510 pull_request, 'rhodecode_git_close_branch_before_merging')
1554
1511
1555 return False
1512 return False
1556
1513
1557 def _get_general_setting(self, pull_request, settings_key, default=False):
1514 def _get_general_setting(self, pull_request, settings_key, default=False):
1558 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1515 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1559 settings = settings_model.get_general_settings()
1516 settings = settings_model.get_general_settings()
1560 return settings.get(settings_key, default)
1517 return settings.get(settings_key, default)
1561
1518
1562 def _log_audit_action(self, action, action_data, user, pull_request):
1519 def _log_audit_action(self, action, action_data, user, pull_request):
1563 audit_logger.store(
1520 audit_logger.store(
1564 action=action,
1521 action=action,
1565 action_data=action_data,
1522 action_data=action_data,
1566 user=user,
1523 user=user,
1567 repo=pull_request.target_repo)
1524 repo=pull_request.target_repo)
1568
1525
1569 def get_reviewer_functions(self):
1526 def get_reviewer_functions(self):
1570 """
1527 """
1571 Fetches functions for validation and fetching default reviewers.
1528 Fetches functions for validation and fetching default reviewers.
1572 If available we use the EE package, else we fallback to CE
1529 If available we use the EE package, else we fallback to CE
1573 package functions
1530 package functions
1574 """
1531 """
1575 try:
1532 try:
1576 from rc_reviewers.utils import get_default_reviewers_data
1533 from rc_reviewers.utils import get_default_reviewers_data
1577 from rc_reviewers.utils import validate_default_reviewers
1534 from rc_reviewers.utils import validate_default_reviewers
1578 except ImportError:
1535 except ImportError:
1579 from rhodecode.apps.repository.utils import get_default_reviewers_data
1536 from rhodecode.apps.repository.utils import get_default_reviewers_data
1580 from rhodecode.apps.repository.utils import validate_default_reviewers
1537 from rhodecode.apps.repository.utils import validate_default_reviewers
1581
1538
1582 return get_default_reviewers_data, validate_default_reviewers
1539 return get_default_reviewers_data, validate_default_reviewers
1583
1540
1584
1541
1585 class MergeCheck(object):
1542 class MergeCheck(object):
1586 """
1543 """
1587 Perform Merge Checks and returns a check object which stores information
1544 Perform Merge Checks and returns a check object which stores information
1588 about merge errors, and merge conditions
1545 about merge errors, and merge conditions
1589 """
1546 """
1590 TODO_CHECK = 'todo'
1547 TODO_CHECK = 'todo'
1591 PERM_CHECK = 'perm'
1548 PERM_CHECK = 'perm'
1592 REVIEW_CHECK = 'review'
1549 REVIEW_CHECK = 'review'
1593 MERGE_CHECK = 'merge'
1550 MERGE_CHECK = 'merge'
1594
1551
1595 def __init__(self):
1552 def __init__(self):
1596 self.review_status = None
1553 self.review_status = None
1597 self.merge_possible = None
1554 self.merge_possible = None
1598 self.merge_msg = ''
1555 self.merge_msg = ''
1599 self.failed = None
1556 self.failed = None
1600 self.errors = []
1557 self.errors = []
1601 self.error_details = OrderedDict()
1558 self.error_details = OrderedDict()
1602
1559
1603 def push_error(self, error_type, message, error_key, details):
1560 def push_error(self, error_type, message, error_key, details):
1604 self.failed = True
1561 self.failed = True
1605 self.errors.append([error_type, message])
1562 self.errors.append([error_type, message])
1606 self.error_details[error_key] = dict(
1563 self.error_details[error_key] = dict(
1607 details=details,
1564 details=details,
1608 error_type=error_type,
1565 error_type=error_type,
1609 message=message
1566 message=message
1610 )
1567 )
1611
1568
1612 @classmethod
1569 @classmethod
1613 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1570 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1614 force_shadow_repo_refresh=False):
1571 force_shadow_repo_refresh=False):
1615 _ = translator
1572 _ = translator
1616 merge_check = cls()
1573 merge_check = cls()
1617
1574
1618 # permissions to merge
1575 # permissions to merge
1619 user_allowed_to_merge = PullRequestModel().check_user_merge(
1576 user_allowed_to_merge = PullRequestModel().check_user_merge(
1620 pull_request, auth_user)
1577 pull_request, auth_user)
1621 if not user_allowed_to_merge:
1578 if not user_allowed_to_merge:
1622 log.debug("MergeCheck: cannot merge, approval is pending.")
1579 log.debug("MergeCheck: cannot merge, approval is pending.")
1623
1580
1624 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1581 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1625 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1582 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1626 if fail_early:
1583 if fail_early:
1627 return merge_check
1584 return merge_check
1628
1585
1629 # permission to merge into the target branch
1586 # permission to merge into the target branch
1630 target_commit_id = pull_request.target_ref_parts.commit_id
1587 target_commit_id = pull_request.target_ref_parts.commit_id
1631 if pull_request.target_ref_parts.type == 'branch':
1588 if pull_request.target_ref_parts.type == 'branch':
1632 branch_name = pull_request.target_ref_parts.name
1589 branch_name = pull_request.target_ref_parts.name
1633 else:
1590 else:
1634 # for mercurial we can always figure out the branch from the commit
1591 # for mercurial we can always figure out the branch from the commit
1635 # in case of bookmark
1592 # in case of bookmark
1636 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1593 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1637 branch_name = target_commit.branch
1594 branch_name = target_commit.branch
1638
1595
1639 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1596 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1640 pull_request.target_repo.repo_name, branch_name)
1597 pull_request.target_repo.repo_name, branch_name)
1641 if branch_perm and branch_perm == 'branch.none':
1598 if branch_perm and branch_perm == 'branch.none':
1642 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1599 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1643 branch_name, rule)
1600 branch_name, rule)
1644 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1601 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1645 if fail_early:
1602 if fail_early:
1646 return merge_check
1603 return merge_check
1647
1604
1648 # review status, must be always present
1605 # review status, must be always present
1649 review_status = pull_request.calculated_review_status()
1606 review_status = pull_request.calculated_review_status()
1650 merge_check.review_status = review_status
1607 merge_check.review_status = review_status
1651
1608
1652 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1609 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1653 if not status_approved:
1610 if not status_approved:
1654 log.debug("MergeCheck: cannot merge, approval is pending.")
1611 log.debug("MergeCheck: cannot merge, approval is pending.")
1655
1612
1656 msg = _('Pull request reviewer approval is pending.')
1613 msg = _('Pull request reviewer approval is pending.')
1657
1614
1658 merge_check.push_error(
1615 merge_check.push_error(
1659 'warning', msg, cls.REVIEW_CHECK, review_status)
1616 'warning', msg, cls.REVIEW_CHECK, review_status)
1660
1617
1661 if fail_early:
1618 if fail_early:
1662 return merge_check
1619 return merge_check
1663
1620
1664 # left over TODOs
1621 # left over TODOs
1665 todos = CommentsModel().get_unresolved_todos(pull_request)
1622 todos = CommentsModel().get_unresolved_todos(pull_request)
1666 if todos:
1623 if todos:
1667 log.debug("MergeCheck: cannot merge, {} "
1624 log.debug("MergeCheck: cannot merge, {} "
1668 "unresolved todos left.".format(len(todos)))
1625 "unresolved todos left.".format(len(todos)))
1669
1626
1670 if len(todos) == 1:
1627 if len(todos) == 1:
1671 msg = _('Cannot merge, {} TODO still not resolved.').format(
1628 msg = _('Cannot merge, {} TODO still not resolved.').format(
1672 len(todos))
1629 len(todos))
1673 else:
1630 else:
1674 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1631 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1675 len(todos))
1632 len(todos))
1676
1633
1677 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1634 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1678
1635
1679 if fail_early:
1636 if fail_early:
1680 return merge_check
1637 return merge_check
1681
1638
1682 # merge possible, here is the filesystem simulation + shadow repo
1639 # merge possible, here is the filesystem simulation + shadow repo
1683 merge_status, msg = PullRequestModel().merge_status(
1640 merge_status, msg = PullRequestModel().merge_status(
1684 pull_request, translator=translator,
1641 pull_request, translator=translator,
1685 force_shadow_repo_refresh=force_shadow_repo_refresh)
1642 force_shadow_repo_refresh=force_shadow_repo_refresh)
1686 merge_check.merge_possible = merge_status
1643 merge_check.merge_possible = merge_status
1687 merge_check.merge_msg = msg
1644 merge_check.merge_msg = msg
1688 if not merge_status:
1645 if not merge_status:
1689 log.debug(
1646 log.debug(
1690 "MergeCheck: cannot merge, pull request merge not possible.")
1647 "MergeCheck: cannot merge, pull request merge not possible.")
1691 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1648 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1692
1649
1693 if fail_early:
1650 if fail_early:
1694 return merge_check
1651 return merge_check
1695
1652
1696 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1653 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1697 return merge_check
1654 return merge_check
1698
1655
1699 @classmethod
1656 @classmethod
1700 def get_merge_conditions(cls, pull_request, translator):
1657 def get_merge_conditions(cls, pull_request, translator):
1701 _ = translator
1658 _ = translator
1702 merge_details = {}
1659 merge_details = {}
1703
1660
1704 model = PullRequestModel()
1661 model = PullRequestModel()
1705 use_rebase = model._use_rebase_for_merging(pull_request)
1662 use_rebase = model._use_rebase_for_merging(pull_request)
1706
1663
1707 if use_rebase:
1664 if use_rebase:
1708 merge_details['merge_strategy'] = dict(
1665 merge_details['merge_strategy'] = dict(
1709 details={},
1666 details={},
1710 message=_('Merge strategy: rebase')
1667 message=_('Merge strategy: rebase')
1711 )
1668 )
1712 else:
1669 else:
1713 merge_details['merge_strategy'] = dict(
1670 merge_details['merge_strategy'] = dict(
1714 details={},
1671 details={},
1715 message=_('Merge strategy: explicit merge commit')
1672 message=_('Merge strategy: explicit merge commit')
1716 )
1673 )
1717
1674
1718 close_branch = model._close_branch_before_merging(pull_request)
1675 close_branch = model._close_branch_before_merging(pull_request)
1719 if close_branch:
1676 if close_branch:
1720 repo_type = pull_request.target_repo.repo_type
1677 repo_type = pull_request.target_repo.repo_type
1721 if repo_type == 'hg':
1678 if repo_type == 'hg':
1722 close_msg = _('Source branch will be closed after merge.')
1679 close_msg = _('Source branch will be closed after merge.')
1723 elif repo_type == 'git':
1680 elif repo_type == 'git':
1724 close_msg = _('Source branch will be deleted after merge.')
1681 close_msg = _('Source branch will be deleted after merge.')
1725
1682
1726 merge_details['close_branch'] = dict(
1683 merge_details['close_branch'] = dict(
1727 details={},
1684 details={},
1728 message=close_msg
1685 message=close_msg
1729 )
1686 )
1730
1687
1731 return merge_details
1688 return merge_details
1732
1689
1733 ChangeTuple = collections.namedtuple(
1690 ChangeTuple = collections.namedtuple(
1734 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1691 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1735
1692
1736 FileChangeTuple = collections.namedtuple(
1693 FileChangeTuple = collections.namedtuple(
1737 'FileChangeTuple', ['added', 'modified', 'removed'])
1694 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,871 +1,910 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture
47 @pytest.fixture
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 merge_resp = MergeResponse(
54 False, False, None, MergeFailureReason.UNKNOWN,
55 metadata={'exception': 'MockError'})
53 self.merge_patcher = mock.patch.object(
56 self.merge_patcher = mock.patch.object(
54 BackendClass, 'merge', return_value=MergeResponse(
57 BackendClass, 'merge', return_value=merge_resp)
55 False, False, None, MergeFailureReason.UNKNOWN))
56 self.workspace_remove_patcher = mock.patch.object(
58 self.workspace_remove_patcher = mock.patch.object(
57 BackendClass, 'cleanup_merge_workspace')
59 BackendClass, 'cleanup_merge_workspace')
58
60
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
61 self.comment_patcher = mock.patch(
63 self.comment_patcher = mock.patch(
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 self.comment_patcher.start()
65 self.comment_patcher.start()
64 self.notification_patcher = mock.patch(
66 self.notification_patcher = mock.patch(
65 'rhodecode.model.notification.NotificationModel.create')
67 'rhodecode.model.notification.NotificationModel.create')
66 self.notification_patcher.start()
68 self.notification_patcher.start()
67 self.helper_patcher = mock.patch(
69 self.helper_patcher = mock.patch(
68 'rhodecode.lib.helpers.route_path')
70 'rhodecode.lib.helpers.route_path')
69 self.helper_patcher.start()
71 self.helper_patcher.start()
70
72
71 self.hook_patcher = mock.patch.object(PullRequestModel,
73 self.hook_patcher = mock.patch.object(PullRequestModel,
72 '_trigger_pull_request_hook')
74 '_trigger_pull_request_hook')
73 self.hook_mock = self.hook_patcher.start()
75 self.hook_mock = self.hook_patcher.start()
74
76
75 self.invalidation_patcher = mock.patch(
77 self.invalidation_patcher = mock.patch(
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 self.invalidation_mock = self.invalidation_patcher.start()
79 self.invalidation_mock = self.invalidation_patcher.start()
78
80
79 self.pull_request = pr_util.create_pull_request(
81 self.pull_request = pr_util.create_pull_request(
80 mergeable=True, name_suffix=u'ąć')
82 mergeable=True, name_suffix=u'ąć')
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 self.repo_id = self.pull_request.target_repo.repo_id
86 self.repo_id = self.pull_request.target_repo.repo_id
85
87
86 @request.addfinalizer
88 @request.addfinalizer
87 def cleanup_pull_request():
89 def cleanup_pull_request():
88 calls = [mock.call(
90 calls = [mock.call(
89 self.pull_request, self.pull_request.author, 'create')]
91 self.pull_request, self.pull_request.author, 'create')]
90 self.hook_mock.assert_has_calls(calls)
92 self.hook_mock.assert_has_calls(calls)
91
93
92 self.workspace_remove_patcher.stop()
94 self.workspace_remove_patcher.stop()
93 self.merge_patcher.stop()
95 self.merge_patcher.stop()
94 self.comment_patcher.stop()
96 self.comment_patcher.stop()
95 self.notification_patcher.stop()
97 self.notification_patcher.stop()
96 self.helper_patcher.stop()
98 self.helper_patcher.stop()
97 self.hook_patcher.stop()
99 self.hook_patcher.stop()
98 self.invalidation_patcher.stop()
100 self.invalidation_patcher.stop()
99
101
100 return self.pull_request
102 return self.pull_request
101
103
102 def test_get_all(self, pull_request):
104 def test_get_all(self, pull_request):
103 prs = PullRequestModel().get_all(pull_request.target_repo)
105 prs = PullRequestModel().get_all(pull_request.target_repo)
104 assert isinstance(prs, list)
106 assert isinstance(prs, list)
105 assert len(prs) == 1
107 assert len(prs) == 1
106
108
107 def test_count_all(self, pull_request):
109 def test_count_all(self, pull_request):
108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
109 assert pr_count == 1
111 assert pr_count == 1
110
112
111 def test_get_awaiting_review(self, pull_request):
113 def test_get_awaiting_review(self, pull_request):
112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
113 assert isinstance(prs, list)
115 assert isinstance(prs, list)
114 assert len(prs) == 1
116 assert len(prs) == 1
115
117
116 def test_count_awaiting_review(self, pull_request):
118 def test_count_awaiting_review(self, pull_request):
117 pr_count = PullRequestModel().count_awaiting_review(
119 pr_count = PullRequestModel().count_awaiting_review(
118 pull_request.target_repo)
120 pull_request.target_repo)
119 assert pr_count == 1
121 assert pr_count == 1
120
122
121 def test_get_awaiting_my_review(self, pull_request):
123 def test_get_awaiting_my_review(self, pull_request):
122 PullRequestModel().update_reviewers(
124 PullRequestModel().update_reviewers(
123 pull_request, [(pull_request.author, ['author'], False, [])],
125 pull_request, [(pull_request.author, ['author'], False, [])],
124 pull_request.author)
126 pull_request.author)
125 prs = PullRequestModel().get_awaiting_my_review(
127 prs = PullRequestModel().get_awaiting_my_review(
126 pull_request.target_repo, user_id=pull_request.author.user_id)
128 pull_request.target_repo, user_id=pull_request.author.user_id)
127 assert isinstance(prs, list)
129 assert isinstance(prs, list)
128 assert len(prs) == 1
130 assert len(prs) == 1
129
131
130 def test_count_awaiting_my_review(self, pull_request):
132 def test_count_awaiting_my_review(self, pull_request):
131 PullRequestModel().update_reviewers(
133 PullRequestModel().update_reviewers(
132 pull_request, [(pull_request.author, ['author'], False, [])],
134 pull_request, [(pull_request.author, ['author'], False, [])],
133 pull_request.author)
135 pull_request.author)
134 pr_count = PullRequestModel().count_awaiting_my_review(
136 pr_count = PullRequestModel().count_awaiting_my_review(
135 pull_request.target_repo, user_id=pull_request.author.user_id)
137 pull_request.target_repo, user_id=pull_request.author.user_id)
136 assert pr_count == 1
138 assert pr_count == 1
137
139
138 def test_delete_calls_cleanup_merge(self, pull_request):
140 def test_delete_calls_cleanup_merge(self, pull_request):
139 repo_id = pull_request.target_repo.repo_id
141 repo_id = pull_request.target_repo.repo_id
140 PullRequestModel().delete(pull_request, pull_request.author)
142 PullRequestModel().delete(pull_request, pull_request.author)
141
143
142 self.workspace_remove_mock.assert_called_once_with(
144 self.workspace_remove_mock.assert_called_once_with(
143 repo_id, self.workspace_id)
145 repo_id, self.workspace_id)
144
146
145 def test_close_calls_cleanup_and_hook(self, pull_request):
147 def test_close_calls_cleanup_and_hook(self, pull_request):
146 PullRequestModel().close_pull_request(
148 PullRequestModel().close_pull_request(
147 pull_request, pull_request.author)
149 pull_request, pull_request.author)
148 repo_id = pull_request.target_repo.repo_id
150 repo_id = pull_request.target_repo.repo_id
149
151
150 self.workspace_remove_mock.assert_called_once_with(
152 self.workspace_remove_mock.assert_called_once_with(
151 repo_id, self.workspace_id)
153 repo_id, self.workspace_id)
152 self.hook_mock.assert_called_with(
154 self.hook_mock.assert_called_with(
153 self.pull_request, self.pull_request.author, 'close')
155 self.pull_request, self.pull_request.author, 'close')
154
156
155 def test_merge_status(self, pull_request):
157 def test_merge_status(self, pull_request):
156 self.merge_mock.return_value = MergeResponse(
158 self.merge_mock.return_value = MergeResponse(
157 True, False, None, MergeFailureReason.NONE)
159 True, False, None, MergeFailureReason.NONE)
158
160
159 assert pull_request._last_merge_source_rev is None
161 assert pull_request._last_merge_source_rev is None
160 assert pull_request._last_merge_target_rev is None
162 assert pull_request._last_merge_target_rev is None
161 assert pull_request.last_merge_status is None
163 assert pull_request.last_merge_status is None
162
164
163 status, msg = PullRequestModel().merge_status(pull_request)
165 status, msg = PullRequestModel().merge_status(pull_request)
164 assert status is True
166 assert status is True
165 assert msg.eval() == 'This pull request can be automatically merged.'
167 assert msg == 'This pull request can be automatically merged.'
166 self.merge_mock.assert_called_with(
168 self.merge_mock.assert_called_with(
167 self.repo_id, self.workspace_id,
169 self.repo_id, self.workspace_id,
168 pull_request.target_ref_parts,
170 pull_request.target_ref_parts,
169 pull_request.source_repo.scm_instance(),
171 pull_request.source_repo.scm_instance(),
170 pull_request.source_ref_parts, dry_run=True,
172 pull_request.source_ref_parts, dry_run=True,
171 use_rebase=False, close_branch=False)
173 use_rebase=False, close_branch=False)
172
174
173 assert pull_request._last_merge_source_rev == self.source_commit
175 assert pull_request._last_merge_source_rev == self.source_commit
174 assert pull_request._last_merge_target_rev == self.target_commit
176 assert pull_request._last_merge_target_rev == self.target_commit
175 assert pull_request.last_merge_status is MergeFailureReason.NONE
177 assert pull_request.last_merge_status is MergeFailureReason.NONE
176
178
177 self.merge_mock.reset_mock()
179 self.merge_mock.reset_mock()
178 status, msg = PullRequestModel().merge_status(pull_request)
180 status, msg = PullRequestModel().merge_status(pull_request)
179 assert status is True
181 assert status is True
180 assert msg.eval() == 'This pull request can be automatically merged.'
182 assert msg == 'This pull request can be automatically merged.'
181 assert self.merge_mock.called is False
183 assert self.merge_mock.called is False
182
184
183 def test_merge_status_known_failure(self, pull_request):
185 def test_merge_status_known_failure(self, pull_request):
184 self.merge_mock.return_value = MergeResponse(
186 self.merge_mock.return_value = MergeResponse(
185 False, False, None, MergeFailureReason.MERGE_FAILED)
187 False, False, None, MergeFailureReason.MERGE_FAILED)
186
188
187 assert pull_request._last_merge_source_rev is None
189 assert pull_request._last_merge_source_rev is None
188 assert pull_request._last_merge_target_rev is None
190 assert pull_request._last_merge_target_rev is None
189 assert pull_request.last_merge_status is None
191 assert pull_request.last_merge_status is None
190
192
191 status, msg = PullRequestModel().merge_status(pull_request)
193 status, msg = PullRequestModel().merge_status(pull_request)
192 assert status is False
194 assert status is False
193 assert (
195 assert msg == 'This pull request cannot be merged because of merge conflicts.'
194 msg.eval() ==
195 'This pull request cannot be merged because of merge conflicts.')
196 self.merge_mock.assert_called_with(
196 self.merge_mock.assert_called_with(
197 self.repo_id, self.workspace_id,
197 self.repo_id, self.workspace_id,
198 pull_request.target_ref_parts,
198 pull_request.target_ref_parts,
199 pull_request.source_repo.scm_instance(),
199 pull_request.source_repo.scm_instance(),
200 pull_request.source_ref_parts, dry_run=True,
200 pull_request.source_ref_parts, dry_run=True,
201 use_rebase=False, close_branch=False)
201 use_rebase=False, close_branch=False)
202
202
203 assert pull_request._last_merge_source_rev == self.source_commit
203 assert pull_request._last_merge_source_rev == self.source_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
205 assert (
205 assert (
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207
207
208 self.merge_mock.reset_mock()
208 self.merge_mock.reset_mock()
209 status, msg = PullRequestModel().merge_status(pull_request)
209 status, msg = PullRequestModel().merge_status(pull_request)
210 assert status is False
210 assert status is False
211 assert (
211 assert msg == 'This pull request cannot be merged because of merge conflicts.'
212 msg.eval() ==
213 'This pull request cannot be merged because of merge conflicts.')
214 assert self.merge_mock.called is False
212 assert self.merge_mock.called is False
215
213
216 def test_merge_status_unknown_failure(self, pull_request):
214 def test_merge_status_unknown_failure(self, pull_request):
217 self.merge_mock.return_value = MergeResponse(
215 self.merge_mock.return_value = MergeResponse(
218 False, False, None, MergeFailureReason.UNKNOWN)
216 False, False, None, MergeFailureReason.UNKNOWN,
217 metadata={'exception': 'MockError'})
219
218
220 assert pull_request._last_merge_source_rev is None
219 assert pull_request._last_merge_source_rev is None
221 assert pull_request._last_merge_target_rev is None
220 assert pull_request._last_merge_target_rev is None
222 assert pull_request.last_merge_status is None
221 assert pull_request.last_merge_status is None
223
222
224 status, msg = PullRequestModel().merge_status(pull_request)
223 status, msg = PullRequestModel().merge_status(pull_request)
225 assert status is False
224 assert status is False
226 assert msg.eval() == (
225 assert msg == (
227 'This pull request cannot be merged because of an unhandled'
226 'This pull request cannot be merged because of an unhandled exception. '
228 ' exception.')
227 'MockError')
229 self.merge_mock.assert_called_with(
228 self.merge_mock.assert_called_with(
230 self.repo_id, self.workspace_id,
229 self.repo_id, self.workspace_id,
231 pull_request.target_ref_parts,
230 pull_request.target_ref_parts,
232 pull_request.source_repo.scm_instance(),
231 pull_request.source_repo.scm_instance(),
233 pull_request.source_ref_parts, dry_run=True,
232 pull_request.source_ref_parts, dry_run=True,
234 use_rebase=False, close_branch=False)
233 use_rebase=False, close_branch=False)
235
234
236 assert pull_request._last_merge_source_rev is None
235 assert pull_request._last_merge_source_rev is None
237 assert pull_request._last_merge_target_rev is None
236 assert pull_request._last_merge_target_rev is None
238 assert pull_request.last_merge_status is None
237 assert pull_request.last_merge_status is None
239
238
240 self.merge_mock.reset_mock()
239 self.merge_mock.reset_mock()
241 status, msg = PullRequestModel().merge_status(pull_request)
240 status, msg = PullRequestModel().merge_status(pull_request)
242 assert status is False
241 assert status is False
243 assert msg.eval() == (
242 assert msg == (
244 'This pull request cannot be merged because of an unhandled'
243 'This pull request cannot be merged because of an unhandled exception. '
245 ' exception.')
244 'MockError')
246 assert self.merge_mock.called is True
245 assert self.merge_mock.called is True
247
246
248 def test_merge_status_when_target_is_locked(self, pull_request):
247 def test_merge_status_when_target_is_locked(self, pull_request):
249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
248 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
250 status, msg = PullRequestModel().merge_status(pull_request)
249 status, msg = PullRequestModel().merge_status(pull_request)
251 assert status is False
250 assert status is False
252 assert msg.eval() == (
251 assert msg == (
253 'This pull request cannot be merged because the target repository'
252 'This pull request cannot be merged because the target repository '
254 ' is locked.')
253 'is locked by user:1.')
255
254
256 def test_merge_status_requirements_check_target(self, pull_request):
255 def test_merge_status_requirements_check_target(self, pull_request):
257
256
258 def has_largefiles(self, repo):
257 def has_largefiles(self, repo):
259 return repo == pull_request.source_repo
258 return repo == pull_request.source_repo
260
259
261 patcher = mock.patch.object(
260 patcher = mock.patch.object(
262 PullRequestModel, '_has_largefiles', has_largefiles)
261 PullRequestModel, '_has_largefiles', has_largefiles)
263 with patcher:
262 with patcher:
264 status, msg = PullRequestModel().merge_status(pull_request)
263 status, msg = PullRequestModel().merge_status(pull_request)
265
264
266 assert status is False
265 assert status is False
267 assert msg == 'Target repository large files support is disabled.'
266 assert msg == 'Target repository large files support is disabled.'
268
267
269 def test_merge_status_requirements_check_source(self, pull_request):
268 def test_merge_status_requirements_check_source(self, pull_request):
270
269
271 def has_largefiles(self, repo):
270 def has_largefiles(self, repo):
272 return repo == pull_request.target_repo
271 return repo == pull_request.target_repo
273
272
274 patcher = mock.patch.object(
273 patcher = mock.patch.object(
275 PullRequestModel, '_has_largefiles', has_largefiles)
274 PullRequestModel, '_has_largefiles', has_largefiles)
276 with patcher:
275 with patcher:
277 status, msg = PullRequestModel().merge_status(pull_request)
276 status, msg = PullRequestModel().merge_status(pull_request)
278
277
279 assert status is False
278 assert status is False
280 assert msg == 'Source repository large files support is disabled.'
279 assert msg == 'Source repository large files support is disabled.'
281
280
282 def test_merge(self, pull_request, merge_extras):
281 def test_merge(self, pull_request, merge_extras):
283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
282 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
284 merge_ref = Reference(
283 merge_ref = Reference(
285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
284 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
286 self.merge_mock.return_value = MergeResponse(
285 self.merge_mock.return_value = MergeResponse(
287 True, True, merge_ref, MergeFailureReason.NONE)
286 True, True, merge_ref, MergeFailureReason.NONE)
288
287
289 merge_extras['repository'] = pull_request.target_repo.repo_name
288 merge_extras['repository'] = pull_request.target_repo.repo_name
290 PullRequestModel().merge_repo(
289 PullRequestModel().merge_repo(
291 pull_request, pull_request.author, extras=merge_extras)
290 pull_request, pull_request.author, extras=merge_extras)
292
291
293 message = (
292 message = (
294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
293 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
295 u'\n\n {pr_title}'.format(
294 u'\n\n {pr_title}'.format(
296 pr_id=pull_request.pull_request_id,
295 pr_id=pull_request.pull_request_id,
297 source_repo=safe_unicode(
296 source_repo=safe_unicode(
298 pull_request.source_repo.scm_instance().name),
297 pull_request.source_repo.scm_instance().name),
299 source_ref_name=pull_request.source_ref_parts.name,
298 source_ref_name=pull_request.source_ref_parts.name,
300 pr_title=safe_unicode(pull_request.title)
299 pr_title=safe_unicode(pull_request.title)
301 )
300 )
302 )
301 )
303 self.merge_mock.assert_called_with(
302 self.merge_mock.assert_called_with(
304 self.repo_id, self.workspace_id,
303 self.repo_id, self.workspace_id,
305 pull_request.target_ref_parts,
304 pull_request.target_ref_parts,
306 pull_request.source_repo.scm_instance(),
305 pull_request.source_repo.scm_instance(),
307 pull_request.source_ref_parts,
306 pull_request.source_ref_parts,
308 user_name=user.short_contact, user_email=user.email, message=message,
307 user_name=user.short_contact, user_email=user.email, message=message,
309 use_rebase=False, close_branch=False
308 use_rebase=False, close_branch=False
310 )
309 )
311 self.invalidation_mock.assert_called_once_with(
310 self.invalidation_mock.assert_called_once_with(
312 pull_request.target_repo.repo_name)
311 pull_request.target_repo.repo_name)
313
312
314 self.hook_mock.assert_called_with(
313 self.hook_mock.assert_called_with(
315 self.pull_request, self.pull_request.author, 'merge')
314 self.pull_request, self.pull_request.author, 'merge')
316
315
317 pull_request = PullRequest.get(pull_request.pull_request_id)
316 pull_request = PullRequest.get(pull_request.pull_request_id)
318 assert (
317 assert (
319 pull_request.merge_rev ==
318 pull_request.merge_rev ==
320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
319 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321
320
322 def test_merge_failed(self, pull_request, merge_extras):
321 def test_merge_failed(self, pull_request, merge_extras):
323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
322 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
324 merge_ref = Reference(
323 merge_ref = Reference(
325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
324 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
326 self.merge_mock.return_value = MergeResponse(
325 self.merge_mock.return_value = MergeResponse(
327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
326 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
328
327
329 merge_extras['repository'] = pull_request.target_repo.repo_name
328 merge_extras['repository'] = pull_request.target_repo.repo_name
330 PullRequestModel().merge_repo(
329 PullRequestModel().merge_repo(
331 pull_request, pull_request.author, extras=merge_extras)
330 pull_request, pull_request.author, extras=merge_extras)
332
331
333 message = (
332 message = (
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
333 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 u'\n\n {pr_title}'.format(
334 u'\n\n {pr_title}'.format(
336 pr_id=pull_request.pull_request_id,
335 pr_id=pull_request.pull_request_id,
337 source_repo=safe_unicode(
336 source_repo=safe_unicode(
338 pull_request.source_repo.scm_instance().name),
337 pull_request.source_repo.scm_instance().name),
339 source_ref_name=pull_request.source_ref_parts.name,
338 source_ref_name=pull_request.source_ref_parts.name,
340 pr_title=safe_unicode(pull_request.title)
339 pr_title=safe_unicode(pull_request.title)
341 )
340 )
342 )
341 )
343 self.merge_mock.assert_called_with(
342 self.merge_mock.assert_called_with(
344 self.repo_id, self.workspace_id,
343 self.repo_id, self.workspace_id,
345 pull_request.target_ref_parts,
344 pull_request.target_ref_parts,
346 pull_request.source_repo.scm_instance(),
345 pull_request.source_repo.scm_instance(),
347 pull_request.source_ref_parts,
346 pull_request.source_ref_parts,
348 user_name=user.short_contact, user_email=user.email, message=message,
347 user_name=user.short_contact, user_email=user.email, message=message,
349 use_rebase=False, close_branch=False
348 use_rebase=False, close_branch=False
350 )
349 )
351
350
352 pull_request = PullRequest.get(pull_request.pull_request_id)
351 pull_request = PullRequest.get(pull_request.pull_request_id)
353 assert self.invalidation_mock.called is False
352 assert self.invalidation_mock.called is False
354 assert pull_request.merge_rev is None
353 assert pull_request.merge_rev is None
355
354
356 def test_get_commit_ids(self, pull_request):
355 def test_get_commit_ids(self, pull_request):
357 # The PR has been not merget yet, so expect an exception
356 # The PR has been not merget yet, so expect an exception
358 with pytest.raises(ValueError):
357 with pytest.raises(ValueError):
359 PullRequestModel()._get_commit_ids(pull_request)
358 PullRequestModel()._get_commit_ids(pull_request)
360
359
361 # Merge revision is in the revisions list
360 # Merge revision is in the revisions list
362 pull_request.merge_rev = pull_request.revisions[0]
361 pull_request.merge_rev = pull_request.revisions[0]
363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
362 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
364 assert commit_ids == pull_request.revisions
363 assert commit_ids == pull_request.revisions
365
364
366 # Merge revision is not in the revisions list
365 # Merge revision is not in the revisions list
367 pull_request.merge_rev = 'f000' * 10
366 pull_request.merge_rev = 'f000' * 10
368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
367 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
368 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
370
369
371 def test_get_diff_from_pr_version(self, pull_request):
370 def test_get_diff_from_pr_version(self, pull_request):
372 source_repo = pull_request.source_repo
371 source_repo = pull_request.source_repo
373 source_ref_id = pull_request.source_ref_parts.commit_id
372 source_ref_id = pull_request.source_ref_parts.commit_id
374 target_ref_id = pull_request.target_ref_parts.commit_id
373 target_ref_id = pull_request.target_ref_parts.commit_id
375 diff = PullRequestModel()._get_diff_from_pr_or_version(
374 diff = PullRequestModel()._get_diff_from_pr_or_version(
376 source_repo, source_ref_id, target_ref_id,
375 source_repo, source_ref_id, target_ref_id,
377 hide_whitespace_changes=False, diff_context=6)
376 hide_whitespace_changes=False, diff_context=6)
378 assert 'file_1' in diff.raw
377 assert 'file_1' in diff.raw
379
378
380 def test_generate_title_returns_unicode(self):
379 def test_generate_title_returns_unicode(self):
381 title = PullRequestModel().generate_pullrequest_title(
380 title = PullRequestModel().generate_pullrequest_title(
382 source='source-dummy',
381 source='source-dummy',
383 source_ref='source-ref-dummy',
382 source_ref='source-ref-dummy',
384 target='target-dummy',
383 target='target-dummy',
385 )
384 )
386 assert type(title) == unicode
385 assert type(title) == unicode
387
386
388
387
389 @pytest.mark.usefixtures('config_stub')
388 @pytest.mark.usefixtures('config_stub')
390 class TestIntegrationMerge(object):
389 class TestIntegrationMerge(object):
391 @pytest.mark.parametrize('extra_config', (
390 @pytest.mark.parametrize('extra_config', (
392 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
393 ))
392 ))
394 def test_merge_triggers_push_hooks(
393 def test_merge_triggers_push_hooks(
395 self, pr_util, user_admin, capture_rcextensions, merge_extras,
394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
396 extra_config):
395 extra_config):
397
396
398 pull_request = pr_util.create_pull_request(
397 pull_request = pr_util.create_pull_request(
399 approved=True, mergeable=True)
398 approved=True, mergeable=True)
400 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
399 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
401 merge_extras['repository'] = pull_request.target_repo.repo_name
400 merge_extras['repository'] = pull_request.target_repo.repo_name
402 Session().commit()
401 Session().commit()
403
402
404 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
403 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
405 merge_state = PullRequestModel().merge_repo(
404 merge_state = PullRequestModel().merge_repo(
406 pull_request, user_admin, extras=merge_extras)
405 pull_request, user_admin, extras=merge_extras)
407
406
408 assert merge_state.executed
407 assert merge_state.executed
409 assert '_pre_push_hook' in capture_rcextensions
408 assert '_pre_push_hook' in capture_rcextensions
410 assert '_push_hook' in capture_rcextensions
409 assert '_push_hook' in capture_rcextensions
411
410
412 def test_merge_can_be_rejected_by_pre_push_hook(
411 def test_merge_can_be_rejected_by_pre_push_hook(
413 self, pr_util, user_admin, capture_rcextensions, merge_extras):
412 self, pr_util, user_admin, capture_rcextensions, merge_extras):
414 pull_request = pr_util.create_pull_request(
413 pull_request = pr_util.create_pull_request(
415 approved=True, mergeable=True)
414 approved=True, mergeable=True)
416 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
415 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
417 merge_extras['repository'] = pull_request.target_repo.repo_name
416 merge_extras['repository'] = pull_request.target_repo.repo_name
418 Session().commit()
417 Session().commit()
419
418
420 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
419 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
421 pre_pull.side_effect = RepositoryError("Disallow push!")
420 pre_pull.side_effect = RepositoryError("Disallow push!")
422 merge_status = PullRequestModel().merge_repo(
421 merge_status = PullRequestModel().merge_repo(
423 pull_request, user_admin, extras=merge_extras)
422 pull_request, user_admin, extras=merge_extras)
424
423
425 assert not merge_status.executed
424 assert not merge_status.executed
426 assert 'pre_push' not in capture_rcextensions
425 assert 'pre_push' not in capture_rcextensions
427 assert 'post_push' not in capture_rcextensions
426 assert 'post_push' not in capture_rcextensions
428
427
429 def test_merge_fails_if_target_is_locked(
428 def test_merge_fails_if_target_is_locked(
430 self, pr_util, user_regular, merge_extras):
429 self, pr_util, user_regular, merge_extras):
431 pull_request = pr_util.create_pull_request(
430 pull_request = pr_util.create_pull_request(
432 approved=True, mergeable=True)
431 approved=True, mergeable=True)
433 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
432 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
434 pull_request.target_repo.locked = locked_by
433 pull_request.target_repo.locked = locked_by
435 # TODO: johbo: Check if this can work based on the database, currently
434 # TODO: johbo: Check if this can work based on the database, currently
436 # all data is pre-computed, that's why just updating the DB is not
435 # all data is pre-computed, that's why just updating the DB is not
437 # enough.
436 # enough.
438 merge_extras['locked_by'] = locked_by
437 merge_extras['locked_by'] = locked_by
439 merge_extras['repository'] = pull_request.target_repo.repo_name
438 merge_extras['repository'] = pull_request.target_repo.repo_name
440 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
441 Session().commit()
440 Session().commit()
442 merge_status = PullRequestModel().merge_repo(
441 merge_status = PullRequestModel().merge_repo(
443 pull_request, user_regular, extras=merge_extras)
442 pull_request, user_regular, extras=merge_extras)
444 assert not merge_status.executed
443 assert not merge_status.executed
445
444
446
445
447 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
446 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
448 (False, 1, 0),
447 (False, 1, 0),
449 (True, 0, 1),
448 (True, 0, 1),
450 ])
449 ])
451 def test_outdated_comments(
450 def test_outdated_comments(
452 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
451 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
453 pull_request = pr_util.create_pull_request()
452 pull_request = pr_util.create_pull_request()
454 pr_util.create_inline_comment(file_path='not_in_updated_diff')
453 pr_util.create_inline_comment(file_path='not_in_updated_diff')
455
454
456 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
455 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
457 pr_util.add_one_commit()
456 pr_util.add_one_commit()
458 assert_inline_comments(
457 assert_inline_comments(
459 pull_request, visible=inlines_count, outdated=outdated_count)
458 pull_request, visible=inlines_count, outdated=outdated_count)
460 outdated_comment_mock.assert_called_with(pull_request)
459 outdated_comment_mock.assert_called_with(pull_request)
461
460
462
461
462 @pytest.mark.parametrize('mr_type, expected_msg', [
463 (MergeFailureReason.NONE,
464 'This pull request can be automatically merged.'),
465 (MergeFailureReason.UNKNOWN,
466 'This pull request cannot be merged because of an unhandled exception. CRASH'),
467 (MergeFailureReason.MERGE_FAILED,
468 'This pull request cannot be merged because of merge conflicts.'),
469 (MergeFailureReason.PUSH_FAILED,
470 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
471 (MergeFailureReason.TARGET_IS_NOT_HEAD,
472 'This pull request cannot be merged because the target `ref_name` is not a head.'),
473 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
474 'This pull request cannot be merged because the source contains more branches than the target.'),
475 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
476 'This pull request cannot be merged because the target has multiple heads: `a,b,c`.'),
477 (MergeFailureReason.TARGET_IS_LOCKED,
478 'This pull request cannot be merged because the target repository is locked by user:123.'),
479 (MergeFailureReason.MISSING_TARGET_REF,
480 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
481 (MergeFailureReason.MISSING_SOURCE_REF,
482 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
483 (MergeFailureReason.SUBREPO_MERGE_FAILED,
484 'This pull request cannot be merged because of conflicts related to sub repositories.'),
485
486 ])
487 def test_merge_response_message(mr_type, expected_msg):
488 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
489 metadata = {
490 'exception': "CRASH",
491 'target': 'some-repo',
492 'merge_commit': 'merge_commit',
493 'target_ref': merge_ref,
494 'source_ref': merge_ref,
495 'heads': ','.join(['a', 'b', 'c']),
496 'locked_by': 'user:123'}
497
498 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
499 assert merge_response.merge_status_message == expected_msg
500
501
463 @pytest.fixture
502 @pytest.fixture
464 def merge_extras(user_regular):
503 def merge_extras(user_regular):
465 """
504 """
466 Context for the vcs operation when running a merge.
505 Context for the vcs operation when running a merge.
467 """
506 """
468 extras = {
507 extras = {
469 'ip': '127.0.0.1',
508 'ip': '127.0.0.1',
470 'username': user_regular.username,
509 'username': user_regular.username,
471 'user_id': user_regular.user_id,
510 'user_id': user_regular.user_id,
472 'action': 'push',
511 'action': 'push',
473 'repository': 'fake_target_repo_name',
512 'repository': 'fake_target_repo_name',
474 'scm': 'git',
513 'scm': 'git',
475 'config': 'fake_config_ini_path',
514 'config': 'fake_config_ini_path',
476 'repo_store': '',
515 'repo_store': '',
477 'make_lock': None,
516 'make_lock': None,
478 'locked_by': [None, None, None],
517 'locked_by': [None, None, None],
479 'server_url': 'http://test.example.com:5000',
518 'server_url': 'http://test.example.com:5000',
480 'hooks': ['push', 'pull'],
519 'hooks': ['push', 'pull'],
481 'is_shadow_repo': False,
520 'is_shadow_repo': False,
482 }
521 }
483 return extras
522 return extras
484
523
485
524
486 @pytest.mark.usefixtures('config_stub')
525 @pytest.mark.usefixtures('config_stub')
487 class TestUpdateCommentHandling(object):
526 class TestUpdateCommentHandling(object):
488
527
489 @pytest.fixture(autouse=True, scope='class')
528 @pytest.fixture(autouse=True, scope='class')
490 def enable_outdated_comments(self, request, baseapp):
529 def enable_outdated_comments(self, request, baseapp):
491 config_patch = mock.patch.dict(
530 config_patch = mock.patch.dict(
492 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
531 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
493 config_patch.start()
532 config_patch.start()
494
533
495 @request.addfinalizer
534 @request.addfinalizer
496 def cleanup():
535 def cleanup():
497 config_patch.stop()
536 config_patch.stop()
498
537
499 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
538 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
500 commits = [
539 commits = [
501 {'message': 'a'},
540 {'message': 'a'},
502 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
541 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
503 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
542 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
504 ]
543 ]
505 pull_request = pr_util.create_pull_request(
544 pull_request = pr_util.create_pull_request(
506 commits=commits, target_head='a', source_head='b', revisions=['b'])
545 commits=commits, target_head='a', source_head='b', revisions=['b'])
507 pr_util.create_inline_comment(file_path='file_b')
546 pr_util.create_inline_comment(file_path='file_b')
508 pr_util.add_one_commit(head='c')
547 pr_util.add_one_commit(head='c')
509
548
510 assert_inline_comments(pull_request, visible=1, outdated=0)
549 assert_inline_comments(pull_request, visible=1, outdated=0)
511
550
512 def test_comment_stays_unflagged_on_change_above(self, pr_util):
551 def test_comment_stays_unflagged_on_change_above(self, pr_util):
513 original_content = ''.join(
552 original_content = ''.join(
514 ['line {}\n'.format(x) for x in range(1, 11)])
553 ['line {}\n'.format(x) for x in range(1, 11)])
515 updated_content = 'new_line_at_top\n' + original_content
554 updated_content = 'new_line_at_top\n' + original_content
516 commits = [
555 commits = [
517 {'message': 'a'},
556 {'message': 'a'},
518 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
557 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
519 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
558 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
520 ]
559 ]
521 pull_request = pr_util.create_pull_request(
560 pull_request = pr_util.create_pull_request(
522 commits=commits, target_head='a', source_head='b', revisions=['b'])
561 commits=commits, target_head='a', source_head='b', revisions=['b'])
523
562
524 with outdated_comments_patcher():
563 with outdated_comments_patcher():
525 comment = pr_util.create_inline_comment(
564 comment = pr_util.create_inline_comment(
526 line_no=u'n8', file_path='file_b')
565 line_no=u'n8', file_path='file_b')
527 pr_util.add_one_commit(head='c')
566 pr_util.add_one_commit(head='c')
528
567
529 assert_inline_comments(pull_request, visible=1, outdated=0)
568 assert_inline_comments(pull_request, visible=1, outdated=0)
530 assert comment.line_no == u'n9'
569 assert comment.line_no == u'n9'
531
570
532 def test_comment_stays_unflagged_on_change_below(self, pr_util):
571 def test_comment_stays_unflagged_on_change_below(self, pr_util):
533 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
572 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
534 updated_content = original_content + 'new_line_at_end\n'
573 updated_content = original_content + 'new_line_at_end\n'
535 commits = [
574 commits = [
536 {'message': 'a'},
575 {'message': 'a'},
537 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
576 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
538 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
577 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
539 ]
578 ]
540 pull_request = pr_util.create_pull_request(
579 pull_request = pr_util.create_pull_request(
541 commits=commits, target_head='a', source_head='b', revisions=['b'])
580 commits=commits, target_head='a', source_head='b', revisions=['b'])
542 pr_util.create_inline_comment(file_path='file_b')
581 pr_util.create_inline_comment(file_path='file_b')
543 pr_util.add_one_commit(head='c')
582 pr_util.add_one_commit(head='c')
544
583
545 assert_inline_comments(pull_request, visible=1, outdated=0)
584 assert_inline_comments(pull_request, visible=1, outdated=0)
546
585
547 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
586 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
548 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
587 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
549 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
588 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
550 change_lines = list(base_lines)
589 change_lines = list(base_lines)
551 change_lines.insert(6, 'line 6a added\n')
590 change_lines.insert(6, 'line 6a added\n')
552
591
553 # Changes on the last line of sight
592 # Changes on the last line of sight
554 update_lines = list(change_lines)
593 update_lines = list(change_lines)
555 update_lines[0] = 'line 1 changed\n'
594 update_lines[0] = 'line 1 changed\n'
556 update_lines[-1] = 'line 12 changed\n'
595 update_lines[-1] = 'line 12 changed\n'
557
596
558 def file_b(lines):
597 def file_b(lines):
559 return FileNode('file_b', ''.join(lines))
598 return FileNode('file_b', ''.join(lines))
560
599
561 commits = [
600 commits = [
562 {'message': 'a', 'added': [file_b(base_lines)]},
601 {'message': 'a', 'added': [file_b(base_lines)]},
563 {'message': 'b', 'changed': [file_b(change_lines)]},
602 {'message': 'b', 'changed': [file_b(change_lines)]},
564 {'message': 'c', 'changed': [file_b(update_lines)]},
603 {'message': 'c', 'changed': [file_b(update_lines)]},
565 ]
604 ]
566
605
567 pull_request = pr_util.create_pull_request(
606 pull_request = pr_util.create_pull_request(
568 commits=commits, target_head='a', source_head='b', revisions=['b'])
607 commits=commits, target_head='a', source_head='b', revisions=['b'])
569 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
608 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
570
609
571 with outdated_comments_patcher():
610 with outdated_comments_patcher():
572 pr_util.add_one_commit(head='c')
611 pr_util.add_one_commit(head='c')
573 assert_inline_comments(pull_request, visible=0, outdated=1)
612 assert_inline_comments(pull_request, visible=0, outdated=1)
574
613
575 @pytest.mark.parametrize("change, content", [
614 @pytest.mark.parametrize("change, content", [
576 ('changed', 'changed\n'),
615 ('changed', 'changed\n'),
577 ('removed', ''),
616 ('removed', ''),
578 ], ids=['changed', 'removed'])
617 ], ids=['changed', 'removed'])
579 def test_comment_flagged_on_change(self, pr_util, change, content):
618 def test_comment_flagged_on_change(self, pr_util, change, content):
580 commits = [
619 commits = [
581 {'message': 'a'},
620 {'message': 'a'},
582 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
621 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
583 {'message': 'c', change: [FileNode('file_b', content)]},
622 {'message': 'c', change: [FileNode('file_b', content)]},
584 ]
623 ]
585 pull_request = pr_util.create_pull_request(
624 pull_request = pr_util.create_pull_request(
586 commits=commits, target_head='a', source_head='b', revisions=['b'])
625 commits=commits, target_head='a', source_head='b', revisions=['b'])
587 pr_util.create_inline_comment(file_path='file_b')
626 pr_util.create_inline_comment(file_path='file_b')
588
627
589 with outdated_comments_patcher():
628 with outdated_comments_patcher():
590 pr_util.add_one_commit(head='c')
629 pr_util.add_one_commit(head='c')
591 assert_inline_comments(pull_request, visible=0, outdated=1)
630 assert_inline_comments(pull_request, visible=0, outdated=1)
592
631
593
632
594 @pytest.mark.usefixtures('config_stub')
633 @pytest.mark.usefixtures('config_stub')
595 class TestUpdateChangedFiles(object):
634 class TestUpdateChangedFiles(object):
596
635
597 def test_no_changes_on_unchanged_diff(self, pr_util):
636 def test_no_changes_on_unchanged_diff(self, pr_util):
598 commits = [
637 commits = [
599 {'message': 'a'},
638 {'message': 'a'},
600 {'message': 'b',
639 {'message': 'b',
601 'added': [FileNode('file_b', 'test_content b\n')]},
640 'added': [FileNode('file_b', 'test_content b\n')]},
602 {'message': 'c',
641 {'message': 'c',
603 'added': [FileNode('file_c', 'test_content c\n')]},
642 'added': [FileNode('file_c', 'test_content c\n')]},
604 ]
643 ]
605 # open a PR from a to b, adding file_b
644 # open a PR from a to b, adding file_b
606 pull_request = pr_util.create_pull_request(
645 pull_request = pr_util.create_pull_request(
607 commits=commits, target_head='a', source_head='b', revisions=['b'],
646 commits=commits, target_head='a', source_head='b', revisions=['b'],
608 name_suffix='per-file-review')
647 name_suffix='per-file-review')
609
648
610 # modify PR adding new file file_c
649 # modify PR adding new file file_c
611 pr_util.add_one_commit(head='c')
650 pr_util.add_one_commit(head='c')
612
651
613 assert_pr_file_changes(
652 assert_pr_file_changes(
614 pull_request,
653 pull_request,
615 added=['file_c'],
654 added=['file_c'],
616 modified=[],
655 modified=[],
617 removed=[])
656 removed=[])
618
657
619 def test_modify_and_undo_modification_diff(self, pr_util):
658 def test_modify_and_undo_modification_diff(self, pr_util):
620 commits = [
659 commits = [
621 {'message': 'a'},
660 {'message': 'a'},
622 {'message': 'b',
661 {'message': 'b',
623 'added': [FileNode('file_b', 'test_content b\n')]},
662 'added': [FileNode('file_b', 'test_content b\n')]},
624 {'message': 'c',
663 {'message': 'c',
625 'changed': [FileNode('file_b', 'test_content b modified\n')]},
664 'changed': [FileNode('file_b', 'test_content b modified\n')]},
626 {'message': 'd',
665 {'message': 'd',
627 'changed': [FileNode('file_b', 'test_content b\n')]},
666 'changed': [FileNode('file_b', 'test_content b\n')]},
628 ]
667 ]
629 # open a PR from a to b, adding file_b
668 # open a PR from a to b, adding file_b
630 pull_request = pr_util.create_pull_request(
669 pull_request = pr_util.create_pull_request(
631 commits=commits, target_head='a', source_head='b', revisions=['b'],
670 commits=commits, target_head='a', source_head='b', revisions=['b'],
632 name_suffix='per-file-review')
671 name_suffix='per-file-review')
633
672
634 # modify PR modifying file file_b
673 # modify PR modifying file file_b
635 pr_util.add_one_commit(head='c')
674 pr_util.add_one_commit(head='c')
636
675
637 assert_pr_file_changes(
676 assert_pr_file_changes(
638 pull_request,
677 pull_request,
639 added=[],
678 added=[],
640 modified=['file_b'],
679 modified=['file_b'],
641 removed=[])
680 removed=[])
642
681
643 # move the head again to d, which rollbacks change,
682 # move the head again to d, which rollbacks change,
644 # meaning we should indicate no changes
683 # meaning we should indicate no changes
645 pr_util.add_one_commit(head='d')
684 pr_util.add_one_commit(head='d')
646
685
647 assert_pr_file_changes(
686 assert_pr_file_changes(
648 pull_request,
687 pull_request,
649 added=[],
688 added=[],
650 modified=[],
689 modified=[],
651 removed=[])
690 removed=[])
652
691
653 def test_updated_all_files_in_pr(self, pr_util):
692 def test_updated_all_files_in_pr(self, pr_util):
654 commits = [
693 commits = [
655 {'message': 'a'},
694 {'message': 'a'},
656 {'message': 'b', 'added': [
695 {'message': 'b', 'added': [
657 FileNode('file_a', 'test_content a\n'),
696 FileNode('file_a', 'test_content a\n'),
658 FileNode('file_b', 'test_content b\n'),
697 FileNode('file_b', 'test_content b\n'),
659 FileNode('file_c', 'test_content c\n')]},
698 FileNode('file_c', 'test_content c\n')]},
660 {'message': 'c', 'changed': [
699 {'message': 'c', 'changed': [
661 FileNode('file_a', 'test_content a changed\n'),
700 FileNode('file_a', 'test_content a changed\n'),
662 FileNode('file_b', 'test_content b changed\n'),
701 FileNode('file_b', 'test_content b changed\n'),
663 FileNode('file_c', 'test_content c changed\n')]},
702 FileNode('file_c', 'test_content c changed\n')]},
664 ]
703 ]
665 # open a PR from a to b, changing 3 files
704 # open a PR from a to b, changing 3 files
666 pull_request = pr_util.create_pull_request(
705 pull_request = pr_util.create_pull_request(
667 commits=commits, target_head='a', source_head='b', revisions=['b'],
706 commits=commits, target_head='a', source_head='b', revisions=['b'],
668 name_suffix='per-file-review')
707 name_suffix='per-file-review')
669
708
670 pr_util.add_one_commit(head='c')
709 pr_util.add_one_commit(head='c')
671
710
672 assert_pr_file_changes(
711 assert_pr_file_changes(
673 pull_request,
712 pull_request,
674 added=[],
713 added=[],
675 modified=['file_a', 'file_b', 'file_c'],
714 modified=['file_a', 'file_b', 'file_c'],
676 removed=[])
715 removed=[])
677
716
678 def test_updated_and_removed_all_files_in_pr(self, pr_util):
717 def test_updated_and_removed_all_files_in_pr(self, pr_util):
679 commits = [
718 commits = [
680 {'message': 'a'},
719 {'message': 'a'},
681 {'message': 'b', 'added': [
720 {'message': 'b', 'added': [
682 FileNode('file_a', 'test_content a\n'),
721 FileNode('file_a', 'test_content a\n'),
683 FileNode('file_b', 'test_content b\n'),
722 FileNode('file_b', 'test_content b\n'),
684 FileNode('file_c', 'test_content c\n')]},
723 FileNode('file_c', 'test_content c\n')]},
685 {'message': 'c', 'removed': [
724 {'message': 'c', 'removed': [
686 FileNode('file_a', 'test_content a changed\n'),
725 FileNode('file_a', 'test_content a changed\n'),
687 FileNode('file_b', 'test_content b changed\n'),
726 FileNode('file_b', 'test_content b changed\n'),
688 FileNode('file_c', 'test_content c changed\n')]},
727 FileNode('file_c', 'test_content c changed\n')]},
689 ]
728 ]
690 # open a PR from a to b, removing 3 files
729 # open a PR from a to b, removing 3 files
691 pull_request = pr_util.create_pull_request(
730 pull_request = pr_util.create_pull_request(
692 commits=commits, target_head='a', source_head='b', revisions=['b'],
731 commits=commits, target_head='a', source_head='b', revisions=['b'],
693 name_suffix='per-file-review')
732 name_suffix='per-file-review')
694
733
695 pr_util.add_one_commit(head='c')
734 pr_util.add_one_commit(head='c')
696
735
697 assert_pr_file_changes(
736 assert_pr_file_changes(
698 pull_request,
737 pull_request,
699 added=[],
738 added=[],
700 modified=[],
739 modified=[],
701 removed=['file_a', 'file_b', 'file_c'])
740 removed=['file_a', 'file_b', 'file_c'])
702
741
703
742
704 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
743 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
705 model = PullRequestModel()
744 model = PullRequestModel()
706 pull_request = pr_util.create_pull_request()
745 pull_request = pr_util.create_pull_request()
707 pr_util.update_source_repository()
746 pr_util.update_source_repository()
708
747
709 model.update_commits(pull_request)
748 model.update_commits(pull_request)
710
749
711 # Expect that it has a version entry now
750 # Expect that it has a version entry now
712 assert len(model.get_versions(pull_request)) == 1
751 assert len(model.get_versions(pull_request)) == 1
713
752
714
753
715 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
754 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
716 pull_request = pr_util.create_pull_request()
755 pull_request = pr_util.create_pull_request()
717 model = PullRequestModel()
756 model = PullRequestModel()
718 model.update_commits(pull_request)
757 model.update_commits(pull_request)
719
758
720 # Expect that it still has no versions
759 # Expect that it still has no versions
721 assert len(model.get_versions(pull_request)) == 0
760 assert len(model.get_versions(pull_request)) == 0
722
761
723
762
724 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
763 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
725 model = PullRequestModel()
764 model = PullRequestModel()
726 pull_request = pr_util.create_pull_request()
765 pull_request = pr_util.create_pull_request()
727 comment = pr_util.create_comment()
766 comment = pr_util.create_comment()
728 pr_util.update_source_repository()
767 pr_util.update_source_repository()
729
768
730 model.update_commits(pull_request)
769 model.update_commits(pull_request)
731
770
732 # Expect that the comment is linked to the pr version now
771 # Expect that the comment is linked to the pr version now
733 assert comment.pull_request_version == model.get_versions(pull_request)[0]
772 assert comment.pull_request_version == model.get_versions(pull_request)[0]
734
773
735
774
736 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
775 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
737 model = PullRequestModel()
776 model = PullRequestModel()
738 pull_request = pr_util.create_pull_request()
777 pull_request = pr_util.create_pull_request()
739 pr_util.update_source_repository()
778 pr_util.update_source_repository()
740 pr_util.update_source_repository()
779 pr_util.update_source_repository()
741
780
742 model.update_commits(pull_request)
781 model.update_commits(pull_request)
743
782
744 # Expect to find a new comment about the change
783 # Expect to find a new comment about the change
745 expected_message = textwrap.dedent(
784 expected_message = textwrap.dedent(
746 """\
785 """\
747 Pull request updated. Auto status change to |under_review|
786 Pull request updated. Auto status change to |under_review|
748
787
749 .. role:: added
788 .. role:: added
750 .. role:: removed
789 .. role:: removed
751 .. parsed-literal::
790 .. parsed-literal::
752
791
753 Changed commits:
792 Changed commits:
754 * :added:`1 added`
793 * :added:`1 added`
755 * :removed:`0 removed`
794 * :removed:`0 removed`
756
795
757 Changed files:
796 Changed files:
758 * `A file_2 <#a_c--92ed3b5f07b4>`_
797 * `A file_2 <#a_c--92ed3b5f07b4>`_
759
798
760 .. |under_review| replace:: *"Under Review"*"""
799 .. |under_review| replace:: *"Under Review"*"""
761 )
800 )
762 pull_request_comments = sorted(
801 pull_request_comments = sorted(
763 pull_request.comments, key=lambda c: c.modified_at)
802 pull_request.comments, key=lambda c: c.modified_at)
764 update_comment = pull_request_comments[-1]
803 update_comment = pull_request_comments[-1]
765 assert update_comment.text == expected_message
804 assert update_comment.text == expected_message
766
805
767
806
768 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
807 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
769 pull_request = pr_util.create_pull_request()
808 pull_request = pr_util.create_pull_request()
770
809
771 # Avoiding default values
810 # Avoiding default values
772 pull_request.status = PullRequest.STATUS_CLOSED
811 pull_request.status = PullRequest.STATUS_CLOSED
773 pull_request._last_merge_source_rev = "0" * 40
812 pull_request._last_merge_source_rev = "0" * 40
774 pull_request._last_merge_target_rev = "1" * 40
813 pull_request._last_merge_target_rev = "1" * 40
775 pull_request.last_merge_status = 1
814 pull_request.last_merge_status = 1
776 pull_request.merge_rev = "2" * 40
815 pull_request.merge_rev = "2" * 40
777
816
778 # Remember automatic values
817 # Remember automatic values
779 created_on = pull_request.created_on
818 created_on = pull_request.created_on
780 updated_on = pull_request.updated_on
819 updated_on = pull_request.updated_on
781
820
782 # Create a new version of the pull request
821 # Create a new version of the pull request
783 version = PullRequestModel()._create_version_from_snapshot(pull_request)
822 version = PullRequestModel()._create_version_from_snapshot(pull_request)
784
823
785 # Check attributes
824 # Check attributes
786 assert version.title == pr_util.create_parameters['title']
825 assert version.title == pr_util.create_parameters['title']
787 assert version.description == pr_util.create_parameters['description']
826 assert version.description == pr_util.create_parameters['description']
788 assert version.status == PullRequest.STATUS_CLOSED
827 assert version.status == PullRequest.STATUS_CLOSED
789
828
790 # versions get updated created_on
829 # versions get updated created_on
791 assert version.created_on != created_on
830 assert version.created_on != created_on
792
831
793 assert version.updated_on == updated_on
832 assert version.updated_on == updated_on
794 assert version.user_id == pull_request.user_id
833 assert version.user_id == pull_request.user_id
795 assert version.revisions == pr_util.create_parameters['revisions']
834 assert version.revisions == pr_util.create_parameters['revisions']
796 assert version.source_repo == pr_util.source_repository
835 assert version.source_repo == pr_util.source_repository
797 assert version.source_ref == pr_util.create_parameters['source_ref']
836 assert version.source_ref == pr_util.create_parameters['source_ref']
798 assert version.target_repo == pr_util.target_repository
837 assert version.target_repo == pr_util.target_repository
799 assert version.target_ref == pr_util.create_parameters['target_ref']
838 assert version.target_ref == pr_util.create_parameters['target_ref']
800 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
839 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
801 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
840 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
802 assert version.last_merge_status == pull_request.last_merge_status
841 assert version.last_merge_status == pull_request.last_merge_status
803 assert version.merge_rev == pull_request.merge_rev
842 assert version.merge_rev == pull_request.merge_rev
804 assert version.pull_request == pull_request
843 assert version.pull_request == pull_request
805
844
806
845
807 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
846 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
808 version1 = pr_util.create_version_of_pull_request()
847 version1 = pr_util.create_version_of_pull_request()
809 comment_linked = pr_util.create_comment(linked_to=version1)
848 comment_linked = pr_util.create_comment(linked_to=version1)
810 comment_unlinked = pr_util.create_comment()
849 comment_unlinked = pr_util.create_comment()
811 version2 = pr_util.create_version_of_pull_request()
850 version2 = pr_util.create_version_of_pull_request()
812
851
813 PullRequestModel()._link_comments_to_version(version2)
852 PullRequestModel()._link_comments_to_version(version2)
814
853
815 # Expect that only the new comment is linked to version2
854 # Expect that only the new comment is linked to version2
816 assert (
855 assert (
817 comment_unlinked.pull_request_version_id ==
856 comment_unlinked.pull_request_version_id ==
818 version2.pull_request_version_id)
857 version2.pull_request_version_id)
819 assert (
858 assert (
820 comment_linked.pull_request_version_id ==
859 comment_linked.pull_request_version_id ==
821 version1.pull_request_version_id)
860 version1.pull_request_version_id)
822 assert (
861 assert (
823 comment_unlinked.pull_request_version_id !=
862 comment_unlinked.pull_request_version_id !=
824 comment_linked.pull_request_version_id)
863 comment_linked.pull_request_version_id)
825
864
826
865
827 def test_calculate_commits():
866 def test_calculate_commits():
828 old_ids = [1, 2, 3]
867 old_ids = [1, 2, 3]
829 new_ids = [1, 3, 4, 5]
868 new_ids = [1, 3, 4, 5]
830 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
869 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
831 assert change.added == [4, 5]
870 assert change.added == [4, 5]
832 assert change.common == [1, 3]
871 assert change.common == [1, 3]
833 assert change.removed == [2]
872 assert change.removed == [2]
834 assert change.total == [1, 3, 4, 5]
873 assert change.total == [1, 3, 4, 5]
835
874
836
875
837 def assert_inline_comments(pull_request, visible=None, outdated=None):
876 def assert_inline_comments(pull_request, visible=None, outdated=None):
838 if visible is not None:
877 if visible is not None:
839 inline_comments = CommentsModel().get_inline_comments(
878 inline_comments = CommentsModel().get_inline_comments(
840 pull_request.target_repo.repo_id, pull_request=pull_request)
879 pull_request.target_repo.repo_id, pull_request=pull_request)
841 inline_cnt = CommentsModel().get_inline_comments_count(
880 inline_cnt = CommentsModel().get_inline_comments_count(
842 inline_comments)
881 inline_comments)
843 assert inline_cnt == visible
882 assert inline_cnt == visible
844 if outdated is not None:
883 if outdated is not None:
845 outdated_comments = CommentsModel().get_outdated_comments(
884 outdated_comments = CommentsModel().get_outdated_comments(
846 pull_request.target_repo.repo_id, pull_request)
885 pull_request.target_repo.repo_id, pull_request)
847 assert len(outdated_comments) == outdated
886 assert len(outdated_comments) == outdated
848
887
849
888
850 def assert_pr_file_changes(
889 def assert_pr_file_changes(
851 pull_request, added=None, modified=None, removed=None):
890 pull_request, added=None, modified=None, removed=None):
852 pr_versions = PullRequestModel().get_versions(pull_request)
891 pr_versions = PullRequestModel().get_versions(pull_request)
853 # always use first version, ie original PR to calculate changes
892 # always use first version, ie original PR to calculate changes
854 pull_request_version = pr_versions[0]
893 pull_request_version = pr_versions[0]
855 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
894 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
856 pull_request, pull_request_version)
895 pull_request, pull_request_version)
857 file_changes = PullRequestModel()._calculate_file_changes(
896 file_changes = PullRequestModel()._calculate_file_changes(
858 old_diff_data, new_diff_data)
897 old_diff_data, new_diff_data)
859
898
860 assert added == file_changes.added, \
899 assert added == file_changes.added, \
861 'expected added:%s vs value:%s' % (added, file_changes.added)
900 'expected added:%s vs value:%s' % (added, file_changes.added)
862 assert modified == file_changes.modified, \
901 assert modified == file_changes.modified, \
863 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
902 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
864 assert removed == file_changes.removed, \
903 assert removed == file_changes.removed, \
865 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
904 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
866
905
867
906
868 def outdated_comments_patcher(use_outdated=True):
907 def outdated_comments_patcher(use_outdated=True):
869 return mock.patch.object(
908 return mock.patch.object(
870 CommentsModel, 'use_outdated_comments',
909 CommentsModel, 'use_outdated_comments',
871 return_value=use_outdated)
910 return_value=use_outdated)
@@ -1,1887 +1,1886 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import functools
33 import functools
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs import create_vcsserver_proxy
60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 def _split_comma(value):
71 def _split_comma(value):
72 return value.split(',')
72 return value.split(',')
73
73
74
74
75 def pytest_addoption(parser):
75 def pytest_addoption(parser):
76 parser.addoption(
76 parser.addoption(
77 '--keep-tmp-path', action='store_true',
77 '--keep-tmp-path', action='store_true',
78 help="Keep the test temporary directories")
78 help="Keep the test temporary directories")
79 parser.addoption(
79 parser.addoption(
80 '--backends', action='store', type=_split_comma,
80 '--backends', action='store', type=_split_comma,
81 default=['git', 'hg', 'svn'],
81 default=['git', 'hg', 'svn'],
82 help="Select which backends to test for backend specific tests.")
82 help="Select which backends to test for backend specific tests.")
83 parser.addoption(
83 parser.addoption(
84 '--dbs', action='store', type=_split_comma,
84 '--dbs', action='store', type=_split_comma,
85 default=['sqlite'],
85 default=['sqlite'],
86 help="Select which database to test for database specific tests. "
86 help="Select which database to test for database specific tests. "
87 "Possible options are sqlite,postgres,mysql")
87 "Possible options are sqlite,postgres,mysql")
88 parser.addoption(
88 parser.addoption(
89 '--appenlight', '--ae', action='store_true',
89 '--appenlight', '--ae', action='store_true',
90 help="Track statistics in appenlight.")
90 help="Track statistics in appenlight.")
91 parser.addoption(
91 parser.addoption(
92 '--appenlight-api-key', '--ae-key',
92 '--appenlight-api-key', '--ae-key',
93 help="API key for Appenlight.")
93 help="API key for Appenlight.")
94 parser.addoption(
94 parser.addoption(
95 '--appenlight-url', '--ae-url',
95 '--appenlight-url', '--ae-url',
96 default="https://ae.rhodecode.com",
96 default="https://ae.rhodecode.com",
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 parser.addoption(
98 parser.addoption(
99 '--sqlite-connection-string', action='store',
99 '--sqlite-connection-string', action='store',
100 default='', help="Connection string for the dbs tests with SQLite")
100 default='', help="Connection string for the dbs tests with SQLite")
101 parser.addoption(
101 parser.addoption(
102 '--postgres-connection-string', action='store',
102 '--postgres-connection-string', action='store',
103 default='', help="Connection string for the dbs tests with Postgres")
103 default='', help="Connection string for the dbs tests with Postgres")
104 parser.addoption(
104 parser.addoption(
105 '--mysql-connection-string', action='store',
105 '--mysql-connection-string', action='store',
106 default='', help="Connection string for the dbs tests with MySQL")
106 default='', help="Connection string for the dbs tests with MySQL")
107 parser.addoption(
107 parser.addoption(
108 '--repeat', type=int, default=100,
108 '--repeat', type=int, default=100,
109 help="Number of repetitions in performance tests.")
109 help="Number of repetitions in performance tests.")
110
110
111
111
112 def pytest_configure(config):
112 def pytest_configure(config):
113 from rhodecode.config import patches
113 from rhodecode.config import patches
114
114
115
115
116 def pytest_collection_modifyitems(session, config, items):
116 def pytest_collection_modifyitems(session, config, items):
117 # nottest marked, compare nose, used for transition from nose to pytest
117 # nottest marked, compare nose, used for transition from nose to pytest
118 remaining = [
118 remaining = [
119 i for i in items if getattr(i.obj, '__test__', True)]
119 i for i in items if getattr(i.obj, '__test__', True)]
120 items[:] = remaining
120 items[:] = remaining
121
121
122
122
123 def pytest_generate_tests(metafunc):
123 def pytest_generate_tests(metafunc):
124 # Support test generation based on --backend parameter
124 # Support test generation based on --backend parameter
125 if 'backend_alias' in metafunc.fixturenames:
125 if 'backend_alias' in metafunc.fixturenames:
126 backends = get_backends_from_metafunc(metafunc)
126 backends = get_backends_from_metafunc(metafunc)
127 scope = None
127 scope = None
128 if not backends:
128 if not backends:
129 pytest.skip("Not enabled for any of selected backends")
129 pytest.skip("Not enabled for any of selected backends")
130 metafunc.parametrize('backend_alias', backends, scope=scope)
130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 elif hasattr(metafunc.function, 'backends'):
131 elif hasattr(metafunc.function, 'backends'):
132 backends = get_backends_from_metafunc(metafunc)
132 backends = get_backends_from_metafunc(metafunc)
133 if not backends:
133 if not backends:
134 pytest.skip("Not enabled for any of selected backends")
134 pytest.skip("Not enabled for any of selected backends")
135
135
136
136
137 def get_backends_from_metafunc(metafunc):
137 def get_backends_from_metafunc(metafunc):
138 requested_backends = set(metafunc.config.getoption('--backends'))
138 requested_backends = set(metafunc.config.getoption('--backends'))
139 if hasattr(metafunc.function, 'backends'):
139 if hasattr(metafunc.function, 'backends'):
140 # Supported backends by this test function, created from
140 # Supported backends by this test function, created from
141 # pytest.mark.backends
141 # pytest.mark.backends
142 backends = metafunc.definition.get_closest_marker('backends').args
142 backends = metafunc.definition.get_closest_marker('backends').args
143 elif hasattr(metafunc.cls, 'backend_alias'):
143 elif hasattr(metafunc.cls, 'backend_alias'):
144 # Support class attribute "backend_alias", this is mainly
144 # Support class attribute "backend_alias", this is mainly
145 # for legacy reasons for tests not yet using pytest.mark.backends
145 # for legacy reasons for tests not yet using pytest.mark.backends
146 backends = [metafunc.cls.backend_alias]
146 backends = [metafunc.cls.backend_alias]
147 else:
147 else:
148 backends = metafunc.config.getoption('--backends')
148 backends = metafunc.config.getoption('--backends')
149 return requested_backends.intersection(backends)
149 return requested_backends.intersection(backends)
150
150
151
151
152 @pytest.fixture(scope='session', autouse=True)
152 @pytest.fixture(scope='session', autouse=True)
153 def activate_example_rcextensions(request):
153 def activate_example_rcextensions(request):
154 """
154 """
155 Patch in an example rcextensions module which verifies passed in kwargs.
155 Patch in an example rcextensions module which verifies passed in kwargs.
156 """
156 """
157 from rhodecode.config import rcextensions
157 from rhodecode.config import rcextensions
158
158
159 old_extensions = rhodecode.EXTENSIONS
159 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = rcextensions
160 rhodecode.EXTENSIONS = rcextensions
161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
162
162
163 @request.addfinalizer
163 @request.addfinalizer
164 def cleanup():
164 def cleanup():
165 rhodecode.EXTENSIONS = old_extensions
165 rhodecode.EXTENSIONS = old_extensions
166
166
167
167
168 @pytest.fixture
168 @pytest.fixture
169 def capture_rcextensions():
169 def capture_rcextensions():
170 """
170 """
171 Returns the recorded calls to entry points in rcextensions.
171 Returns the recorded calls to entry points in rcextensions.
172 """
172 """
173 calls = rhodecode.EXTENSIONS.calls
173 calls = rhodecode.EXTENSIONS.calls
174 calls.clear()
174 calls.clear()
175 # Note: At this moment, it is still the empty dict, but that will
175 # Note: At this moment, it is still the empty dict, but that will
176 # be filled during the test run and since it is a reference this
176 # be filled during the test run and since it is a reference this
177 # is enough to make it work.
177 # is enough to make it work.
178 return calls
178 return calls
179
179
180
180
181 @pytest.fixture(scope='session')
181 @pytest.fixture(scope='session')
182 def http_environ_session():
182 def http_environ_session():
183 """
183 """
184 Allow to use "http_environ" in session scope.
184 Allow to use "http_environ" in session scope.
185 """
185 """
186 return plain_http_environ()
186 return plain_http_environ()
187
187
188
188
189 def plain_http_host_stub():
189 def plain_http_host_stub():
190 """
190 """
191 Value of HTTP_HOST in the test run.
191 Value of HTTP_HOST in the test run.
192 """
192 """
193 return 'example.com:80'
193 return 'example.com:80'
194
194
195
195
196 @pytest.fixture
196 @pytest.fixture
197 def http_host_stub():
197 def http_host_stub():
198 """
198 """
199 Value of HTTP_HOST in the test run.
199 Value of HTTP_HOST in the test run.
200 """
200 """
201 return plain_http_host_stub()
201 return plain_http_host_stub()
202
202
203
203
204 def plain_http_host_only_stub():
204 def plain_http_host_only_stub():
205 """
205 """
206 Value of HTTP_HOST in the test run.
206 Value of HTTP_HOST in the test run.
207 """
207 """
208 return plain_http_host_stub().split(':')[0]
208 return plain_http_host_stub().split(':')[0]
209
209
210
210
211 @pytest.fixture
211 @pytest.fixture
212 def http_host_only_stub():
212 def http_host_only_stub():
213 """
213 """
214 Value of HTTP_HOST in the test run.
214 Value of HTTP_HOST in the test run.
215 """
215 """
216 return plain_http_host_only_stub()
216 return plain_http_host_only_stub()
217
217
218
218
219 def plain_http_environ():
219 def plain_http_environ():
220 """
220 """
221 HTTP extra environ keys.
221 HTTP extra environ keys.
222
222
223 User by the test application and as well for setting up the pylons
223 User by the test application and as well for setting up the pylons
224 environment. In the case of the fixture "app" it should be possible
224 environment. In the case of the fixture "app" it should be possible
225 to override this for a specific test case.
225 to override this for a specific test case.
226 """
226 """
227 return {
227 return {
228 'SERVER_NAME': plain_http_host_only_stub(),
228 'SERVER_NAME': plain_http_host_only_stub(),
229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
230 'HTTP_HOST': plain_http_host_stub(),
230 'HTTP_HOST': plain_http_host_stub(),
231 'HTTP_USER_AGENT': 'rc-test-agent',
231 'HTTP_USER_AGENT': 'rc-test-agent',
232 'REQUEST_METHOD': 'GET'
232 'REQUEST_METHOD': 'GET'
233 }
233 }
234
234
235
235
236 @pytest.fixture
236 @pytest.fixture
237 def http_environ():
237 def http_environ():
238 """
238 """
239 HTTP extra environ keys.
239 HTTP extra environ keys.
240
240
241 User by the test application and as well for setting up the pylons
241 User by the test application and as well for setting up the pylons
242 environment. In the case of the fixture "app" it should be possible
242 environment. In the case of the fixture "app" it should be possible
243 to override this for a specific test case.
243 to override this for a specific test case.
244 """
244 """
245 return plain_http_environ()
245 return plain_http_environ()
246
246
247
247
248 @pytest.fixture(scope='session')
248 @pytest.fixture(scope='session')
249 def baseapp(ini_config, vcsserver, http_environ_session):
249 def baseapp(ini_config, vcsserver, http_environ_session):
250 from rhodecode.lib.pyramid_utils import get_app_config
250 from rhodecode.lib.pyramid_utils import get_app_config
251 from rhodecode.config.middleware import make_pyramid_app
251 from rhodecode.config.middleware import make_pyramid_app
252
252
253 print("Using the RhodeCode configuration:{}".format(ini_config))
253 print("Using the RhodeCode configuration:{}".format(ini_config))
254 pyramid.paster.setup_logging(ini_config)
254 pyramid.paster.setup_logging(ini_config)
255
255
256 settings = get_app_config(ini_config)
256 settings = get_app_config(ini_config)
257 app = make_pyramid_app({'__file__': ini_config}, **settings)
257 app = make_pyramid_app({'__file__': ini_config}, **settings)
258
258
259 return app
259 return app
260
260
261
261
262 @pytest.fixture(scope='function')
262 @pytest.fixture(scope='function')
263 def app(request, config_stub, baseapp, http_environ):
263 def app(request, config_stub, baseapp, http_environ):
264 app = CustomTestApp(
264 app = CustomTestApp(
265 baseapp,
265 baseapp,
266 extra_environ=http_environ)
266 extra_environ=http_environ)
267 if request.cls:
267 if request.cls:
268 request.cls.app = app
268 request.cls.app = app
269 return app
269 return app
270
270
271
271
272 @pytest.fixture(scope='session')
272 @pytest.fixture(scope='session')
273 def app_settings(baseapp, ini_config):
273 def app_settings(baseapp, ini_config):
274 """
274 """
275 Settings dictionary used to create the app.
275 Settings dictionary used to create the app.
276
276
277 Parses the ini file and passes the result through the sanitize and apply
277 Parses the ini file and passes the result through the sanitize and apply
278 defaults mechanism in `rhodecode.config.middleware`.
278 defaults mechanism in `rhodecode.config.middleware`.
279 """
279 """
280 return baseapp.config.get_settings()
280 return baseapp.config.get_settings()
281
281
282
282
283 @pytest.fixture(scope='session')
283 @pytest.fixture(scope='session')
284 def db_connection(ini_settings):
284 def db_connection(ini_settings):
285 # Initialize the database connection.
285 # Initialize the database connection.
286 config_utils.initialize_database(ini_settings)
286 config_utils.initialize_database(ini_settings)
287
287
288
288
289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
290
290
291
291
292 def _autologin_user(app, *args):
292 def _autologin_user(app, *args):
293 session = login_user_session(app, *args)
293 session = login_user_session(app, *args)
294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
295 return LoginData(csrf_token, session['rhodecode_user'])
295 return LoginData(csrf_token, session['rhodecode_user'])
296
296
297
297
298 @pytest.fixture
298 @pytest.fixture
299 def autologin_user(app):
299 def autologin_user(app):
300 """
300 """
301 Utility fixture which makes sure that the admin user is logged in
301 Utility fixture which makes sure that the admin user is logged in
302 """
302 """
303 return _autologin_user(app)
303 return _autologin_user(app)
304
304
305
305
306 @pytest.fixture
306 @pytest.fixture
307 def autologin_regular_user(app):
307 def autologin_regular_user(app):
308 """
308 """
309 Utility fixture which makes sure that the regular user is logged in
309 Utility fixture which makes sure that the regular user is logged in
310 """
310 """
311 return _autologin_user(
311 return _autologin_user(
312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
313
313
314
314
315 @pytest.fixture(scope='function')
315 @pytest.fixture(scope='function')
316 def csrf_token(request, autologin_user):
316 def csrf_token(request, autologin_user):
317 return autologin_user.csrf_token
317 return autologin_user.csrf_token
318
318
319
319
320 @pytest.fixture(scope='function')
320 @pytest.fixture(scope='function')
321 def xhr_header(request):
321 def xhr_header(request):
322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
323
323
324
324
325 @pytest.fixture
325 @pytest.fixture
326 def real_crypto_backend(monkeypatch):
326 def real_crypto_backend(monkeypatch):
327 """
327 """
328 Switch the production crypto backend on for this test.
328 Switch the production crypto backend on for this test.
329
329
330 During the test run the crypto backend is replaced with a faster
330 During the test run the crypto backend is replaced with a faster
331 implementation based on the MD5 algorithm.
331 implementation based on the MD5 algorithm.
332 """
332 """
333 monkeypatch.setattr(rhodecode, 'is_test', False)
333 monkeypatch.setattr(rhodecode, 'is_test', False)
334
334
335
335
336 @pytest.fixture(scope='class')
336 @pytest.fixture(scope='class')
337 def index_location(request, baseapp):
337 def index_location(request, baseapp):
338 index_location = baseapp.config.get_settings()['search.location']
338 index_location = baseapp.config.get_settings()['search.location']
339 if request.cls:
339 if request.cls:
340 request.cls.index_location = index_location
340 request.cls.index_location = index_location
341 return index_location
341 return index_location
342
342
343
343
344 @pytest.fixture(scope='session', autouse=True)
344 @pytest.fixture(scope='session', autouse=True)
345 def tests_tmp_path(request):
345 def tests_tmp_path(request):
346 """
346 """
347 Create temporary directory to be used during the test session.
347 Create temporary directory to be used during the test session.
348 """
348 """
349 if not os.path.exists(TESTS_TMP_PATH):
349 if not os.path.exists(TESTS_TMP_PATH):
350 os.makedirs(TESTS_TMP_PATH)
350 os.makedirs(TESTS_TMP_PATH)
351
351
352 if not request.config.getoption('--keep-tmp-path'):
352 if not request.config.getoption('--keep-tmp-path'):
353 @request.addfinalizer
353 @request.addfinalizer
354 def remove_tmp_path():
354 def remove_tmp_path():
355 shutil.rmtree(TESTS_TMP_PATH)
355 shutil.rmtree(TESTS_TMP_PATH)
356
356
357 return TESTS_TMP_PATH
357 return TESTS_TMP_PATH
358
358
359
359
360 @pytest.fixture
360 @pytest.fixture
361 def test_repo_group(request):
361 def test_repo_group(request):
362 """
362 """
363 Create a temporary repository group, and destroy it after
363 Create a temporary repository group, and destroy it after
364 usage automatically
364 usage automatically
365 """
365 """
366 fixture = Fixture()
366 fixture = Fixture()
367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
368 repo_group = fixture.create_repo_group(repogroupid)
368 repo_group = fixture.create_repo_group(repogroupid)
369
369
370 def _cleanup():
370 def _cleanup():
371 fixture.destroy_repo_group(repogroupid)
371 fixture.destroy_repo_group(repogroupid)
372
372
373 request.addfinalizer(_cleanup)
373 request.addfinalizer(_cleanup)
374 return repo_group
374 return repo_group
375
375
376
376
377 @pytest.fixture
377 @pytest.fixture
378 def test_user_group(request):
378 def test_user_group(request):
379 """
379 """
380 Create a temporary user group, and destroy it after
380 Create a temporary user group, and destroy it after
381 usage automatically
381 usage automatically
382 """
382 """
383 fixture = Fixture()
383 fixture = Fixture()
384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
385 user_group = fixture.create_user_group(usergroupid)
385 user_group = fixture.create_user_group(usergroupid)
386
386
387 def _cleanup():
387 def _cleanup():
388 fixture.destroy_user_group(user_group)
388 fixture.destroy_user_group(user_group)
389
389
390 request.addfinalizer(_cleanup)
390 request.addfinalizer(_cleanup)
391 return user_group
391 return user_group
392
392
393
393
394 @pytest.fixture(scope='session')
394 @pytest.fixture(scope='session')
395 def test_repo(request):
395 def test_repo(request):
396 container = TestRepoContainer()
396 container = TestRepoContainer()
397 request.addfinalizer(container._cleanup)
397 request.addfinalizer(container._cleanup)
398 return container
398 return container
399
399
400
400
401 class TestRepoContainer(object):
401 class TestRepoContainer(object):
402 """
402 """
403 Container for test repositories which are used read only.
403 Container for test repositories which are used read only.
404
404
405 Repositories will be created on demand and re-used during the lifetime
405 Repositories will be created on demand and re-used during the lifetime
406 of this object.
406 of this object.
407
407
408 Usage to get the svn test repository "minimal"::
408 Usage to get the svn test repository "minimal"::
409
409
410 test_repo = TestContainer()
410 test_repo = TestContainer()
411 repo = test_repo('minimal', 'svn')
411 repo = test_repo('minimal', 'svn')
412
412
413 """
413 """
414
414
415 dump_extractors = {
415 dump_extractors = {
416 'git': utils.extract_git_repo_from_dump,
416 'git': utils.extract_git_repo_from_dump,
417 'hg': utils.extract_hg_repo_from_dump,
417 'hg': utils.extract_hg_repo_from_dump,
418 'svn': utils.extract_svn_repo_from_dump,
418 'svn': utils.extract_svn_repo_from_dump,
419 }
419 }
420
420
421 def __init__(self):
421 def __init__(self):
422 self._cleanup_repos = []
422 self._cleanup_repos = []
423 self._fixture = Fixture()
423 self._fixture = Fixture()
424 self._repos = {}
424 self._repos = {}
425
425
426 def __call__(self, dump_name, backend_alias, config=None):
426 def __call__(self, dump_name, backend_alias, config=None):
427 key = (dump_name, backend_alias)
427 key = (dump_name, backend_alias)
428 if key not in self._repos:
428 if key not in self._repos:
429 repo = self._create_repo(dump_name, backend_alias, config)
429 repo = self._create_repo(dump_name, backend_alias, config)
430 self._repos[key] = repo.repo_id
430 self._repos[key] = repo.repo_id
431 return Repository.get(self._repos[key])
431 return Repository.get(self._repos[key])
432
432
433 def _create_repo(self, dump_name, backend_alias, config):
433 def _create_repo(self, dump_name, backend_alias, config):
434 repo_name = '%s-%s' % (backend_alias, dump_name)
434 repo_name = '%s-%s' % (backend_alias, dump_name)
435 backend_class = get_backend(backend_alias)
435 backend_class = get_backend(backend_alias)
436 dump_extractor = self.dump_extractors[backend_alias]
436 dump_extractor = self.dump_extractors[backend_alias]
437 repo_path = dump_extractor(dump_name, repo_name)
437 repo_path = dump_extractor(dump_name, repo_name)
438
438
439 vcs_repo = backend_class(repo_path, config=config)
439 vcs_repo = backend_class(repo_path, config=config)
440 repo2db_mapper({repo_name: vcs_repo})
440 repo2db_mapper({repo_name: vcs_repo})
441
441
442 repo = RepoModel().get_by_repo_name(repo_name)
442 repo = RepoModel().get_by_repo_name(repo_name)
443 self._cleanup_repos.append(repo_name)
443 self._cleanup_repos.append(repo_name)
444 return repo
444 return repo
445
445
446 def _cleanup(self):
446 def _cleanup(self):
447 for repo_name in reversed(self._cleanup_repos):
447 for repo_name in reversed(self._cleanup_repos):
448 self._fixture.destroy_repo(repo_name)
448 self._fixture.destroy_repo(repo_name)
449
449
450
450
451 def backend_base(request, backend_alias, baseapp, test_repo):
451 def backend_base(request, backend_alias, baseapp, test_repo):
452 if backend_alias not in request.config.getoption('--backends'):
452 if backend_alias not in request.config.getoption('--backends'):
453 pytest.skip("Backend %s not selected." % (backend_alias, ))
453 pytest.skip("Backend %s not selected." % (backend_alias, ))
454
454
455 utils.check_xfail_backends(request.node, backend_alias)
455 utils.check_xfail_backends(request.node, backend_alias)
456 utils.check_skip_backends(request.node, backend_alias)
456 utils.check_skip_backends(request.node, backend_alias)
457
457
458 repo_name = 'vcs_test_%s' % (backend_alias, )
458 repo_name = 'vcs_test_%s' % (backend_alias, )
459 backend = Backend(
459 backend = Backend(
460 alias=backend_alias,
460 alias=backend_alias,
461 repo_name=repo_name,
461 repo_name=repo_name,
462 test_name=request.node.name,
462 test_name=request.node.name,
463 test_repo_container=test_repo)
463 test_repo_container=test_repo)
464 request.addfinalizer(backend.cleanup)
464 request.addfinalizer(backend.cleanup)
465 return backend
465 return backend
466
466
467
467
468 @pytest.fixture
468 @pytest.fixture
469 def backend(request, backend_alias, baseapp, test_repo):
469 def backend(request, backend_alias, baseapp, test_repo):
470 """
470 """
471 Parametrized fixture which represents a single backend implementation.
471 Parametrized fixture which represents a single backend implementation.
472
472
473 It respects the option `--backends` to focus the test run on specific
473 It respects the option `--backends` to focus the test run on specific
474 backend implementations.
474 backend implementations.
475
475
476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
477 for specific backends. This is intended as a utility for incremental
477 for specific backends. This is intended as a utility for incremental
478 development of a new backend implementation.
478 development of a new backend implementation.
479 """
479 """
480 return backend_base(request, backend_alias, baseapp, test_repo)
480 return backend_base(request, backend_alias, baseapp, test_repo)
481
481
482
482
483 @pytest.fixture
483 @pytest.fixture
484 def backend_git(request, baseapp, test_repo):
484 def backend_git(request, baseapp, test_repo):
485 return backend_base(request, 'git', baseapp, test_repo)
485 return backend_base(request, 'git', baseapp, test_repo)
486
486
487
487
488 @pytest.fixture
488 @pytest.fixture
489 def backend_hg(request, baseapp, test_repo):
489 def backend_hg(request, baseapp, test_repo):
490 return backend_base(request, 'hg', baseapp, test_repo)
490 return backend_base(request, 'hg', baseapp, test_repo)
491
491
492
492
493 @pytest.fixture
493 @pytest.fixture
494 def backend_svn(request, baseapp, test_repo):
494 def backend_svn(request, baseapp, test_repo):
495 return backend_base(request, 'svn', baseapp, test_repo)
495 return backend_base(request, 'svn', baseapp, test_repo)
496
496
497
497
498 @pytest.fixture
498 @pytest.fixture
499 def backend_random(backend_git):
499 def backend_random(backend_git):
500 """
500 """
501 Use this to express that your tests need "a backend.
501 Use this to express that your tests need "a backend.
502
502
503 A few of our tests need a backend, so that we can run the code. This
503 A few of our tests need a backend, so that we can run the code. This
504 fixture is intended to be used for such cases. It will pick one of the
504 fixture is intended to be used for such cases. It will pick one of the
505 backends and run the tests.
505 backends and run the tests.
506
506
507 The fixture `backend` would run the test multiple times for each
507 The fixture `backend` would run the test multiple times for each
508 available backend which is a pure waste of time if the test is
508 available backend which is a pure waste of time if the test is
509 independent of the backend type.
509 independent of the backend type.
510 """
510 """
511 # TODO: johbo: Change this to pick a random backend
511 # TODO: johbo: Change this to pick a random backend
512 return backend_git
512 return backend_git
513
513
514
514
515 @pytest.fixture
515 @pytest.fixture
516 def backend_stub(backend_git):
516 def backend_stub(backend_git):
517 """
517 """
518 Use this to express that your tests need a backend stub
518 Use this to express that your tests need a backend stub
519
519
520 TODO: mikhail: Implement a real stub logic instead of returning
520 TODO: mikhail: Implement a real stub logic instead of returning
521 a git backend
521 a git backend
522 """
522 """
523 return backend_git
523 return backend_git
524
524
525
525
526 @pytest.fixture
526 @pytest.fixture
527 def repo_stub(backend_stub):
527 def repo_stub(backend_stub):
528 """
528 """
529 Use this to express that your tests need a repository stub
529 Use this to express that your tests need a repository stub
530 """
530 """
531 return backend_stub.create_repo()
531 return backend_stub.create_repo()
532
532
533
533
534 class Backend(object):
534 class Backend(object):
535 """
535 """
536 Represents the test configuration for one supported backend
536 Represents the test configuration for one supported backend
537
537
538 Provides easy access to different test repositories based on
538 Provides easy access to different test repositories based on
539 `__getitem__`. Such repositories will only be created once per test
539 `__getitem__`. Such repositories will only be created once per test
540 session.
540 session.
541 """
541 """
542
542
543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
544 _master_repo = None
544 _master_repo = None
545 _commit_ids = {}
545 _commit_ids = {}
546
546
547 def __init__(self, alias, repo_name, test_name, test_repo_container):
547 def __init__(self, alias, repo_name, test_name, test_repo_container):
548 self.alias = alias
548 self.alias = alias
549 self.repo_name = repo_name
549 self.repo_name = repo_name
550 self._cleanup_repos = []
550 self._cleanup_repos = []
551 self._test_name = test_name
551 self._test_name = test_name
552 self._test_repo_container = test_repo_container
552 self._test_repo_container = test_repo_container
553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
554 # Fixture will survive in the end.
554 # Fixture will survive in the end.
555 self._fixture = Fixture()
555 self._fixture = Fixture()
556
556
557 def __getitem__(self, key):
557 def __getitem__(self, key):
558 return self._test_repo_container(key, self.alias)
558 return self._test_repo_container(key, self.alias)
559
559
560 def create_test_repo(self, key, config=None):
560 def create_test_repo(self, key, config=None):
561 return self._test_repo_container(key, self.alias, config)
561 return self._test_repo_container(key, self.alias, config)
562
562
563 @property
563 @property
564 def repo(self):
564 def repo(self):
565 """
565 """
566 Returns the "current" repository. This is the vcs_test repo or the
566 Returns the "current" repository. This is the vcs_test repo or the
567 last repo which has been created with `create_repo`.
567 last repo which has been created with `create_repo`.
568 """
568 """
569 from rhodecode.model.db import Repository
569 from rhodecode.model.db import Repository
570 return Repository.get_by_repo_name(self.repo_name)
570 return Repository.get_by_repo_name(self.repo_name)
571
571
572 @property
572 @property
573 def default_branch_name(self):
573 def default_branch_name(self):
574 VcsRepository = get_backend(self.alias)
574 VcsRepository = get_backend(self.alias)
575 return VcsRepository.DEFAULT_BRANCH_NAME
575 return VcsRepository.DEFAULT_BRANCH_NAME
576
576
577 @property
577 @property
578 def default_head_id(self):
578 def default_head_id(self):
579 """
579 """
580 Returns the default head id of the underlying backend.
580 Returns the default head id of the underlying backend.
581
581
582 This will be the default branch name in case the backend does have a
582 This will be the default branch name in case the backend does have a
583 default branch. In the other cases it will point to a valid head
583 default branch. In the other cases it will point to a valid head
584 which can serve as the base to create a new commit on top of it.
584 which can serve as the base to create a new commit on top of it.
585 """
585 """
586 vcsrepo = self.repo.scm_instance()
586 vcsrepo = self.repo.scm_instance()
587 head_id = (
587 head_id = (
588 vcsrepo.DEFAULT_BRANCH_NAME or
588 vcsrepo.DEFAULT_BRANCH_NAME or
589 vcsrepo.commit_ids[-1])
589 vcsrepo.commit_ids[-1])
590 return head_id
590 return head_id
591
591
592 @property
592 @property
593 def commit_ids(self):
593 def commit_ids(self):
594 """
594 """
595 Returns the list of commits for the last created repository
595 Returns the list of commits for the last created repository
596 """
596 """
597 return self._commit_ids
597 return self._commit_ids
598
598
599 def create_master_repo(self, commits):
599 def create_master_repo(self, commits):
600 """
600 """
601 Create a repository and remember it as a template.
601 Create a repository and remember it as a template.
602
602
603 This allows to easily create derived repositories to construct
603 This allows to easily create derived repositories to construct
604 more complex scenarios for diff, compare and pull requests.
604 more complex scenarios for diff, compare and pull requests.
605
605
606 Returns a commit map which maps from commit message to raw_id.
606 Returns a commit map which maps from commit message to raw_id.
607 """
607 """
608 self._master_repo = self.create_repo(commits=commits)
608 self._master_repo = self.create_repo(commits=commits)
609 return self._commit_ids
609 return self._commit_ids
610
610
611 def create_repo(
611 def create_repo(
612 self, commits=None, number_of_commits=0, heads=None,
612 self, commits=None, number_of_commits=0, heads=None,
613 name_suffix=u'', bare=False, **kwargs):
613 name_suffix=u'', bare=False, **kwargs):
614 """
614 """
615 Create a repository and record it for later cleanup.
615 Create a repository and record it for later cleanup.
616
616
617 :param commits: Optional. A sequence of dict instances.
617 :param commits: Optional. A sequence of dict instances.
618 Will add a commit per entry to the new repository.
618 Will add a commit per entry to the new repository.
619 :param number_of_commits: Optional. If set to a number, this number of
619 :param number_of_commits: Optional. If set to a number, this number of
620 commits will be added to the new repository.
620 commits will be added to the new repository.
621 :param heads: Optional. Can be set to a sequence of of commit
621 :param heads: Optional. Can be set to a sequence of of commit
622 names which shall be pulled in from the master repository.
622 names which shall be pulled in from the master repository.
623 :param name_suffix: adds special suffix to generated repo name
623 :param name_suffix: adds special suffix to generated repo name
624 :param bare: set a repo as bare (no checkout)
624 :param bare: set a repo as bare (no checkout)
625 """
625 """
626 self.repo_name = self._next_repo_name() + name_suffix
626 self.repo_name = self._next_repo_name() + name_suffix
627 repo = self._fixture.create_repo(
627 repo = self._fixture.create_repo(
628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
629 self._cleanup_repos.append(repo.repo_name)
629 self._cleanup_repos.append(repo.repo_name)
630
630
631 commits = commits or [
631 commits = commits or [
632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
633 for x in range(number_of_commits)]
633 for x in range(number_of_commits)]
634 self._add_commits_to_repo(repo.scm_instance(), commits)
634 self._add_commits_to_repo(repo.scm_instance(), commits)
635 if heads:
635 if heads:
636 self.pull_heads(repo, heads)
636 self.pull_heads(repo, heads)
637
637
638 return repo
638 return repo
639
639
640 def pull_heads(self, repo, heads):
640 def pull_heads(self, repo, heads):
641 """
641 """
642 Make sure that repo contains all commits mentioned in `heads`
642 Make sure that repo contains all commits mentioned in `heads`
643 """
643 """
644 vcsmaster = self._master_repo.scm_instance()
644 vcsmaster = self._master_repo.scm_instance()
645 vcsrepo = repo.scm_instance()
645 vcsrepo = repo.scm_instance()
646 vcsrepo.config.clear_section('hooks')
646 vcsrepo.config.clear_section('hooks')
647 commit_ids = [self._commit_ids[h] for h in heads]
647 commit_ids = [self._commit_ids[h] for h in heads]
648 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
648 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
649
649
650 def create_fork(self):
650 def create_fork(self):
651 repo_to_fork = self.repo_name
651 repo_to_fork = self.repo_name
652 self.repo_name = self._next_repo_name()
652 self.repo_name = self._next_repo_name()
653 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
653 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
654 self._cleanup_repos.append(self.repo_name)
654 self._cleanup_repos.append(self.repo_name)
655 return repo
655 return repo
656
656
657 def new_repo_name(self, suffix=u''):
657 def new_repo_name(self, suffix=u''):
658 self.repo_name = self._next_repo_name() + suffix
658 self.repo_name = self._next_repo_name() + suffix
659 self._cleanup_repos.append(self.repo_name)
659 self._cleanup_repos.append(self.repo_name)
660 return self.repo_name
660 return self.repo_name
661
661
662 def _next_repo_name(self):
662 def _next_repo_name(self):
663 return u"%s_%s" % (
663 return u"%s_%s" % (
664 self.invalid_repo_name.sub(u'_', self._test_name),
664 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
665 len(self._cleanup_repos))
666
665
667 def ensure_file(self, filename, content='Test content\n'):
666 def ensure_file(self, filename, content='Test content\n'):
668 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
667 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
669 commits = [
668 commits = [
670 {'added': [
669 {'added': [
671 FileNode(filename, content=content),
670 FileNode(filename, content=content),
672 ]},
671 ]},
673 ]
672 ]
674 self._add_commits_to_repo(self.repo.scm_instance(), commits)
673 self._add_commits_to_repo(self.repo.scm_instance(), commits)
675
674
676 def enable_downloads(self):
675 def enable_downloads(self):
677 repo = self.repo
676 repo = self.repo
678 repo.enable_downloads = True
677 repo.enable_downloads = True
679 Session().add(repo)
678 Session().add(repo)
680 Session().commit()
679 Session().commit()
681
680
682 def cleanup(self):
681 def cleanup(self):
683 for repo_name in reversed(self._cleanup_repos):
682 for repo_name in reversed(self._cleanup_repos):
684 self._fixture.destroy_repo(repo_name)
683 self._fixture.destroy_repo(repo_name)
685
684
686 def _add_commits_to_repo(self, repo, commits):
685 def _add_commits_to_repo(self, repo, commits):
687 commit_ids = _add_commits_to_repo(repo, commits)
686 commit_ids = _add_commits_to_repo(repo, commits)
688 if not commit_ids:
687 if not commit_ids:
689 return
688 return
690 self._commit_ids = commit_ids
689 self._commit_ids = commit_ids
691
690
692 # Creating refs for Git to allow fetching them from remote repository
691 # Creating refs for Git to allow fetching them from remote repository
693 if self.alias == 'git':
692 if self.alias == 'git':
694 refs = {}
693 refs = {}
695 for message in self._commit_ids:
694 for message in self._commit_ids:
696 # TODO: mikhail: do more special chars replacements
695 # TODO: mikhail: do more special chars replacements
697 ref_name = 'refs/test-refs/{}'.format(
696 ref_name = 'refs/test-refs/{}'.format(
698 message.replace(' ', ''))
697 message.replace(' ', ''))
699 refs[ref_name] = self._commit_ids[message]
698 refs[ref_name] = self._commit_ids[message]
700 self._create_refs(repo, refs)
699 self._create_refs(repo, refs)
701
700
702 def _create_refs(self, repo, refs):
701 def _create_refs(self, repo, refs):
703 for ref_name in refs:
702 for ref_name in refs:
704 repo.set_refs(ref_name, refs[ref_name])
703 repo.set_refs(ref_name, refs[ref_name])
705
704
706
705
707 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
706 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
708 if backend_alias not in request.config.getoption('--backends'):
707 if backend_alias not in request.config.getoption('--backends'):
709 pytest.skip("Backend %s not selected." % (backend_alias, ))
708 pytest.skip("Backend %s not selected." % (backend_alias, ))
710
709
711 utils.check_xfail_backends(request.node, backend_alias)
710 utils.check_xfail_backends(request.node, backend_alias)
712 utils.check_skip_backends(request.node, backend_alias)
711 utils.check_skip_backends(request.node, backend_alias)
713
712
714 repo_name = 'vcs_test_%s' % (backend_alias, )
713 repo_name = 'vcs_test_%s' % (backend_alias, )
715 repo_path = os.path.join(tests_tmp_path, repo_name)
714 repo_path = os.path.join(tests_tmp_path, repo_name)
716 backend = VcsBackend(
715 backend = VcsBackend(
717 alias=backend_alias,
716 alias=backend_alias,
718 repo_path=repo_path,
717 repo_path=repo_path,
719 test_name=request.node.name,
718 test_name=request.node.name,
720 test_repo_container=test_repo)
719 test_repo_container=test_repo)
721 request.addfinalizer(backend.cleanup)
720 request.addfinalizer(backend.cleanup)
722 return backend
721 return backend
723
722
724
723
725 @pytest.fixture
724 @pytest.fixture
726 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
725 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
727 """
726 """
728 Parametrized fixture which represents a single vcs backend implementation.
727 Parametrized fixture which represents a single vcs backend implementation.
729
728
730 See the fixture `backend` for more details. This one implements the same
729 See the fixture `backend` for more details. This one implements the same
731 concept, but on vcs level. So it does not provide model instances etc.
730 concept, but on vcs level. So it does not provide model instances etc.
732
731
733 Parameters are generated dynamically, see :func:`pytest_generate_tests`
732 Parameters are generated dynamically, see :func:`pytest_generate_tests`
734 for how this works.
733 for how this works.
735 """
734 """
736 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
735 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
737
736
738
737
739 @pytest.fixture
738 @pytest.fixture
740 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
739 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
741 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
740 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
742
741
743
742
744 @pytest.fixture
743 @pytest.fixture
745 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
744 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
746 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
745 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
747
746
748
747
749 @pytest.fixture
748 @pytest.fixture
750 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
749 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
751 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
750 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
752
751
753
752
754 @pytest.fixture
753 @pytest.fixture
755 def vcsbackend_stub(vcsbackend_git):
754 def vcsbackend_stub(vcsbackend_git):
756 """
755 """
757 Use this to express that your test just needs a stub of a vcsbackend.
756 Use this to express that your test just needs a stub of a vcsbackend.
758
757
759 Plan is to eventually implement an in-memory stub to speed tests up.
758 Plan is to eventually implement an in-memory stub to speed tests up.
760 """
759 """
761 return vcsbackend_git
760 return vcsbackend_git
762
761
763
762
764 class VcsBackend(object):
763 class VcsBackend(object):
765 """
764 """
766 Represents the test configuration for one supported vcs backend.
765 Represents the test configuration for one supported vcs backend.
767 """
766 """
768
767
769 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
768 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
770
769
771 def __init__(self, alias, repo_path, test_name, test_repo_container):
770 def __init__(self, alias, repo_path, test_name, test_repo_container):
772 self.alias = alias
771 self.alias = alias
773 self._repo_path = repo_path
772 self._repo_path = repo_path
774 self._cleanup_repos = []
773 self._cleanup_repos = []
775 self._test_name = test_name
774 self._test_name = test_name
776 self._test_repo_container = test_repo_container
775 self._test_repo_container = test_repo_container
777
776
778 def __getitem__(self, key):
777 def __getitem__(self, key):
779 return self._test_repo_container(key, self.alias).scm_instance()
778 return self._test_repo_container(key, self.alias).scm_instance()
780
779
781 @property
780 @property
782 def repo(self):
781 def repo(self):
783 """
782 """
784 Returns the "current" repository. This is the vcs_test repo of the last
783 Returns the "current" repository. This is the vcs_test repo of the last
785 repo which has been created.
784 repo which has been created.
786 """
785 """
787 Repository = get_backend(self.alias)
786 Repository = get_backend(self.alias)
788 return Repository(self._repo_path)
787 return Repository(self._repo_path)
789
788
790 @property
789 @property
791 def backend(self):
790 def backend(self):
792 """
791 """
793 Returns the backend implementation class.
792 Returns the backend implementation class.
794 """
793 """
795 return get_backend(self.alias)
794 return get_backend(self.alias)
796
795
797 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
796 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
798 bare=False):
797 bare=False):
799 repo_name = self._next_repo_name()
798 repo_name = self._next_repo_name()
800 self._repo_path = get_new_dir(repo_name)
799 self._repo_path = get_new_dir(repo_name)
801 repo_class = get_backend(self.alias)
800 repo_class = get_backend(self.alias)
802 src_url = None
801 src_url = None
803 if _clone_repo:
802 if _clone_repo:
804 src_url = _clone_repo.path
803 src_url = _clone_repo.path
805 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
804 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
806 self._cleanup_repos.append(repo)
805 self._cleanup_repos.append(repo)
807
806
808 commits = commits or [
807 commits = commits or [
809 {'message': 'Commit %s of %s' % (x, repo_name)}
808 {'message': 'Commit %s of %s' % (x, repo_name)}
810 for x in xrange(number_of_commits)]
809 for x in xrange(number_of_commits)]
811 _add_commits_to_repo(repo, commits)
810 _add_commits_to_repo(repo, commits)
812 return repo
811 return repo
813
812
814 def clone_repo(self, repo):
813 def clone_repo(self, repo):
815 return self.create_repo(_clone_repo=repo)
814 return self.create_repo(_clone_repo=repo)
816
815
817 def cleanup(self):
816 def cleanup(self):
818 for repo in self._cleanup_repos:
817 for repo in self._cleanup_repos:
819 shutil.rmtree(repo.path)
818 shutil.rmtree(repo.path)
820
819
821 def new_repo_path(self):
820 def new_repo_path(self):
822 repo_name = self._next_repo_name()
821 repo_name = self._next_repo_name()
823 self._repo_path = get_new_dir(repo_name)
822 self._repo_path = get_new_dir(repo_name)
824 return self._repo_path
823 return self._repo_path
825
824
826 def _next_repo_name(self):
825 def _next_repo_name(self):
827 return "%s_%s" % (
826 return "%s_%s" % (
828 self.invalid_repo_name.sub('_', self._test_name),
827 self.invalid_repo_name.sub('_', self._test_name),
829 len(self._cleanup_repos))
828 len(self._cleanup_repos))
830
829
831 def add_file(self, repo, filename, content='Test content\n'):
830 def add_file(self, repo, filename, content='Test content\n'):
832 imc = repo.in_memory_commit
831 imc = repo.in_memory_commit
833 imc.add(FileNode(filename, content=content))
832 imc.add(FileNode(filename, content=content))
834 imc.commit(
833 imc.commit(
835 message=u'Automatic commit from vcsbackend fixture',
834 message=u'Automatic commit from vcsbackend fixture',
836 author=u'Automatic')
835 author=u'Automatic')
837
836
838 def ensure_file(self, filename, content='Test content\n'):
837 def ensure_file(self, filename, content='Test content\n'):
839 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
838 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
840 self.add_file(self.repo, filename, content)
839 self.add_file(self.repo, filename, content)
841
840
842
841
843 def _add_commits_to_repo(vcs_repo, commits):
842 def _add_commits_to_repo(vcs_repo, commits):
844 commit_ids = {}
843 commit_ids = {}
845 if not commits:
844 if not commits:
846 return commit_ids
845 return commit_ids
847
846
848 imc = vcs_repo.in_memory_commit
847 imc = vcs_repo.in_memory_commit
849 commit = None
848 commit = None
850
849
851 for idx, commit in enumerate(commits):
850 for idx, commit in enumerate(commits):
852 message = unicode(commit.get('message', 'Commit %s' % idx))
851 message = unicode(commit.get('message', 'Commit %s' % idx))
853
852
854 for node in commit.get('added', []):
853 for node in commit.get('added', []):
855 imc.add(FileNode(node.path, content=node.content))
854 imc.add(FileNode(node.path, content=node.content))
856 for node in commit.get('changed', []):
855 for node in commit.get('changed', []):
857 imc.change(FileNode(node.path, content=node.content))
856 imc.change(FileNode(node.path, content=node.content))
858 for node in commit.get('removed', []):
857 for node in commit.get('removed', []):
859 imc.remove(FileNode(node.path))
858 imc.remove(FileNode(node.path))
860
859
861 parents = [
860 parents = [
862 vcs_repo.get_commit(commit_id=commit_ids[p])
861 vcs_repo.get_commit(commit_id=commit_ids[p])
863 for p in commit.get('parents', [])]
862 for p in commit.get('parents', [])]
864
863
865 operations = ('added', 'changed', 'removed')
864 operations = ('added', 'changed', 'removed')
866 if not any((commit.get(o) for o in operations)):
865 if not any((commit.get(o) for o in operations)):
867 imc.add(FileNode('file_%s' % idx, content=message))
866 imc.add(FileNode('file_%s' % idx, content=message))
868
867
869 commit = imc.commit(
868 commit = imc.commit(
870 message=message,
869 message=message,
871 author=unicode(commit.get('author', 'Automatic')),
870 author=unicode(commit.get('author', 'Automatic')),
872 date=commit.get('date'),
871 date=commit.get('date'),
873 branch=commit.get('branch'),
872 branch=commit.get('branch'),
874 parents=parents)
873 parents=parents)
875
874
876 commit_ids[commit.message] = commit.raw_id
875 commit_ids[commit.message] = commit.raw_id
877
876
878 return commit_ids
877 return commit_ids
879
878
880
879
881 @pytest.fixture
880 @pytest.fixture
882 def reposerver(request):
881 def reposerver(request):
883 """
882 """
884 Allows to serve a backend repository
883 Allows to serve a backend repository
885 """
884 """
886
885
887 repo_server = RepoServer()
886 repo_server = RepoServer()
888 request.addfinalizer(repo_server.cleanup)
887 request.addfinalizer(repo_server.cleanup)
889 return repo_server
888 return repo_server
890
889
891
890
892 class RepoServer(object):
891 class RepoServer(object):
893 """
892 """
894 Utility to serve a local repository for the duration of a test case.
893 Utility to serve a local repository for the duration of a test case.
895
894
896 Supports only Subversion so far.
895 Supports only Subversion so far.
897 """
896 """
898
897
899 url = None
898 url = None
900
899
901 def __init__(self):
900 def __init__(self):
902 self._cleanup_servers = []
901 self._cleanup_servers = []
903
902
904 def serve(self, vcsrepo):
903 def serve(self, vcsrepo):
905 if vcsrepo.alias != 'svn':
904 if vcsrepo.alias != 'svn':
906 raise TypeError("Backend %s not supported" % vcsrepo.alias)
905 raise TypeError("Backend %s not supported" % vcsrepo.alias)
907
906
908 proc = subprocess32.Popen(
907 proc = subprocess32.Popen(
909 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
908 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
910 '--root', vcsrepo.path])
909 '--root', vcsrepo.path])
911 self._cleanup_servers.append(proc)
910 self._cleanup_servers.append(proc)
912 self.url = 'svn://localhost'
911 self.url = 'svn://localhost'
913
912
914 def cleanup(self):
913 def cleanup(self):
915 for proc in self._cleanup_servers:
914 for proc in self._cleanup_servers:
916 proc.terminate()
915 proc.terminate()
917
916
918
917
919 @pytest.fixture
918 @pytest.fixture
920 def pr_util(backend, request, config_stub):
919 def pr_util(backend, request, config_stub):
921 """
920 """
922 Utility for tests of models and for functional tests around pull requests.
921 Utility for tests of models and for functional tests around pull requests.
923
922
924 It gives an instance of :class:`PRTestUtility` which provides various
923 It gives an instance of :class:`PRTestUtility` which provides various
925 utility methods around one pull request.
924 utility methods around one pull request.
926
925
927 This fixture uses `backend` and inherits its parameterization.
926 This fixture uses `backend` and inherits its parameterization.
928 """
927 """
929
928
930 util = PRTestUtility(backend)
929 util = PRTestUtility(backend)
931 request.addfinalizer(util.cleanup)
930 request.addfinalizer(util.cleanup)
932
931
933 return util
932 return util
934
933
935
934
936 class PRTestUtility(object):
935 class PRTestUtility(object):
937
936
938 pull_request = None
937 pull_request = None
939 pull_request_id = None
938 pull_request_id = None
940 mergeable_patcher = None
939 mergeable_patcher = None
941 mergeable_mock = None
940 mergeable_mock = None
942 notification_patcher = None
941 notification_patcher = None
943
942
944 def __init__(self, backend):
943 def __init__(self, backend):
945 self.backend = backend
944 self.backend = backend
946
945
947 def create_pull_request(
946 def create_pull_request(
948 self, commits=None, target_head=None, source_head=None,
947 self, commits=None, target_head=None, source_head=None,
949 revisions=None, approved=False, author=None, mergeable=False,
948 revisions=None, approved=False, author=None, mergeable=False,
950 enable_notifications=True, name_suffix=u'', reviewers=None,
949 enable_notifications=True, name_suffix=u'', reviewers=None,
951 title=u"Test", description=u"Description"):
950 title=u"Test", description=u"Description"):
952 self.set_mergeable(mergeable)
951 self.set_mergeable(mergeable)
953 if not enable_notifications:
952 if not enable_notifications:
954 # mock notification side effect
953 # mock notification side effect
955 self.notification_patcher = mock.patch(
954 self.notification_patcher = mock.patch(
956 'rhodecode.model.notification.NotificationModel.create')
955 'rhodecode.model.notification.NotificationModel.create')
957 self.notification_patcher.start()
956 self.notification_patcher.start()
958
957
959 if not self.pull_request:
958 if not self.pull_request:
960 if not commits:
959 if not commits:
961 commits = [
960 commits = [
962 {'message': 'c1'},
961 {'message': 'c1'},
963 {'message': 'c2'},
962 {'message': 'c2'},
964 {'message': 'c3'},
963 {'message': 'c3'},
965 ]
964 ]
966 target_head = 'c1'
965 target_head = 'c1'
967 source_head = 'c2'
966 source_head = 'c2'
968 revisions = ['c2']
967 revisions = ['c2']
969
968
970 self.commit_ids = self.backend.create_master_repo(commits)
969 self.commit_ids = self.backend.create_master_repo(commits)
971 self.target_repository = self.backend.create_repo(
970 self.target_repository = self.backend.create_repo(
972 heads=[target_head], name_suffix=name_suffix)
971 heads=[target_head], name_suffix=name_suffix)
973 self.source_repository = self.backend.create_repo(
972 self.source_repository = self.backend.create_repo(
974 heads=[source_head], name_suffix=name_suffix)
973 heads=[source_head], name_suffix=name_suffix)
975 self.author = author or UserModel().get_by_username(
974 self.author = author or UserModel().get_by_username(
976 TEST_USER_ADMIN_LOGIN)
975 TEST_USER_ADMIN_LOGIN)
977
976
978 model = PullRequestModel()
977 model = PullRequestModel()
979 self.create_parameters = {
978 self.create_parameters = {
980 'created_by': self.author,
979 'created_by': self.author,
981 'source_repo': self.source_repository.repo_name,
980 'source_repo': self.source_repository.repo_name,
982 'source_ref': self._default_branch_reference(source_head),
981 'source_ref': self._default_branch_reference(source_head),
983 'target_repo': self.target_repository.repo_name,
982 'target_repo': self.target_repository.repo_name,
984 'target_ref': self._default_branch_reference(target_head),
983 'target_ref': self._default_branch_reference(target_head),
985 'revisions': [self.commit_ids[r] for r in revisions],
984 'revisions': [self.commit_ids[r] for r in revisions],
986 'reviewers': reviewers or self._get_reviewers(),
985 'reviewers': reviewers or self._get_reviewers(),
987 'title': title,
986 'title': title,
988 'description': description,
987 'description': description,
989 }
988 }
990 self.pull_request = model.create(**self.create_parameters)
989 self.pull_request = model.create(**self.create_parameters)
991 assert model.get_versions(self.pull_request) == []
990 assert model.get_versions(self.pull_request) == []
992
991
993 self.pull_request_id = self.pull_request.pull_request_id
992 self.pull_request_id = self.pull_request.pull_request_id
994
993
995 if approved:
994 if approved:
996 self.approve()
995 self.approve()
997
996
998 Session().add(self.pull_request)
997 Session().add(self.pull_request)
999 Session().commit()
998 Session().commit()
1000
999
1001 return self.pull_request
1000 return self.pull_request
1002
1001
1003 def approve(self):
1002 def approve(self):
1004 self.create_status_votes(
1003 self.create_status_votes(
1005 ChangesetStatus.STATUS_APPROVED,
1004 ChangesetStatus.STATUS_APPROVED,
1006 *self.pull_request.reviewers)
1005 *self.pull_request.reviewers)
1007
1006
1008 def close(self):
1007 def close(self):
1009 PullRequestModel().close_pull_request(self.pull_request, self.author)
1008 PullRequestModel().close_pull_request(self.pull_request, self.author)
1010
1009
1011 def _default_branch_reference(self, commit_message):
1010 def _default_branch_reference(self, commit_message):
1012 reference = '%s:%s:%s' % (
1011 reference = '%s:%s:%s' % (
1013 'branch',
1012 'branch',
1014 self.backend.default_branch_name,
1013 self.backend.default_branch_name,
1015 self.commit_ids[commit_message])
1014 self.commit_ids[commit_message])
1016 return reference
1015 return reference
1017
1016
1018 def _get_reviewers(self):
1017 def _get_reviewers(self):
1019 return [
1018 return [
1020 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1019 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1021 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1020 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1022 ]
1021 ]
1023
1022
1024 def update_source_repository(self, head=None):
1023 def update_source_repository(self, head=None):
1025 heads = [head or 'c3']
1024 heads = [head or 'c3']
1026 self.backend.pull_heads(self.source_repository, heads=heads)
1025 self.backend.pull_heads(self.source_repository, heads=heads)
1027
1026
1028 def add_one_commit(self, head=None):
1027 def add_one_commit(self, head=None):
1029 self.update_source_repository(head=head)
1028 self.update_source_repository(head=head)
1030 old_commit_ids = set(self.pull_request.revisions)
1029 old_commit_ids = set(self.pull_request.revisions)
1031 PullRequestModel().update_commits(self.pull_request)
1030 PullRequestModel().update_commits(self.pull_request)
1032 commit_ids = set(self.pull_request.revisions)
1031 commit_ids = set(self.pull_request.revisions)
1033 new_commit_ids = commit_ids - old_commit_ids
1032 new_commit_ids = commit_ids - old_commit_ids
1034 assert len(new_commit_ids) == 1
1033 assert len(new_commit_ids) == 1
1035 return new_commit_ids.pop()
1034 return new_commit_ids.pop()
1036
1035
1037 def remove_one_commit(self):
1036 def remove_one_commit(self):
1038 assert len(self.pull_request.revisions) == 2
1037 assert len(self.pull_request.revisions) == 2
1039 source_vcs = self.source_repository.scm_instance()
1038 source_vcs = self.source_repository.scm_instance()
1040 removed_commit_id = source_vcs.commit_ids[-1]
1039 removed_commit_id = source_vcs.commit_ids[-1]
1041
1040
1042 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1041 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1043 # remove the if once that's sorted out.
1042 # remove the if once that's sorted out.
1044 if self.backend.alias == "git":
1043 if self.backend.alias == "git":
1045 kwargs = {'branch_name': self.backend.default_branch_name}
1044 kwargs = {'branch_name': self.backend.default_branch_name}
1046 else:
1045 else:
1047 kwargs = {}
1046 kwargs = {}
1048 source_vcs.strip(removed_commit_id, **kwargs)
1047 source_vcs.strip(removed_commit_id, **kwargs)
1049
1048
1050 PullRequestModel().update_commits(self.pull_request)
1049 PullRequestModel().update_commits(self.pull_request)
1051 assert len(self.pull_request.revisions) == 1
1050 assert len(self.pull_request.revisions) == 1
1052 return removed_commit_id
1051 return removed_commit_id
1053
1052
1054 def create_comment(self, linked_to=None):
1053 def create_comment(self, linked_to=None):
1055 comment = CommentsModel().create(
1054 comment = CommentsModel().create(
1056 text=u"Test comment",
1055 text=u"Test comment",
1057 repo=self.target_repository.repo_name,
1056 repo=self.target_repository.repo_name,
1058 user=self.author,
1057 user=self.author,
1059 pull_request=self.pull_request)
1058 pull_request=self.pull_request)
1060 assert comment.pull_request_version_id is None
1059 assert comment.pull_request_version_id is None
1061
1060
1062 if linked_to:
1061 if linked_to:
1063 PullRequestModel()._link_comments_to_version(linked_to)
1062 PullRequestModel()._link_comments_to_version(linked_to)
1064
1063
1065 return comment
1064 return comment
1066
1065
1067 def create_inline_comment(
1066 def create_inline_comment(
1068 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1067 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1069 comment = CommentsModel().create(
1068 comment = CommentsModel().create(
1070 text=u"Test comment",
1069 text=u"Test comment",
1071 repo=self.target_repository.repo_name,
1070 repo=self.target_repository.repo_name,
1072 user=self.author,
1071 user=self.author,
1073 line_no=line_no,
1072 line_no=line_no,
1074 f_path=file_path,
1073 f_path=file_path,
1075 pull_request=self.pull_request)
1074 pull_request=self.pull_request)
1076 assert comment.pull_request_version_id is None
1075 assert comment.pull_request_version_id is None
1077
1076
1078 if linked_to:
1077 if linked_to:
1079 PullRequestModel()._link_comments_to_version(linked_to)
1078 PullRequestModel()._link_comments_to_version(linked_to)
1080
1079
1081 return comment
1080 return comment
1082
1081
1083 def create_version_of_pull_request(self):
1082 def create_version_of_pull_request(self):
1084 pull_request = self.create_pull_request()
1083 pull_request = self.create_pull_request()
1085 version = PullRequestModel()._create_version_from_snapshot(
1084 version = PullRequestModel()._create_version_from_snapshot(
1086 pull_request)
1085 pull_request)
1087 return version
1086 return version
1088
1087
1089 def create_status_votes(self, status, *reviewers):
1088 def create_status_votes(self, status, *reviewers):
1090 for reviewer in reviewers:
1089 for reviewer in reviewers:
1091 ChangesetStatusModel().set_status(
1090 ChangesetStatusModel().set_status(
1092 repo=self.pull_request.target_repo,
1091 repo=self.pull_request.target_repo,
1093 status=status,
1092 status=status,
1094 user=reviewer.user_id,
1093 user=reviewer.user_id,
1095 pull_request=self.pull_request)
1094 pull_request=self.pull_request)
1096
1095
1097 def set_mergeable(self, value):
1096 def set_mergeable(self, value):
1098 if not self.mergeable_patcher:
1097 if not self.mergeable_patcher:
1099 self.mergeable_patcher = mock.patch.object(
1098 self.mergeable_patcher = mock.patch.object(
1100 VcsSettingsModel, 'get_general_settings')
1099 VcsSettingsModel, 'get_general_settings')
1101 self.mergeable_mock = self.mergeable_patcher.start()
1100 self.mergeable_mock = self.mergeable_patcher.start()
1102 self.mergeable_mock.return_value = {
1101 self.mergeable_mock.return_value = {
1103 'rhodecode_pr_merge_enabled': value}
1102 'rhodecode_pr_merge_enabled': value}
1104
1103
1105 def cleanup(self):
1104 def cleanup(self):
1106 # In case the source repository is already cleaned up, the pull
1105 # In case the source repository is already cleaned up, the pull
1107 # request will already be deleted.
1106 # request will already be deleted.
1108 pull_request = PullRequest().get(self.pull_request_id)
1107 pull_request = PullRequest().get(self.pull_request_id)
1109 if pull_request:
1108 if pull_request:
1110 PullRequestModel().delete(pull_request, pull_request.author)
1109 PullRequestModel().delete(pull_request, pull_request.author)
1111 Session().commit()
1110 Session().commit()
1112
1111
1113 if self.notification_patcher:
1112 if self.notification_patcher:
1114 self.notification_patcher.stop()
1113 self.notification_patcher.stop()
1115
1114
1116 if self.mergeable_patcher:
1115 if self.mergeable_patcher:
1117 self.mergeable_patcher.stop()
1116 self.mergeable_patcher.stop()
1118
1117
1119
1118
1120 @pytest.fixture
1119 @pytest.fixture
1121 def user_admin(baseapp):
1120 def user_admin(baseapp):
1122 """
1121 """
1123 Provides the default admin test user as an instance of `db.User`.
1122 Provides the default admin test user as an instance of `db.User`.
1124 """
1123 """
1125 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1124 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1126 return user
1125 return user
1127
1126
1128
1127
1129 @pytest.fixture
1128 @pytest.fixture
1130 def user_regular(baseapp):
1129 def user_regular(baseapp):
1131 """
1130 """
1132 Provides the default regular test user as an instance of `db.User`.
1131 Provides the default regular test user as an instance of `db.User`.
1133 """
1132 """
1134 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1133 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1135 return user
1134 return user
1136
1135
1137
1136
1138 @pytest.fixture
1137 @pytest.fixture
1139 def user_util(request, db_connection):
1138 def user_util(request, db_connection):
1140 """
1139 """
1141 Provides a wired instance of `UserUtility` with integrated cleanup.
1140 Provides a wired instance of `UserUtility` with integrated cleanup.
1142 """
1141 """
1143 utility = UserUtility(test_name=request.node.name)
1142 utility = UserUtility(test_name=request.node.name)
1144 request.addfinalizer(utility.cleanup)
1143 request.addfinalizer(utility.cleanup)
1145 return utility
1144 return utility
1146
1145
1147
1146
1148 # TODO: johbo: Split this up into utilities per domain or something similar
1147 # TODO: johbo: Split this up into utilities per domain or something similar
1149 class UserUtility(object):
1148 class UserUtility(object):
1150
1149
1151 def __init__(self, test_name="test"):
1150 def __init__(self, test_name="test"):
1152 self._test_name = self._sanitize_name(test_name)
1151 self._test_name = self._sanitize_name(test_name)
1153 self.fixture = Fixture()
1152 self.fixture = Fixture()
1154 self.repo_group_ids = []
1153 self.repo_group_ids = []
1155 self.repos_ids = []
1154 self.repos_ids = []
1156 self.user_ids = []
1155 self.user_ids = []
1157 self.user_group_ids = []
1156 self.user_group_ids = []
1158 self.user_repo_permission_ids = []
1157 self.user_repo_permission_ids = []
1159 self.user_group_repo_permission_ids = []
1158 self.user_group_repo_permission_ids = []
1160 self.user_repo_group_permission_ids = []
1159 self.user_repo_group_permission_ids = []
1161 self.user_group_repo_group_permission_ids = []
1160 self.user_group_repo_group_permission_ids = []
1162 self.user_user_group_permission_ids = []
1161 self.user_user_group_permission_ids = []
1163 self.user_group_user_group_permission_ids = []
1162 self.user_group_user_group_permission_ids = []
1164 self.user_permissions = []
1163 self.user_permissions = []
1165
1164
1166 def _sanitize_name(self, name):
1165 def _sanitize_name(self, name):
1167 for char in ['[', ']']:
1166 for char in ['[', ']']:
1168 name = name.replace(char, '_')
1167 name = name.replace(char, '_')
1169 return name
1168 return name
1170
1169
1171 def create_repo_group(
1170 def create_repo_group(
1172 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1171 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1173 group_name = "{prefix}_repogroup_{count}".format(
1172 group_name = "{prefix}_repogroup_{count}".format(
1174 prefix=self._test_name,
1173 prefix=self._test_name,
1175 count=len(self.repo_group_ids))
1174 count=len(self.repo_group_ids))
1176 repo_group = self.fixture.create_repo_group(
1175 repo_group = self.fixture.create_repo_group(
1177 group_name, cur_user=owner)
1176 group_name, cur_user=owner)
1178 if auto_cleanup:
1177 if auto_cleanup:
1179 self.repo_group_ids.append(repo_group.group_id)
1178 self.repo_group_ids.append(repo_group.group_id)
1180 return repo_group
1179 return repo_group
1181
1180
1182 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1181 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1183 auto_cleanup=True, repo_type='hg', bare=False):
1182 auto_cleanup=True, repo_type='hg', bare=False):
1184 repo_name = "{prefix}_repository_{count}".format(
1183 repo_name = "{prefix}_repository_{count}".format(
1185 prefix=self._test_name,
1184 prefix=self._test_name,
1186 count=len(self.repos_ids))
1185 count=len(self.repos_ids))
1187
1186
1188 repository = self.fixture.create_repo(
1187 repository = self.fixture.create_repo(
1189 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1188 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1190 if auto_cleanup:
1189 if auto_cleanup:
1191 self.repos_ids.append(repository.repo_id)
1190 self.repos_ids.append(repository.repo_id)
1192 return repository
1191 return repository
1193
1192
1194 def create_user(self, auto_cleanup=True, **kwargs):
1193 def create_user(self, auto_cleanup=True, **kwargs):
1195 user_name = "{prefix}_user_{count}".format(
1194 user_name = "{prefix}_user_{count}".format(
1196 prefix=self._test_name,
1195 prefix=self._test_name,
1197 count=len(self.user_ids))
1196 count=len(self.user_ids))
1198 user = self.fixture.create_user(user_name, **kwargs)
1197 user = self.fixture.create_user(user_name, **kwargs)
1199 if auto_cleanup:
1198 if auto_cleanup:
1200 self.user_ids.append(user.user_id)
1199 self.user_ids.append(user.user_id)
1201 return user
1200 return user
1202
1201
1203 def create_additional_user_email(self, user, email):
1202 def create_additional_user_email(self, user, email):
1204 uem = self.fixture.create_additional_user_email(user=user, email=email)
1203 uem = self.fixture.create_additional_user_email(user=user, email=email)
1205 return uem
1204 return uem
1206
1205
1207 def create_user_with_group(self):
1206 def create_user_with_group(self):
1208 user = self.create_user()
1207 user = self.create_user()
1209 user_group = self.create_user_group(members=[user])
1208 user_group = self.create_user_group(members=[user])
1210 return user, user_group
1209 return user, user_group
1211
1210
1212 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1211 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1213 auto_cleanup=True, **kwargs):
1212 auto_cleanup=True, **kwargs):
1214 group_name = "{prefix}_usergroup_{count}".format(
1213 group_name = "{prefix}_usergroup_{count}".format(
1215 prefix=self._test_name,
1214 prefix=self._test_name,
1216 count=len(self.user_group_ids))
1215 count=len(self.user_group_ids))
1217 user_group = self.fixture.create_user_group(
1216 user_group = self.fixture.create_user_group(
1218 group_name, cur_user=owner, **kwargs)
1217 group_name, cur_user=owner, **kwargs)
1219
1218
1220 if auto_cleanup:
1219 if auto_cleanup:
1221 self.user_group_ids.append(user_group.users_group_id)
1220 self.user_group_ids.append(user_group.users_group_id)
1222 if members:
1221 if members:
1223 for user in members:
1222 for user in members:
1224 UserGroupModel().add_user_to_group(user_group, user)
1223 UserGroupModel().add_user_to_group(user_group, user)
1225 return user_group
1224 return user_group
1226
1225
1227 def grant_user_permission(self, user_name, permission_name):
1226 def grant_user_permission(self, user_name, permission_name):
1228 self._inherit_default_user_permissions(user_name, False)
1227 self._inherit_default_user_permissions(user_name, False)
1229 self.user_permissions.append((user_name, permission_name))
1228 self.user_permissions.append((user_name, permission_name))
1230
1229
1231 def grant_user_permission_to_repo_group(
1230 def grant_user_permission_to_repo_group(
1232 self, repo_group, user, permission_name):
1231 self, repo_group, user, permission_name):
1233 permission = RepoGroupModel().grant_user_permission(
1232 permission = RepoGroupModel().grant_user_permission(
1234 repo_group, user, permission_name)
1233 repo_group, user, permission_name)
1235 self.user_repo_group_permission_ids.append(
1234 self.user_repo_group_permission_ids.append(
1236 (repo_group.group_id, user.user_id))
1235 (repo_group.group_id, user.user_id))
1237 return permission
1236 return permission
1238
1237
1239 def grant_user_group_permission_to_repo_group(
1238 def grant_user_group_permission_to_repo_group(
1240 self, repo_group, user_group, permission_name):
1239 self, repo_group, user_group, permission_name):
1241 permission = RepoGroupModel().grant_user_group_permission(
1240 permission = RepoGroupModel().grant_user_group_permission(
1242 repo_group, user_group, permission_name)
1241 repo_group, user_group, permission_name)
1243 self.user_group_repo_group_permission_ids.append(
1242 self.user_group_repo_group_permission_ids.append(
1244 (repo_group.group_id, user_group.users_group_id))
1243 (repo_group.group_id, user_group.users_group_id))
1245 return permission
1244 return permission
1246
1245
1247 def grant_user_permission_to_repo(
1246 def grant_user_permission_to_repo(
1248 self, repo, user, permission_name):
1247 self, repo, user, permission_name):
1249 permission = RepoModel().grant_user_permission(
1248 permission = RepoModel().grant_user_permission(
1250 repo, user, permission_name)
1249 repo, user, permission_name)
1251 self.user_repo_permission_ids.append(
1250 self.user_repo_permission_ids.append(
1252 (repo.repo_id, user.user_id))
1251 (repo.repo_id, user.user_id))
1253 return permission
1252 return permission
1254
1253
1255 def grant_user_group_permission_to_repo(
1254 def grant_user_group_permission_to_repo(
1256 self, repo, user_group, permission_name):
1255 self, repo, user_group, permission_name):
1257 permission = RepoModel().grant_user_group_permission(
1256 permission = RepoModel().grant_user_group_permission(
1258 repo, user_group, permission_name)
1257 repo, user_group, permission_name)
1259 self.user_group_repo_permission_ids.append(
1258 self.user_group_repo_permission_ids.append(
1260 (repo.repo_id, user_group.users_group_id))
1259 (repo.repo_id, user_group.users_group_id))
1261 return permission
1260 return permission
1262
1261
1263 def grant_user_permission_to_user_group(
1262 def grant_user_permission_to_user_group(
1264 self, target_user_group, user, permission_name):
1263 self, target_user_group, user, permission_name):
1265 permission = UserGroupModel().grant_user_permission(
1264 permission = UserGroupModel().grant_user_permission(
1266 target_user_group, user, permission_name)
1265 target_user_group, user, permission_name)
1267 self.user_user_group_permission_ids.append(
1266 self.user_user_group_permission_ids.append(
1268 (target_user_group.users_group_id, user.user_id))
1267 (target_user_group.users_group_id, user.user_id))
1269 return permission
1268 return permission
1270
1269
1271 def grant_user_group_permission_to_user_group(
1270 def grant_user_group_permission_to_user_group(
1272 self, target_user_group, user_group, permission_name):
1271 self, target_user_group, user_group, permission_name):
1273 permission = UserGroupModel().grant_user_group_permission(
1272 permission = UserGroupModel().grant_user_group_permission(
1274 target_user_group, user_group, permission_name)
1273 target_user_group, user_group, permission_name)
1275 self.user_group_user_group_permission_ids.append(
1274 self.user_group_user_group_permission_ids.append(
1276 (target_user_group.users_group_id, user_group.users_group_id))
1275 (target_user_group.users_group_id, user_group.users_group_id))
1277 return permission
1276 return permission
1278
1277
1279 def revoke_user_permission(self, user_name, permission_name):
1278 def revoke_user_permission(self, user_name, permission_name):
1280 self._inherit_default_user_permissions(user_name, True)
1279 self._inherit_default_user_permissions(user_name, True)
1281 UserModel().revoke_perm(user_name, permission_name)
1280 UserModel().revoke_perm(user_name, permission_name)
1282
1281
1283 def _inherit_default_user_permissions(self, user_name, value):
1282 def _inherit_default_user_permissions(self, user_name, value):
1284 user = UserModel().get_by_username(user_name)
1283 user = UserModel().get_by_username(user_name)
1285 user.inherit_default_permissions = value
1284 user.inherit_default_permissions = value
1286 Session().add(user)
1285 Session().add(user)
1287 Session().commit()
1286 Session().commit()
1288
1287
1289 def cleanup(self):
1288 def cleanup(self):
1290 self._cleanup_permissions()
1289 self._cleanup_permissions()
1291 self._cleanup_repos()
1290 self._cleanup_repos()
1292 self._cleanup_repo_groups()
1291 self._cleanup_repo_groups()
1293 self._cleanup_user_groups()
1292 self._cleanup_user_groups()
1294 self._cleanup_users()
1293 self._cleanup_users()
1295
1294
1296 def _cleanup_permissions(self):
1295 def _cleanup_permissions(self):
1297 if self.user_permissions:
1296 if self.user_permissions:
1298 for user_name, permission_name in self.user_permissions:
1297 for user_name, permission_name in self.user_permissions:
1299 self.revoke_user_permission(user_name, permission_name)
1298 self.revoke_user_permission(user_name, permission_name)
1300
1299
1301 for permission in self.user_repo_permission_ids:
1300 for permission in self.user_repo_permission_ids:
1302 RepoModel().revoke_user_permission(*permission)
1301 RepoModel().revoke_user_permission(*permission)
1303
1302
1304 for permission in self.user_group_repo_permission_ids:
1303 for permission in self.user_group_repo_permission_ids:
1305 RepoModel().revoke_user_group_permission(*permission)
1304 RepoModel().revoke_user_group_permission(*permission)
1306
1305
1307 for permission in self.user_repo_group_permission_ids:
1306 for permission in self.user_repo_group_permission_ids:
1308 RepoGroupModel().revoke_user_permission(*permission)
1307 RepoGroupModel().revoke_user_permission(*permission)
1309
1308
1310 for permission in self.user_group_repo_group_permission_ids:
1309 for permission in self.user_group_repo_group_permission_ids:
1311 RepoGroupModel().revoke_user_group_permission(*permission)
1310 RepoGroupModel().revoke_user_group_permission(*permission)
1312
1311
1313 for permission in self.user_user_group_permission_ids:
1312 for permission in self.user_user_group_permission_ids:
1314 UserGroupModel().revoke_user_permission(*permission)
1313 UserGroupModel().revoke_user_permission(*permission)
1315
1314
1316 for permission in self.user_group_user_group_permission_ids:
1315 for permission in self.user_group_user_group_permission_ids:
1317 UserGroupModel().revoke_user_group_permission(*permission)
1316 UserGroupModel().revoke_user_group_permission(*permission)
1318
1317
1319 def _cleanup_repo_groups(self):
1318 def _cleanup_repo_groups(self):
1320 def _repo_group_compare(first_group_id, second_group_id):
1319 def _repo_group_compare(first_group_id, second_group_id):
1321 """
1320 """
1322 Gives higher priority to the groups with the most complex paths
1321 Gives higher priority to the groups with the most complex paths
1323 """
1322 """
1324 first_group = RepoGroup.get(first_group_id)
1323 first_group = RepoGroup.get(first_group_id)
1325 second_group = RepoGroup.get(second_group_id)
1324 second_group = RepoGroup.get(second_group_id)
1326 first_group_parts = (
1325 first_group_parts = (
1327 len(first_group.group_name.split('/')) if first_group else 0)
1326 len(first_group.group_name.split('/')) if first_group else 0)
1328 second_group_parts = (
1327 second_group_parts = (
1329 len(second_group.group_name.split('/')) if second_group else 0)
1328 len(second_group.group_name.split('/')) if second_group else 0)
1330 return cmp(second_group_parts, first_group_parts)
1329 return cmp(second_group_parts, first_group_parts)
1331
1330
1332 sorted_repo_group_ids = sorted(
1331 sorted_repo_group_ids = sorted(
1333 self.repo_group_ids, cmp=_repo_group_compare)
1332 self.repo_group_ids, cmp=_repo_group_compare)
1334 for repo_group_id in sorted_repo_group_ids:
1333 for repo_group_id in sorted_repo_group_ids:
1335 self.fixture.destroy_repo_group(repo_group_id)
1334 self.fixture.destroy_repo_group(repo_group_id)
1336
1335
1337 def _cleanup_repos(self):
1336 def _cleanup_repos(self):
1338 sorted_repos_ids = sorted(self.repos_ids)
1337 sorted_repos_ids = sorted(self.repos_ids)
1339 for repo_id in sorted_repos_ids:
1338 for repo_id in sorted_repos_ids:
1340 self.fixture.destroy_repo(repo_id)
1339 self.fixture.destroy_repo(repo_id)
1341
1340
1342 def _cleanup_user_groups(self):
1341 def _cleanup_user_groups(self):
1343 def _user_group_compare(first_group_id, second_group_id):
1342 def _user_group_compare(first_group_id, second_group_id):
1344 """
1343 """
1345 Gives higher priority to the groups with the most complex paths
1344 Gives higher priority to the groups with the most complex paths
1346 """
1345 """
1347 first_group = UserGroup.get(first_group_id)
1346 first_group = UserGroup.get(first_group_id)
1348 second_group = UserGroup.get(second_group_id)
1347 second_group = UserGroup.get(second_group_id)
1349 first_group_parts = (
1348 first_group_parts = (
1350 len(first_group.users_group_name.split('/'))
1349 len(first_group.users_group_name.split('/'))
1351 if first_group else 0)
1350 if first_group else 0)
1352 second_group_parts = (
1351 second_group_parts = (
1353 len(second_group.users_group_name.split('/'))
1352 len(second_group.users_group_name.split('/'))
1354 if second_group else 0)
1353 if second_group else 0)
1355 return cmp(second_group_parts, first_group_parts)
1354 return cmp(second_group_parts, first_group_parts)
1356
1355
1357 sorted_user_group_ids = sorted(
1356 sorted_user_group_ids = sorted(
1358 self.user_group_ids, cmp=_user_group_compare)
1357 self.user_group_ids, cmp=_user_group_compare)
1359 for user_group_id in sorted_user_group_ids:
1358 for user_group_id in sorted_user_group_ids:
1360 self.fixture.destroy_user_group(user_group_id)
1359 self.fixture.destroy_user_group(user_group_id)
1361
1360
1362 def _cleanup_users(self):
1361 def _cleanup_users(self):
1363 for user_id in self.user_ids:
1362 for user_id in self.user_ids:
1364 self.fixture.destroy_user(user_id)
1363 self.fixture.destroy_user(user_id)
1365
1364
1366
1365
1367 # TODO: Think about moving this into a pytest-pyro package and make it a
1366 # TODO: Think about moving this into a pytest-pyro package and make it a
1368 # pytest plugin
1367 # pytest plugin
1369 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1368 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1370 def pytest_runtest_makereport(item, call):
1369 def pytest_runtest_makereport(item, call):
1371 """
1370 """
1372 Adding the remote traceback if the exception has this information.
1371 Adding the remote traceback if the exception has this information.
1373
1372
1374 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1373 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1375 to the exception instance.
1374 to the exception instance.
1376 """
1375 """
1377 outcome = yield
1376 outcome = yield
1378 report = outcome.get_result()
1377 report = outcome.get_result()
1379 if call.excinfo:
1378 if call.excinfo:
1380 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1379 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1381
1380
1382
1381
1383 def _add_vcsserver_remote_traceback(report, exc):
1382 def _add_vcsserver_remote_traceback(report, exc):
1384 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1383 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1385
1384
1386 if vcsserver_traceback:
1385 if vcsserver_traceback:
1387 section = 'VCSServer remote traceback ' + report.when
1386 section = 'VCSServer remote traceback ' + report.when
1388 report.sections.append((section, vcsserver_traceback))
1387 report.sections.append((section, vcsserver_traceback))
1389
1388
1390
1389
1391 @pytest.fixture(scope='session')
1390 @pytest.fixture(scope='session')
1392 def testrun():
1391 def testrun():
1393 return {
1392 return {
1394 'uuid': uuid.uuid4(),
1393 'uuid': uuid.uuid4(),
1395 'start': datetime.datetime.utcnow().isoformat(),
1394 'start': datetime.datetime.utcnow().isoformat(),
1396 'timestamp': int(time.time()),
1395 'timestamp': int(time.time()),
1397 }
1396 }
1398
1397
1399
1398
1400 @pytest.fixture(autouse=True)
1399 @pytest.fixture(autouse=True)
1401 def collect_appenlight_stats(request, testrun):
1400 def collect_appenlight_stats(request, testrun):
1402 """
1401 """
1403 This fixture reports memory consumtion of single tests.
1402 This fixture reports memory consumtion of single tests.
1404
1403
1405 It gathers data based on `psutil` and sends them to Appenlight. The option
1404 It gathers data based on `psutil` and sends them to Appenlight. The option
1406 ``--ae`` has te be used to enable this fixture and the API key for your
1405 ``--ae`` has te be used to enable this fixture and the API key for your
1407 application has to be provided in ``--ae-key``.
1406 application has to be provided in ``--ae-key``.
1408 """
1407 """
1409 try:
1408 try:
1410 # cygwin cannot have yet psutil support.
1409 # cygwin cannot have yet psutil support.
1411 import psutil
1410 import psutil
1412 except ImportError:
1411 except ImportError:
1413 return
1412 return
1414
1413
1415 if not request.config.getoption('--appenlight'):
1414 if not request.config.getoption('--appenlight'):
1416 return
1415 return
1417 else:
1416 else:
1418 # Only request the baseapp fixture if appenlight tracking is
1417 # Only request the baseapp fixture if appenlight tracking is
1419 # enabled. This will speed up a test run of unit tests by 2 to 3
1418 # enabled. This will speed up a test run of unit tests by 2 to 3
1420 # seconds if appenlight is not enabled.
1419 # seconds if appenlight is not enabled.
1421 baseapp = request.getfuncargvalue("baseapp")
1420 baseapp = request.getfuncargvalue("baseapp")
1422 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1421 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1423 client = AppenlightClient(
1422 client = AppenlightClient(
1424 url=url,
1423 url=url,
1425 api_key=request.config.getoption('--appenlight-api-key'),
1424 api_key=request.config.getoption('--appenlight-api-key'),
1426 namespace=request.node.nodeid,
1425 namespace=request.node.nodeid,
1427 request=str(testrun['uuid']),
1426 request=str(testrun['uuid']),
1428 testrun=testrun)
1427 testrun=testrun)
1429
1428
1430 client.collect({
1429 client.collect({
1431 'message': "Starting",
1430 'message': "Starting",
1432 })
1431 })
1433
1432
1434 server_and_port = baseapp.config.get_settings()['vcs.server']
1433 server_and_port = baseapp.config.get_settings()['vcs.server']
1435 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1434 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1436 server = create_vcsserver_proxy(server_and_port, protocol)
1435 server = create_vcsserver_proxy(server_and_port, protocol)
1437 with server:
1436 with server:
1438 vcs_pid = server.get_pid()
1437 vcs_pid = server.get_pid()
1439 server.run_gc()
1438 server.run_gc()
1440 vcs_process = psutil.Process(vcs_pid)
1439 vcs_process = psutil.Process(vcs_pid)
1441 mem = vcs_process.memory_info()
1440 mem = vcs_process.memory_info()
1442 client.tag_before('vcsserver.rss', mem.rss)
1441 client.tag_before('vcsserver.rss', mem.rss)
1443 client.tag_before('vcsserver.vms', mem.vms)
1442 client.tag_before('vcsserver.vms', mem.vms)
1444
1443
1445 test_process = psutil.Process()
1444 test_process = psutil.Process()
1446 mem = test_process.memory_info()
1445 mem = test_process.memory_info()
1447 client.tag_before('test.rss', mem.rss)
1446 client.tag_before('test.rss', mem.rss)
1448 client.tag_before('test.vms', mem.vms)
1447 client.tag_before('test.vms', mem.vms)
1449
1448
1450 client.tag_before('time', time.time())
1449 client.tag_before('time', time.time())
1451
1450
1452 @request.addfinalizer
1451 @request.addfinalizer
1453 def send_stats():
1452 def send_stats():
1454 client.tag_after('time', time.time())
1453 client.tag_after('time', time.time())
1455 with server:
1454 with server:
1456 gc_stats = server.run_gc()
1455 gc_stats = server.run_gc()
1457 for tag, value in gc_stats.items():
1456 for tag, value in gc_stats.items():
1458 client.tag_after(tag, value)
1457 client.tag_after(tag, value)
1459 mem = vcs_process.memory_info()
1458 mem = vcs_process.memory_info()
1460 client.tag_after('vcsserver.rss', mem.rss)
1459 client.tag_after('vcsserver.rss', mem.rss)
1461 client.tag_after('vcsserver.vms', mem.vms)
1460 client.tag_after('vcsserver.vms', mem.vms)
1462
1461
1463 mem = test_process.memory_info()
1462 mem = test_process.memory_info()
1464 client.tag_after('test.rss', mem.rss)
1463 client.tag_after('test.rss', mem.rss)
1465 client.tag_after('test.vms', mem.vms)
1464 client.tag_after('test.vms', mem.vms)
1466
1465
1467 client.collect({
1466 client.collect({
1468 'message': "Finished",
1467 'message': "Finished",
1469 })
1468 })
1470 client.send_stats()
1469 client.send_stats()
1471
1470
1472 return client
1471 return client
1473
1472
1474
1473
1475 class AppenlightClient():
1474 class AppenlightClient():
1476
1475
1477 url_template = '{url}?protocol_version=0.5'
1476 url_template = '{url}?protocol_version=0.5'
1478
1477
1479 def __init__(
1478 def __init__(
1480 self, url, api_key, add_server=True, add_timestamp=True,
1479 self, url, api_key, add_server=True, add_timestamp=True,
1481 namespace=None, request=None, testrun=None):
1480 namespace=None, request=None, testrun=None):
1482 self.url = self.url_template.format(url=url)
1481 self.url = self.url_template.format(url=url)
1483 self.api_key = api_key
1482 self.api_key = api_key
1484 self.add_server = add_server
1483 self.add_server = add_server
1485 self.add_timestamp = add_timestamp
1484 self.add_timestamp = add_timestamp
1486 self.namespace = namespace
1485 self.namespace = namespace
1487 self.request = request
1486 self.request = request
1488 self.server = socket.getfqdn(socket.gethostname())
1487 self.server = socket.getfqdn(socket.gethostname())
1489 self.tags_before = {}
1488 self.tags_before = {}
1490 self.tags_after = {}
1489 self.tags_after = {}
1491 self.stats = []
1490 self.stats = []
1492 self.testrun = testrun or {}
1491 self.testrun = testrun or {}
1493
1492
1494 def tag_before(self, tag, value):
1493 def tag_before(self, tag, value):
1495 self.tags_before[tag] = value
1494 self.tags_before[tag] = value
1496
1495
1497 def tag_after(self, tag, value):
1496 def tag_after(self, tag, value):
1498 self.tags_after[tag] = value
1497 self.tags_after[tag] = value
1499
1498
1500 def collect(self, data):
1499 def collect(self, data):
1501 if self.add_server:
1500 if self.add_server:
1502 data.setdefault('server', self.server)
1501 data.setdefault('server', self.server)
1503 if self.add_timestamp:
1502 if self.add_timestamp:
1504 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1503 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1505 if self.namespace:
1504 if self.namespace:
1506 data.setdefault('namespace', self.namespace)
1505 data.setdefault('namespace', self.namespace)
1507 if self.request:
1506 if self.request:
1508 data.setdefault('request', self.request)
1507 data.setdefault('request', self.request)
1509 self.stats.append(data)
1508 self.stats.append(data)
1510
1509
1511 def send_stats(self):
1510 def send_stats(self):
1512 tags = [
1511 tags = [
1513 ('testrun', self.request),
1512 ('testrun', self.request),
1514 ('testrun.start', self.testrun['start']),
1513 ('testrun.start', self.testrun['start']),
1515 ('testrun.timestamp', self.testrun['timestamp']),
1514 ('testrun.timestamp', self.testrun['timestamp']),
1516 ('test', self.namespace),
1515 ('test', self.namespace),
1517 ]
1516 ]
1518 for key, value in self.tags_before.items():
1517 for key, value in self.tags_before.items():
1519 tags.append((key + '.before', value))
1518 tags.append((key + '.before', value))
1520 try:
1519 try:
1521 delta = self.tags_after[key] - value
1520 delta = self.tags_after[key] - value
1522 tags.append((key + '.delta', delta))
1521 tags.append((key + '.delta', delta))
1523 except Exception:
1522 except Exception:
1524 pass
1523 pass
1525 for key, value in self.tags_after.items():
1524 for key, value in self.tags_after.items():
1526 tags.append((key + '.after', value))
1525 tags.append((key + '.after', value))
1527 self.collect({
1526 self.collect({
1528 'message': "Collected tags",
1527 'message': "Collected tags",
1529 'tags': tags,
1528 'tags': tags,
1530 })
1529 })
1531
1530
1532 response = requests.post(
1531 response = requests.post(
1533 self.url,
1532 self.url,
1534 headers={
1533 headers={
1535 'X-appenlight-api-key': self.api_key},
1534 'X-appenlight-api-key': self.api_key},
1536 json=self.stats,
1535 json=self.stats,
1537 )
1536 )
1538
1537
1539 if not response.status_code == 200:
1538 if not response.status_code == 200:
1540 pprint.pprint(self.stats)
1539 pprint.pprint(self.stats)
1541 print(response.headers)
1540 print(response.headers)
1542 print(response.text)
1541 print(response.text)
1543 raise Exception('Sending to appenlight failed')
1542 raise Exception('Sending to appenlight failed')
1544
1543
1545
1544
1546 @pytest.fixture
1545 @pytest.fixture
1547 def gist_util(request, db_connection):
1546 def gist_util(request, db_connection):
1548 """
1547 """
1549 Provides a wired instance of `GistUtility` with integrated cleanup.
1548 Provides a wired instance of `GistUtility` with integrated cleanup.
1550 """
1549 """
1551 utility = GistUtility()
1550 utility = GistUtility()
1552 request.addfinalizer(utility.cleanup)
1551 request.addfinalizer(utility.cleanup)
1553 return utility
1552 return utility
1554
1553
1555
1554
1556 class GistUtility(object):
1555 class GistUtility(object):
1557 def __init__(self):
1556 def __init__(self):
1558 self.fixture = Fixture()
1557 self.fixture = Fixture()
1559 self.gist_ids = []
1558 self.gist_ids = []
1560
1559
1561 def create_gist(self, **kwargs):
1560 def create_gist(self, **kwargs):
1562 gist = self.fixture.create_gist(**kwargs)
1561 gist = self.fixture.create_gist(**kwargs)
1563 self.gist_ids.append(gist.gist_id)
1562 self.gist_ids.append(gist.gist_id)
1564 return gist
1563 return gist
1565
1564
1566 def cleanup(self):
1565 def cleanup(self):
1567 for id_ in self.gist_ids:
1566 for id_ in self.gist_ids:
1568 self.fixture.destroy_gists(str(id_))
1567 self.fixture.destroy_gists(str(id_))
1569
1568
1570
1569
1571 @pytest.fixture
1570 @pytest.fixture
1572 def enabled_backends(request):
1571 def enabled_backends(request):
1573 backends = request.config.option.backends
1572 backends = request.config.option.backends
1574 return backends[:]
1573 return backends[:]
1575
1574
1576
1575
1577 @pytest.fixture
1576 @pytest.fixture
1578 def settings_util(request, db_connection):
1577 def settings_util(request, db_connection):
1579 """
1578 """
1580 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1579 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1581 """
1580 """
1582 utility = SettingsUtility()
1581 utility = SettingsUtility()
1583 request.addfinalizer(utility.cleanup)
1582 request.addfinalizer(utility.cleanup)
1584 return utility
1583 return utility
1585
1584
1586
1585
1587 class SettingsUtility(object):
1586 class SettingsUtility(object):
1588 def __init__(self):
1587 def __init__(self):
1589 self.rhodecode_ui_ids = []
1588 self.rhodecode_ui_ids = []
1590 self.rhodecode_setting_ids = []
1589 self.rhodecode_setting_ids = []
1591 self.repo_rhodecode_ui_ids = []
1590 self.repo_rhodecode_ui_ids = []
1592 self.repo_rhodecode_setting_ids = []
1591 self.repo_rhodecode_setting_ids = []
1593
1592
1594 def create_repo_rhodecode_ui(
1593 def create_repo_rhodecode_ui(
1595 self, repo, section, value, key=None, active=True, cleanup=True):
1594 self, repo, section, value, key=None, active=True, cleanup=True):
1596 key = key or hashlib.sha1(
1595 key = key or hashlib.sha1(
1597 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1596 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1598
1597
1599 setting = RepoRhodeCodeUi()
1598 setting = RepoRhodeCodeUi()
1600 setting.repository_id = repo.repo_id
1599 setting.repository_id = repo.repo_id
1601 setting.ui_section = section
1600 setting.ui_section = section
1602 setting.ui_value = value
1601 setting.ui_value = value
1603 setting.ui_key = key
1602 setting.ui_key = key
1604 setting.ui_active = active
1603 setting.ui_active = active
1605 Session().add(setting)
1604 Session().add(setting)
1606 Session().commit()
1605 Session().commit()
1607
1606
1608 if cleanup:
1607 if cleanup:
1609 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1608 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1610 return setting
1609 return setting
1611
1610
1612 def create_rhodecode_ui(
1611 def create_rhodecode_ui(
1613 self, section, value, key=None, active=True, cleanup=True):
1612 self, section, value, key=None, active=True, cleanup=True):
1614 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1613 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1615
1614
1616 setting = RhodeCodeUi()
1615 setting = RhodeCodeUi()
1617 setting.ui_section = section
1616 setting.ui_section = section
1618 setting.ui_value = value
1617 setting.ui_value = value
1619 setting.ui_key = key
1618 setting.ui_key = key
1620 setting.ui_active = active
1619 setting.ui_active = active
1621 Session().add(setting)
1620 Session().add(setting)
1622 Session().commit()
1621 Session().commit()
1623
1622
1624 if cleanup:
1623 if cleanup:
1625 self.rhodecode_ui_ids.append(setting.ui_id)
1624 self.rhodecode_ui_ids.append(setting.ui_id)
1626 return setting
1625 return setting
1627
1626
1628 def create_repo_rhodecode_setting(
1627 def create_repo_rhodecode_setting(
1629 self, repo, name, value, type_, cleanup=True):
1628 self, repo, name, value, type_, cleanup=True):
1630 setting = RepoRhodeCodeSetting(
1629 setting = RepoRhodeCodeSetting(
1631 repo.repo_id, key=name, val=value, type=type_)
1630 repo.repo_id, key=name, val=value, type=type_)
1632 Session().add(setting)
1631 Session().add(setting)
1633 Session().commit()
1632 Session().commit()
1634
1633
1635 if cleanup:
1634 if cleanup:
1636 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1635 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1637 return setting
1636 return setting
1638
1637
1639 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1638 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1640 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1639 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1641 Session().add(setting)
1640 Session().add(setting)
1642 Session().commit()
1641 Session().commit()
1643
1642
1644 if cleanup:
1643 if cleanup:
1645 self.rhodecode_setting_ids.append(setting.app_settings_id)
1644 self.rhodecode_setting_ids.append(setting.app_settings_id)
1646
1645
1647 return setting
1646 return setting
1648
1647
1649 def cleanup(self):
1648 def cleanup(self):
1650 for id_ in self.rhodecode_ui_ids:
1649 for id_ in self.rhodecode_ui_ids:
1651 setting = RhodeCodeUi.get(id_)
1650 setting = RhodeCodeUi.get(id_)
1652 Session().delete(setting)
1651 Session().delete(setting)
1653
1652
1654 for id_ in self.rhodecode_setting_ids:
1653 for id_ in self.rhodecode_setting_ids:
1655 setting = RhodeCodeSetting.get(id_)
1654 setting = RhodeCodeSetting.get(id_)
1656 Session().delete(setting)
1655 Session().delete(setting)
1657
1656
1658 for id_ in self.repo_rhodecode_ui_ids:
1657 for id_ in self.repo_rhodecode_ui_ids:
1659 setting = RepoRhodeCodeUi.get(id_)
1658 setting = RepoRhodeCodeUi.get(id_)
1660 Session().delete(setting)
1659 Session().delete(setting)
1661
1660
1662 for id_ in self.repo_rhodecode_setting_ids:
1661 for id_ in self.repo_rhodecode_setting_ids:
1663 setting = RepoRhodeCodeSetting.get(id_)
1662 setting = RepoRhodeCodeSetting.get(id_)
1664 Session().delete(setting)
1663 Session().delete(setting)
1665
1664
1666 Session().commit()
1665 Session().commit()
1667
1666
1668
1667
1669 @pytest.fixture
1668 @pytest.fixture
1670 def no_notifications(request):
1669 def no_notifications(request):
1671 notification_patcher = mock.patch(
1670 notification_patcher = mock.patch(
1672 'rhodecode.model.notification.NotificationModel.create')
1671 'rhodecode.model.notification.NotificationModel.create')
1673 notification_patcher.start()
1672 notification_patcher.start()
1674 request.addfinalizer(notification_patcher.stop)
1673 request.addfinalizer(notification_patcher.stop)
1675
1674
1676
1675
1677 @pytest.fixture(scope='session')
1676 @pytest.fixture(scope='session')
1678 def repeat(request):
1677 def repeat(request):
1679 """
1678 """
1680 The number of repetitions is based on this fixture.
1679 The number of repetitions is based on this fixture.
1681
1680
1682 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1681 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1683 tests are not too slow in our default test suite.
1682 tests are not too slow in our default test suite.
1684 """
1683 """
1685 return request.config.getoption('--repeat')
1684 return request.config.getoption('--repeat')
1686
1685
1687
1686
1688 @pytest.fixture
1687 @pytest.fixture
1689 def rhodecode_fixtures():
1688 def rhodecode_fixtures():
1690 return Fixture()
1689 return Fixture()
1691
1690
1692
1691
1693 @pytest.fixture
1692 @pytest.fixture
1694 def context_stub():
1693 def context_stub():
1695 """
1694 """
1696 Stub context object.
1695 Stub context object.
1697 """
1696 """
1698 context = pyramid.testing.DummyResource()
1697 context = pyramid.testing.DummyResource()
1699 return context
1698 return context
1700
1699
1701
1700
1702 @pytest.fixture
1701 @pytest.fixture
1703 def request_stub():
1702 def request_stub():
1704 """
1703 """
1705 Stub request object.
1704 Stub request object.
1706 """
1705 """
1707 from rhodecode.lib.base import bootstrap_request
1706 from rhodecode.lib.base import bootstrap_request
1708 request = bootstrap_request(scheme='https')
1707 request = bootstrap_request(scheme='https')
1709 return request
1708 return request
1710
1709
1711
1710
1712 @pytest.fixture
1711 @pytest.fixture
1713 def config_stub(request, request_stub):
1712 def config_stub(request, request_stub):
1714 """
1713 """
1715 Set up pyramid.testing and return the Configurator.
1714 Set up pyramid.testing and return the Configurator.
1716 """
1715 """
1717 from rhodecode.lib.base import bootstrap_config
1716 from rhodecode.lib.base import bootstrap_config
1718 config = bootstrap_config(request=request_stub)
1717 config = bootstrap_config(request=request_stub)
1719
1718
1720 @request.addfinalizer
1719 @request.addfinalizer
1721 def cleanup():
1720 def cleanup():
1722 pyramid.testing.tearDown()
1721 pyramid.testing.tearDown()
1723
1722
1724 return config
1723 return config
1725
1724
1726
1725
1727 @pytest.fixture
1726 @pytest.fixture
1728 def StubIntegrationType():
1727 def StubIntegrationType():
1729 class _StubIntegrationType(IntegrationTypeBase):
1728 class _StubIntegrationType(IntegrationTypeBase):
1730 """ Test integration type class """
1729 """ Test integration type class """
1731
1730
1732 key = 'test'
1731 key = 'test'
1733 display_name = 'Test integration type'
1732 display_name = 'Test integration type'
1734 description = 'A test integration type for testing'
1733 description = 'A test integration type for testing'
1735
1734
1736 @classmethod
1735 @classmethod
1737 def icon(cls):
1736 def icon(cls):
1738 return 'test_icon_html_image'
1737 return 'test_icon_html_image'
1739
1738
1740 def __init__(self, settings):
1739 def __init__(self, settings):
1741 super(_StubIntegrationType, self).__init__(settings)
1740 super(_StubIntegrationType, self).__init__(settings)
1742 self.sent_events = [] # for testing
1741 self.sent_events = [] # for testing
1743
1742
1744 def send_event(self, event):
1743 def send_event(self, event):
1745 self.sent_events.append(event)
1744 self.sent_events.append(event)
1746
1745
1747 def settings_schema(self):
1746 def settings_schema(self):
1748 class SettingsSchema(colander.Schema):
1747 class SettingsSchema(colander.Schema):
1749 test_string_field = colander.SchemaNode(
1748 test_string_field = colander.SchemaNode(
1750 colander.String(),
1749 colander.String(),
1751 missing=colander.required,
1750 missing=colander.required,
1752 title='test string field',
1751 title='test string field',
1753 )
1752 )
1754 test_int_field = colander.SchemaNode(
1753 test_int_field = colander.SchemaNode(
1755 colander.Int(),
1754 colander.Int(),
1756 title='some integer setting',
1755 title='some integer setting',
1757 )
1756 )
1758 return SettingsSchema()
1757 return SettingsSchema()
1759
1758
1760
1759
1761 integration_type_registry.register_integration_type(_StubIntegrationType)
1760 integration_type_registry.register_integration_type(_StubIntegrationType)
1762 return _StubIntegrationType
1761 return _StubIntegrationType
1763
1762
1764 @pytest.fixture
1763 @pytest.fixture
1765 def stub_integration_settings():
1764 def stub_integration_settings():
1766 return {
1765 return {
1767 'test_string_field': 'some data',
1766 'test_string_field': 'some data',
1768 'test_int_field': 100,
1767 'test_int_field': 100,
1769 }
1768 }
1770
1769
1771
1770
1772 @pytest.fixture
1771 @pytest.fixture
1773 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1772 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1774 stub_integration_settings):
1773 stub_integration_settings):
1775 integration = IntegrationModel().create(
1774 integration = IntegrationModel().create(
1776 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1775 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1777 name='test repo integration',
1776 name='test repo integration',
1778 repo=repo_stub, repo_group=None, child_repos_only=None)
1777 repo=repo_stub, repo_group=None, child_repos_only=None)
1779
1778
1780 @request.addfinalizer
1779 @request.addfinalizer
1781 def cleanup():
1780 def cleanup():
1782 IntegrationModel().delete(integration)
1781 IntegrationModel().delete(integration)
1783
1782
1784 return integration
1783 return integration
1785
1784
1786
1785
1787 @pytest.fixture
1786 @pytest.fixture
1788 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1787 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1789 stub_integration_settings):
1788 stub_integration_settings):
1790 integration = IntegrationModel().create(
1789 integration = IntegrationModel().create(
1791 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1790 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1792 name='test repogroup integration',
1791 name='test repogroup integration',
1793 repo=None, repo_group=test_repo_group, child_repos_only=True)
1792 repo=None, repo_group=test_repo_group, child_repos_only=True)
1794
1793
1795 @request.addfinalizer
1794 @request.addfinalizer
1796 def cleanup():
1795 def cleanup():
1797 IntegrationModel().delete(integration)
1796 IntegrationModel().delete(integration)
1798
1797
1799 return integration
1798 return integration
1800
1799
1801
1800
1802 @pytest.fixture
1801 @pytest.fixture
1803 def repogroup_recursive_integration_stub(request, test_repo_group,
1802 def repogroup_recursive_integration_stub(request, test_repo_group,
1804 StubIntegrationType, stub_integration_settings):
1803 StubIntegrationType, stub_integration_settings):
1805 integration = IntegrationModel().create(
1804 integration = IntegrationModel().create(
1806 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1805 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1807 name='test recursive repogroup integration',
1806 name='test recursive repogroup integration',
1808 repo=None, repo_group=test_repo_group, child_repos_only=False)
1807 repo=None, repo_group=test_repo_group, child_repos_only=False)
1809
1808
1810 @request.addfinalizer
1809 @request.addfinalizer
1811 def cleanup():
1810 def cleanup():
1812 IntegrationModel().delete(integration)
1811 IntegrationModel().delete(integration)
1813
1812
1814 return integration
1813 return integration
1815
1814
1816
1815
1817 @pytest.fixture
1816 @pytest.fixture
1818 def global_integration_stub(request, StubIntegrationType,
1817 def global_integration_stub(request, StubIntegrationType,
1819 stub_integration_settings):
1818 stub_integration_settings):
1820 integration = IntegrationModel().create(
1819 integration = IntegrationModel().create(
1821 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1820 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1822 name='test global integration',
1821 name='test global integration',
1823 repo=None, repo_group=None, child_repos_only=None)
1822 repo=None, repo_group=None, child_repos_only=None)
1824
1823
1825 @request.addfinalizer
1824 @request.addfinalizer
1826 def cleanup():
1825 def cleanup():
1827 IntegrationModel().delete(integration)
1826 IntegrationModel().delete(integration)
1828
1827
1829 return integration
1828 return integration
1830
1829
1831
1830
1832 @pytest.fixture
1831 @pytest.fixture
1833 def root_repos_integration_stub(request, StubIntegrationType,
1832 def root_repos_integration_stub(request, StubIntegrationType,
1834 stub_integration_settings):
1833 stub_integration_settings):
1835 integration = IntegrationModel().create(
1834 integration = IntegrationModel().create(
1836 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1835 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1837 name='test global integration',
1836 name='test global integration',
1838 repo=None, repo_group=None, child_repos_only=True)
1837 repo=None, repo_group=None, child_repos_only=True)
1839
1838
1840 @request.addfinalizer
1839 @request.addfinalizer
1841 def cleanup():
1840 def cleanup():
1842 IntegrationModel().delete(integration)
1841 IntegrationModel().delete(integration)
1843
1842
1844 return integration
1843 return integration
1845
1844
1846
1845
1847 @pytest.fixture
1846 @pytest.fixture
1848 def local_dt_to_utc():
1847 def local_dt_to_utc():
1849 def _factory(dt):
1848 def _factory(dt):
1850 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1849 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1851 dateutil.tz.tzutc()).replace(tzinfo=None)
1850 dateutil.tz.tzutc()).replace(tzinfo=None)
1852 return _factory
1851 return _factory
1853
1852
1854
1853
1855 @pytest.fixture
1854 @pytest.fixture
1856 def disable_anonymous_user(request, baseapp):
1855 def disable_anonymous_user(request, baseapp):
1857 set_anonymous_access(False)
1856 set_anonymous_access(False)
1858
1857
1859 @request.addfinalizer
1858 @request.addfinalizer
1860 def cleanup():
1859 def cleanup():
1861 set_anonymous_access(True)
1860 set_anonymous_access(True)
1862
1861
1863
1862
1864 @pytest.fixture(scope='module')
1863 @pytest.fixture(scope='module')
1865 def rc_fixture(request):
1864 def rc_fixture(request):
1866 return Fixture()
1865 return Fixture()
1867
1866
1868
1867
1869 @pytest.fixture
1868 @pytest.fixture
1870 def repo_groups(request):
1869 def repo_groups(request):
1871 fixture = Fixture()
1870 fixture = Fixture()
1872
1871
1873 session = Session()
1872 session = Session()
1874 zombie_group = fixture.create_repo_group('zombie')
1873 zombie_group = fixture.create_repo_group('zombie')
1875 parent_group = fixture.create_repo_group('parent')
1874 parent_group = fixture.create_repo_group('parent')
1876 child_group = fixture.create_repo_group('parent/child')
1875 child_group = fixture.create_repo_group('parent/child')
1877 groups_in_db = session.query(RepoGroup).all()
1876 groups_in_db = session.query(RepoGroup).all()
1878 assert len(groups_in_db) == 3
1877 assert len(groups_in_db) == 3
1879 assert child_group.group_parent_id == parent_group.group_id
1878 assert child_group.group_parent_id == parent_group.group_id
1880
1879
1881 @request.addfinalizer
1880 @request.addfinalizer
1882 def cleanup():
1881 def cleanup():
1883 fixture.destroy_repo_group(zombie_group)
1882 fixture.destroy_repo_group(zombie_group)
1884 fixture.destroy_repo_group(child_group)
1883 fixture.destroy_repo_group(child_group)
1885 fixture.destroy_repo_group(parent_group)
1884 fixture.destroy_repo_group(parent_group)
1886
1885
1887 return zombie_group, parent_group, child_group
1886 return zombie_group, parent_group, child_group
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now