##// END OF EJS Templates
pull-requests: fix way how pull-request calculates common ancestors....
marcink -
r4346:4dcd6440 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,47 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4 from sqlalchemy import *
5
6 from alembic.migration import MigrationContext
7 from alembic.operations import Operations
8 from sqlalchemy import BigInteger
9
10 from rhodecode.lib.dbmigrate.versions import _reset_base
11 from rhodecode.model import init_model_encryption
12
13
14 log = logging.getLogger(__name__)
15
16
17 def upgrade(migrate_engine):
18 """
19 Upgrade operations go here.
20 Don't create your own engine; bind migrate_engine to your metadata
21 """
22 _reset_base(migrate_engine)
23 from rhodecode.lib.dbmigrate.schema import db_4_19_0_0 as db
24
25 init_model_encryption(db)
26
27 context = MigrationContext.configure(migrate_engine.connect())
28 op = Operations(context)
29
30 pull_requests = db.PullRequest.__table__
31 with op.batch_alter_table(pull_requests.name) as batch_op:
32 new_column = Column('common_ancestor_id', Unicode(255), nullable=True)
33 batch_op.add_column(new_column)
34
35 pull_request_version = db.PullRequestVersion.__table__
36 with op.batch_alter_table(pull_request_version.name) as batch_op:
37 new_column = Column('common_ancestor_id', Unicode(255), nullable=True)
38 batch_op.add_column(new_column)
39
40
41 def downgrade(migrate_engine):
42 meta = MetaData()
43 meta.bind = migrate_engine
44
45
46 def fixups(models, _SESSION):
47 pass
@@ -1,60 +1,60 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 from collections import OrderedDict
23 23
24 24 import sys
25 25 import platform
26 26
27 27 VERSION = tuple(open(os.path.join(
28 28 os.path.dirname(__file__), 'VERSION')).read().split('.'))
29 29
30 30 BACKENDS = OrderedDict()
31 31
32 32 BACKENDS['hg'] = 'Mercurial repository'
33 33 BACKENDS['git'] = 'Git repository'
34 34 BACKENDS['svn'] = 'Subversion repository'
35 35
36 36
37 37 CELERY_ENABLED = False
38 38 CELERY_EAGER = False
39 39
40 40 # link to config for pyramid
41 41 CONFIG = {}
42 42
43 43 # Populated with the settings dictionary from application init in
44 44 # rhodecode.conf.environment.load_pyramid_environment
45 45 PYRAMID_SETTINGS = {}
46 46
47 47 # Linked module for extensions
48 48 EXTENSIONS = {}
49 49
50 50 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
51 __dbversion__ = 106 # defines current db version for migrations
51 __dbversion__ = 107 # defines current db version for migrations
52 52 __platform__ = platform.system()
53 53 __license__ = 'AGPLv3, and Commercial License'
54 54 __author__ = 'RhodeCode GmbH'
55 55 __url__ = 'https://code.rhodecode.com'
56 56
57 57 is_windows = __platform__ in ['Windows']
58 58 is_unix = not is_windows
59 59 is_test = False
60 60 disable_error_handler = False
@@ -1,1018 +1,1018 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from rhodecode import events
25 25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 26 from rhodecode.api.utils import (
27 27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 29 validate_repo_permissions, resolve_ref_or_error, validate_set_owner_permissions)
30 30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 from rhodecode.lib.utils2 import str2bool
33 33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 34 from rhodecode.model.comment import CommentsModel
35 35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment, PullRequest
36 36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 37 from rhodecode.model.settings import SettingsModel
38 38 from rhodecode.model.validation_schema import Invalid
39 39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 40 ReviewerListSchema)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 @jsonrpc_method()
46 46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None),
47 47 merge_state=Optional(False)):
48 48 """
49 49 Get a pull request based on the given ID.
50 50
51 51 :param apiuser: This is filled automatically from the |authtoken|.
52 52 :type apiuser: AuthUser
53 53 :param repoid: Optional, repository name or repository ID from where
54 54 the pull request was opened.
55 55 :type repoid: str or int
56 56 :param pullrequestid: ID of the requested pull request.
57 57 :type pullrequestid: int
58 58 :param merge_state: Optional calculate merge state for each repository.
59 59 This could result in longer time to fetch the data
60 60 :type merge_state: bool
61 61
62 62 Example output:
63 63
64 64 .. code-block:: bash
65 65
66 66 "id": <id_given_in_input>,
67 67 "result":
68 68 {
69 69 "pull_request_id": "<pull_request_id>",
70 70 "url": "<url>",
71 71 "title": "<title>",
72 72 "description": "<description>",
73 73 "status" : "<status>",
74 74 "created_on": "<date_time_created>",
75 75 "updated_on": "<date_time_updated>",
76 76 "versions": "<number_or_versions_of_pr>",
77 77 "commit_ids": [
78 78 ...
79 79 "<commit_id>",
80 80 "<commit_id>",
81 81 ...
82 82 ],
83 83 "review_status": "<review_status>",
84 84 "mergeable": {
85 85 "status": "<bool>",
86 86 "message": "<message>",
87 87 },
88 88 "source": {
89 89 "clone_url": "<clone_url>",
90 90 "repository": "<repository_name>",
91 91 "reference":
92 92 {
93 93 "name": "<name>",
94 94 "type": "<type>",
95 95 "commit_id": "<commit_id>",
96 96 }
97 97 },
98 98 "target": {
99 99 "clone_url": "<clone_url>",
100 100 "repository": "<repository_name>",
101 101 "reference":
102 102 {
103 103 "name": "<name>",
104 104 "type": "<type>",
105 105 "commit_id": "<commit_id>",
106 106 }
107 107 },
108 108 "merge": {
109 109 "clone_url": "<clone_url>",
110 110 "reference":
111 111 {
112 112 "name": "<name>",
113 113 "type": "<type>",
114 114 "commit_id": "<commit_id>",
115 115 }
116 116 },
117 117 "author": <user_obj>,
118 118 "reviewers": [
119 119 ...
120 120 {
121 121 "user": "<user_obj>",
122 122 "review_status": "<review_status>",
123 123 }
124 124 ...
125 125 ]
126 126 },
127 127 "error": null
128 128 """
129 129
130 130 pull_request = get_pull_request_or_error(pullrequestid)
131 131 if Optional.extract(repoid):
132 132 repo = get_repo_or_error(repoid)
133 133 else:
134 134 repo = pull_request.target_repo
135 135
136 136 if not PullRequestModel().check_user_read(pull_request, apiuser, api=True):
137 137 raise JSONRPCError('repository `%s` or pull request `%s` '
138 138 'does not exist' % (repoid, pullrequestid))
139 139
140 140 # NOTE(marcink): only calculate and return merge state if the pr state is 'created'
141 141 # otherwise we can lock the repo on calculation of merge state while update/merge
142 142 # is happening.
143 143 pr_created = pull_request.pull_request_state == pull_request.STATE_CREATED
144 144 merge_state = Optional.extract(merge_state, binary=True) and pr_created
145 145 data = pull_request.get_api_data(with_merge_state=merge_state)
146 146 return data
147 147
148 148
149 149 @jsonrpc_method()
150 150 def get_pull_requests(request, apiuser, repoid, status=Optional('new'),
151 151 merge_state=Optional(False)):
152 152 """
153 153 Get all pull requests from the repository specified in `repoid`.
154 154
155 155 :param apiuser: This is filled automatically from the |authtoken|.
156 156 :type apiuser: AuthUser
157 157 :param repoid: Optional repository name or repository ID.
158 158 :type repoid: str or int
159 159 :param status: Only return pull requests with the specified status.
160 160 Valid options are.
161 161 * ``new`` (default)
162 162 * ``open``
163 163 * ``closed``
164 164 :type status: str
165 165 :param merge_state: Optional calculate merge state for each repository.
166 166 This could result in longer time to fetch the data
167 167 :type merge_state: bool
168 168
169 169 Example output:
170 170
171 171 .. code-block:: bash
172 172
173 173 "id": <id_given_in_input>,
174 174 "result":
175 175 [
176 176 ...
177 177 {
178 178 "pull_request_id": "<pull_request_id>",
179 179 "url": "<url>",
180 180 "title" : "<title>",
181 181 "description": "<description>",
182 182 "status": "<status>",
183 183 "created_on": "<date_time_created>",
184 184 "updated_on": "<date_time_updated>",
185 185 "commit_ids": [
186 186 ...
187 187 "<commit_id>",
188 188 "<commit_id>",
189 189 ...
190 190 ],
191 191 "review_status": "<review_status>",
192 192 "mergeable": {
193 193 "status": "<bool>",
194 194 "message: "<message>",
195 195 },
196 196 "source": {
197 197 "clone_url": "<clone_url>",
198 198 "reference":
199 199 {
200 200 "name": "<name>",
201 201 "type": "<type>",
202 202 "commit_id": "<commit_id>",
203 203 }
204 204 },
205 205 "target": {
206 206 "clone_url": "<clone_url>",
207 207 "reference":
208 208 {
209 209 "name": "<name>",
210 210 "type": "<type>",
211 211 "commit_id": "<commit_id>",
212 212 }
213 213 },
214 214 "merge": {
215 215 "clone_url": "<clone_url>",
216 216 "reference":
217 217 {
218 218 "name": "<name>",
219 219 "type": "<type>",
220 220 "commit_id": "<commit_id>",
221 221 }
222 222 },
223 223 "author": <user_obj>,
224 224 "reviewers": [
225 225 ...
226 226 {
227 227 "user": "<user_obj>",
228 228 "review_status": "<review_status>",
229 229 }
230 230 ...
231 231 ]
232 232 }
233 233 ...
234 234 ],
235 235 "error": null
236 236
237 237 """
238 238 repo = get_repo_or_error(repoid)
239 239 if not has_superadmin_permission(apiuser):
240 240 _perms = (
241 241 'repository.admin', 'repository.write', 'repository.read',)
242 242 validate_repo_permissions(apiuser, repoid, repo, _perms)
243 243
244 244 status = Optional.extract(status)
245 245 merge_state = Optional.extract(merge_state, binary=True)
246 246 pull_requests = PullRequestModel().get_all(repo, statuses=[status],
247 247 order_by='id', order_dir='desc')
248 248 data = [pr.get_api_data(with_merge_state=merge_state) for pr in pull_requests]
249 249 return data
250 250
251 251
252 252 @jsonrpc_method()
253 253 def merge_pull_request(
254 254 request, apiuser, pullrequestid, repoid=Optional(None),
255 255 userid=Optional(OAttr('apiuser'))):
256 256 """
257 257 Merge the pull request specified by `pullrequestid` into its target
258 258 repository.
259 259
260 260 :param apiuser: This is filled automatically from the |authtoken|.
261 261 :type apiuser: AuthUser
262 262 :param repoid: Optional, repository name or repository ID of the
263 263 target repository to which the |pr| is to be merged.
264 264 :type repoid: str or int
265 265 :param pullrequestid: ID of the pull request which shall be merged.
266 266 :type pullrequestid: int
267 267 :param userid: Merge the pull request as this user.
268 268 :type userid: Optional(str or int)
269 269
270 270 Example output:
271 271
272 272 .. code-block:: bash
273 273
274 274 "id": <id_given_in_input>,
275 275 "result": {
276 276 "executed": "<bool>",
277 277 "failure_reason": "<int>",
278 278 "merge_status_message": "<str>",
279 279 "merge_commit_id": "<merge_commit_id>",
280 280 "possible": "<bool>",
281 281 "merge_ref": {
282 282 "commit_id": "<commit_id>",
283 283 "type": "<type>",
284 284 "name": "<name>"
285 285 }
286 286 },
287 287 "error": null
288 288 """
289 289 pull_request = get_pull_request_or_error(pullrequestid)
290 290 if Optional.extract(repoid):
291 291 repo = get_repo_or_error(repoid)
292 292 else:
293 293 repo = pull_request.target_repo
294 294 auth_user = apiuser
295 295 if not isinstance(userid, Optional):
296 296 if (has_superadmin_permission(apiuser) or
297 297 HasRepoPermissionAnyApi('repository.admin')(
298 298 user=apiuser, repo_name=repo.repo_name)):
299 299 apiuser = get_user_or_error(userid)
300 300 auth_user = apiuser.AuthUser()
301 301 else:
302 302 raise JSONRPCError('userid is not the same as your user')
303 303
304 304 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
305 305 raise JSONRPCError(
306 306 'Operation forbidden because pull request is in state {}, '
307 307 'only state {} is allowed.'.format(
308 308 pull_request.pull_request_state, PullRequest.STATE_CREATED))
309 309
310 310 with pull_request.set_state(PullRequest.STATE_UPDATING):
311 311 check = MergeCheck.validate(pull_request, auth_user=auth_user,
312 312 translator=request.translate)
313 313 merge_possible = not check.failed
314 314
315 315 if not merge_possible:
316 316 error_messages = []
317 317 for err_type, error_msg in check.errors:
318 318 error_msg = request.translate(error_msg)
319 319 error_messages.append(error_msg)
320 320
321 321 reasons = ','.join(error_messages)
322 322 raise JSONRPCError(
323 323 'merge not possible for following reasons: {}'.format(reasons))
324 324
325 325 target_repo = pull_request.target_repo
326 326 extras = vcs_operation_context(
327 327 request.environ, repo_name=target_repo.repo_name,
328 328 username=auth_user.username, action='push',
329 329 scm=target_repo.repo_type)
330 330 with pull_request.set_state(PullRequest.STATE_UPDATING):
331 331 merge_response = PullRequestModel().merge_repo(
332 332 pull_request, apiuser, extras=extras)
333 333 if merge_response.executed:
334 334 PullRequestModel().close_pull_request(pull_request.pull_request_id, auth_user)
335 335
336 336 Session().commit()
337 337
338 338 # In previous versions the merge response directly contained the merge
339 339 # commit id. It is now contained in the merge reference object. To be
340 340 # backwards compatible we have to extract it again.
341 341 merge_response = merge_response.asdict()
342 342 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
343 343
344 344 return merge_response
345 345
346 346
347 347 @jsonrpc_method()
348 348 def get_pull_request_comments(
349 349 request, apiuser, pullrequestid, repoid=Optional(None)):
350 350 """
351 351 Get all comments of pull request specified with the `pullrequestid`
352 352
353 353 :param apiuser: This is filled automatically from the |authtoken|.
354 354 :type apiuser: AuthUser
355 355 :param repoid: Optional repository name or repository ID.
356 356 :type repoid: str or int
357 357 :param pullrequestid: The pull request ID.
358 358 :type pullrequestid: int
359 359
360 360 Example output:
361 361
362 362 .. code-block:: bash
363 363
364 364 id : <id_given_in_input>
365 365 result : [
366 366 {
367 367 "comment_author": {
368 368 "active": true,
369 369 "full_name_or_username": "Tom Gore",
370 370 "username": "admin"
371 371 },
372 372 "comment_created_on": "2017-01-02T18:43:45.533",
373 373 "comment_f_path": null,
374 374 "comment_id": 25,
375 375 "comment_lineno": null,
376 376 "comment_status": {
377 377 "status": "under_review",
378 378 "status_lbl": "Under Review"
379 379 },
380 380 "comment_text": "Example text",
381 381 "comment_type": null,
382 382 "pull_request_version": null,
383 383 "comment_commit_id": None,
384 384 "comment_pull_request_id": <pull_request_id>
385 385 }
386 386 ],
387 387 error : null
388 388 """
389 389
390 390 pull_request = get_pull_request_or_error(pullrequestid)
391 391 if Optional.extract(repoid):
392 392 repo = get_repo_or_error(repoid)
393 393 else:
394 394 repo = pull_request.target_repo
395 395
396 396 if not PullRequestModel().check_user_read(
397 397 pull_request, apiuser, api=True):
398 398 raise JSONRPCError('repository `%s` or pull request `%s` '
399 399 'does not exist' % (repoid, pullrequestid))
400 400
401 401 (pull_request_latest,
402 402 pull_request_at_ver,
403 403 pull_request_display_obj,
404 404 at_version) = PullRequestModel().get_pr_version(
405 405 pull_request.pull_request_id, version=None)
406 406
407 407 versions = pull_request_display_obj.versions()
408 408 ver_map = {
409 409 ver.pull_request_version_id: cnt
410 410 for cnt, ver in enumerate(versions, 1)
411 411 }
412 412
413 413 # GENERAL COMMENTS with versions #
414 414 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
415 415 q = q.order_by(ChangesetComment.comment_id.asc())
416 416 general_comments = q.all()
417 417
418 418 # INLINE COMMENTS with versions #
419 419 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
420 420 q = q.order_by(ChangesetComment.comment_id.asc())
421 421 inline_comments = q.all()
422 422
423 423 data = []
424 424 for comment in inline_comments + general_comments:
425 425 full_data = comment.get_api_data()
426 426 pr_version_id = None
427 427 if comment.pull_request_version_id:
428 428 pr_version_id = 'v{}'.format(
429 429 ver_map[comment.pull_request_version_id])
430 430
431 431 # sanitize some entries
432 432
433 433 full_data['pull_request_version'] = pr_version_id
434 434 full_data['comment_author'] = {
435 435 'username': full_data['comment_author'].username,
436 436 'full_name_or_username': full_data['comment_author'].full_name_or_username,
437 437 'active': full_data['comment_author'].active,
438 438 }
439 439
440 440 if full_data['comment_status']:
441 441 full_data['comment_status'] = {
442 442 'status': full_data['comment_status'][0].status,
443 443 'status_lbl': full_data['comment_status'][0].status_lbl,
444 444 }
445 445 else:
446 446 full_data['comment_status'] = {}
447 447
448 448 data.append(full_data)
449 449 return data
450 450
451 451
452 452 @jsonrpc_method()
453 453 def comment_pull_request(
454 454 request, apiuser, pullrequestid, repoid=Optional(None),
455 455 message=Optional(None), commit_id=Optional(None), status=Optional(None),
456 456 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
457 457 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
458 458 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
459 459 """
460 460 Comment on the pull request specified with the `pullrequestid`,
461 461 in the |repo| specified by the `repoid`, and optionally change the
462 462 review status.
463 463
464 464 :param apiuser: This is filled automatically from the |authtoken|.
465 465 :type apiuser: AuthUser
466 466 :param repoid: Optional repository name or repository ID.
467 467 :type repoid: str or int
468 468 :param pullrequestid: The pull request ID.
469 469 :type pullrequestid: int
470 470 :param commit_id: Specify the commit_id for which to set a comment. If
471 471 given commit_id is different than latest in the PR status
472 472 change won't be performed.
473 473 :type commit_id: str
474 474 :param message: The text content of the comment.
475 475 :type message: str
476 476 :param status: (**Optional**) Set the approval status of the pull
477 477 request. One of: 'not_reviewed', 'approved', 'rejected',
478 478 'under_review'
479 479 :type status: str
480 480 :param comment_type: Comment type, one of: 'note', 'todo'
481 481 :type comment_type: Optional(str), default: 'note'
482 482 :param resolves_comment_id: id of comment which this one will resolve
483 483 :type resolves_comment_id: Optional(int)
484 484 :param extra_recipients: list of user ids or usernames to add
485 485 notifications for this comment. Acts like a CC for notification
486 486 :type extra_recipients: Optional(list)
487 487 :param userid: Comment on the pull request as this user
488 488 :type userid: Optional(str or int)
489 489 :param send_email: Define if this comment should also send email notification
490 490 :type send_email: Optional(bool)
491 491
492 492 Example output:
493 493
494 494 .. code-block:: bash
495 495
496 496 id : <id_given_in_input>
497 497 result : {
498 498 "pull_request_id": "<Integer>",
499 499 "comment_id": "<Integer>",
500 500 "status": {"given": <given_status>,
501 501 "was_changed": <bool status_was_actually_changed> },
502 502 },
503 503 error : null
504 504 """
505 505 pull_request = get_pull_request_or_error(pullrequestid)
506 506 if Optional.extract(repoid):
507 507 repo = get_repo_or_error(repoid)
508 508 else:
509 509 repo = pull_request.target_repo
510 510
511 511 auth_user = apiuser
512 512 if not isinstance(userid, Optional):
513 513 if (has_superadmin_permission(apiuser) or
514 514 HasRepoPermissionAnyApi('repository.admin')(
515 515 user=apiuser, repo_name=repo.repo_name)):
516 516 apiuser = get_user_or_error(userid)
517 517 auth_user = apiuser.AuthUser()
518 518 else:
519 519 raise JSONRPCError('userid is not the same as your user')
520 520
521 521 if pull_request.is_closed():
522 522 raise JSONRPCError(
523 523 'pull request `%s` comment failed, pull request is closed' % (
524 524 pullrequestid,))
525 525
526 526 if not PullRequestModel().check_user_read(
527 527 pull_request, apiuser, api=True):
528 528 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
529 529 message = Optional.extract(message)
530 530 status = Optional.extract(status)
531 531 commit_id = Optional.extract(commit_id)
532 532 comment_type = Optional.extract(comment_type)
533 533 resolves_comment_id = Optional.extract(resolves_comment_id)
534 534 extra_recipients = Optional.extract(extra_recipients)
535 535 send_email = Optional.extract(send_email, binary=True)
536 536
537 537 if not message and not status:
538 538 raise JSONRPCError(
539 539 'Both message and status parameters are missing. '
540 540 'At least one is required.')
541 541
542 542 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
543 543 status is not None):
544 544 raise JSONRPCError('Unknown comment status: `%s`' % status)
545 545
546 546 if commit_id and commit_id not in pull_request.revisions:
547 547 raise JSONRPCError(
548 548 'Invalid commit_id `%s` for this pull request.' % commit_id)
549 549
550 550 allowed_to_change_status = PullRequestModel().check_user_change_status(
551 551 pull_request, apiuser)
552 552
553 553 # if commit_id is passed re-validated if user is allowed to change status
554 554 # based on latest commit_id from the PR
555 555 if commit_id:
556 556 commit_idx = pull_request.revisions.index(commit_id)
557 557 if commit_idx != 0:
558 558 allowed_to_change_status = False
559 559
560 560 if resolves_comment_id:
561 561 comment = ChangesetComment.get(resolves_comment_id)
562 562 if not comment:
563 563 raise JSONRPCError(
564 564 'Invalid resolves_comment_id `%s` for this pull request.'
565 565 % resolves_comment_id)
566 566 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
567 567 raise JSONRPCError(
568 568 'Comment `%s` is wrong type for setting status to resolved.'
569 569 % resolves_comment_id)
570 570
571 571 text = message
572 572 status_label = ChangesetStatus.get_status_lbl(status)
573 573 if status and allowed_to_change_status:
574 574 st_message = ('Status change %(transition_icon)s %(status)s'
575 575 % {'transition_icon': '>', 'status': status_label})
576 576 text = message or st_message
577 577
578 578 rc_config = SettingsModel().get_all_settings()
579 579 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
580 580
581 581 status_change = status and allowed_to_change_status
582 582 comment = CommentsModel().create(
583 583 text=text,
584 584 repo=pull_request.target_repo.repo_id,
585 585 user=apiuser.user_id,
586 586 pull_request=pull_request.pull_request_id,
587 587 f_path=None,
588 588 line_no=None,
589 589 status_change=(status_label if status_change else None),
590 590 status_change_type=(status if status_change else None),
591 591 closing_pr=False,
592 592 renderer=renderer,
593 593 comment_type=comment_type,
594 594 resolves_comment_id=resolves_comment_id,
595 595 auth_user=auth_user,
596 596 extra_recipients=extra_recipients,
597 597 send_email=send_email
598 598 )
599 599
600 600 if allowed_to_change_status and status:
601 601 old_calculated_status = pull_request.calculated_review_status()
602 602 ChangesetStatusModel().set_status(
603 603 pull_request.target_repo.repo_id,
604 604 status,
605 605 apiuser.user_id,
606 606 comment,
607 607 pull_request=pull_request.pull_request_id
608 608 )
609 609 Session().flush()
610 610
611 611 Session().commit()
612 612
613 613 PullRequestModel().trigger_pull_request_hook(
614 614 pull_request, apiuser, 'comment',
615 615 data={'comment': comment})
616 616
617 617 if allowed_to_change_status and status:
618 618 # we now calculate the status of pull request, and based on that
619 619 # calculation we set the commits status
620 620 calculated_status = pull_request.calculated_review_status()
621 621 if old_calculated_status != calculated_status:
622 622 PullRequestModel().trigger_pull_request_hook(
623 623 pull_request, apiuser, 'review_status_change',
624 624 data={'status': calculated_status})
625 625
626 626 data = {
627 627 'pull_request_id': pull_request.pull_request_id,
628 628 'comment_id': comment.comment_id if comment else None,
629 629 'status': {'given': status, 'was_changed': status_change},
630 630 }
631 631 return data
632 632
633 633
634 634 @jsonrpc_method()
635 635 def create_pull_request(
636 636 request, apiuser, source_repo, target_repo, source_ref, target_ref,
637 637 owner=Optional(OAttr('apiuser')), title=Optional(''), description=Optional(''),
638 638 description_renderer=Optional(''), reviewers=Optional(None)):
639 639 """
640 640 Creates a new pull request.
641 641
642 642 Accepts refs in the following formats:
643 643
644 644 * branch:<branch_name>:<sha>
645 645 * branch:<branch_name>
646 646 * bookmark:<bookmark_name>:<sha> (Mercurial only)
647 647 * bookmark:<bookmark_name> (Mercurial only)
648 648
649 649 :param apiuser: This is filled automatically from the |authtoken|.
650 650 :type apiuser: AuthUser
651 651 :param source_repo: Set the source repository name.
652 652 :type source_repo: str
653 653 :param target_repo: Set the target repository name.
654 654 :type target_repo: str
655 655 :param source_ref: Set the source ref name.
656 656 :type source_ref: str
657 657 :param target_ref: Set the target ref name.
658 658 :type target_ref: str
659 659 :param owner: user_id or username
660 660 :type owner: Optional(str)
661 661 :param title: Optionally Set the pull request title, it's generated otherwise
662 662 :type title: str
663 663 :param description: Set the pull request description.
664 664 :type description: Optional(str)
665 665 :type description_renderer: Optional(str)
666 666 :param description_renderer: Set pull request renderer for the description.
667 667 It should be 'rst', 'markdown' or 'plain'. If not give default
668 668 system renderer will be used
669 669 :param reviewers: Set the new pull request reviewers list.
670 670 Reviewer defined by review rules will be added automatically to the
671 671 defined list.
672 672 :type reviewers: Optional(list)
673 673 Accepts username strings or objects of the format:
674 674
675 675 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
676 676 """
677 677
678 678 source_db_repo = get_repo_or_error(source_repo)
679 679 target_db_repo = get_repo_or_error(target_repo)
680 680 if not has_superadmin_permission(apiuser):
681 681 _perms = ('repository.admin', 'repository.write', 'repository.read',)
682 682 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
683 683
684 684 owner = validate_set_owner_permissions(apiuser, owner)
685 685
686 686 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
687 687 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
688 688
689 source_scm = source_db_repo.scm_instance()
690 target_scm = target_db_repo.scm_instance()
691
692 689 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
693 690 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
694 691
695 ancestor = source_scm.get_common_ancestor(
696 source_commit.raw_id, target_commit.raw_id, target_scm)
697 if not ancestor:
698 raise JSONRPCError('no common ancestor found')
699
700 # recalculate target ref based on ancestor
701 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
702 full_target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
703
704 commit_ranges = target_scm.compare(
705 target_commit.raw_id, source_commit.raw_id, source_scm,
706 merge=True, pre_load=[])
707
708 if not commit_ranges:
709 raise JSONRPCError('no commits found')
710
711 692 reviewer_objects = Optional.extract(reviewers) or []
712 693
713 694 # serialize and validate passed in given reviewers
714 695 if reviewer_objects:
715 696 schema = ReviewerListSchema()
716 697 try:
717 698 reviewer_objects = schema.deserialize(reviewer_objects)
718 699 except Invalid as err:
719 700 raise JSONRPCValidationError(colander_exc=err)
720 701
721 702 # validate users
722 703 for reviewer_object in reviewer_objects:
723 704 user = get_user_or_error(reviewer_object['username'])
724 705 reviewer_object['user_id'] = user.user_id
725 706
726 707 get_default_reviewers_data, validate_default_reviewers = \
727 708 PullRequestModel().get_reviewer_functions()
728 709
729 710 # recalculate reviewers logic, to make sure we can validate this
730 reviewer_rules = get_default_reviewers_data(
711 default_reviewers_data = get_default_reviewers_data(
731 712 owner, source_db_repo,
732 713 source_commit, target_db_repo, target_commit)
733 714
734 715 # now MERGE our given with the calculated
735 reviewer_objects = reviewer_rules['reviewers'] + reviewer_objects
716 reviewer_objects = default_reviewers_data['reviewers'] + reviewer_objects
736 717
737 718 try:
738 719 reviewers = validate_default_reviewers(
739 reviewer_objects, reviewer_rules)
720 reviewer_objects, default_reviewers_data)
740 721 except ValueError as e:
741 722 raise JSONRPCError('Reviewers Validation: {}'.format(e))
742 723
743 724 title = Optional.extract(title)
744 725 if not title:
745 726 title_source_ref = source_ref.split(':', 2)[1]
746 727 title = PullRequestModel().generate_pullrequest_title(
747 728 source=source_repo,
748 729 source_ref=title_source_ref,
749 730 target=target_repo
750 731 )
732
733 diff_info = default_reviewers_data['diff_info']
734 common_ancestor_id = diff_info['ancestor']
735 commits = diff_info['commits']
736
737 if not common_ancestor_id:
738 raise JSONRPCError('no common ancestor found')
739
740 if not commits:
741 raise JSONRPCError('no commits found')
742
743 # NOTE(marcink): reversed is consistent with how we open it in the WEB interface
744 revisions = [commit.raw_id for commit in reversed(commits)]
745
746 # recalculate target ref based on ancestor
747 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
748 full_target_ref = ':'.join((target_ref_type, target_ref_name, common_ancestor_id))
749
751 750 # fetch renderer, if set fallback to plain in case of PR
752 751 rc_config = SettingsModel().get_all_settings()
753 752 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
754 753 description = Optional.extract(description)
755 754 description_renderer = Optional.extract(description_renderer) or default_system_renderer
756 755
757 756 pull_request = PullRequestModel().create(
758 757 created_by=owner.user_id,
759 758 source_repo=source_repo,
760 759 source_ref=full_source_ref,
761 760 target_repo=target_repo,
762 761 target_ref=full_target_ref,
763 revisions=[commit.raw_id for commit in reversed(commit_ranges)],
762 common_ancestor_id=common_ancestor_id,
763 revisions=revisions,
764 764 reviewers=reviewers,
765 765 title=title,
766 766 description=description,
767 767 description_renderer=description_renderer,
768 reviewer_data=reviewer_rules,
768 reviewer_data=default_reviewers_data,
769 769 auth_user=apiuser
770 770 )
771 771
772 772 Session().commit()
773 773 data = {
774 774 'msg': 'Created new pull request `{}`'.format(title),
775 775 'pull_request_id': pull_request.pull_request_id,
776 776 }
777 777 return data
778 778
779 779
780 780 @jsonrpc_method()
781 781 def update_pull_request(
782 782 request, apiuser, pullrequestid, repoid=Optional(None),
783 783 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
784 784 reviewers=Optional(None), update_commits=Optional(None)):
785 785 """
786 786 Updates a pull request.
787 787
788 788 :param apiuser: This is filled automatically from the |authtoken|.
789 789 :type apiuser: AuthUser
790 790 :param repoid: Optional repository name or repository ID.
791 791 :type repoid: str or int
792 792 :param pullrequestid: The pull request ID.
793 793 :type pullrequestid: int
794 794 :param title: Set the pull request title.
795 795 :type title: str
796 796 :param description: Update pull request description.
797 797 :type description: Optional(str)
798 798 :type description_renderer: Optional(str)
799 799 :param description_renderer: Update pull request renderer for the description.
800 800 It should be 'rst', 'markdown' or 'plain'
801 801 :param reviewers: Update pull request reviewers list with new value.
802 802 :type reviewers: Optional(list)
803 803 Accepts username strings or objects of the format:
804 804
805 805 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
806 806
807 807 :param update_commits: Trigger update of commits for this pull request
808 808 :type: update_commits: Optional(bool)
809 809
810 810 Example output:
811 811
812 812 .. code-block:: bash
813 813
814 814 id : <id_given_in_input>
815 815 result : {
816 816 "msg": "Updated pull request `63`",
817 817 "pull_request": <pull_request_object>,
818 818 "updated_reviewers": {
819 819 "added": [
820 820 "username"
821 821 ],
822 822 "removed": []
823 823 },
824 824 "updated_commits": {
825 825 "added": [
826 826 "<sha1_hash>"
827 827 ],
828 828 "common": [
829 829 "<sha1_hash>",
830 830 "<sha1_hash>",
831 831 ],
832 832 "removed": []
833 833 }
834 834 }
835 835 error : null
836 836 """
837 837
838 838 pull_request = get_pull_request_or_error(pullrequestid)
839 839 if Optional.extract(repoid):
840 840 repo = get_repo_or_error(repoid)
841 841 else:
842 842 repo = pull_request.target_repo
843 843
844 844 if not PullRequestModel().check_user_update(
845 845 pull_request, apiuser, api=True):
846 846 raise JSONRPCError(
847 847 'pull request `%s` update failed, no permission to update.' % (
848 848 pullrequestid,))
849 849 if pull_request.is_closed():
850 850 raise JSONRPCError(
851 851 'pull request `%s` update failed, pull request is closed' % (
852 852 pullrequestid,))
853 853
854 854 reviewer_objects = Optional.extract(reviewers) or []
855 855
856 856 if reviewer_objects:
857 857 schema = ReviewerListSchema()
858 858 try:
859 859 reviewer_objects = schema.deserialize(reviewer_objects)
860 860 except Invalid as err:
861 861 raise JSONRPCValidationError(colander_exc=err)
862 862
863 863 # validate users
864 864 for reviewer_object in reviewer_objects:
865 865 user = get_user_or_error(reviewer_object['username'])
866 866 reviewer_object['user_id'] = user.user_id
867 867
868 868 get_default_reviewers_data, get_validated_reviewers = \
869 869 PullRequestModel().get_reviewer_functions()
870 870
871 871 # re-use stored rules
872 872 reviewer_rules = pull_request.reviewer_data
873 873 try:
874 874 reviewers = get_validated_reviewers(
875 875 reviewer_objects, reviewer_rules)
876 876 except ValueError as e:
877 877 raise JSONRPCError('Reviewers Validation: {}'.format(e))
878 878 else:
879 879 reviewers = []
880 880
881 881 title = Optional.extract(title)
882 882 description = Optional.extract(description)
883 883 description_renderer = Optional.extract(description_renderer)
884 884
885 885 if title or description:
886 886 PullRequestModel().edit(
887 887 pull_request,
888 888 title or pull_request.title,
889 889 description or pull_request.description,
890 890 description_renderer or pull_request.description_renderer,
891 891 apiuser)
892 892 Session().commit()
893 893
894 894 commit_changes = {"added": [], "common": [], "removed": []}
895 895 if str2bool(Optional.extract(update_commits)):
896 896
897 897 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
898 898 raise JSONRPCError(
899 899 'Operation forbidden because pull request is in state {}, '
900 900 'only state {} is allowed.'.format(
901 901 pull_request.pull_request_state, PullRequest.STATE_CREATED))
902 902
903 903 with pull_request.set_state(PullRequest.STATE_UPDATING):
904 904 if PullRequestModel().has_valid_update_type(pull_request):
905 905 db_user = apiuser.get_instance()
906 906 update_response = PullRequestModel().update_commits(
907 907 pull_request, db_user)
908 908 commit_changes = update_response.changes or commit_changes
909 909 Session().commit()
910 910
911 911 reviewers_changes = {"added": [], "removed": []}
912 912 if reviewers:
913 913 old_calculated_status = pull_request.calculated_review_status()
914 914 added_reviewers, removed_reviewers = \
915 915 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
916 916
917 917 reviewers_changes['added'] = sorted(
918 918 [get_user_or_error(n).username for n in added_reviewers])
919 919 reviewers_changes['removed'] = sorted(
920 920 [get_user_or_error(n).username for n in removed_reviewers])
921 921 Session().commit()
922 922
923 923 # trigger status changed if change in reviewers changes the status
924 924 calculated_status = pull_request.calculated_review_status()
925 925 if old_calculated_status != calculated_status:
926 926 PullRequestModel().trigger_pull_request_hook(
927 927 pull_request, apiuser, 'review_status_change',
928 928 data={'status': calculated_status})
929 929
930 930 data = {
931 931 'msg': 'Updated pull request `{}`'.format(
932 932 pull_request.pull_request_id),
933 933 'pull_request': pull_request.get_api_data(),
934 934 'updated_commits': commit_changes,
935 935 'updated_reviewers': reviewers_changes
936 936 }
937 937
938 938 return data
939 939
940 940
941 941 @jsonrpc_method()
942 942 def close_pull_request(
943 943 request, apiuser, pullrequestid, repoid=Optional(None),
944 944 userid=Optional(OAttr('apiuser')), message=Optional('')):
945 945 """
946 946 Close the pull request specified by `pullrequestid`.
947 947
948 948 :param apiuser: This is filled automatically from the |authtoken|.
949 949 :type apiuser: AuthUser
950 950 :param repoid: Repository name or repository ID to which the pull
951 951 request belongs.
952 952 :type repoid: str or int
953 953 :param pullrequestid: ID of the pull request to be closed.
954 954 :type pullrequestid: int
955 955 :param userid: Close the pull request as this user.
956 956 :type userid: Optional(str or int)
957 957 :param message: Optional message to close the Pull Request with. If not
958 958 specified it will be generated automatically.
959 959 :type message: Optional(str)
960 960
961 961 Example output:
962 962
963 963 .. code-block:: bash
964 964
965 965 "id": <id_given_in_input>,
966 966 "result": {
967 967 "pull_request_id": "<int>",
968 968 "close_status": "<str:status_lbl>,
969 969 "closed": "<bool>"
970 970 },
971 971 "error": null
972 972
973 973 """
974 974 _ = request.translate
975 975
976 976 pull_request = get_pull_request_or_error(pullrequestid)
977 977 if Optional.extract(repoid):
978 978 repo = get_repo_or_error(repoid)
979 979 else:
980 980 repo = pull_request.target_repo
981 981
982 982 if not isinstance(userid, Optional):
983 983 if (has_superadmin_permission(apiuser) or
984 984 HasRepoPermissionAnyApi('repository.admin')(
985 985 user=apiuser, repo_name=repo.repo_name)):
986 986 apiuser = get_user_or_error(userid)
987 987 else:
988 988 raise JSONRPCError('userid is not the same as your user')
989 989
990 990 if pull_request.is_closed():
991 991 raise JSONRPCError(
992 992 'pull request `%s` is already closed' % (pullrequestid,))
993 993
994 994 # only owner or admin or person with write permissions
995 995 allowed_to_close = PullRequestModel().check_user_update(
996 996 pull_request, apiuser, api=True)
997 997
998 998 if not allowed_to_close:
999 999 raise JSONRPCError(
1000 1000 'pull request `%s` close failed, no permission to close.' % (
1001 1001 pullrequestid,))
1002 1002
1003 1003 # message we're using to close the PR, else it's automatically generated
1004 1004 message = Optional.extract(message)
1005 1005
1006 1006 # finally close the PR, with proper message comment
1007 1007 comment, status = PullRequestModel().close_pull_request_with_comment(
1008 1008 pull_request, apiuser, repo, message=message, auth_user=apiuser)
1009 1009 status_lbl = ChangesetStatus.get_status_lbl(status)
1010 1010
1011 1011 Session().commit()
1012 1012
1013 1013 data = {
1014 1014 'pull_request_id': pull_request.pull_request_id,
1015 1015 'close_status': status_lbl,
1016 1016 'closed': True,
1017 1017 }
1018 1018 return data
@@ -1,666 +1,667 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import lxml.html
24 24
25 25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 26 from rhodecode.tests import assert_session_flash
27 27 from rhodecode.tests.utils import AssertResponse, commit_change
28 28
29 29
30 30 def route_path(name, params=None, **kwargs):
31 31 import urllib
32 32
33 33 base_url = {
34 34 'repo_compare_select': '/{repo_name}/compare',
35 35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
36 36 }[name].format(**kwargs)
37 37
38 38 if params:
39 39 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
40 40 return base_url
41 41
42 42
43 43 @pytest.mark.usefixtures("autologin_user", "app")
44 44 class TestCompareView(object):
45 45
46 46 def test_compare_index_is_reached_at_least_once(self, backend):
47 47 repo = backend.repo
48 48 self.app.get(
49 49 route_path('repo_compare_select', repo_name=repo.repo_name))
50 50
51 51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
52 52 def test_compare_remote_with_different_commit_indexes(self, backend):
53 53 # Preparing the following repository structure:
54 54 #
55 55 # Origin repository has two commits:
56 56 #
57 57 # 0 1
58 58 # A -- D
59 59 #
60 60 # The fork of it has a few more commits and "D" has a commit index
61 61 # which does not exist in origin.
62 62 #
63 63 # 0 1 2 3 4
64 64 # A -- -- -- D -- E
65 65 # \- B -- C
66 66 #
67 67
68 68 fork = backend.create_repo()
69 69
70 70 # prepare fork
71 71 commit0 = commit_change(
72 72 fork.repo_name, filename='file1', content='A',
73 73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
74 74
75 75 commit1 = commit_change(
76 76 fork.repo_name, filename='file1', content='B',
77 77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
78 78
79 79 commit_change( # commit 2
80 80 fork.repo_name, filename='file1', content='C',
81 81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
82 82
83 83 commit3 = commit_change(
84 84 fork.repo_name, filename='file1', content='D',
85 85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
86 86
87 87 commit4 = commit_change(
88 88 fork.repo_name, filename='file1', content='E',
89 89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
90 90
91 91 # prepare origin repository, taking just the history up to D
92 92 origin = backend.create_repo()
93 93
94 94 origin_repo = origin.scm_instance(cache=False)
95 95 origin_repo.config.clear_section('hooks')
96 96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
97 97 origin_repo = origin.scm_instance(cache=False) # cache rebuild
98 98
99 99 # Verify test fixture setup
100 100 # This does not work for git
101 101 if backend.alias != 'git':
102 102 assert 5 == len(fork.scm_instance().commit_ids)
103 103 assert 2 == len(origin_repo.commit_ids)
104 104
105 105 # Comparing the revisions
106 106 response = self.app.get(
107 107 route_path('repo_compare',
108 108 repo_name=origin.repo_name,
109 109 source_ref_type="rev", source_ref=commit3.raw_id,
110 110 target_ref_type="rev", target_ref=commit4.raw_id,
111 111 params=dict(merge='1', target_repo=fork.repo_name)
112 112 ))
113 113
114 114 compare_page = ComparePage(response)
115 115 compare_page.contains_commits([commit4])
116 116
117 117 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
118 118 def test_compare_forks_on_branch_extra_commits(self, backend):
119 119 repo1 = backend.create_repo()
120 120
121 121 # commit something !
122 122 commit0 = commit_change(
123 123 repo1.repo_name, filename='file1', content='line1\n',
124 124 message='commit1', vcs_type=backend.alias, parent=None,
125 125 newfile=True)
126 126
127 127 # fork this repo
128 128 repo2 = backend.create_fork()
129 129
130 130 # add two extra commit into fork
131 131 commit1 = commit_change(
132 132 repo2.repo_name, filename='file1', content='line1\nline2\n',
133 133 message='commit2', vcs_type=backend.alias, parent=commit0)
134 134
135 135 commit2 = commit_change(
136 136 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
137 137 message='commit3', vcs_type=backend.alias, parent=commit1)
138 138
139 139 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
140 140 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
141 141
142 142 response = self.app.get(
143 143 route_path('repo_compare',
144 144 repo_name=repo1.repo_name,
145 145 source_ref_type="branch", source_ref=commit_id2,
146 146 target_ref_type="branch", target_ref=commit_id1,
147 147 params=dict(merge='1', target_repo=repo2.repo_name)
148 148 ))
149 149
150 150 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
151 151 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
152 152
153 153 compare_page = ComparePage(response)
154 154 compare_page.contains_change_summary(1, 2, 0)
155 155 compare_page.contains_commits([commit1, commit2])
156 156
157 157 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
158 158 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
159 159
160 160 # Swap is removed when comparing branches since it's a PR feature and
161 161 # it is then a preview mode
162 162 compare_page.swap_is_hidden()
163 163 compare_page.target_source_are_disabled()
164 164
165 165 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
166 166 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend):
167 167 repo1 = backend.create_repo()
168 168
169 169 # commit something !
170 170 commit0 = commit_change(
171 171 repo1.repo_name, filename='file1', content='line1\n',
172 172 message='commit1', vcs_type=backend.alias, parent=None,
173 173 newfile=True)
174 174
175 175 # fork this repo
176 176 repo2 = backend.create_fork()
177 177
178 178 # now commit something to origin repo
179 179 commit_change(
180 180 repo1.repo_name, filename='file2', content='line1file2\n',
181 181 message='commit2', vcs_type=backend.alias, parent=commit0,
182 182 newfile=True)
183 183
184 184 # add two extra commit into fork
185 185 commit1 = commit_change(
186 186 repo2.repo_name, filename='file1', content='line1\nline2\n',
187 187 message='commit2', vcs_type=backend.alias, parent=commit0)
188 188
189 189 commit2 = commit_change(
190 190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
191 191 message='commit3', vcs_type=backend.alias, parent=commit1)
192 192
193 193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
194 194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
195 195
196 196 response = self.app.get(
197 197 route_path('repo_compare',
198 198 repo_name=repo1.repo_name,
199 199 source_ref_type="branch", source_ref=commit_id2,
200 200 target_ref_type="branch", target_ref=commit_id1,
201 201 params=dict(merge='1', target_repo=repo2.repo_name),
202 202 ))
203 203
204 204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
205 205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
206 206
207 207 compare_page = ComparePage(response)
208 208 compare_page.contains_change_summary(1, 2, 0)
209 209 compare_page.contains_commits([commit1, commit2])
210 210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
211 211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
212 212
213 213 # Swap is removed when comparing branches since it's a PR feature and
214 214 # it is then a preview mode
215 215 compare_page.swap_is_hidden()
216 216 compare_page.target_source_are_disabled()
217 217
218 218 @pytest.mark.xfail_backends("svn")
219 219 # TODO(marcink): no svn support for compare two seperate repos
220 220 def test_compare_of_unrelated_forks(self, backend):
221 221 orig = backend.create_repo(number_of_commits=1)
222 222 fork = backend.create_repo(number_of_commits=1)
223 223
224 224 response = self.app.get(
225 225 route_path('repo_compare',
226 226 repo_name=orig.repo_name,
227 227 source_ref_type="rev", source_ref="tip",
228 228 target_ref_type="rev", target_ref="tip",
229 229 params=dict(merge='1', target_repo=fork.repo_name),
230 230 ),
231 231 status=302)
232 232 response = response.follow()
233 233 response.mustcontain("Repositories unrelated.")
234 234
235 235 @pytest.mark.xfail_backends("svn")
236 236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
237 237
238 238 # repo1:
239 239 # commit0:
240 240 # commit1:
241 241 # repo1-fork- in which we will cherry pick bottom commits
242 242 # commit0:
243 243 # commit1:
244 244 # commit2: x
245 245 # commit3: x
246 246 # commit4: x
247 247 # commit5:
248 248 # make repo1, and commit1+commit2
249 249
250 250 repo1 = backend.create_repo()
251 251
252 252 # commit something !
253 253 commit0 = commit_change(
254 254 repo1.repo_name, filename='file1', content='line1\n',
255 255 message='commit1', vcs_type=backend.alias, parent=None,
256 256 newfile=True)
257 257 commit1 = commit_change(
258 258 repo1.repo_name, filename='file1', content='line1\nline2\n',
259 259 message='commit2', vcs_type=backend.alias, parent=commit0)
260 260
261 261 # fork this repo
262 262 repo2 = backend.create_fork()
263 263
264 264 # now make commit3-6
265 265 commit2 = commit_change(
266 266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
267 267 message='commit3', vcs_type=backend.alias, parent=commit1)
268 268 commit3 = commit_change(
269 269 repo1.repo_name, filename='file1',
270 270 content='line1\nline2\nline3\nline4\n', message='commit4',
271 271 vcs_type=backend.alias, parent=commit2)
272 272 commit4 = commit_change(
273 273 repo1.repo_name, filename='file1',
274 274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
275 275 vcs_type=backend.alias, parent=commit3)
276 276 commit_change( # commit 5
277 277 repo1.repo_name, filename='file1',
278 278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
279 279 message='commit6', vcs_type=backend.alias, parent=commit4)
280 280
281 281 response = self.app.get(
282 282 route_path('repo_compare',
283 283 repo_name=repo2.repo_name,
284 284 # parent of commit2, in target repo2
285 285 source_ref_type="rev", source_ref=commit1.raw_id,
286 286 target_ref_type="rev", target_ref=commit4.raw_id,
287 287 params=dict(merge='1', target_repo=repo1.repo_name),
288 288 ))
289 289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
290 290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
291 291
292 292 # files
293 293 compare_page = ComparePage(response)
294 294 compare_page.contains_change_summary(1, 3, 0)
295 295 compare_page.contains_commits([commit2, commit3, commit4])
296 296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
297 297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
298 298
299 299 @pytest.mark.xfail_backends("svn")
300 300 def test_compare_cherry_pick_commits_from_top(self, backend):
301 301 # repo1:
302 302 # commit0:
303 303 # commit1:
304 304 # repo1-fork- in which we will cherry pick bottom commits
305 305 # commit0:
306 306 # commit1:
307 307 # commit2:
308 308 # commit3: x
309 309 # commit4: x
310 310 # commit5: x
311 311
312 312 # make repo1, and commit1+commit2
313 313 repo1 = backend.create_repo()
314 314
315 315 # commit something !
316 316 commit0 = commit_change(
317 317 repo1.repo_name, filename='file1', content='line1\n',
318 318 message='commit1', vcs_type=backend.alias, parent=None,
319 319 newfile=True)
320 320 commit1 = commit_change(
321 321 repo1.repo_name, filename='file1', content='line1\nline2\n',
322 322 message='commit2', vcs_type=backend.alias, parent=commit0)
323 323
324 324 # fork this repo
325 325 backend.create_fork()
326 326
327 327 # now make commit3-6
328 328 commit2 = commit_change(
329 329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
330 330 message='commit3', vcs_type=backend.alias, parent=commit1)
331 331 commit3 = commit_change(
332 332 repo1.repo_name, filename='file1',
333 333 content='line1\nline2\nline3\nline4\n', message='commit4',
334 334 vcs_type=backend.alias, parent=commit2)
335 335 commit4 = commit_change(
336 336 repo1.repo_name, filename='file1',
337 337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
338 338 vcs_type=backend.alias, parent=commit3)
339 339 commit5 = commit_change(
340 340 repo1.repo_name, filename='file1',
341 341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
342 342 message='commit6', vcs_type=backend.alias, parent=commit4)
343 343
344 344 response = self.app.get(
345 345 route_path('repo_compare',
346 346 repo_name=repo1.repo_name,
347 347 # parent of commit3, not in source repo2
348 348 source_ref_type="rev", source_ref=commit2.raw_id,
349 349 target_ref_type="rev", target_ref=commit5.raw_id,
350 350 params=dict(merge='1'),))
351 351
352 352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
353 353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
354 354
355 355 compare_page = ComparePage(response)
356 356 compare_page.contains_change_summary(1, 3, 0)
357 357 compare_page.contains_commits([commit3, commit4, commit5])
358 358
359 359 # files
360 360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
361 361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
362 362
363 363 @pytest.mark.xfail_backends("svn")
364 364 def test_compare_remote_branches(self, backend):
365 365 repo1 = backend.repo
366 366 repo2 = backend.create_fork()
367 367
368 368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
369 369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
370 370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
371 371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
372 372
373 373 response = self.app.get(
374 374 route_path('repo_compare',
375 375 repo_name=repo1.repo_name,
376 376 source_ref_type="rev", source_ref=commit_id1,
377 377 target_ref_type="rev", target_ref=commit_id2,
378 378 params=dict(merge='1', target_repo=repo2.repo_name),
379 379 ))
380 380
381 381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
382 382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
383 383
384 384 compare_page = ComparePage(response)
385 385
386 386 # outgoing commits between those commits
387 387 compare_page.contains_commits(
388 388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
389 389
390 390 # files
391 391 compare_page.contains_file_links_and_anchors([
392 392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
393 393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
394 394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
395 395 ])
396 396
397 397 @pytest.mark.xfail_backends("svn")
398 398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
399 399 repo1 = backend.create_repo()
400 400 r1_name = repo1.repo_name
401 401
402 402 commit0 = commit_change(
403 403 repo=r1_name, filename='file1',
404 404 content='line1', message='commit1', vcs_type=backend.alias,
405 405 newfile=True)
406 406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
407 407
408 408 # fork the repo1
409 409 repo2 = backend.create_fork()
410 410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
411 411
412 412 self.r2_id = repo2.repo_id
413 413 r2_name = repo2.repo_name
414 414
415 415 commit1 = commit_change(
416 416 repo=r2_name, filename='file1-fork',
417 417 content='file1-line1-from-fork', message='commit1-fork',
418 418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
419 419 newfile=True)
420 420
421 421 commit2 = commit_change(
422 422 repo=r2_name, filename='file2-fork',
423 423 content='file2-line1-from-fork', message='commit2-fork',
424 424 vcs_type=backend.alias, parent=commit1,
425 425 newfile=True)
426 426
427 427 commit_change( # commit 3
428 428 repo=r2_name, filename='file3-fork',
429 429 content='file3-line1-from-fork', message='commit3-fork',
430 430 vcs_type=backend.alias, parent=commit2, newfile=True)
431 431
432 432 # compare !
433 433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
434 434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
435 435
436 436 response = self.app.get(
437 437 route_path('repo_compare',
438 438 repo_name=r2_name,
439 439 source_ref_type="branch", source_ref=commit_id1,
440 440 target_ref_type="branch", target_ref=commit_id2,
441 441 params=dict(merge='1', target_repo=r1_name),
442 442 ))
443 443
444 444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
445 445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
446 446 response.mustcontain('No files')
447 447 response.mustcontain('No commits in this compare')
448 448
449 449 commit0 = commit_change(
450 450 repo=r1_name, filename='file2',
451 451 content='line1-added-after-fork', message='commit2-parent',
452 452 vcs_type=backend.alias, parent=None, newfile=True)
453 453
454 454 # compare !
455 455 response = self.app.get(
456 456 route_path('repo_compare',
457 457 repo_name=r2_name,
458 458 source_ref_type="branch", source_ref=commit_id1,
459 459 target_ref_type="branch", target_ref=commit_id2,
460 460 params=dict(merge='1', target_repo=r1_name),
461 461 ))
462 462
463 463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
464 464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
465 465
466 466 response.mustcontain("""commit2-parent""")
467 467 response.mustcontain("""line1-added-after-fork""")
468 468 compare_page = ComparePage(response)
469 469 compare_page.contains_change_summary(1, 1, 0)
470 470
471 471 @pytest.mark.xfail_backends("svn")
472 472 def test_compare_commits(self, backend, xhr_header):
473 473 commit0 = backend.repo.get_commit(commit_idx=0)
474 474 commit1 = backend.repo.get_commit(commit_idx=1)
475 475
476 476 response = self.app.get(
477 477 route_path('repo_compare',
478 478 repo_name=backend.repo_name,
479 479 source_ref_type="rev", source_ref=commit0.raw_id,
480 480 target_ref_type="rev", target_ref=commit1.raw_id,
481 481 params=dict(merge='1')
482 482 ),
483 483 extra_environ=xhr_header, )
484 484
485 485 # outgoing commits between those commits
486 486 compare_page = ComparePage(response)
487 compare_page.contains_commits(commits=[commit1], ancestors=[commit0])
487 compare_page.contains_commits(commits=[commit1])
488 488
489 489 def test_errors_when_comparing_unknown_source_repo(self, backend):
490 490 repo = backend.repo
491 491 badrepo = 'badrepo'
492 492
493 493 response = self.app.get(
494 494 route_path('repo_compare',
495 495 repo_name=badrepo,
496 496 source_ref_type="rev", source_ref='tip',
497 497 target_ref_type="rev", target_ref='tip',
498 498 params=dict(merge='1', target_repo=repo.repo_name)
499 499 ),
500 500 status=404)
501 501
502 502 def test_errors_when_comparing_unknown_target_repo(self, backend):
503 503 repo = backend.repo
504 504 badrepo = 'badrepo'
505 505
506 506 response = self.app.get(
507 507 route_path('repo_compare',
508 508 repo_name=repo.repo_name,
509 509 source_ref_type="rev", source_ref='tip',
510 510 target_ref_type="rev", target_ref='tip',
511 511 params=dict(merge='1', target_repo=badrepo),
512 512 ),
513 513 status=302)
514 514 redirected = response.follow()
515 515 redirected.mustcontain(
516 516 'Could not find the target repo: `{}`'.format(badrepo))
517 517
518 518 def test_compare_not_in_preview_mode(self, backend_stub):
519 519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
520 520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
521 521
522 522 response = self.app.get(
523 523 route_path('repo_compare',
524 524 repo_name=backend_stub.repo_name,
525 525 source_ref_type="rev", source_ref=commit0.raw_id,
526 526 target_ref_type="rev", target_ref=commit1.raw_id,
527 527 ))
528 528
529 529 # outgoing commits between those commits
530 530 compare_page = ComparePage(response)
531 531 compare_page.swap_is_visible()
532 532 compare_page.target_source_are_enabled()
533 533
534 534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
535 535 orig = backend_hg.create_repo(number_of_commits=1)
536 536 fork = backend_hg.create_fork()
537 537
538 538 settings_util.create_repo_rhodecode_ui(
539 539 orig, 'extensions', value='', key='largefiles', active=False)
540 540 settings_util.create_repo_rhodecode_ui(
541 541 fork, 'extensions', value='', key='largefiles', active=True)
542 542
543 543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
544 544 'MercurialRepository.compare')
545 545 with mock.patch(compare_module) as compare_mock:
546 546 compare_mock.side_effect = RepositoryRequirementError()
547 547
548 548 response = self.app.get(
549 549 route_path('repo_compare',
550 550 repo_name=orig.repo_name,
551 551 source_ref_type="rev", source_ref="tip",
552 552 target_ref_type="rev", target_ref="tip",
553 553 params=dict(merge='1', target_repo=fork.repo_name),
554 554 ),
555 555 status=302)
556 556
557 557 assert_session_flash(
558 558 response,
559 559 'Could not compare repos with different large file settings')
560 560
561 561
562 562 @pytest.mark.usefixtures("autologin_user")
563 563 class TestCompareControllerSvn(object):
564 564
565 565 def test_supports_references_with_path(self, app, backend_svn):
566 566 repo = backend_svn['svn-simple-layout']
567 567 commit_id = repo.get_commit(commit_idx=-1).raw_id
568 568 response = app.get(
569 569 route_path('repo_compare',
570 570 repo_name=repo.repo_name,
571 571 source_ref_type="tag",
572 572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
573 573 target_ref_type="tag",
574 574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
575 575 params=dict(merge='1'),
576 576 ),
577 577 status=200)
578 578
579 579 # Expecting no commits, since both paths are at the same revision
580 580 response.mustcontain('No commits in this compare')
581 581
582 582 # Should find only one file changed when comparing those two tags
583 583 response.mustcontain('example.py')
584 584 compare_page = ComparePage(response)
585 585 compare_page.contains_change_summary(1, 5, 1)
586 586
587 587 def test_shows_commits_if_different_ids(self, app, backend_svn):
588 588 repo = backend_svn['svn-simple-layout']
589 589 source_id = repo.get_commit(commit_idx=-6).raw_id
590 590 target_id = repo.get_commit(commit_idx=-1).raw_id
591 591 response = app.get(
592 592 route_path('repo_compare',
593 593 repo_name=repo.repo_name,
594 594 source_ref_type="tag",
595 595 source_ref="%s@%s" % ('tags/v0.1', source_id),
596 596 target_ref_type="tag",
597 597 target_ref="%s@%s" % ('tags/v0.2', target_id),
598 598 params=dict(merge='1')
599 599 ),
600 600 status=200)
601 601
602 602 # It should show commits
603 603 assert 'No commits in this compare' not in response.body
604 604
605 605 # Should find only one file changed when comparing those two tags
606 606 response.mustcontain('example.py')
607 607 compare_page = ComparePage(response)
608 608 compare_page.contains_change_summary(1, 5, 1)
609 609
610 610
611 611 class ComparePage(AssertResponse):
612 612 """
613 613 Abstracts the page template from the tests
614 614 """
615 615
616 616 def contains_file_links_and_anchors(self, files):
617 617 doc = lxml.html.fromstring(self.response.body)
618 618 for filename, file_id in files:
619 619 self.contains_one_anchor(file_id)
620 620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
621 621 assert len(diffblock) == 2
622 622 assert len(diffblock[0].cssselect('a[href="#%s"]' % file_id)) == 1
623 623
624 624 def contains_change_summary(self, files_changed, inserted, deleted):
625 625 template = (
626 626 '{files_changed} file{plural} changed: '
627 627 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
628 628 self.response.mustcontain(template.format(
629 629 files_changed=files_changed,
630 630 plural="s" if files_changed > 1 else "",
631 631 inserted=inserted,
632 632 deleted=deleted))
633 633
634 634 def contains_commits(self, commits, ancestors=None):
635 635 response = self.response
636 636
637 637 for commit in commits:
638 638 # Expecting to see the commit message in an element which
639 639 # has the ID "c-{commit.raw_id}"
640 640 self.element_contains('#c-' + commit.raw_id, commit.message)
641 641 self.contains_one_link(
642 642 'r%s:%s' % (commit.idx, commit.short_id),
643 643 self._commit_url(commit))
644
644 645 if ancestors:
645 646 response.mustcontain('Ancestor')
646 647 for ancestor in ancestors:
647 648 self.contains_one_link(
648 649 ancestor.short_id, self._commit_url(ancestor))
649 650
650 651 def _commit_url(self, commit):
651 652 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
652 653
653 654 def swap_is_hidden(self):
654 655 assert '<a id="btn-swap"' not in self.response.text
655 656
656 657 def swap_is_visible(self):
657 658 assert '<a id="btn-swap"' in self.response.text
658 659
659 660 def target_source_are_disabled(self):
660 661 response = self.response
661 662 response.mustcontain("var enable_fields = false;")
662 663 response.mustcontain('.select2("enable", enable_fields)')
663 664
664 665 def target_source_are_enabled(self):
665 666 response = self.response
666 667 response.mustcontain("var enable_fields = true;")
@@ -1,79 +1,87 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from rhodecode.lib import helpers as h
22 22 from rhodecode.lib.utils2 import safe_int
23 from rhodecode.model.pull_request import get_diff_info
24
25 REVIEWER_API_VERSION = 'V3'
23 26
24 27
25 28 def reviewer_as_json(user, reasons=None, mandatory=False, rules=None, user_group=None):
26 29 """
27 30 Returns json struct of a reviewer for frontend
28 31
29 32 :param user: the reviewer
30 33 :param reasons: list of strings of why they are reviewers
31 34 :param mandatory: bool, to set user as mandatory
32 35 """
33 36
34 37 return {
35 38 'user_id': user.user_id,
36 39 'reasons': reasons or [],
37 40 'rules': rules or [],
38 41 'mandatory': mandatory,
39 42 'user_group': user_group,
40 43 'username': user.username,
41 44 'first_name': user.first_name,
42 45 'last_name': user.last_name,
43 46 'user_link': h.link_to_user(user),
44 47 'gravatar_link': h.gravatar_url(user.email, 14),
45 48 }
46 49
47 50
48 51 def get_default_reviewers_data(
49 52 current_user, source_repo, source_commit, target_repo, target_commit):
53 """
54 Return json for default reviewers of a repository
55 """
50 56
51 """ Return json for default reviewers of a repository """
57 diff_info = get_diff_info(
58 source_repo, source_commit.raw_id, target_repo, target_commit.raw_id)
52 59
53 60 reasons = ['Default reviewer', 'Repository owner']
54 61 json_reviewers = [reviewer_as_json(
55 62 user=target_repo.user, reasons=reasons, mandatory=False, rules=None)]
56 63
57 64 return {
58 'api_ver': 'v1', # define version for later possible schema upgrade
65 'api_ver': REVIEWER_API_VERSION, # define version for later possible schema upgrade
66 'diff_info': diff_info,
59 67 'reviewers': json_reviewers,
60 68 'rules': {},
61 69 'rules_data': {},
62 70 }
63 71
64 72
65 73 def validate_default_reviewers(review_members, reviewer_rules):
66 74 """
67 75 Function to validate submitted reviewers against the saved rules
68 76
69 77 """
70 78 reviewers = []
71 79 reviewer_by_id = {}
72 80 for r in review_members:
73 81 reviewer_user_id = safe_int(r['user_id'])
74 82 entry = (reviewer_user_id, r['reasons'], r['mandatory'], r['rules'])
75 83
76 84 reviewer_by_id[reviewer_user_id] = entry
77 85 reviewers.append(entry)
78 86
79 87 return reviewers
@@ -1,1512 +1,1520 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.auth import (
39 39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 40 NotAnonymous, CSRFRequired)
41 41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 RepositoryRequirementError, EmptyRepositoryError)
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
45 45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 ChangesetComment, ChangesetStatus, Repository)
47 from rhodecode.model.db import (
48 func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository)
49 49 from rhodecode.model.forms import PullRequestForm
50 50 from rhodecode.model.meta import Session
51 51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 52 from rhodecode.model.scm import ScmModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 58
59 59 def load_default_context(self):
60 60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 63 # backward compat., we use for OLD PRs a plain renderer
64 64 c.renderer = 'plain'
65 65 return c
66 66
67 67 def _get_pull_requests_list(
68 68 self, repo_name, source, filter_type, opened_by, statuses):
69 69
70 70 draw, start, limit = self._extract_chunk(self.request)
71 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 72 _render = self.request.get_partial_renderer(
73 73 'rhodecode:templates/data_table/_dt_elements.mako')
74 74
75 75 # pagination
76 76
77 77 if filter_type == 'awaiting_review':
78 78 pull_requests = PullRequestModel().get_awaiting_review(
79 79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 80 statuses=statuses, offset=start, length=limit,
81 81 order_by=order_by, order_dir=order_dir)
82 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 83 repo_name, search_q=search_q, source=source, statuses=statuses,
84 84 opened_by=opened_by)
85 85 elif filter_type == 'awaiting_my_review':
86 86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 89 offset=start, length=limit, order_by=order_by,
90 90 order_dir=order_dir)
91 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 93 statuses=statuses, opened_by=opened_by)
94 94 else:
95 95 pull_requests = PullRequestModel().get_all(
96 96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 97 statuses=statuses, offset=start, length=limit,
98 98 order_by=order_by, order_dir=order_dir)
99 99 pull_requests_total_count = PullRequestModel().count_all(
100 100 repo_name, search_q=search_q, source=source, statuses=statuses,
101 101 opened_by=opened_by)
102 102
103 103 data = []
104 104 comments_model = CommentsModel()
105 105 for pr in pull_requests:
106 106 comments = comments_model.get_all_comments(
107 107 self.db_repo.repo_id, pull_request=pr)
108 108
109 109 data.append({
110 110 'name': _render('pullrequest_name',
111 111 pr.pull_request_id, pr.pull_request_state,
112 112 pr.work_in_progress, pr.target_repo.repo_name),
113 113 'name_raw': pr.pull_request_id,
114 114 'status': _render('pullrequest_status',
115 115 pr.calculated_review_status()),
116 116 'title': _render('pullrequest_title', pr.title, pr.description),
117 117 'description': h.escape(pr.description),
118 118 'updated_on': _render('pullrequest_updated_on',
119 119 h.datetime_to_time(pr.updated_on)),
120 120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 121 'created_on': _render('pullrequest_updated_on',
122 122 h.datetime_to_time(pr.created_on)),
123 123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 124 'state': pr.pull_request_state,
125 125 'author': _render('pullrequest_author',
126 126 pr.author.full_contact, ),
127 127 'author_raw': pr.author.full_name,
128 128 'comments': _render('pullrequest_comments', len(comments)),
129 129 'comments_raw': len(comments),
130 130 'closed': pr.is_closed(),
131 131 })
132 132
133 133 data = ({
134 134 'draw': draw,
135 135 'data': data,
136 136 'recordsTotal': pull_requests_total_count,
137 137 'recordsFiltered': pull_requests_total_count,
138 138 })
139 139 return data
140 140
141 141 @LoginRequired()
142 142 @HasRepoPermissionAnyDecorator(
143 143 'repository.read', 'repository.write', 'repository.admin')
144 144 @view_config(
145 145 route_name='pullrequest_show_all', request_method='GET',
146 146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 147 def pull_request_list(self):
148 148 c = self.load_default_context()
149 149
150 150 req_get = self.request.GET
151 151 c.source = str2bool(req_get.get('source'))
152 152 c.closed = str2bool(req_get.get('closed'))
153 153 c.my = str2bool(req_get.get('my'))
154 154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156 156
157 157 c.active = 'open'
158 158 if c.my:
159 159 c.active = 'my'
160 160 if c.closed:
161 161 c.active = 'closed'
162 162 if c.awaiting_review and not c.source:
163 163 c.active = 'awaiting'
164 164 if c.source and not c.awaiting_review:
165 165 c.active = 'source'
166 166 if c.awaiting_my_review:
167 167 c.active = 'awaiting_my'
168 168
169 169 return self._get_template_context(c)
170 170
171 171 @LoginRequired()
172 172 @HasRepoPermissionAnyDecorator(
173 173 'repository.read', 'repository.write', 'repository.admin')
174 174 @view_config(
175 175 route_name='pullrequest_show_all_data', request_method='GET',
176 176 renderer='json_ext', xhr=True)
177 177 def pull_request_list_data(self):
178 178 self.load_default_context()
179 179
180 180 # additional filters
181 181 req_get = self.request.GET
182 182 source = str2bool(req_get.get('source'))
183 183 closed = str2bool(req_get.get('closed'))
184 184 my = str2bool(req_get.get('my'))
185 185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187 187
188 188 filter_type = 'awaiting_review' if awaiting_review \
189 189 else 'awaiting_my_review' if awaiting_my_review \
190 190 else None
191 191
192 192 opened_by = None
193 193 if my:
194 194 opened_by = [self._rhodecode_user.user_id]
195 195
196 196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 197 if closed:
198 198 statuses = [PullRequest.STATUS_CLOSED]
199 199
200 200 data = self._get_pull_requests_list(
201 201 repo_name=self.db_repo_name, source=source,
202 202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203 203
204 204 return data
205 205
206 206 def _is_diff_cache_enabled(self, target_repo):
207 207 caching_enabled = self._get_general_setting(
208 208 target_repo, 'rhodecode_diff_cache')
209 209 log.debug('Diff caching enabled: %s', caching_enabled)
210 210 return caching_enabled
211 211
212 212 def _get_diffset(self, source_repo_name, source_repo,
213 ancestor_commit,
213 214 source_ref_id, target_ref_id,
214 215 target_commit, source_commit, diff_limit, file_limit,
215 216 fulldiff, hide_whitespace_changes, diff_context):
216 217
218 target_ref_id = ancestor_commit.raw_id
217 219 vcs_diff = PullRequestModel().get_diff(
218 220 source_repo, source_ref_id, target_ref_id,
219 221 hide_whitespace_changes, diff_context)
220 222
221 223 diff_processor = diffs.DiffProcessor(
222 224 vcs_diff, format='newdiff', diff_limit=diff_limit,
223 225 file_limit=file_limit, show_full_diff=fulldiff)
224 226
225 227 _parsed = diff_processor.prepare()
226 228
227 229 diffset = codeblocks.DiffSet(
228 230 repo_name=self.db_repo_name,
229 231 source_repo_name=source_repo_name,
230 232 source_node_getter=codeblocks.diffset_node_getter(target_commit),
231 233 target_node_getter=codeblocks.diffset_node_getter(source_commit),
232 234 )
233 235 diffset = self.path_filter.render_patchset_filtered(
234 236 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
235 237
236 238 return diffset
237 239
238 240 def _get_range_diffset(self, source_scm, source_repo,
239 241 commit1, commit2, diff_limit, file_limit,
240 242 fulldiff, hide_whitespace_changes, diff_context):
241 243 vcs_diff = source_scm.get_diff(
242 244 commit1, commit2,
243 245 ignore_whitespace=hide_whitespace_changes,
244 246 context=diff_context)
245 247
246 248 diff_processor = diffs.DiffProcessor(
247 249 vcs_diff, format='newdiff', diff_limit=diff_limit,
248 250 file_limit=file_limit, show_full_diff=fulldiff)
249 251
250 252 _parsed = diff_processor.prepare()
251 253
252 254 diffset = codeblocks.DiffSet(
253 255 repo_name=source_repo.repo_name,
254 256 source_node_getter=codeblocks.diffset_node_getter(commit1),
255 257 target_node_getter=codeblocks.diffset_node_getter(commit2))
256 258
257 259 diffset = self.path_filter.render_patchset_filtered(
258 260 diffset, _parsed, commit1.raw_id, commit2.raw_id)
259 261
260 262 return diffset
261 263
262 264 @LoginRequired()
263 265 @HasRepoPermissionAnyDecorator(
264 266 'repository.read', 'repository.write', 'repository.admin')
265 267 @view_config(
266 268 route_name='pullrequest_show', request_method='GET',
267 269 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
268 270 def pull_request_show(self):
269 271 _ = self.request.translate
270 272 c = self.load_default_context()
271 273
272 274 pull_request = PullRequest.get_or_404(
273 275 self.request.matchdict['pull_request_id'])
274 276 pull_request_id = pull_request.pull_request_id
275 277
276 278 c.state_progressing = pull_request.is_state_changing()
277 279
278 280 _new_state = {
279 281 'created': PullRequest.STATE_CREATED,
280 282 }.get(self.request.GET.get('force_state'))
283
281 284 if c.is_super_admin and _new_state:
282 285 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
283 286 h.flash(
284 287 _('Pull Request state was force changed to `{}`').format(_new_state),
285 288 category='success')
286 289 Session().commit()
287 290
288 291 raise HTTPFound(h.route_path(
289 292 'pullrequest_show', repo_name=self.db_repo_name,
290 293 pull_request_id=pull_request_id))
291 294
292 295 version = self.request.GET.get('version')
293 296 from_version = self.request.GET.get('from_version') or version
294 297 merge_checks = self.request.GET.get('merge_checks')
295 298 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
296 299
297 300 # fetch global flags of ignore ws or context lines
298 301 diff_context = diffs.get_diff_context(self.request)
299 302 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
300 303
301 304 force_refresh = str2bool(self.request.GET.get('force_refresh'))
302 305
303 306 (pull_request_latest,
304 307 pull_request_at_ver,
305 308 pull_request_display_obj,
306 309 at_version) = PullRequestModel().get_pr_version(
307 310 pull_request_id, version=version)
308 311 pr_closed = pull_request_latest.is_closed()
309 312
310 313 if pr_closed and (version or from_version):
311 314 # not allow to browse versions
312 315 raise HTTPFound(h.route_path(
313 316 'pullrequest_show', repo_name=self.db_repo_name,
314 317 pull_request_id=pull_request_id))
315 318
316 319 versions = pull_request_display_obj.versions()
317 320 # used to store per-commit range diffs
318 321 c.changes = collections.OrderedDict()
319 322 c.range_diff_on = self.request.GET.get('range-diff') == "1"
320 323
321 324 c.at_version = at_version
322 325 c.at_version_num = (at_version
323 326 if at_version and at_version != 'latest'
324 327 else None)
325 328 c.at_version_pos = ChangesetComment.get_index_from_version(
326 329 c.at_version_num, versions)
327 330
328 331 (prev_pull_request_latest,
329 332 prev_pull_request_at_ver,
330 333 prev_pull_request_display_obj,
331 334 prev_at_version) = PullRequestModel().get_pr_version(
332 335 pull_request_id, version=from_version)
333 336
334 337 c.from_version = prev_at_version
335 338 c.from_version_num = (prev_at_version
336 339 if prev_at_version and prev_at_version != 'latest'
337 340 else None)
338 341 c.from_version_pos = ChangesetComment.get_index_from_version(
339 342 c.from_version_num, versions)
340 343
341 344 # define if we're in COMPARE mode or VIEW at version mode
342 345 compare = at_version != prev_at_version
343 346
344 347 # pull_requests repo_name we opened it against
345 348 # ie. target_repo must match
346 349 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
347 350 raise HTTPNotFound()
348 351
349 352 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
350 353 pull_request_at_ver)
351 354
352 355 c.pull_request = pull_request_display_obj
353 356 c.renderer = pull_request_at_ver.description_renderer or c.renderer
354 357 c.pull_request_latest = pull_request_latest
355 358
356 359 if compare or (at_version and not at_version == 'latest'):
357 360 c.allowed_to_change_status = False
358 361 c.allowed_to_update = False
359 362 c.allowed_to_merge = False
360 363 c.allowed_to_delete = False
361 364 c.allowed_to_comment = False
362 365 c.allowed_to_close = False
363 366 else:
364 367 can_change_status = PullRequestModel().check_user_change_status(
365 368 pull_request_at_ver, self._rhodecode_user)
366 369 c.allowed_to_change_status = can_change_status and not pr_closed
367 370
368 371 c.allowed_to_update = PullRequestModel().check_user_update(
369 372 pull_request_latest, self._rhodecode_user) and not pr_closed
370 373 c.allowed_to_merge = PullRequestModel().check_user_merge(
371 374 pull_request_latest, self._rhodecode_user) and not pr_closed
372 375 c.allowed_to_delete = PullRequestModel().check_user_delete(
373 376 pull_request_latest, self._rhodecode_user) and not pr_closed
374 377 c.allowed_to_comment = not pr_closed
375 378 c.allowed_to_close = c.allowed_to_merge and not pr_closed
376 379
377 380 c.forbid_adding_reviewers = False
378 381 c.forbid_author_to_review = False
379 382 c.forbid_commit_author_to_review = False
380 383
381 384 if pull_request_latest.reviewer_data and \
382 385 'rules' in pull_request_latest.reviewer_data:
383 386 rules = pull_request_latest.reviewer_data['rules'] or {}
384 387 try:
385 388 c.forbid_adding_reviewers = rules.get(
386 389 'forbid_adding_reviewers')
387 390 c.forbid_author_to_review = rules.get(
388 391 'forbid_author_to_review')
389 392 c.forbid_commit_author_to_review = rules.get(
390 393 'forbid_commit_author_to_review')
391 394 except Exception:
392 395 pass
393 396
394 397 # check merge capabilities
395 398 _merge_check = MergeCheck.validate(
396 399 pull_request_latest, auth_user=self._rhodecode_user,
397 400 translator=self.request.translate,
398 401 force_shadow_repo_refresh=force_refresh)
399 402
400 403 c.pr_merge_errors = _merge_check.error_details
401 404 c.pr_merge_possible = not _merge_check.failed
402 405 c.pr_merge_message = _merge_check.merge_msg
403 406 c.pr_merge_source_commit = _merge_check.source_commit
404 407 c.pr_merge_target_commit = _merge_check.target_commit
405 408
406 409 c.pr_merge_info = MergeCheck.get_merge_conditions(
407 410 pull_request_latest, translator=self.request.translate)
408 411
409 412 c.pull_request_review_status = _merge_check.review_status
410 413 if merge_checks:
411 414 self.request.override_renderer = \
412 415 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
413 416 return self._get_template_context(c)
414 417
415 418 comments_model = CommentsModel()
416 419
417 420 # reviewers and statuses
418 421 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
419 422 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
420 423
421 424 # GENERAL COMMENTS with versions #
422 425 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
423 426 q = q.order_by(ChangesetComment.comment_id.asc())
424 427 general_comments = q
425 428
426 429 # pick comments we want to render at current version
427 430 c.comment_versions = comments_model.aggregate_comments(
428 431 general_comments, versions, c.at_version_num)
429 432 c.comments = c.comment_versions[c.at_version_num]['until']
430 433
431 434 # INLINE COMMENTS with versions #
432 435 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
433 436 q = q.order_by(ChangesetComment.comment_id.asc())
434 437 inline_comments = q
435 438
436 439 c.inline_versions = comments_model.aggregate_comments(
437 440 inline_comments, versions, c.at_version_num, inline=True)
438 441
439 442 # TODOs
440 443 c.unresolved_comments = CommentsModel() \
441 444 .get_pull_request_unresolved_todos(pull_request)
442 445 c.resolved_comments = CommentsModel() \
443 446 .get_pull_request_resolved_todos(pull_request)
444 447
445 448 # inject latest version
446 449 latest_ver = PullRequest.get_pr_display_object(
447 450 pull_request_latest, pull_request_latest)
448 451
449 452 c.versions = versions + [latest_ver]
450 453
451 454 # if we use version, then do not show later comments
452 455 # than current version
453 456 display_inline_comments = collections.defaultdict(
454 457 lambda: collections.defaultdict(list))
455 458 for co in inline_comments:
456 459 if c.at_version_num:
457 460 # pick comments that are at least UPTO given version, so we
458 461 # don't render comments for higher version
459 462 should_render = co.pull_request_version_id and \
460 463 co.pull_request_version_id <= c.at_version_num
461 464 else:
462 465 # showing all, for 'latest'
463 466 should_render = True
464 467
465 468 if should_render:
466 469 display_inline_comments[co.f_path][co.line_no].append(co)
467 470
468 471 # load diff data into template context, if we use compare mode then
469 472 # diff is calculated based on changes between versions of PR
470 473
471 474 source_repo = pull_request_at_ver.source_repo
472 475 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
473 476
474 477 target_repo = pull_request_at_ver.target_repo
475 478 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
476 479
477 480 if compare:
478 481 # in compare switch the diff base to latest commit from prev version
479 482 target_ref_id = prev_pull_request_display_obj.revisions[0]
480 483
481 484 # despite opening commits for bookmarks/branches/tags, we always
482 485 # convert this to rev to prevent changes after bookmark or branch change
483 486 c.source_ref_type = 'rev'
484 487 c.source_ref = source_ref_id
485 488
486 489 c.target_ref_type = 'rev'
487 490 c.target_ref = target_ref_id
488 491
489 492 c.source_repo = source_repo
490 493 c.target_repo = target_repo
491 494
492 495 c.commit_ranges = []
493 496 source_commit = EmptyCommit()
494 497 target_commit = EmptyCommit()
495 498 c.missing_requirements = False
496 499
497 500 source_scm = source_repo.scm_instance()
498 501 target_scm = target_repo.scm_instance()
499 502
500 503 shadow_scm = None
501 504 try:
502 505 shadow_scm = pull_request_latest.get_shadow_repo()
503 506 except Exception:
504 507 log.debug('Failed to get shadow repo', exc_info=True)
505 508 # try first the existing source_repo, and then shadow
506 509 # repo if we can obtain one
507 510 commits_source_repo = source_scm
508 511 if shadow_scm:
509 512 commits_source_repo = shadow_scm
510 513
511 514 c.commits_source_repo = commits_source_repo
512 515 c.ancestor = None # set it to None, to hide it from PR view
513 516
514 517 # empty version means latest, so we keep this to prevent
515 518 # double caching
516 519 version_normalized = version or 'latest'
517 520 from_version_normalized = from_version or 'latest'
518 521
519 522 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
520 523 cache_file_path = diff_cache_exist(
521 524 cache_path, 'pull_request', pull_request_id, version_normalized,
522 525 from_version_normalized, source_ref_id, target_ref_id,
523 526 hide_whitespace_changes, diff_context, c.fulldiff)
524 527
525 528 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
526 529 force_recache = self.get_recache_flag()
527 530
528 531 cached_diff = None
529 532 if caching_enabled:
530 533 cached_diff = load_cached_diff(cache_file_path)
531 534
532 535 has_proper_commit_cache = (
533 536 cached_diff and cached_diff.get('commits')
534 537 and len(cached_diff.get('commits', [])) == 5
535 538 and cached_diff.get('commits')[0]
536 539 and cached_diff.get('commits')[3])
537 540
538 541 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
539 542 diff_commit_cache = \
540 543 (ancestor_commit, commit_cache, missing_requirements,
541 544 source_commit, target_commit) = cached_diff['commits']
542 545 else:
543 546 # NOTE(marcink): we reach potentially unreachable errors when a PR has
544 547 # merge errors resulting in potentially hidden commits in the shadow repo.
545 548 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
546 549 and _merge_check.merge_response
547 550 maybe_unreachable = maybe_unreachable \
548 551 and _merge_check.merge_response.metadata.get('unresolved_files')
549 552 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
550 553 diff_commit_cache = \
551 554 (ancestor_commit, commit_cache, missing_requirements,
552 555 source_commit, target_commit) = self.get_commits(
553 556 commits_source_repo,
554 557 pull_request_at_ver,
555 558 source_commit,
556 559 source_ref_id,
557 560 source_scm,
558 561 target_commit,
559 562 target_ref_id,
560 target_scm, maybe_unreachable=maybe_unreachable)
563 target_scm,
564 maybe_unreachable=maybe_unreachable)
561 565
562 566 # register our commit range
563 567 for comm in commit_cache.values():
564 568 c.commit_ranges.append(comm)
565 569
566 570 c.missing_requirements = missing_requirements
567 571 c.ancestor_commit = ancestor_commit
568 572 c.statuses = source_repo.statuses(
569 573 [x.raw_id for x in c.commit_ranges])
570 574
571 575 # auto collapse if we have more than limit
572 576 collapse_limit = diffs.DiffProcessor._collapse_commits_over
573 577 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
574 578 c.compare_mode = compare
575 579
576 580 # diff_limit is the old behavior, will cut off the whole diff
577 581 # if the limit is applied otherwise will just hide the
578 582 # big files from the front-end
579 583 diff_limit = c.visual.cut_off_limit_diff
580 584 file_limit = c.visual.cut_off_limit_file
581 585
582 586 c.missing_commits = False
583 587 if (c.missing_requirements
584 588 or isinstance(source_commit, EmptyCommit)
585 589 or source_commit == target_commit):
586 590
587 591 c.missing_commits = True
588 592 else:
589 593 c.inline_comments = display_inline_comments
590 594
591 595 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
592 596 if not force_recache and has_proper_diff_cache:
593 597 c.diffset = cached_diff['diff']
594 (ancestor_commit, commit_cache, missing_requirements,
595 source_commit, target_commit) = cached_diff['commits']
596 598 else:
597 599 c.diffset = self._get_diffset(
598 600 c.source_repo.repo_name, commits_source_repo,
601 c.ancestor_commit,
599 602 source_ref_id, target_ref_id,
600 603 target_commit, source_commit,
601 604 diff_limit, file_limit, c.fulldiff,
602 605 hide_whitespace_changes, diff_context)
603 606
604 607 # save cached diff
605 608 if caching_enabled:
606 609 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
607 610
608 611 c.limited_diff = c.diffset.limited_diff
609 612
610 613 # calculate removed files that are bound to comments
611 614 comment_deleted_files = [
612 615 fname for fname in display_inline_comments
613 616 if fname not in c.diffset.file_stats]
614 617
615 618 c.deleted_files_comments = collections.defaultdict(dict)
616 619 for fname, per_line_comments in display_inline_comments.items():
617 620 if fname in comment_deleted_files:
618 621 c.deleted_files_comments[fname]['stats'] = 0
619 622 c.deleted_files_comments[fname]['comments'] = list()
620 623 for lno, comments in per_line_comments.items():
621 624 c.deleted_files_comments[fname]['comments'].extend(comments)
622 625
623 626 # maybe calculate the range diff
624 627 if c.range_diff_on:
625 628 # TODO(marcink): set whitespace/context
626 629 context_lcl = 3
627 630 ign_whitespace_lcl = False
628 631
629 632 for commit in c.commit_ranges:
630 633 commit2 = commit
631 634 commit1 = commit.first_parent
632 635
633 636 range_diff_cache_file_path = diff_cache_exist(
634 637 cache_path, 'diff', commit.raw_id,
635 638 ign_whitespace_lcl, context_lcl, c.fulldiff)
636 639
637 640 cached_diff = None
638 641 if caching_enabled:
639 642 cached_diff = load_cached_diff(range_diff_cache_file_path)
640 643
641 644 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
642 645 if not force_recache and has_proper_diff_cache:
643 646 diffset = cached_diff['diff']
644 647 else:
645 648 diffset = self._get_range_diffset(
646 649 commits_source_repo, source_repo,
647 650 commit1, commit2, diff_limit, file_limit,
648 651 c.fulldiff, ign_whitespace_lcl, context_lcl
649 652 )
650 653
651 654 # save cached diff
652 655 if caching_enabled:
653 656 cache_diff(range_diff_cache_file_path, diffset, None)
654 657
655 658 c.changes[commit.raw_id] = diffset
656 659
657 660 # this is a hack to properly display links, when creating PR, the
658 661 # compare view and others uses different notation, and
659 662 # compare_commits.mako renders links based on the target_repo.
660 663 # We need to swap that here to generate it properly on the html side
661 664 c.target_repo = c.source_repo
662 665
663 666 c.commit_statuses = ChangesetStatus.STATUSES
664 667
665 668 c.show_version_changes = not pr_closed
666 669 if c.show_version_changes:
667 670 cur_obj = pull_request_at_ver
668 671 prev_obj = prev_pull_request_at_ver
669 672
670 673 old_commit_ids = prev_obj.revisions
671 674 new_commit_ids = cur_obj.revisions
672 675 commit_changes = PullRequestModel()._calculate_commit_id_changes(
673 676 old_commit_ids, new_commit_ids)
674 677 c.commit_changes_summary = commit_changes
675 678
676 679 # calculate the diff for commits between versions
677 680 c.commit_changes = []
678 mark = lambda cs, fw: list(
679 h.itertools.izip_longest([], cs, fillvalue=fw))
681
682 def mark(cs, fw):
683 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
684
680 685 for c_type, raw_id in mark(commit_changes.added, 'a') \
681 686 + mark(commit_changes.removed, 'r') \
682 687 + mark(commit_changes.common, 'c'):
683 688
684 689 if raw_id in commit_cache:
685 690 commit = commit_cache[raw_id]
686 691 else:
687 692 try:
688 693 commit = commits_source_repo.get_commit(raw_id)
689 694 except CommitDoesNotExistError:
690 695 # in case we fail extracting still use "dummy" commit
691 696 # for display in commit diff
692 697 commit = h.AttributeDict(
693 698 {'raw_id': raw_id,
694 699 'message': 'EMPTY or MISSING COMMIT'})
695 700 c.commit_changes.append([c_type, commit])
696 701
697 702 # current user review statuses for each version
698 703 c.review_versions = {}
699 704 if self._rhodecode_user.user_id in allowed_reviewers:
700 705 for co in general_comments:
701 706 if co.author.user_id == self._rhodecode_user.user_id:
702 707 status = co.status_change
703 708 if status:
704 709 _ver_pr = status[0].comment.pull_request_version_id
705 710 c.review_versions[_ver_pr] = status[0]
706 711
707 712 return self._get_template_context(c)
708 713
709 714 def get_commits(
710 715 self, commits_source_repo, pull_request_at_ver, source_commit,
711 716 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
712 717 maybe_unreachable=False):
713 718
714 719 commit_cache = collections.OrderedDict()
715 720 missing_requirements = False
716 721
717 722 try:
718 723 pre_load = ["author", "date", "message", "branch", "parents"]
719 724
720 725 pull_request_commits = pull_request_at_ver.revisions
721 726 log.debug('Loading %s commits from %s',
722 727 len(pull_request_commits), commits_source_repo)
723 728
724 729 for rev in pull_request_commits:
725 730 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
726 731 maybe_unreachable=maybe_unreachable)
727 732 commit_cache[comm.raw_id] = comm
728 733
729 734 # Order here matters, we first need to get target, and then
730 735 # the source
731 736 target_commit = commits_source_repo.get_commit(
732 737 commit_id=safe_str(target_ref_id))
733 738
734 739 source_commit = commits_source_repo.get_commit(
735 740 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
736 741 except CommitDoesNotExistError:
737 742 log.warning('Failed to get commit from `{}` repo'.format(
738 743 commits_source_repo), exc_info=True)
739 744 except RepositoryRequirementError:
740 745 log.warning('Failed to get all required data from repo', exc_info=True)
741 746 missing_requirements = True
742 ancestor_commit = None
747
748 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
749
743 750 try:
744 ancestor_id = source_scm.get_common_ancestor(
745 source_commit.raw_id, target_commit.raw_id, target_scm)
746 ancestor_commit = source_scm.get_commit(ancestor_id)
751 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
747 752 except Exception:
748 753 ancestor_commit = None
754
749 755 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
750 756
751 757 def assure_not_empty_repo(self):
752 758 _ = self.request.translate
753 759
754 760 try:
755 761 self.db_repo.scm_instance().get_commit()
756 762 except EmptyRepositoryError:
757 763 h.flash(h.literal(_('There are no commits yet')),
758 764 category='warning')
759 765 raise HTTPFound(
760 766 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
761 767
762 768 @LoginRequired()
763 769 @NotAnonymous()
764 770 @HasRepoPermissionAnyDecorator(
765 771 'repository.read', 'repository.write', 'repository.admin')
766 772 @view_config(
767 773 route_name='pullrequest_new', request_method='GET',
768 774 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
769 775 def pull_request_new(self):
770 776 _ = self.request.translate
771 777 c = self.load_default_context()
772 778
773 779 self.assure_not_empty_repo()
774 780 source_repo = self.db_repo
775 781
776 782 commit_id = self.request.GET.get('commit')
777 783 branch_ref = self.request.GET.get('branch')
778 784 bookmark_ref = self.request.GET.get('bookmark')
779 785
780 786 try:
781 787 source_repo_data = PullRequestModel().generate_repo_data(
782 788 source_repo, commit_id=commit_id,
783 789 branch=branch_ref, bookmark=bookmark_ref,
784 790 translator=self.request.translate)
785 791 except CommitDoesNotExistError as e:
786 792 log.exception(e)
787 793 h.flash(_('Commit does not exist'), 'error')
788 794 raise HTTPFound(
789 795 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
790 796
791 797 default_target_repo = source_repo
792 798
793 799 if source_repo.parent and c.has_origin_repo_read_perm:
794 800 parent_vcs_obj = source_repo.parent.scm_instance()
795 801 if parent_vcs_obj and not parent_vcs_obj.is_empty():
796 802 # change default if we have a parent repo
797 803 default_target_repo = source_repo.parent
798 804
799 805 target_repo_data = PullRequestModel().generate_repo_data(
800 806 default_target_repo, translator=self.request.translate)
801 807
802 808 selected_source_ref = source_repo_data['refs']['selected_ref']
803 809 title_source_ref = ''
804 810 if selected_source_ref:
805 811 title_source_ref = selected_source_ref.split(':', 2)[1]
806 812 c.default_title = PullRequestModel().generate_pullrequest_title(
807 813 source=source_repo.repo_name,
808 814 source_ref=title_source_ref,
809 815 target=default_target_repo.repo_name
810 816 )
811 817
812 818 c.default_repo_data = {
813 819 'source_repo_name': source_repo.repo_name,
814 820 'source_refs_json': json.dumps(source_repo_data),
815 821 'target_repo_name': default_target_repo.repo_name,
816 822 'target_refs_json': json.dumps(target_repo_data),
817 823 }
818 824 c.default_source_ref = selected_source_ref
819 825
820 826 return self._get_template_context(c)
821 827
822 828 @LoginRequired()
823 829 @NotAnonymous()
824 830 @HasRepoPermissionAnyDecorator(
825 831 'repository.read', 'repository.write', 'repository.admin')
826 832 @view_config(
827 833 route_name='pullrequest_repo_refs', request_method='GET',
828 834 renderer='json_ext', xhr=True)
829 835 def pull_request_repo_refs(self):
830 836 self.load_default_context()
831 837 target_repo_name = self.request.matchdict['target_repo_name']
832 838 repo = Repository.get_by_repo_name(target_repo_name)
833 839 if not repo:
834 840 raise HTTPNotFound()
835 841
836 842 target_perm = HasRepoPermissionAny(
837 843 'repository.read', 'repository.write', 'repository.admin')(
838 844 target_repo_name)
839 845 if not target_perm:
840 846 raise HTTPNotFound()
841 847
842 848 return PullRequestModel().generate_repo_data(
843 849 repo, translator=self.request.translate)
844 850
845 851 @LoginRequired()
846 852 @NotAnonymous()
847 853 @HasRepoPermissionAnyDecorator(
848 854 'repository.read', 'repository.write', 'repository.admin')
849 855 @view_config(
850 856 route_name='pullrequest_repo_targets', request_method='GET',
851 857 renderer='json_ext', xhr=True)
852 858 def pullrequest_repo_targets(self):
853 859 _ = self.request.translate
854 860 filter_query = self.request.GET.get('query')
855 861
856 862 # get the parents
857 863 parent_target_repos = []
858 864 if self.db_repo.parent:
859 865 parents_query = Repository.query() \
860 866 .order_by(func.length(Repository.repo_name)) \
861 867 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
862 868
863 869 if filter_query:
864 870 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
865 871 parents_query = parents_query.filter(
866 872 Repository.repo_name.ilike(ilike_expression))
867 873 parents = parents_query.limit(20).all()
868 874
869 875 for parent in parents:
870 876 parent_vcs_obj = parent.scm_instance()
871 877 if parent_vcs_obj and not parent_vcs_obj.is_empty():
872 878 parent_target_repos.append(parent)
873 879
874 880 # get other forks, and repo itself
875 881 query = Repository.query() \
876 882 .order_by(func.length(Repository.repo_name)) \
877 883 .filter(
878 884 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
879 885 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
880 886 ) \
881 887 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
882 888
883 889 if filter_query:
884 890 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
885 891 query = query.filter(Repository.repo_name.ilike(ilike_expression))
886 892
887 893 limit = max(20 - len(parent_target_repos), 5) # not less then 5
888 894 target_repos = query.limit(limit).all()
889 895
890 896 all_target_repos = target_repos + parent_target_repos
891 897
892 898 repos = []
893 899 # This checks permissions to the repositories
894 900 for obj in ScmModel().get_repos(all_target_repos):
895 901 repos.append({
896 902 'id': obj['name'],
897 903 'text': obj['name'],
898 904 'type': 'repo',
899 905 'repo_id': obj['dbrepo']['repo_id'],
900 906 'repo_type': obj['dbrepo']['repo_type'],
901 907 'private': obj['dbrepo']['private'],
902 908
903 909 })
904 910
905 911 data = {
906 912 'more': False,
907 913 'results': [{
908 914 'text': _('Repositories'),
909 915 'children': repos
910 916 }] if repos else []
911 917 }
912 918 return data
913 919
914 920 @LoginRequired()
915 921 @NotAnonymous()
916 922 @HasRepoPermissionAnyDecorator(
917 923 'repository.read', 'repository.write', 'repository.admin')
918 924 @CSRFRequired()
919 925 @view_config(
920 926 route_name='pullrequest_create', request_method='POST',
921 927 renderer=None)
922 928 def pull_request_create(self):
923 929 _ = self.request.translate
924 930 self.assure_not_empty_repo()
925 931 self.load_default_context()
926 932
927 933 controls = peppercorn.parse(self.request.POST.items())
928 934
929 935 try:
930 936 form = PullRequestForm(
931 937 self.request.translate, self.db_repo.repo_id)()
932 938 _form = form.to_python(controls)
933 939 except formencode.Invalid as errors:
934 940 if errors.error_dict.get('revisions'):
935 941 msg = 'Revisions: %s' % errors.error_dict['revisions']
936 942 elif errors.error_dict.get('pullrequest_title'):
937 943 msg = errors.error_dict.get('pullrequest_title')
938 944 else:
939 945 msg = _('Error creating pull request: {}').format(errors)
940 946 log.exception(msg)
941 947 h.flash(msg, 'error')
942 948
943 949 # would rather just go back to form ...
944 950 raise HTTPFound(
945 951 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
946 952
947 953 source_repo = _form['source_repo']
948 954 source_ref = _form['source_ref']
949 955 target_repo = _form['target_repo']
950 956 target_ref = _form['target_ref']
951 957 commit_ids = _form['revisions'][::-1]
958 common_ancestor_id = _form['common_ancestor']
952 959
953 960 # find the ancestor for this pr
954 961 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
955 962 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
956 963
957 964 if not (source_db_repo or target_db_repo):
958 965 h.flash(_('source_repo or target repo not found'), category='error')
959 966 raise HTTPFound(
960 967 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
961 968
962 969 # re-check permissions again here
963 970 # source_repo we must have read permissions
964 971
965 972 source_perm = HasRepoPermissionAny(
966 973 'repository.read', 'repository.write', 'repository.admin')(
967 974 source_db_repo.repo_name)
968 975 if not source_perm:
969 976 msg = _('Not Enough permissions to source repo `{}`.'.format(
970 977 source_db_repo.repo_name))
971 978 h.flash(msg, category='error')
972 979 # copy the args back to redirect
973 980 org_query = self.request.GET.mixed()
974 981 raise HTTPFound(
975 982 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
976 983 _query=org_query))
977 984
978 985 # target repo we must have read permissions, and also later on
979 986 # we want to check branch permissions here
980 987 target_perm = HasRepoPermissionAny(
981 988 'repository.read', 'repository.write', 'repository.admin')(
982 989 target_db_repo.repo_name)
983 990 if not target_perm:
984 991 msg = _('Not Enough permissions to target repo `{}`.'.format(
985 992 target_db_repo.repo_name))
986 993 h.flash(msg, category='error')
987 994 # copy the args back to redirect
988 995 org_query = self.request.GET.mixed()
989 996 raise HTTPFound(
990 997 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
991 998 _query=org_query))
992 999
993 1000 source_scm = source_db_repo.scm_instance()
994 1001 target_scm = target_db_repo.scm_instance()
995 1002
996 1003 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
997 1004 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
998 1005
999 1006 ancestor = source_scm.get_common_ancestor(
1000 1007 source_commit.raw_id, target_commit.raw_id, target_scm)
1001 1008
1002 1009 # recalculate target ref based on ancestor
1003 1010 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1004 1011 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1005 1012
1006 1013 get_default_reviewers_data, validate_default_reviewers = \
1007 1014 PullRequestModel().get_reviewer_functions()
1008 1015
1009 1016 # recalculate reviewers logic, to make sure we can validate this
1010 1017 reviewer_rules = get_default_reviewers_data(
1011 1018 self._rhodecode_db_user, source_db_repo,
1012 1019 source_commit, target_db_repo, target_commit)
1013 1020
1014 1021 given_reviewers = _form['review_members']
1015 1022 reviewers = validate_default_reviewers(
1016 1023 given_reviewers, reviewer_rules)
1017 1024
1018 1025 pullrequest_title = _form['pullrequest_title']
1019 1026 title_source_ref = source_ref.split(':', 2)[1]
1020 1027 if not pullrequest_title:
1021 1028 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1022 1029 source=source_repo,
1023 1030 source_ref=title_source_ref,
1024 1031 target=target_repo
1025 1032 )
1026 1033
1027 1034 description = _form['pullrequest_desc']
1028 1035 description_renderer = _form['description_renderer']
1029 1036
1030 1037 try:
1031 1038 pull_request = PullRequestModel().create(
1032 1039 created_by=self._rhodecode_user.user_id,
1033 1040 source_repo=source_repo,
1034 1041 source_ref=source_ref,
1035 1042 target_repo=target_repo,
1036 1043 target_ref=target_ref,
1037 1044 revisions=commit_ids,
1045 common_ancestor_id=common_ancestor_id,
1038 1046 reviewers=reviewers,
1039 1047 title=pullrequest_title,
1040 1048 description=description,
1041 1049 description_renderer=description_renderer,
1042 1050 reviewer_data=reviewer_rules,
1043 1051 auth_user=self._rhodecode_user
1044 1052 )
1045 1053 Session().commit()
1046 1054
1047 1055 h.flash(_('Successfully opened new pull request'),
1048 1056 category='success')
1049 1057 except Exception:
1050 1058 msg = _('Error occurred during creation of this pull request.')
1051 1059 log.exception(msg)
1052 1060 h.flash(msg, category='error')
1053 1061
1054 1062 # copy the args back to redirect
1055 1063 org_query = self.request.GET.mixed()
1056 1064 raise HTTPFound(
1057 1065 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1058 1066 _query=org_query))
1059 1067
1060 1068 raise HTTPFound(
1061 1069 h.route_path('pullrequest_show', repo_name=target_repo,
1062 1070 pull_request_id=pull_request.pull_request_id))
1063 1071
1064 1072 @LoginRequired()
1065 1073 @NotAnonymous()
1066 1074 @HasRepoPermissionAnyDecorator(
1067 1075 'repository.read', 'repository.write', 'repository.admin')
1068 1076 @CSRFRequired()
1069 1077 @view_config(
1070 1078 route_name='pullrequest_update', request_method='POST',
1071 1079 renderer='json_ext')
1072 1080 def pull_request_update(self):
1073 1081 pull_request = PullRequest.get_or_404(
1074 1082 self.request.matchdict['pull_request_id'])
1075 1083 _ = self.request.translate
1076 1084
1077 1085 self.load_default_context()
1078 1086 redirect_url = None
1079 1087
1080 1088 if pull_request.is_closed():
1081 1089 log.debug('update: forbidden because pull request is closed')
1082 1090 msg = _(u'Cannot update closed pull requests.')
1083 1091 h.flash(msg, category='error')
1084 1092 return {'response': True,
1085 1093 'redirect_url': redirect_url}
1086 1094
1087 1095 is_state_changing = pull_request.is_state_changing()
1088 1096
1089 1097 # only owner or admin can update it
1090 1098 allowed_to_update = PullRequestModel().check_user_update(
1091 1099 pull_request, self._rhodecode_user)
1092 1100 if allowed_to_update:
1093 1101 controls = peppercorn.parse(self.request.POST.items())
1094 1102 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1095 1103
1096 1104 if 'review_members' in controls:
1097 1105 self._update_reviewers(
1098 1106 pull_request, controls['review_members'],
1099 1107 pull_request.reviewer_data)
1100 1108 elif str2bool(self.request.POST.get('update_commits', 'false')):
1101 1109 if is_state_changing:
1102 1110 log.debug('commits update: forbidden because pull request is in state %s',
1103 1111 pull_request.pull_request_state)
1104 1112 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1105 1113 u'Current state is: `{}`').format(
1106 1114 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1107 1115 h.flash(msg, category='error')
1108 1116 return {'response': True,
1109 1117 'redirect_url': redirect_url}
1110 1118
1111 1119 self._update_commits(pull_request)
1112 1120 if force_refresh:
1113 1121 redirect_url = h.route_path(
1114 1122 'pullrequest_show', repo_name=self.db_repo_name,
1115 1123 pull_request_id=pull_request.pull_request_id,
1116 1124 _query={"force_refresh": 1})
1117 1125 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1118 1126 self._edit_pull_request(pull_request)
1119 1127 else:
1120 1128 raise HTTPBadRequest()
1121 1129
1122 1130 return {'response': True,
1123 1131 'redirect_url': redirect_url}
1124 1132 raise HTTPForbidden()
1125 1133
1126 1134 def _edit_pull_request(self, pull_request):
1127 1135 _ = self.request.translate
1128 1136
1129 1137 try:
1130 1138 PullRequestModel().edit(
1131 1139 pull_request,
1132 1140 self.request.POST.get('title'),
1133 1141 self.request.POST.get('description'),
1134 1142 self.request.POST.get('description_renderer'),
1135 1143 self._rhodecode_user)
1136 1144 except ValueError:
1137 1145 msg = _(u'Cannot update closed pull requests.')
1138 1146 h.flash(msg, category='error')
1139 1147 return
1140 1148 else:
1141 1149 Session().commit()
1142 1150
1143 1151 msg = _(u'Pull request title & description updated.')
1144 1152 h.flash(msg, category='success')
1145 1153 return
1146 1154
1147 1155 def _update_commits(self, pull_request):
1148 1156 _ = self.request.translate
1149 1157
1150 1158 with pull_request.set_state(PullRequest.STATE_UPDATING):
1151 1159 resp = PullRequestModel().update_commits(
1152 1160 pull_request, self._rhodecode_db_user)
1153 1161
1154 1162 if resp.executed:
1155 1163
1156 1164 if resp.target_changed and resp.source_changed:
1157 1165 changed = 'target and source repositories'
1158 1166 elif resp.target_changed and not resp.source_changed:
1159 1167 changed = 'target repository'
1160 1168 elif not resp.target_changed and resp.source_changed:
1161 1169 changed = 'source repository'
1162 1170 else:
1163 1171 changed = 'nothing'
1164 1172
1165 1173 msg = _(u'Pull request updated to "{source_commit_id}" with '
1166 1174 u'{count_added} added, {count_removed} removed commits. '
1167 1175 u'Source of changes: {change_source}')
1168 1176 msg = msg.format(
1169 1177 source_commit_id=pull_request.source_ref_parts.commit_id,
1170 1178 count_added=len(resp.changes.added),
1171 1179 count_removed=len(resp.changes.removed),
1172 1180 change_source=changed)
1173 1181 h.flash(msg, category='success')
1174 1182
1175 1183 channel = '/repo${}$/pr/{}'.format(
1176 1184 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1177 1185 message = msg + (
1178 1186 ' - <a onclick="window.location.reload()">'
1179 1187 '<strong>{}</strong></a>'.format(_('Reload page')))
1180 1188 channelstream.post_message(
1181 1189 channel, message, self._rhodecode_user.username,
1182 1190 registry=self.request.registry)
1183 1191 else:
1184 1192 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1185 1193 warning_reasons = [
1186 1194 UpdateFailureReason.NO_CHANGE,
1187 1195 UpdateFailureReason.WRONG_REF_TYPE,
1188 1196 ]
1189 1197 category = 'warning' if resp.reason in warning_reasons else 'error'
1190 1198 h.flash(msg, category=category)
1191 1199
1192 1200 @LoginRequired()
1193 1201 @NotAnonymous()
1194 1202 @HasRepoPermissionAnyDecorator(
1195 1203 'repository.read', 'repository.write', 'repository.admin')
1196 1204 @CSRFRequired()
1197 1205 @view_config(
1198 1206 route_name='pullrequest_merge', request_method='POST',
1199 1207 renderer='json_ext')
1200 1208 def pull_request_merge(self):
1201 1209 """
1202 1210 Merge will perform a server-side merge of the specified
1203 1211 pull request, if the pull request is approved and mergeable.
1204 1212 After successful merging, the pull request is automatically
1205 1213 closed, with a relevant comment.
1206 1214 """
1207 1215 pull_request = PullRequest.get_or_404(
1208 1216 self.request.matchdict['pull_request_id'])
1209 1217 _ = self.request.translate
1210 1218
1211 1219 if pull_request.is_state_changing():
1212 1220 log.debug('show: forbidden because pull request is in state %s',
1213 1221 pull_request.pull_request_state)
1214 1222 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1215 1223 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1216 1224 pull_request.pull_request_state)
1217 1225 h.flash(msg, category='error')
1218 1226 raise HTTPFound(
1219 1227 h.route_path('pullrequest_show',
1220 1228 repo_name=pull_request.target_repo.repo_name,
1221 1229 pull_request_id=pull_request.pull_request_id))
1222 1230
1223 1231 self.load_default_context()
1224 1232
1225 1233 with pull_request.set_state(PullRequest.STATE_UPDATING):
1226 1234 check = MergeCheck.validate(
1227 1235 pull_request, auth_user=self._rhodecode_user,
1228 1236 translator=self.request.translate)
1229 1237 merge_possible = not check.failed
1230 1238
1231 1239 for err_type, error_msg in check.errors:
1232 1240 h.flash(error_msg, category=err_type)
1233 1241
1234 1242 if merge_possible:
1235 1243 log.debug("Pre-conditions checked, trying to merge.")
1236 1244 extras = vcs_operation_context(
1237 1245 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1238 1246 username=self._rhodecode_db_user.username, action='push',
1239 1247 scm=pull_request.target_repo.repo_type)
1240 1248 with pull_request.set_state(PullRequest.STATE_UPDATING):
1241 1249 self._merge_pull_request(
1242 1250 pull_request, self._rhodecode_db_user, extras)
1243 1251 else:
1244 1252 log.debug("Pre-conditions failed, NOT merging.")
1245 1253
1246 1254 raise HTTPFound(
1247 1255 h.route_path('pullrequest_show',
1248 1256 repo_name=pull_request.target_repo.repo_name,
1249 1257 pull_request_id=pull_request.pull_request_id))
1250 1258
1251 1259 def _merge_pull_request(self, pull_request, user, extras):
1252 1260 _ = self.request.translate
1253 1261 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1254 1262
1255 1263 if merge_resp.executed:
1256 1264 log.debug("The merge was successful, closing the pull request.")
1257 1265 PullRequestModel().close_pull_request(
1258 1266 pull_request.pull_request_id, user)
1259 1267 Session().commit()
1260 1268 msg = _('Pull request was successfully merged and closed.')
1261 1269 h.flash(msg, category='success')
1262 1270 else:
1263 1271 log.debug(
1264 1272 "The merge was not successful. Merge response: %s", merge_resp)
1265 1273 msg = merge_resp.merge_status_message
1266 1274 h.flash(msg, category='error')
1267 1275
1268 1276 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1269 1277 _ = self.request.translate
1270 1278
1271 1279 get_default_reviewers_data, validate_default_reviewers = \
1272 1280 PullRequestModel().get_reviewer_functions()
1273 1281
1274 1282 try:
1275 1283 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1276 1284 except ValueError as e:
1277 1285 log.error('Reviewers Validation: {}'.format(e))
1278 1286 h.flash(e, category='error')
1279 1287 return
1280 1288
1281 1289 old_calculated_status = pull_request.calculated_review_status()
1282 1290 PullRequestModel().update_reviewers(
1283 1291 pull_request, reviewers, self._rhodecode_user)
1284 1292 h.flash(_('Pull request reviewers updated.'), category='success')
1285 1293 Session().commit()
1286 1294
1287 1295 # trigger status changed if change in reviewers changes the status
1288 1296 calculated_status = pull_request.calculated_review_status()
1289 1297 if old_calculated_status != calculated_status:
1290 1298 PullRequestModel().trigger_pull_request_hook(
1291 1299 pull_request, self._rhodecode_user, 'review_status_change',
1292 1300 data={'status': calculated_status})
1293 1301
1294 1302 @LoginRequired()
1295 1303 @NotAnonymous()
1296 1304 @HasRepoPermissionAnyDecorator(
1297 1305 'repository.read', 'repository.write', 'repository.admin')
1298 1306 @CSRFRequired()
1299 1307 @view_config(
1300 1308 route_name='pullrequest_delete', request_method='POST',
1301 1309 renderer='json_ext')
1302 1310 def pull_request_delete(self):
1303 1311 _ = self.request.translate
1304 1312
1305 1313 pull_request = PullRequest.get_or_404(
1306 1314 self.request.matchdict['pull_request_id'])
1307 1315 self.load_default_context()
1308 1316
1309 1317 pr_closed = pull_request.is_closed()
1310 1318 allowed_to_delete = PullRequestModel().check_user_delete(
1311 1319 pull_request, self._rhodecode_user) and not pr_closed
1312 1320
1313 1321 # only owner can delete it !
1314 1322 if allowed_to_delete:
1315 1323 PullRequestModel().delete(pull_request, self._rhodecode_user)
1316 1324 Session().commit()
1317 1325 h.flash(_('Successfully deleted pull request'),
1318 1326 category='success')
1319 1327 raise HTTPFound(h.route_path('pullrequest_show_all',
1320 1328 repo_name=self.db_repo_name))
1321 1329
1322 1330 log.warning('user %s tried to delete pull request without access',
1323 1331 self._rhodecode_user)
1324 1332 raise HTTPNotFound()
1325 1333
1326 1334 @LoginRequired()
1327 1335 @NotAnonymous()
1328 1336 @HasRepoPermissionAnyDecorator(
1329 1337 'repository.read', 'repository.write', 'repository.admin')
1330 1338 @CSRFRequired()
1331 1339 @view_config(
1332 1340 route_name='pullrequest_comment_create', request_method='POST',
1333 1341 renderer='json_ext')
1334 1342 def pull_request_comment_create(self):
1335 1343 _ = self.request.translate
1336 1344
1337 1345 pull_request = PullRequest.get_or_404(
1338 1346 self.request.matchdict['pull_request_id'])
1339 1347 pull_request_id = pull_request.pull_request_id
1340 1348
1341 1349 if pull_request.is_closed():
1342 1350 log.debug('comment: forbidden because pull request is closed')
1343 1351 raise HTTPForbidden()
1344 1352
1345 1353 allowed_to_comment = PullRequestModel().check_user_comment(
1346 1354 pull_request, self._rhodecode_user)
1347 1355 if not allowed_to_comment:
1348 1356 log.debug(
1349 1357 'comment: forbidden because pull request is from forbidden repo')
1350 1358 raise HTTPForbidden()
1351 1359
1352 1360 c = self.load_default_context()
1353 1361
1354 1362 status = self.request.POST.get('changeset_status', None)
1355 1363 text = self.request.POST.get('text')
1356 1364 comment_type = self.request.POST.get('comment_type')
1357 1365 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1358 1366 close_pull_request = self.request.POST.get('close_pull_request')
1359 1367
1360 1368 # the logic here should work like following, if we submit close
1361 1369 # pr comment, use `close_pull_request_with_comment` function
1362 1370 # else handle regular comment logic
1363 1371
1364 1372 if close_pull_request:
1365 1373 # only owner or admin or person with write permissions
1366 1374 allowed_to_close = PullRequestModel().check_user_update(
1367 1375 pull_request, self._rhodecode_user)
1368 1376 if not allowed_to_close:
1369 1377 log.debug('comment: forbidden because not allowed to close '
1370 1378 'pull request %s', pull_request_id)
1371 1379 raise HTTPForbidden()
1372 1380
1373 1381 # This also triggers `review_status_change`
1374 1382 comment, status = PullRequestModel().close_pull_request_with_comment(
1375 1383 pull_request, self._rhodecode_user, self.db_repo, message=text,
1376 1384 auth_user=self._rhodecode_user)
1377 1385 Session().flush()
1378 1386
1379 1387 PullRequestModel().trigger_pull_request_hook(
1380 1388 pull_request, self._rhodecode_user, 'comment',
1381 1389 data={'comment': comment})
1382 1390
1383 1391 else:
1384 1392 # regular comment case, could be inline, or one with status.
1385 1393 # for that one we check also permissions
1386 1394
1387 1395 allowed_to_change_status = PullRequestModel().check_user_change_status(
1388 1396 pull_request, self._rhodecode_user)
1389 1397
1390 1398 if status and allowed_to_change_status:
1391 1399 message = (_('Status change %(transition_icon)s %(status)s')
1392 1400 % {'transition_icon': '>',
1393 1401 'status': ChangesetStatus.get_status_lbl(status)})
1394 1402 text = text or message
1395 1403
1396 1404 comment = CommentsModel().create(
1397 1405 text=text,
1398 1406 repo=self.db_repo.repo_id,
1399 1407 user=self._rhodecode_user.user_id,
1400 1408 pull_request=pull_request,
1401 1409 f_path=self.request.POST.get('f_path'),
1402 1410 line_no=self.request.POST.get('line'),
1403 1411 status_change=(ChangesetStatus.get_status_lbl(status)
1404 1412 if status and allowed_to_change_status else None),
1405 1413 status_change_type=(status
1406 1414 if status and allowed_to_change_status else None),
1407 1415 comment_type=comment_type,
1408 1416 resolves_comment_id=resolves_comment_id,
1409 1417 auth_user=self._rhodecode_user
1410 1418 )
1411 1419
1412 1420 if allowed_to_change_status:
1413 1421 # calculate old status before we change it
1414 1422 old_calculated_status = pull_request.calculated_review_status()
1415 1423
1416 1424 # get status if set !
1417 1425 if status:
1418 1426 ChangesetStatusModel().set_status(
1419 1427 self.db_repo.repo_id,
1420 1428 status,
1421 1429 self._rhodecode_user.user_id,
1422 1430 comment,
1423 1431 pull_request=pull_request
1424 1432 )
1425 1433
1426 1434 Session().flush()
1427 1435 # this is somehow required to get access to some relationship
1428 1436 # loaded on comment
1429 1437 Session().refresh(comment)
1430 1438
1431 1439 PullRequestModel().trigger_pull_request_hook(
1432 1440 pull_request, self._rhodecode_user, 'comment',
1433 1441 data={'comment': comment})
1434 1442
1435 1443 # we now calculate the status of pull request, and based on that
1436 1444 # calculation we set the commits status
1437 1445 calculated_status = pull_request.calculated_review_status()
1438 1446 if old_calculated_status != calculated_status:
1439 1447 PullRequestModel().trigger_pull_request_hook(
1440 1448 pull_request, self._rhodecode_user, 'review_status_change',
1441 1449 data={'status': calculated_status})
1442 1450
1443 1451 Session().commit()
1444 1452
1445 1453 data = {
1446 1454 'target_id': h.safeid(h.safe_unicode(
1447 1455 self.request.POST.get('f_path'))),
1448 1456 }
1449 1457 if comment:
1450 1458 c.co = comment
1451 1459 rendered_comment = render(
1452 1460 'rhodecode:templates/changeset/changeset_comment_block.mako',
1453 1461 self._get_template_context(c), self.request)
1454 1462
1455 1463 data.update(comment.get_dict())
1456 1464 data.update({'rendered_text': rendered_comment})
1457 1465
1458 1466 return data
1459 1467
1460 1468 @LoginRequired()
1461 1469 @NotAnonymous()
1462 1470 @HasRepoPermissionAnyDecorator(
1463 1471 'repository.read', 'repository.write', 'repository.admin')
1464 1472 @CSRFRequired()
1465 1473 @view_config(
1466 1474 route_name='pullrequest_comment_delete', request_method='POST',
1467 1475 renderer='json_ext')
1468 1476 def pull_request_comment_delete(self):
1469 1477 pull_request = PullRequest.get_or_404(
1470 1478 self.request.matchdict['pull_request_id'])
1471 1479
1472 1480 comment = ChangesetComment.get_or_404(
1473 1481 self.request.matchdict['comment_id'])
1474 1482 comment_id = comment.comment_id
1475 1483
1476 1484 if comment.immutable:
1477 1485 # don't allow deleting comments that are immutable
1478 1486 raise HTTPForbidden()
1479 1487
1480 1488 if pull_request.is_closed():
1481 1489 log.debug('comment: forbidden because pull request is closed')
1482 1490 raise HTTPForbidden()
1483 1491
1484 1492 if not comment:
1485 1493 log.debug('Comment with id:%s not found, skipping', comment_id)
1486 1494 # comment already deleted in another call probably
1487 1495 return True
1488 1496
1489 1497 if comment.pull_request.is_closed():
1490 1498 # don't allow deleting comments on closed pull request
1491 1499 raise HTTPForbidden()
1492 1500
1493 1501 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1494 1502 super_admin = h.HasPermissionAny('hg.admin')()
1495 1503 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1496 1504 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1497 1505 comment_repo_admin = is_repo_admin and is_repo_comment
1498 1506
1499 1507 if super_admin or comment_owner or comment_repo_admin:
1500 1508 old_calculated_status = comment.pull_request.calculated_review_status()
1501 1509 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1502 1510 Session().commit()
1503 1511 calculated_status = comment.pull_request.calculated_review_status()
1504 1512 if old_calculated_status != calculated_status:
1505 1513 PullRequestModel().trigger_pull_request_hook(
1506 1514 comment.pull_request, self._rhodecode_user, 'review_status_change',
1507 1515 data={'status': calculated_status})
1508 1516 return True
1509 1517 else:
1510 1518 log.warning('No permissions for user %s to delete comment_id: %s',
1511 1519 self._rhodecode_db_user, comment_id)
1512 1520 raise HTTPNotFound()
@@ -1,61 +1,73 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from pyramid.view import view_config
24 24
25 25 from rhodecode.apps._base import RepoAppView
26 26 from rhodecode.apps.repository.utils import get_default_reviewers_data
27 27 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
28 28 from rhodecode.model.db import Repository
29 29
30 30 log = logging.getLogger(__name__)
31 31
32 32
33 33 class RepoReviewRulesView(RepoAppView):
34 34 def load_default_context(self):
35 35 c = self._get_local_tmpl_context()
36 36 return c
37 37
38 38 @LoginRequired()
39 39 @HasRepoPermissionAnyDecorator('repository.admin')
40 40 @view_config(
41 41 route_name='repo_reviewers', request_method='GET',
42 42 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
43 43 def repo_review_rules(self):
44 44 c = self.load_default_context()
45 45 c.active = 'reviewers'
46 46
47 47 return self._get_template_context(c)
48 48
49 49 @LoginRequired()
50 50 @HasRepoPermissionAnyDecorator(
51 51 'repository.read', 'repository.write', 'repository.admin')
52 52 @view_config(
53 53 route_name='repo_default_reviewers_data', request_method='GET',
54 54 renderer='json_ext')
55 55 def repo_default_reviewers_data(self):
56 56 self.load_default_context()
57 target_repo_name = self.request.GET.get('target_repo', self.db_repo.repo_name)
57
58 request = self.request
59 source_repo = self.db_repo
60 source_repo_name = source_repo.repo_name
61 target_repo_name = request.GET.get('target_repo', source_repo_name)
58 62 target_repo = Repository.get_by_repo_name(target_repo_name)
63
64 source_ref = request.GET['source_ref']
65 target_ref = request.GET['target_ref']
66 source_commit = source_repo.get_commit(source_ref)
67 target_commit = target_repo.get_commit(target_ref)
68
69 current_user = request.user.get_instance()
59 70 review_data = get_default_reviewers_data(
60 self.db_repo.user, None, None, target_repo, None)
71 current_user, source_repo, source_commit, target_repo, target_commit)
72
61 73 return review_data
@@ -1,1029 +1,1034 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else self.get_default_config()
66 66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 67
68 68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def _remote(self):
75 75 repo_id = self.path
76 76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 77
78 78 @LazyProperty
79 79 def bare(self):
80 80 return self._remote.bare()
81 81
82 82 @LazyProperty
83 83 def head(self):
84 84 return self._remote.head()
85 85
86 86 @CachedProperty
87 87 def commit_ids(self):
88 88 """
89 89 Returns list of commit ids, in ascending order. Being lazy
90 90 attribute allows external tools to inject commit ids from cache.
91 91 """
92 92 commit_ids = self._get_all_commit_ids()
93 93 self._rebuild_cache(commit_ids)
94 94 return commit_ids
95 95
96 96 def _rebuild_cache(self, commit_ids):
97 97 self._commit_ids = dict((commit_id, index)
98 98 for index, commit_id in enumerate(commit_ids))
99 99
100 100 def run_git_command(self, cmd, **opts):
101 101 """
102 102 Runs given ``cmd`` as git command and returns tuple
103 103 (stdout, stderr).
104 104
105 105 :param cmd: git command to be executed
106 106 :param opts: env options to pass into Subprocess command
107 107 """
108 108 if not isinstance(cmd, list):
109 109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 110
111 111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 112 out, err = self._remote.run_git_command(cmd, **opts)
113 113 if err and not skip_stderr_log:
114 114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 115 return out, err
116 116
117 117 @staticmethod
118 118 def check_url(url, config):
119 119 """
120 120 Function will check given url and try to verify if it's a valid
121 121 link. Sometimes it may happened that git will issue basic
122 122 auth request that can cause whole API to hang when used from python
123 123 or other external calls.
124 124
125 125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 126 when the return code is non 200
127 127 """
128 128 # check first if it's not an url
129 129 if os.path.isdir(url) or url.startswith('file:'):
130 130 return True
131 131
132 132 if '+' in url.split('://', 1)[0]:
133 133 url = url.split('+', 1)[1]
134 134
135 135 # Request the _remote to verify the url
136 136 return connection.Git.check_url(url, config.serialize())
137 137
138 138 @staticmethod
139 139 def is_valid_repository(path):
140 140 if os.path.isdir(os.path.join(path, '.git')):
141 141 return True
142 142 # check case of bare repository
143 143 try:
144 144 GitRepository(path)
145 145 return True
146 146 except VCSError:
147 147 pass
148 148 return False
149 149
150 150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 151 bare=False):
152 152 if create and os.path.exists(self.path):
153 153 raise RepositoryError(
154 154 "Cannot create repository at %s, location already exist"
155 155 % self.path)
156 156
157 157 if bare and do_workspace_checkout:
158 158 raise RepositoryError("Cannot update a bare repository")
159 159 try:
160 160
161 161 if src_url:
162 162 # check URL before any actions
163 163 GitRepository.check_url(src_url, self.config)
164 164
165 165 if create:
166 166 os.makedirs(self.path, mode=0o755)
167 167
168 168 if bare:
169 169 self._remote.init_bare()
170 170 else:
171 171 self._remote.init()
172 172
173 173 if src_url and bare:
174 174 # bare repository only allows a fetch and checkout is not allowed
175 175 self.fetch(src_url, commit_ids=None)
176 176 elif src_url:
177 177 self.pull(src_url, commit_ids=None,
178 178 update_after=do_workspace_checkout)
179 179
180 180 else:
181 181 if not self._remote.assert_correct_path():
182 182 raise RepositoryError(
183 183 'Path "%s" does not contain a Git repository' %
184 184 (self.path,))
185 185
186 186 # TODO: johbo: check if we have to translate the OSError here
187 187 except OSError as err:
188 188 raise RepositoryError(err)
189 189
190 190 def _get_all_commit_ids(self):
191 191 return self._remote.get_all_commit_ids()
192 192
193 193 def _get_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199
200 200 if not head:
201 201 return []
202 202
203 203 rev_filter = ['--branches', '--tags']
204 204 extra_filter = []
205 205
206 206 if filters:
207 207 if filters.get('since'):
208 208 extra_filter.append('--since=%s' % (filters['since']))
209 209 if filters.get('until'):
210 210 extra_filter.append('--until=%s' % (filters['until']))
211 211 if filters.get('branch_name'):
212 212 rev_filter = []
213 213 extra_filter.append(filters['branch_name'])
214 214 rev_filter.extend(extra_filter)
215 215
216 216 # if filters.get('start') or filters.get('end'):
217 217 # # skip is offset, max-count is limit
218 218 # if filters.get('start'):
219 219 # extra_filter += ' --skip=%s' % filters['start']
220 220 # if filters.get('end'):
221 221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 222
223 223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 224 try:
225 225 output, __ = self.run_git_command(cmd)
226 226 except RepositoryError:
227 227 # Can be raised for empty repositories
228 228 return []
229 229 return output.splitlines()
230 230
231 231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
232 232 def is_null(value):
233 233 return len(value) == commit_id_or_idx.count('0')
234 234
235 235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 236 return self.commit_ids[-1]
237 237
238 238 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 239 *map(safe_str, [commit_id_or_idx, self.name]))
240 240
241 241 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
242 242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
243 243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
244 244 try:
245 245 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
246 246 except Exception:
247 247 raise CommitDoesNotExistError(commit_missing_err)
248 248
249 249 elif is_bstr:
250 250 # Need to call remote to translate id for tagging scenario
251 251 try:
252 252 remote_data = self._remote.get_object(commit_id_or_idx,
253 253 maybe_unreachable=maybe_unreachable)
254 254 commit_id_or_idx = remote_data["commit_id"]
255 255 except (CommitDoesNotExistError,):
256 256 raise CommitDoesNotExistError(commit_missing_err)
257 257
258 258 # Ensure we return full id
259 259 if not SHA_PATTERN.match(str(commit_id_or_idx)):
260 260 raise CommitDoesNotExistError(
261 261 "Given commit id %s not recognized" % commit_id_or_idx)
262 262 return commit_id_or_idx
263 263
264 264 def get_hook_location(self):
265 265 """
266 266 returns absolute path to location where hooks are stored
267 267 """
268 268 loc = os.path.join(self.path, 'hooks')
269 269 if not self.bare:
270 270 loc = os.path.join(self.path, '.git', 'hooks')
271 271 return loc
272 272
273 273 @LazyProperty
274 274 def last_change(self):
275 275 """
276 276 Returns last change made on this repository as
277 277 `datetime.datetime` object.
278 278 """
279 279 try:
280 280 return self.get_commit().date
281 281 except RepositoryError:
282 282 tzoffset = makedate()[1]
283 283 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
284 284
285 285 def _get_fs_mtime(self):
286 286 idx_loc = '' if self.bare else '.git'
287 287 # fallback to filesystem
288 288 in_path = os.path.join(self.path, idx_loc, "index")
289 289 he_path = os.path.join(self.path, idx_loc, "HEAD")
290 290 if os.path.exists(in_path):
291 291 return os.stat(in_path).st_mtime
292 292 else:
293 293 return os.stat(he_path).st_mtime
294 294
295 295 @LazyProperty
296 296 def description(self):
297 297 description = self._remote.get_description()
298 298 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
299 299
300 300 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
301 301 if self.is_empty():
302 302 return OrderedDict()
303 303
304 304 result = []
305 305 for ref, sha in self._refs.iteritems():
306 306 if ref.startswith(prefix):
307 307 ref_name = ref
308 308 if strip_prefix:
309 309 ref_name = ref[len(prefix):]
310 310 result.append((safe_unicode(ref_name), sha))
311 311
312 312 def get_name(entry):
313 313 return entry[0]
314 314
315 315 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
316 316
317 317 def _get_branches(self):
318 318 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
319 319
320 320 @CachedProperty
321 321 def branches(self):
322 322 return self._get_branches()
323 323
324 324 @CachedProperty
325 325 def branches_closed(self):
326 326 return {}
327 327
328 328 @CachedProperty
329 329 def bookmarks(self):
330 330 return {}
331 331
332 332 @CachedProperty
333 333 def branches_all(self):
334 334 all_branches = {}
335 335 all_branches.update(self.branches)
336 336 all_branches.update(self.branches_closed)
337 337 return all_branches
338 338
339 339 @CachedProperty
340 340 def tags(self):
341 341 return self._get_tags()
342 342
343 343 def _get_tags(self):
344 344 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
345 345
346 346 def tag(self, name, user, commit_id=None, message=None, date=None,
347 347 **kwargs):
348 348 # TODO: fix this method to apply annotated tags correct with message
349 349 """
350 350 Creates and returns a tag for the given ``commit_id``.
351 351
352 352 :param name: name for new tag
353 353 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
354 354 :param commit_id: commit id for which new tag would be created
355 355 :param message: message of the tag's commit
356 356 :param date: date of tag's commit
357 357
358 358 :raises TagAlreadyExistError: if tag with same name already exists
359 359 """
360 360 if name in self.tags:
361 361 raise TagAlreadyExistError("Tag %s already exists" % name)
362 362 commit = self.get_commit(commit_id=commit_id)
363 363 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
364 364
365 365 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
366 366
367 367 self._invalidate_prop_cache('tags')
368 368 self._invalidate_prop_cache('_refs')
369 369
370 370 return commit
371 371
372 372 def remove_tag(self, name, user, message=None, date=None):
373 373 """
374 374 Removes tag with the given ``name``.
375 375
376 376 :param name: name of the tag to be removed
377 377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
378 378 :param message: message of the tag's removal commit
379 379 :param date: date of tag's removal commit
380 380
381 381 :raises TagDoesNotExistError: if tag with given name does not exists
382 382 """
383 383 if name not in self.tags:
384 384 raise TagDoesNotExistError("Tag %s does not exist" % name)
385 385
386 386 self._remote.tag_remove(name)
387 387 self._invalidate_prop_cache('tags')
388 388 self._invalidate_prop_cache('_refs')
389 389
390 390 def _get_refs(self):
391 391 return self._remote.get_refs()
392 392
393 393 @CachedProperty
394 394 def _refs(self):
395 395 return self._get_refs()
396 396
397 397 @property
398 398 def _ref_tree(self):
399 399 node = tree = {}
400 400 for ref, sha in self._refs.iteritems():
401 401 path = ref.split('/')
402 402 for bit in path[:-1]:
403 403 node = node.setdefault(bit, {})
404 404 node[path[-1]] = sha
405 405 node = tree
406 406 return tree
407 407
408 408 def get_remote_ref(self, ref_name):
409 409 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
410 410 try:
411 411 return self._refs[ref_key]
412 412 except Exception:
413 413 return
414 414
415 415 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
416 416 translate_tag=True, maybe_unreachable=False):
417 417 """
418 418 Returns `GitCommit` object representing commit from git repository
419 419 at the given `commit_id` or head (most recent commit) if None given.
420 420 """
421 421 if self.is_empty():
422 422 raise EmptyRepositoryError("There are no commits yet")
423 423
424 424 if commit_id is not None:
425 425 self._validate_commit_id(commit_id)
426 426 try:
427 427 # we have cached idx, use it without contacting the remote
428 428 idx = self._commit_ids[commit_id]
429 429 return GitCommit(self, commit_id, idx, pre_load=pre_load)
430 430 except KeyError:
431 431 pass
432 432
433 433 elif commit_idx is not None:
434 434 self._validate_commit_idx(commit_idx)
435 435 try:
436 436 _commit_id = self.commit_ids[commit_idx]
437 437 if commit_idx < 0:
438 438 commit_idx = self.commit_ids.index(_commit_id)
439 439 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
440 440 except IndexError:
441 441 commit_id = commit_idx
442 442 else:
443 443 commit_id = "tip"
444 444
445 445 if translate_tag:
446 446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
447 447
448 448 try:
449 449 idx = self._commit_ids[commit_id]
450 450 except KeyError:
451 451 idx = -1
452 452
453 453 return GitCommit(self, commit_id, idx, pre_load=pre_load)
454 454
455 455 def get_commits(
456 456 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
458 458 """
459 459 Returns generator of `GitCommit` objects from start to end (both
460 460 are inclusive), in ascending date order.
461 461
462 462 :param start_id: None, str(commit_id)
463 463 :param end_id: None, str(commit_id)
464 464 :param start_date: if specified, commits with commit date less than
465 465 ``start_date`` would be filtered out from returned set
466 466 :param end_date: if specified, commits with commit date greater than
467 467 ``end_date`` would be filtered out from returned set
468 468 :param branch_name: if specified, commits not reachable from given
469 469 branch would be filtered out from returned set
470 470 :param show_hidden: Show hidden commits such as obsolete or hidden from
471 471 Mercurial evolve
472 472 :raise BranchDoesNotExistError: If given `branch_name` does not
473 473 exist.
474 474 :raise CommitDoesNotExistError: If commits for given `start` or
475 475 `end` could not be found.
476 476
477 477 """
478 478 if self.is_empty():
479 479 raise EmptyRepositoryError("There are no commits yet")
480 480
481 481 self._validate_branch_name(branch_name)
482 482
483 483 if start_id is not None:
484 484 self._validate_commit_id(start_id)
485 485 if end_id is not None:
486 486 self._validate_commit_id(end_id)
487 487
488 488 start_raw_id = self._lookup_commit(start_id)
489 489 start_pos = self._commit_ids[start_raw_id] if start_id else None
490 490 end_raw_id = self._lookup_commit(end_id)
491 491 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
492 492
493 493 if None not in [start_id, end_id] and start_pos > end_pos:
494 494 raise RepositoryError(
495 495 "Start commit '%s' cannot be after end commit '%s'" %
496 496 (start_id, end_id))
497 497
498 498 if end_pos is not None:
499 499 end_pos += 1
500 500
501 501 filter_ = []
502 502 if branch_name:
503 503 filter_.append({'branch_name': branch_name})
504 504 if start_date and not end_date:
505 505 filter_.append({'since': start_date})
506 506 if end_date and not start_date:
507 507 filter_.append({'until': end_date})
508 508 if start_date and end_date:
509 509 filter_.append({'since': start_date})
510 510 filter_.append({'until': end_date})
511 511
512 512 # if start_pos or end_pos:
513 513 # filter_.append({'start': start_pos})
514 514 # filter_.append({'end': end_pos})
515 515
516 516 if filter_:
517 517 revfilters = {
518 518 'branch_name': branch_name,
519 519 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
520 520 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
521 521 'start': start_pos,
522 522 'end': end_pos,
523 523 }
524 524 commit_ids = self._get_commit_ids(filters=revfilters)
525 525
526 526 else:
527 527 commit_ids = self.commit_ids
528 528
529 529 if start_pos or end_pos:
530 530 commit_ids = commit_ids[start_pos: end_pos]
531 531
532 532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 533 translate_tag=translate_tags)
534 534
535 535 def get_diff(
536 536 self, commit1, commit2, path='', ignore_whitespace=False,
537 537 context=3, path1=None):
538 538 """
539 539 Returns (git like) *diff*, as plain text. Shows changes introduced by
540 540 ``commit2`` since ``commit1``.
541 541
542 542 :param commit1: Entry point from which diff is shown. Can be
543 543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
544 544 the changes since empty state of the repository until ``commit2``
545 545 :param commit2: Until which commits changes should be shown.
546 546 :param ignore_whitespace: If set to ``True``, would not show whitespace
547 547 changes. Defaults to ``False``.
548 548 :param context: How many lines before/after changed lines should be
549 549 shown. Defaults to ``3``.
550 550 """
551 551 self._validate_diff_commits(commit1, commit2)
552 552 if path1 is not None and path1 != path:
553 553 raise ValueError("Diff of two different paths not supported.")
554 554
555 555 if path:
556 556 file_filter = path
557 557 else:
558 558 file_filter = None
559 559
560 560 diff = self._remote.diff(
561 561 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
562 562 opt_ignorews=ignore_whitespace,
563 563 context=context)
564 564 return GitDiff(diff)
565 565
566 566 def strip(self, commit_id, branch_name):
567 567 commit = self.get_commit(commit_id=commit_id)
568 568 if commit.merge:
569 569 raise Exception('Cannot reset to merge commit')
570 570
571 571 # parent is going to be the new head now
572 572 commit = commit.parents[0]
573 573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574 574
575 575 # clear cached properties
576 576 self._invalidate_prop_cache('commit_ids')
577 577 self._invalidate_prop_cache('_refs')
578 578 self._invalidate_prop_cache('branches')
579 579
580 580 return len(self.commit_ids)
581 581
582 582 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
583 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
584 self, commit_id1, repo2, commit_id2)
585
583 586 if commit_id1 == commit_id2:
584 587 return commit_id1
585 588
586 589 if self != repo2:
587 590 commits = self._remote.get_missing_revs(
588 591 commit_id1, commit_id2, repo2.path)
589 592 if commits:
590 593 commit = repo2.get_commit(commits[-1])
591 594 if commit.parents:
592 595 ancestor_id = commit.parents[0].raw_id
593 596 else:
594 597 ancestor_id = None
595 598 else:
596 599 # no commits from other repo, ancestor_id is the commit_id2
597 600 ancestor_id = commit_id2
598 601 else:
599 602 output, __ = self.run_git_command(
600 603 ['merge-base', commit_id1, commit_id2])
601 604 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
602 605
606 log.debug('Found common ancestor with sha: %s', ancestor_id)
607
603 608 return ancestor_id
604 609
605 610 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
606 611 repo1 = self
607 612 ancestor_id = None
608 613
609 614 if commit_id1 == commit_id2:
610 615 commits = []
611 616 elif repo1 != repo2:
612 617 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
613 618 repo2.path)
614 619 commits = [
615 620 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
616 621 for commit_id in reversed(missing_ids)]
617 622 else:
618 623 output, __ = repo1.run_git_command(
619 624 ['log', '--reverse', '--pretty=format: %H', '-s',
620 625 '%s..%s' % (commit_id1, commit_id2)])
621 626 commits = [
622 627 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
623 628 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
624 629
625 630 return commits
626 631
627 632 @LazyProperty
628 633 def in_memory_commit(self):
629 634 """
630 635 Returns ``GitInMemoryCommit`` object for this repository.
631 636 """
632 637 return GitInMemoryCommit(self)
633 638
634 639 def pull(self, url, commit_ids=None, update_after=False):
635 640 """
636 641 Pull changes from external location. Pull is different in GIT
637 642 that fetch since it's doing a checkout
638 643
639 644 :param commit_ids: Optional. Can be set to a list of commit ids
640 645 which shall be pulled from the other repository.
641 646 """
642 647 refs = None
643 648 if commit_ids is not None:
644 649 remote_refs = self._remote.get_remote_refs(url)
645 650 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
646 651 self._remote.pull(url, refs=refs, update_after=update_after)
647 652 self._remote.invalidate_vcs_cache()
648 653
649 654 def fetch(self, url, commit_ids=None):
650 655 """
651 656 Fetch all git objects from external location.
652 657 """
653 658 self._remote.sync_fetch(url, refs=commit_ids)
654 659 self._remote.invalidate_vcs_cache()
655 660
656 661 def push(self, url):
657 662 refs = None
658 663 self._remote.sync_push(url, refs=refs)
659 664
660 665 def set_refs(self, ref_name, commit_id):
661 666 self._remote.set_refs(ref_name, commit_id)
662 667 self._invalidate_prop_cache('_refs')
663 668
664 669 def remove_ref(self, ref_name):
665 670 self._remote.remove_ref(ref_name)
666 671 self._invalidate_prop_cache('_refs')
667 672
668 673 def run_gc(self, prune=True):
669 674 cmd = ['gc', '--aggressive']
670 675 if prune:
671 676 cmd += ['--prune=now']
672 677 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
673 678 return stderr
674 679
675 680 def _update_server_info(self):
676 681 """
677 682 runs gits update-server-info command in this repo instance
678 683 """
679 684 self._remote.update_server_info()
680 685
681 686 def _current_branch(self):
682 687 """
683 688 Return the name of the current branch.
684 689
685 690 It only works for non bare repositories (i.e. repositories with a
686 691 working copy)
687 692 """
688 693 if self.bare:
689 694 raise RepositoryError('Bare git repos do not have active branches')
690 695
691 696 if self.is_empty():
692 697 return None
693 698
694 699 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
695 700 return stdout.strip()
696 701
697 702 def _checkout(self, branch_name, create=False, force=False):
698 703 """
699 704 Checkout a branch in the working directory.
700 705
701 706 It tries to create the branch if create is True, failing if the branch
702 707 already exists.
703 708
704 709 It only works for non bare repositories (i.e. repositories with a
705 710 working copy)
706 711 """
707 712 if self.bare:
708 713 raise RepositoryError('Cannot checkout branches in a bare git repo')
709 714
710 715 cmd = ['checkout']
711 716 if force:
712 717 cmd.append('-f')
713 718 if create:
714 719 cmd.append('-b')
715 720 cmd.append(branch_name)
716 721 self.run_git_command(cmd, fail_on_stderr=False)
717 722
718 723 def _create_branch(self, branch_name, commit_id):
719 724 """
720 725 creates a branch in a GIT repo
721 726 """
722 727 self._remote.create_branch(branch_name, commit_id)
723 728
724 729 def _identify(self):
725 730 """
726 731 Return the current state of the working directory.
727 732 """
728 733 if self.bare:
729 734 raise RepositoryError('Bare git repos do not have active branches')
730 735
731 736 if self.is_empty():
732 737 return None
733 738
734 739 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
735 740 return stdout.strip()
736 741
737 742 def _local_clone(self, clone_path, branch_name, source_branch=None):
738 743 """
739 744 Create a local clone of the current repo.
740 745 """
741 746 # N.B.(skreft): the --branch option is required as otherwise the shallow
742 747 # clone will only fetch the active branch.
743 748 cmd = ['clone', '--branch', branch_name,
744 749 self.path, os.path.abspath(clone_path)]
745 750
746 751 self.run_git_command(cmd, fail_on_stderr=False)
747 752
748 753 # if we get the different source branch, make sure we also fetch it for
749 754 # merge conditions
750 755 if source_branch and source_branch != branch_name:
751 756 # check if the ref exists.
752 757 shadow_repo = GitRepository(os.path.abspath(clone_path))
753 758 if shadow_repo.get_remote_ref(source_branch):
754 759 cmd = ['fetch', self.path, source_branch]
755 760 self.run_git_command(cmd, fail_on_stderr=False)
756 761
757 762 def _local_fetch(self, repository_path, branch_name, use_origin=False):
758 763 """
759 764 Fetch a branch from a local repository.
760 765 """
761 766 repository_path = os.path.abspath(repository_path)
762 767 if repository_path == self.path:
763 768 raise ValueError('Cannot fetch from the same repository')
764 769
765 770 if use_origin:
766 771 branch_name = '+{branch}:refs/heads/{branch}'.format(
767 772 branch=branch_name)
768 773
769 774 cmd = ['fetch', '--no-tags', '--update-head-ok',
770 775 repository_path, branch_name]
771 776 self.run_git_command(cmd, fail_on_stderr=False)
772 777
773 778 def _local_reset(self, branch_name):
774 779 branch_name = '{}'.format(branch_name)
775 780 cmd = ['reset', '--hard', branch_name, '--']
776 781 self.run_git_command(cmd, fail_on_stderr=False)
777 782
778 783 def _last_fetch_heads(self):
779 784 """
780 785 Return the last fetched heads that need merging.
781 786
782 787 The algorithm is defined at
783 788 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
784 789 """
785 790 if not self.bare:
786 791 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
787 792 else:
788 793 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
789 794
790 795 heads = []
791 796 with open(fetch_heads_path) as f:
792 797 for line in f:
793 798 if ' not-for-merge ' in line:
794 799 continue
795 800 line = re.sub('\t.*', '', line, flags=re.DOTALL)
796 801 heads.append(line)
797 802
798 803 return heads
799 804
800 805 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
801 806 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
802 807
803 808 def _local_pull(self, repository_path, branch_name, ff_only=True):
804 809 """
805 810 Pull a branch from a local repository.
806 811 """
807 812 if self.bare:
808 813 raise RepositoryError('Cannot pull into a bare git repository')
809 814 # N.B.(skreft): The --ff-only option is to make sure this is a
810 815 # fast-forward (i.e., we are only pulling new changes and there are no
811 816 # conflicts with our current branch)
812 817 # Additionally, that option needs to go before --no-tags, otherwise git
813 818 # pull complains about it being an unknown flag.
814 819 cmd = ['pull']
815 820 if ff_only:
816 821 cmd.append('--ff-only')
817 822 cmd.extend(['--no-tags', repository_path, branch_name])
818 823 self.run_git_command(cmd, fail_on_stderr=False)
819 824
820 825 def _local_merge(self, merge_message, user_name, user_email, heads):
821 826 """
822 827 Merge the given head into the checked out branch.
823 828
824 829 It will force a merge commit.
825 830
826 831 Currently it raises an error if the repo is empty, as it is not possible
827 832 to create a merge commit in an empty repo.
828 833
829 834 :param merge_message: The message to use for the merge commit.
830 835 :param heads: the heads to merge.
831 836 """
832 837 if self.bare:
833 838 raise RepositoryError('Cannot merge into a bare git repository')
834 839
835 840 if not heads:
836 841 return
837 842
838 843 if self.is_empty():
839 844 # TODO(skreft): do something more robust in this case.
840 845 raise RepositoryError('Do not know how to merge into empty repositories yet')
841 846 unresolved = None
842 847
843 848 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
844 849 # commit message. We also specify the user who is doing the merge.
845 850 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
846 851 '-c', 'user.email=%s' % safe_str(user_email),
847 852 'merge', '--no-ff', '-m', safe_str(merge_message)]
848 853
849 854 merge_cmd = cmd + heads
850 855
851 856 try:
852 857 self.run_git_command(merge_cmd, fail_on_stderr=False)
853 858 except RepositoryError:
854 859 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
855 860 fail_on_stderr=False)[0].splitlines()
856 861 # NOTE(marcink): we add U notation for consistent with HG backend output
857 862 unresolved = ['U {}'.format(f) for f in files]
858 863
859 864 # Cleanup any merge leftovers
860 865 self._remote.invalidate_vcs_cache()
861 866 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
862 867
863 868 if unresolved:
864 869 raise UnresolvedFilesInRepo(unresolved)
865 870 else:
866 871 raise
867 872
868 873 def _local_push(
869 874 self, source_branch, repository_path, target_branch,
870 875 enable_hooks=False, rc_scm_data=None):
871 876 """
872 877 Push the source_branch to the given repository and target_branch.
873 878
874 879 Currently it if the target_branch is not master and the target repo is
875 880 empty, the push will work, but then GitRepository won't be able to find
876 881 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
877 882 pointing to master, which does not exist).
878 883
879 884 It does not run the hooks in the target repo.
880 885 """
881 886 # TODO(skreft): deal with the case in which the target repo is empty,
882 887 # and the target_branch is not master.
883 888 target_repo = GitRepository(repository_path)
884 889 if (not target_repo.bare and
885 890 target_repo._current_branch() == target_branch):
886 891 # Git prevents pushing to the checked out branch, so simulate it by
887 892 # pulling into the target repository.
888 893 target_repo._local_pull(self.path, source_branch)
889 894 else:
890 895 cmd = ['push', os.path.abspath(repository_path),
891 896 '%s:%s' % (source_branch, target_branch)]
892 897 gitenv = {}
893 898 if rc_scm_data:
894 899 gitenv.update({'RC_SCM_DATA': rc_scm_data})
895 900
896 901 if not enable_hooks:
897 902 gitenv['RC_SKIP_HOOKS'] = '1'
898 903 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
899 904
900 905 def _get_new_pr_branch(self, source_branch, target_branch):
901 906 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
902 907 pr_branches = []
903 908 for branch in self.branches:
904 909 if branch.startswith(prefix):
905 910 pr_branches.append(int(branch[len(prefix):]))
906 911
907 912 if not pr_branches:
908 913 branch_id = 0
909 914 else:
910 915 branch_id = max(pr_branches) + 1
911 916
912 917 return '%s%d' % (prefix, branch_id)
913 918
914 919 def _maybe_prepare_merge_workspace(
915 920 self, repo_id, workspace_id, target_ref, source_ref):
916 921 shadow_repository_path = self._get_shadow_repository_path(
917 922 self.path, repo_id, workspace_id)
918 923 if not os.path.exists(shadow_repository_path):
919 924 self._local_clone(
920 925 shadow_repository_path, target_ref.name, source_ref.name)
921 926 log.debug('Prepared %s shadow repository in %s',
922 927 self.alias, shadow_repository_path)
923 928
924 929 return shadow_repository_path
925 930
926 931 def _merge_repo(self, repo_id, workspace_id, target_ref,
927 932 source_repo, source_ref, merge_message,
928 933 merger_name, merger_email, dry_run=False,
929 934 use_rebase=False, close_branch=False):
930 935
931 936 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
932 937 'rebase' if use_rebase else 'merge', dry_run)
933 938 if target_ref.commit_id != self.branches[target_ref.name]:
934 939 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
935 940 target_ref.commit_id, self.branches[target_ref.name])
936 941 return MergeResponse(
937 942 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
938 943 metadata={'target_ref': target_ref})
939 944
940 945 shadow_repository_path = self._maybe_prepare_merge_workspace(
941 946 repo_id, workspace_id, target_ref, source_ref)
942 947 shadow_repo = self.get_shadow_instance(shadow_repository_path)
943 948
944 949 # checkout source, if it's different. Otherwise we could not
945 950 # fetch proper commits for merge testing
946 951 if source_ref.name != target_ref.name:
947 952 if shadow_repo.get_remote_ref(source_ref.name):
948 953 shadow_repo._checkout(source_ref.name, force=True)
949 954
950 955 # checkout target, and fetch changes
951 956 shadow_repo._checkout(target_ref.name, force=True)
952 957
953 958 # fetch/reset pull the target, in case it is changed
954 959 # this handles even force changes
955 960 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
956 961 shadow_repo._local_reset(target_ref.name)
957 962
958 963 # Need to reload repo to invalidate the cache, or otherwise we cannot
959 964 # retrieve the last target commit.
960 965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
961 966 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
962 967 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
963 968 target_ref, target_ref.commit_id,
964 969 shadow_repo.branches[target_ref.name])
965 970 return MergeResponse(
966 971 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
967 972 metadata={'target_ref': target_ref})
968 973
969 974 # calculate new branch
970 975 pr_branch = shadow_repo._get_new_pr_branch(
971 976 source_ref.name, target_ref.name)
972 977 log.debug('using pull-request merge branch: `%s`', pr_branch)
973 978 # checkout to temp branch, and fetch changes
974 979 shadow_repo._checkout(pr_branch, create=True)
975 980 try:
976 981 shadow_repo._local_fetch(source_repo.path, source_ref.name)
977 982 except RepositoryError:
978 983 log.exception('Failure when doing local fetch on '
979 984 'shadow repo: %s', shadow_repo)
980 985 return MergeResponse(
981 986 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
982 987 metadata={'source_ref': source_ref})
983 988
984 989 merge_ref = None
985 990 merge_failure_reason = MergeFailureReason.NONE
986 991 metadata = {}
987 992 try:
988 993 shadow_repo._local_merge(merge_message, merger_name, merger_email,
989 994 [source_ref.commit_id])
990 995 merge_possible = True
991 996
992 997 # Need to invalidate the cache, or otherwise we
993 998 # cannot retrieve the merge commit.
994 999 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
995 1000 merge_commit_id = shadow_repo.branches[pr_branch]
996 1001
997 1002 # Set a reference pointing to the merge commit. This reference may
998 1003 # be used to easily identify the last successful merge commit in
999 1004 # the shadow repository.
1000 1005 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1001 1006 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1002 1007 except RepositoryError as e:
1003 1008 log.exception('Failure when doing local merge on git shadow repo')
1004 1009 if isinstance(e, UnresolvedFilesInRepo):
1005 1010 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1006 1011
1007 1012 merge_possible = False
1008 1013 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1009 1014
1010 1015 if merge_possible and not dry_run:
1011 1016 try:
1012 1017 shadow_repo._local_push(
1013 1018 pr_branch, self.path, target_ref.name, enable_hooks=True,
1014 1019 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1015 1020 merge_succeeded = True
1016 1021 except RepositoryError:
1017 1022 log.exception(
1018 1023 'Failure when doing local push from the shadow '
1019 1024 'repository to the target repository at %s.', self.path)
1020 1025 merge_succeeded = False
1021 1026 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1022 1027 metadata['target'] = 'git shadow repo'
1023 1028 metadata['merge_commit'] = pr_branch
1024 1029 else:
1025 1030 merge_succeeded = False
1026 1031
1027 1032 return MergeResponse(
1028 1033 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1029 1034 metadata=metadata)
@@ -1,979 +1,986 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, exceptions
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 46 from rhodecode.lib.vcs.compat import configparser
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
302
300 303 if commit_id1 == commit_id2:
301 304 return commit_id1
302 305
303 306 ancestors = self._remote.revs_from_revspec(
304 307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
305 308 other_path=repo2.path)
306 return repo2[ancestors[0]].raw_id if ancestors else None
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
307 314
308 315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
309 316 if commit_id1 == commit_id2:
310 317 commits = []
311 318 else:
312 319 if merge:
313 320 indexes = self._remote.revs_from_revspec(
314 321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
315 322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
316 323 else:
317 324 indexes = self._remote.revs_from_revspec(
318 325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
319 326 commit_id1, other_path=repo2.path)
320 327
321 328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
322 329 for idx in indexes]
323 330
324 331 return commits
325 332
326 333 @staticmethod
327 334 def check_url(url, config):
328 335 """
329 336 Function will check given url and try to verify if it's a valid
330 337 link. Sometimes it may happened that mercurial will issue basic
331 338 auth request that can cause whole API to hang when used from python
332 339 or other external calls.
333 340
334 341 On failures it'll raise urllib2.HTTPError, exception is also thrown
335 342 when the return code is non 200
336 343 """
337 344 # check first if it's not an local url
338 345 if os.path.isdir(url) or url.startswith('file:'):
339 346 return True
340 347
341 348 # Request the _remote to verify the url
342 349 return connection.Hg.check_url(url, config.serialize())
343 350
344 351 @staticmethod
345 352 def is_valid_repository(path):
346 353 return os.path.isdir(os.path.join(path, '.hg'))
347 354
348 355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
349 356 """
350 357 Function will check for mercurial repository in given path. If there
351 358 is no repository in that path it will raise an exception unless
352 359 `create` parameter is set to True - in that case repository would
353 360 be created.
354 361
355 362 If `src_url` is given, would try to clone repository from the
356 363 location at given clone_point. Additionally it'll make update to
357 364 working copy accordingly to `do_workspace_checkout` flag.
358 365 """
359 366 if create and os.path.exists(self.path):
360 367 raise RepositoryError(
361 368 "Cannot create repository at %s, location already exist"
362 369 % self.path)
363 370
364 371 if src_url:
365 372 url = str(self._get_url(src_url))
366 373 MercurialRepository.check_url(url, self.config)
367 374
368 375 self._remote.clone(url, self.path, do_workspace_checkout)
369 376
370 377 # Don't try to create if we've already cloned repo
371 378 create = False
372 379
373 380 if create:
374 381 os.makedirs(self.path, mode=0o755)
375 382 self._remote.localrepository(create)
376 383
377 384 @LazyProperty
378 385 def in_memory_commit(self):
379 386 return MercurialInMemoryCommit(self)
380 387
381 388 @LazyProperty
382 389 def description(self):
383 390 description = self._remote.get_config_value(
384 391 'web', 'description', untrusted=True)
385 392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
386 393
387 394 @LazyProperty
388 395 def contact(self):
389 396 contact = (
390 397 self._remote.get_config_value("web", "contact") or
391 398 self._remote.get_config_value("ui", "username"))
392 399 return safe_unicode(contact or self.DEFAULT_CONTACT)
393 400
394 401 @LazyProperty
395 402 def last_change(self):
396 403 """
397 404 Returns last change made on this repository as
398 405 `datetime.datetime` object.
399 406 """
400 407 try:
401 408 return self.get_commit().date
402 409 except RepositoryError:
403 410 tzoffset = makedate()[1]
404 411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
405 412
406 413 def _get_fs_mtime(self):
407 414 # fallback to filesystem
408 415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
409 416 st_path = os.path.join(self.path, '.hg', "store")
410 417 if os.path.exists(cl_path):
411 418 return os.stat(cl_path).st_mtime
412 419 else:
413 420 return os.stat(st_path).st_mtime
414 421
415 422 def _get_url(self, url):
416 423 """
417 424 Returns normalized url. If schema is not given, would fall
418 425 to filesystem
419 426 (``file:///``) schema.
420 427 """
421 428 url = url.encode('utf8')
422 429 if url != 'default' and '://' not in url:
423 430 url = "file:" + urllib.pathname2url(url)
424 431 return url
425 432
426 433 def get_hook_location(self):
427 434 """
428 435 returns absolute path to location where hooks are stored
429 436 """
430 437 return os.path.join(self.path, '.hg', '.hgrc')
431 438
432 439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
433 440 translate_tag=None, maybe_unreachable=False):
434 441 """
435 442 Returns ``MercurialCommit`` object representing repository's
436 443 commit at the given `commit_id` or `commit_idx`.
437 444 """
438 445 if self.is_empty():
439 446 raise EmptyRepositoryError("There are no commits yet")
440 447
441 448 if commit_id is not None:
442 449 self._validate_commit_id(commit_id)
443 450 try:
444 451 # we have cached idx, use it without contacting the remote
445 452 idx = self._commit_ids[commit_id]
446 453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
447 454 except KeyError:
448 455 pass
449 456
450 457 elif commit_idx is not None:
451 458 self._validate_commit_idx(commit_idx)
452 459 try:
453 460 _commit_id = self.commit_ids[commit_idx]
454 461 if commit_idx < 0:
455 462 commit_idx = self.commit_ids.index(_commit_id)
456 463
457 464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
458 465 except IndexError:
459 466 commit_id = commit_idx
460 467 else:
461 468 commit_id = "tip"
462 469
463 470 if isinstance(commit_id, unicode):
464 471 commit_id = safe_str(commit_id)
465 472
466 473 try:
467 474 raw_id, idx = self._remote.lookup(commit_id, both=True)
468 475 except CommitDoesNotExistError:
469 476 msg = "Commit {} does not exist for `{}`".format(
470 477 *map(safe_str, [commit_id, self.name]))
471 478 raise CommitDoesNotExistError(msg)
472 479
473 480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
474 481
475 482 def get_commits(
476 483 self, start_id=None, end_id=None, start_date=None, end_date=None,
477 484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
478 485 """
479 486 Returns generator of ``MercurialCommit`` objects from start to end
480 487 (both are inclusive)
481 488
482 489 :param start_id: None, str(commit_id)
483 490 :param end_id: None, str(commit_id)
484 491 :param start_date: if specified, commits with commit date less than
485 492 ``start_date`` would be filtered out from returned set
486 493 :param end_date: if specified, commits with commit date greater than
487 494 ``end_date`` would be filtered out from returned set
488 495 :param branch_name: if specified, commits not reachable from given
489 496 branch would be filtered out from returned set
490 497 :param show_hidden: Show hidden commits such as obsolete or hidden from
491 498 Mercurial evolve
492 499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
493 500 exist.
494 501 :raise CommitDoesNotExistError: If commit for given ``start`` or
495 502 ``end`` could not be found.
496 503 """
497 504 # actually we should check now if it's not an empty repo
498 505 if self.is_empty():
499 506 raise EmptyRepositoryError("There are no commits yet")
500 507 self._validate_branch_name(branch_name)
501 508
502 509 branch_ancestors = False
503 510 if start_id is not None:
504 511 self._validate_commit_id(start_id)
505 512 c_start = self.get_commit(commit_id=start_id)
506 513 start_pos = self._commit_ids[c_start.raw_id]
507 514 else:
508 515 start_pos = None
509 516
510 517 if end_id is not None:
511 518 self._validate_commit_id(end_id)
512 519 c_end = self.get_commit(commit_id=end_id)
513 520 end_pos = max(0, self._commit_ids[c_end.raw_id])
514 521 else:
515 522 end_pos = None
516 523
517 524 if None not in [start_id, end_id] and start_pos > end_pos:
518 525 raise RepositoryError(
519 526 "Start commit '%s' cannot be after end commit '%s'" %
520 527 (start_id, end_id))
521 528
522 529 if end_pos is not None:
523 530 end_pos += 1
524 531
525 532 commit_filter = []
526 533
527 534 if branch_name and not branch_ancestors:
528 535 commit_filter.append('branch("%s")' % (branch_name,))
529 536 elif branch_name and branch_ancestors:
530 537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
531 538
532 539 if start_date and not end_date:
533 540 commit_filter.append('date(">%s")' % (start_date,))
534 541 if end_date and not start_date:
535 542 commit_filter.append('date("<%s")' % (end_date,))
536 543 if start_date and end_date:
537 544 commit_filter.append(
538 545 'date(">%s") and date("<%s")' % (start_date, end_date))
539 546
540 547 if not show_hidden:
541 548 commit_filter.append('not obsolete()')
542 549 commit_filter.append('not hidden()')
543 550
544 551 # TODO: johbo: Figure out a simpler way for this solution
545 552 collection_generator = CollectionGenerator
546 553 if commit_filter:
547 554 commit_filter = ' and '.join(map(safe_str, commit_filter))
548 555 revisions = self._remote.rev_range([commit_filter])
549 556 collection_generator = MercurialIndexBasedCollectionGenerator
550 557 else:
551 558 revisions = self.commit_ids
552 559
553 560 if start_pos or end_pos:
554 561 revisions = revisions[start_pos:end_pos]
555 562
556 563 return collection_generator(self, revisions, pre_load=pre_load)
557 564
558 565 def pull(self, url, commit_ids=None):
559 566 """
560 567 Pull changes from external location.
561 568
562 569 :param commit_ids: Optional. Can be set to a list of commit ids
563 570 which shall be pulled from the other repository.
564 571 """
565 572 url = self._get_url(url)
566 573 self._remote.pull(url, commit_ids=commit_ids)
567 574 self._remote.invalidate_vcs_cache()
568 575
569 576 def fetch(self, url, commit_ids=None):
570 577 """
571 578 Backward compatibility with GIT fetch==pull
572 579 """
573 580 return self.pull(url, commit_ids=commit_ids)
574 581
575 582 def push(self, url):
576 583 url = self._get_url(url)
577 584 self._remote.sync_push(url)
578 585
579 586 def _local_clone(self, clone_path):
580 587 """
581 588 Create a local clone of the current repo.
582 589 """
583 590 self._remote.clone(self.path, clone_path, update_after_clone=True,
584 591 hooks=False)
585 592
586 593 def _update(self, revision, clean=False):
587 594 """
588 595 Update the working copy to the specified revision.
589 596 """
590 597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
591 598 self._remote.update(revision, clean=clean)
592 599
593 600 def _identify(self):
594 601 """
595 602 Return the current state of the working directory.
596 603 """
597 604 return self._remote.identify().strip().rstrip('+')
598 605
599 606 def _heads(self, branch=None):
600 607 """
601 608 Return the commit ids of the repository heads.
602 609 """
603 610 return self._remote.heads(branch=branch).strip().split(' ')
604 611
605 612 def _ancestor(self, revision1, revision2):
606 613 """
607 614 Return the common ancestor of the two revisions.
608 615 """
609 616 return self._remote.ancestor(revision1, revision2)
610 617
611 618 def _local_push(
612 619 self, revision, repository_path, push_branches=False,
613 620 enable_hooks=False):
614 621 """
615 622 Push the given revision to the specified repository.
616 623
617 624 :param push_branches: allow to create branches in the target repo.
618 625 """
619 626 self._remote.push(
620 627 [revision], repository_path, hooks=enable_hooks,
621 628 push_branches=push_branches)
622 629
623 630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
624 631 source_ref, use_rebase=False, dry_run=False):
625 632 """
626 633 Merge the given source_revision into the checked out revision.
627 634
628 635 Returns the commit id of the merge and a boolean indicating if the
629 636 commit needs to be pushed.
630 637 """
631 638 self._update(target_ref.commit_id, clean=True)
632 639
633 640 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
634 641 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
635 642
636 643 if ancestor == source_ref.commit_id:
637 644 # Nothing to do, the changes were already integrated
638 645 return target_ref.commit_id, False
639 646
640 647 elif ancestor == target_ref.commit_id and is_the_same_branch:
641 648 # In this case we should force a commit message
642 649 return source_ref.commit_id, True
643 650
644 651 unresolved = None
645 652 if use_rebase:
646 653 try:
647 654 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
648 655 target_ref.commit_id)
649 656 self.bookmark(bookmark_name, revision=source_ref.commit_id)
650 657 self._remote.rebase(
651 658 source=source_ref.commit_id, dest=target_ref.commit_id)
652 659 self._remote.invalidate_vcs_cache()
653 660 self._update(bookmark_name, clean=True)
654 661 return self._identify(), True
655 662 except RepositoryError as e:
656 663 # The rebase-abort may raise another exception which 'hides'
657 664 # the original one, therefore we log it here.
658 665 log.exception('Error while rebasing shadow repo during merge.')
659 666 if 'unresolved conflicts' in safe_str(e):
660 667 unresolved = self._remote.get_unresolved_files()
661 668 log.debug('unresolved files: %s', unresolved)
662 669
663 670 # Cleanup any rebase leftovers
664 671 self._remote.invalidate_vcs_cache()
665 672 self._remote.rebase(abort=True)
666 673 self._remote.invalidate_vcs_cache()
667 674 self._remote.update(clean=True)
668 675 if unresolved:
669 676 raise UnresolvedFilesInRepo(unresolved)
670 677 else:
671 678 raise
672 679 else:
673 680 try:
674 681 self._remote.merge(source_ref.commit_id)
675 682 self._remote.invalidate_vcs_cache()
676 683 self._remote.commit(
677 684 message=safe_str(merge_message),
678 685 username=safe_str('%s <%s>' % (user_name, user_email)))
679 686 self._remote.invalidate_vcs_cache()
680 687 return self._identify(), True
681 688 except RepositoryError as e:
682 689 # The merge-abort may raise another exception which 'hides'
683 690 # the original one, therefore we log it here.
684 691 log.exception('Error while merging shadow repo during merge.')
685 692 if 'unresolved merge conflicts' in safe_str(e):
686 693 unresolved = self._remote.get_unresolved_files()
687 694 log.debug('unresolved files: %s', unresolved)
688 695
689 696 # Cleanup any merge leftovers
690 697 self._remote.update(clean=True)
691 698 if unresolved:
692 699 raise UnresolvedFilesInRepo(unresolved)
693 700 else:
694 701 raise
695 702
696 703 def _local_close(self, target_ref, user_name, user_email,
697 704 source_ref, close_message=''):
698 705 """
699 706 Close the branch of the given source_revision
700 707
701 708 Returns the commit id of the close and a boolean indicating if the
702 709 commit needs to be pushed.
703 710 """
704 711 self._update(source_ref.commit_id)
705 712 message = close_message or "Closing branch: `{}`".format(source_ref.name)
706 713 try:
707 714 self._remote.commit(
708 715 message=safe_str(message),
709 716 username=safe_str('%s <%s>' % (user_name, user_email)),
710 717 close_branch=True)
711 718 self._remote.invalidate_vcs_cache()
712 719 return self._identify(), True
713 720 except RepositoryError:
714 721 # Cleanup any commit leftovers
715 722 self._remote.update(clean=True)
716 723 raise
717 724
718 725 def _is_the_same_branch(self, target_ref, source_ref):
719 726 return (
720 727 self._get_branch_name(target_ref) ==
721 728 self._get_branch_name(source_ref))
722 729
723 730 def _get_branch_name(self, ref):
724 731 if ref.type == 'branch':
725 732 return ref.name
726 733 return self._remote.ctx_branch(ref.commit_id)
727 734
728 735 def _maybe_prepare_merge_workspace(
729 736 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
730 737 shadow_repository_path = self._get_shadow_repository_path(
731 738 self.path, repo_id, workspace_id)
732 739 if not os.path.exists(shadow_repository_path):
733 740 self._local_clone(shadow_repository_path)
734 741 log.debug(
735 742 'Prepared shadow repository in %s', shadow_repository_path)
736 743
737 744 return shadow_repository_path
738 745
739 746 def _merge_repo(self, repo_id, workspace_id, target_ref,
740 747 source_repo, source_ref, merge_message,
741 748 merger_name, merger_email, dry_run=False,
742 749 use_rebase=False, close_branch=False):
743 750
744 751 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
745 752 'rebase' if use_rebase else 'merge', dry_run)
746 753 if target_ref.commit_id not in self._heads():
747 754 return MergeResponse(
748 755 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
749 756 metadata={'target_ref': target_ref})
750 757
751 758 try:
752 759 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
753 760 heads = '\n,'.join(self._heads(target_ref.name))
754 761 metadata = {
755 762 'target_ref': target_ref,
756 763 'source_ref': source_ref,
757 764 'heads': heads
758 765 }
759 766 return MergeResponse(
760 767 False, False, None,
761 768 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
762 769 metadata=metadata)
763 770 except CommitDoesNotExistError:
764 771 log.exception('Failure when looking up branch heads on hg target')
765 772 return MergeResponse(
766 773 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
767 774 metadata={'target_ref': target_ref})
768 775
769 776 shadow_repository_path = self._maybe_prepare_merge_workspace(
770 777 repo_id, workspace_id, target_ref, source_ref)
771 778 shadow_repo = self.get_shadow_instance(shadow_repository_path)
772 779
773 780 log.debug('Pulling in target reference %s', target_ref)
774 781 self._validate_pull_reference(target_ref)
775 782 shadow_repo._local_pull(self.path, target_ref)
776 783
777 784 try:
778 785 log.debug('Pulling in source reference %s', source_ref)
779 786 source_repo._validate_pull_reference(source_ref)
780 787 shadow_repo._local_pull(source_repo.path, source_ref)
781 788 except CommitDoesNotExistError:
782 789 log.exception('Failure when doing local pull on hg shadow repo')
783 790 return MergeResponse(
784 791 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
785 792 metadata={'source_ref': source_ref})
786 793
787 794 merge_ref = None
788 795 merge_commit_id = None
789 796 close_commit_id = None
790 797 merge_failure_reason = MergeFailureReason.NONE
791 798 metadata = {}
792 799
793 800 # enforce that close branch should be used only in case we source from
794 801 # an actual Branch
795 802 close_branch = close_branch and source_ref.type == 'branch'
796 803
797 804 # don't allow to close branch if source and target are the same
798 805 close_branch = close_branch and source_ref.name != target_ref.name
799 806
800 807 needs_push_on_close = False
801 808 if close_branch and not use_rebase and not dry_run:
802 809 try:
803 810 close_commit_id, needs_push_on_close = shadow_repo._local_close(
804 811 target_ref, merger_name, merger_email, source_ref)
805 812 merge_possible = True
806 813 except RepositoryError:
807 814 log.exception('Failure when doing close branch on '
808 815 'shadow repo: %s', shadow_repo)
809 816 merge_possible = False
810 817 merge_failure_reason = MergeFailureReason.MERGE_FAILED
811 818 else:
812 819 merge_possible = True
813 820
814 821 needs_push = False
815 822 if merge_possible:
816 823 try:
817 824 merge_commit_id, needs_push = shadow_repo._local_merge(
818 825 target_ref, merge_message, merger_name, merger_email,
819 826 source_ref, use_rebase=use_rebase, dry_run=dry_run)
820 827 merge_possible = True
821 828
822 829 # read the state of the close action, if it
823 830 # maybe required a push
824 831 needs_push = needs_push or needs_push_on_close
825 832
826 833 # Set a bookmark pointing to the merge commit. This bookmark
827 834 # may be used to easily identify the last successful merge
828 835 # commit in the shadow repository.
829 836 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
830 837 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
831 838 except SubrepoMergeError:
832 839 log.exception(
833 840 'Subrepo merge error during local merge on hg shadow repo.')
834 841 merge_possible = False
835 842 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
836 843 needs_push = False
837 844 except RepositoryError as e:
838 845 log.exception('Failure when doing local merge on hg shadow repo')
839 846 if isinstance(e, UnresolvedFilesInRepo):
840 847 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
841 848
842 849 merge_possible = False
843 850 merge_failure_reason = MergeFailureReason.MERGE_FAILED
844 851 needs_push = False
845 852
846 853 if merge_possible and not dry_run:
847 854 if needs_push:
848 855 # In case the target is a bookmark, update it, so after pushing
849 856 # the bookmarks is also updated in the target.
850 857 if target_ref.type == 'book':
851 858 shadow_repo.bookmark(
852 859 target_ref.name, revision=merge_commit_id)
853 860 try:
854 861 shadow_repo_with_hooks = self.get_shadow_instance(
855 862 shadow_repository_path,
856 863 enable_hooks=True)
857 864 # This is the actual merge action, we push from shadow
858 865 # into origin.
859 866 # Note: the push_branches option will push any new branch
860 867 # defined in the source repository to the target. This may
861 868 # be dangerous as branches are permanent in Mercurial.
862 869 # This feature was requested in issue #441.
863 870 shadow_repo_with_hooks._local_push(
864 871 merge_commit_id, self.path, push_branches=True,
865 872 enable_hooks=True)
866 873
867 874 # maybe we also need to push the close_commit_id
868 875 if close_commit_id:
869 876 shadow_repo_with_hooks._local_push(
870 877 close_commit_id, self.path, push_branches=True,
871 878 enable_hooks=True)
872 879 merge_succeeded = True
873 880 except RepositoryError:
874 881 log.exception(
875 882 'Failure when doing local push from the shadow '
876 883 'repository to the target repository at %s.', self.path)
877 884 merge_succeeded = False
878 885 merge_failure_reason = MergeFailureReason.PUSH_FAILED
879 886 metadata['target'] = 'hg shadow repo'
880 887 metadata['merge_commit'] = merge_commit_id
881 888 else:
882 889 merge_succeeded = True
883 890 else:
884 891 merge_succeeded = False
885 892
886 893 return MergeResponse(
887 894 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
888 895 metadata=metadata)
889 896
890 897 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
891 898 config = self.config.copy()
892 899 if not enable_hooks:
893 900 config.clear_section('hooks')
894 901 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
895 902
896 903 def _validate_pull_reference(self, reference):
897 904 if not (reference.name in self.bookmarks or
898 905 reference.name in self.branches or
899 906 self.get_commit(reference.commit_id)):
900 907 raise CommitDoesNotExistError(
901 908 'Unknown branch, bookmark or commit id')
902 909
903 910 def _local_pull(self, repository_path, reference):
904 911 """
905 912 Fetch a branch, bookmark or commit from a local repository.
906 913 """
907 914 repository_path = os.path.abspath(repository_path)
908 915 if repository_path == self.path:
909 916 raise ValueError('Cannot pull from the same repository')
910 917
911 918 reference_type_to_option_name = {
912 919 'book': 'bookmark',
913 920 'branch': 'branch',
914 921 }
915 922 option_name = reference_type_to_option_name.get(
916 923 reference.type, 'revision')
917 924
918 925 if option_name == 'revision':
919 926 ref = reference.commit_id
920 927 else:
921 928 ref = reference.name
922 929
923 930 options = {option_name: [ref]}
924 931 self._remote.pull_cmd(repository_path, hooks=False, **options)
925 932 self._remote.invalidate_vcs_cache()
926 933
927 934 def bookmark(self, bookmark, revision=None):
928 935 if isinstance(bookmark, unicode):
929 936 bookmark = safe_str(bookmark)
930 937 self._remote.bookmark(bookmark, revision=revision)
931 938 self._remote.invalidate_vcs_cache()
932 939
933 940 def get_path_permissions(self, username):
934 941 hgacl_file = os.path.join(self.path, '.hg/hgacl')
935 942
936 943 def read_patterns(suffix):
937 944 svalue = None
938 945 for section, option in [
939 946 ('narrowacl', username + suffix),
940 947 ('narrowacl', 'default' + suffix),
941 948 ('narrowhgacl', username + suffix),
942 949 ('narrowhgacl', 'default' + suffix)
943 950 ]:
944 951 try:
945 952 svalue = hgacl.get(section, option)
946 953 break # stop at the first value we find
947 954 except configparser.NoOptionError:
948 955 pass
949 956 if not svalue:
950 957 return None
951 958 result = ['/']
952 959 for pattern in svalue.split():
953 960 result.append(pattern)
954 961 if '*' not in pattern and '?' not in pattern:
955 962 result.append(pattern + '/*')
956 963 return result
957 964
958 965 if os.path.exists(hgacl_file):
959 966 try:
960 967 hgacl = configparser.RawConfigParser()
961 968 hgacl.read(hgacl_file)
962 969
963 970 includes = read_patterns('.includes')
964 971 excludes = read_patterns('.excludes')
965 972 return BasePathPermissionChecker.create_from_patterns(
966 973 includes, excludes)
967 974 except BaseException as e:
968 975 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
969 976 hgacl_file, self.name, e)
970 977 raise exceptions.RepositoryRequirementError(msg)
971 978 else:
972 979 return None
973 980
974 981
975 982 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
976 983
977 984 def _commit_factory(self, commit_id):
978 985 return self.repo.get_commit(
979 986 commit_idx=commit_id, pre_load=self.pre_load)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now