##// END OF EJS Templates
pull-requests: updated metadata information for failed merges with multiple heads.
marcink -
r3627:cff84552 default
parent child Browse files
Show More
@@ -1,169 +1,169 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import webob
21 import webob
22 from pyramid.threadlocal import get_current_request
22 from pyramid.threadlocal import get_current_request
23
23
24 from rhodecode import events
24 from rhodecode import events
25 from rhodecode.lib import hooks_base
25 from rhodecode.lib import hooks_base
26 from rhodecode.lib import utils2
26 from rhodecode.lib import utils2
27
27
28
28
29 def _get_rc_scm_extras(username, repo_name, repo_alias, action):
29 def _get_rc_scm_extras(username, repo_name, repo_alias, action):
30 # TODO: johbo: Replace by vcs_operation_context and remove fully
30 # TODO: johbo: Replace by vcs_operation_context and remove fully
31 from rhodecode.lib.base import vcs_operation_context
31 from rhodecode.lib.base import vcs_operation_context
32 check_locking = action in ('pull', 'push')
32 check_locking = action in ('pull', 'push')
33
33
34 request = get_current_request()
34 request = get_current_request()
35
35
36 # default
36 # default
37 dummy_environ = webob.Request.blank('').environ
37 dummy_environ = webob.Request.blank('').environ
38 try:
38 try:
39 environ = request.environ or dummy_environ
39 environ = request.environ or dummy_environ
40 except TypeError:
40 except TypeError:
41 # we might use this outside of request context
41 # we might use this outside of request context
42 environ = dummy_environ
42 environ = dummy_environ
43
43
44 extras = vcs_operation_context(
44 extras = vcs_operation_context(
45 environ, repo_name, username, action, repo_alias, check_locking)
45 environ, repo_name, username, action, repo_alias, check_locking)
46 return utils2.AttributeDict(extras)
46 return utils2.AttributeDict(extras)
47
47
48
48
49 def trigger_post_push_hook(
49 def trigger_post_push_hook(
50 username, action, hook_type, repo_name, repo_alias, commit_ids):
50 username, action, hook_type, repo_name, repo_alias, commit_ids):
51 """
51 """
52 Triggers push action hooks
52 Triggers push action hooks
53
53
54 :param username: username who pushes
54 :param username: username who pushes
55 :param action: push/push_local/push_remote
55 :param action: push/push_local/push_remote
56 :param repo_name: name of repo
56 :param repo_name: name of repo
57 :param repo_alias: the type of SCM repo
57 :param repo_alias: the type of SCM repo
58 :param commit_ids: list of commit ids that we pushed
58 :param commit_ids: list of commit ids that we pushed
59 """
59 """
60 extras = _get_rc_scm_extras(username, repo_name, repo_alias, action)
60 extras = _get_rc_scm_extras(username, repo_name, repo_alias, action)
61 extras.commit_ids = commit_ids
61 extras.commit_ids = commit_ids
62 extras.hook_type = hook_type
62 extras.hook_type = hook_type
63 hooks_base.post_push(extras)
63 hooks_base.post_push(extras)
64
64
65
65
66 def trigger_log_create_pull_request_hook(username, repo_name, repo_alias,
66 def trigger_log_create_pull_request_hook(username, repo_name, repo_alias,
67 pull_request, data=None):
67 pull_request, data=None):
68 """
68 """
69 Triggers create pull request action hooks
69 Triggers create pull request action hooks
70
70
71 :param username: username who creates the pull request
71 :param username: username who creates the pull request
72 :param repo_name: name of target repo
72 :param repo_name: name of target repo
73 :param repo_alias: the type of SCM target repo
73 :param repo_alias: the type of SCM target repo
74 :param pull_request: the pull request that was created
74 :param pull_request: the pull request that was created
75 :param data: extra data for specific events e.g {'comment': comment_obj}
75 :param data: extra data for specific events e.g {'comment': comment_obj}
76 """
76 """
77 if repo_alias not in ('hg', 'git'):
77 if repo_alias not in ('hg', 'git'):
78 return
78 return
79
79
80 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
80 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
81 'create_pull_request')
81 'create_pull_request')
82 events.trigger(events.PullRequestCreateEvent(pull_request))
82 events.trigger(events.PullRequestCreateEvent(pull_request))
83 extras.update(pull_request.get_api_data())
83 extras.update(pull_request.get_api_data(with_merge_state=False))
84 hooks_base.log_create_pull_request(**extras)
84 hooks_base.log_create_pull_request(**extras)
85
85
86
86
87 def trigger_log_merge_pull_request_hook(username, repo_name, repo_alias,
87 def trigger_log_merge_pull_request_hook(username, repo_name, repo_alias,
88 pull_request, data=None):
88 pull_request, data=None):
89 """
89 """
90 Triggers merge pull request action hooks
90 Triggers merge pull request action hooks
91
91
92 :param username: username who creates the pull request
92 :param username: username who creates the pull request
93 :param repo_name: name of target repo
93 :param repo_name: name of target repo
94 :param repo_alias: the type of SCM target repo
94 :param repo_alias: the type of SCM target repo
95 :param pull_request: the pull request that was merged
95 :param pull_request: the pull request that was merged
96 :param data: extra data for specific events e.g {'comment': comment_obj}
96 :param data: extra data for specific events e.g {'comment': comment_obj}
97 """
97 """
98 if repo_alias not in ('hg', 'git'):
98 if repo_alias not in ('hg', 'git'):
99 return
99 return
100
100
101 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
101 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
102 'merge_pull_request')
102 'merge_pull_request')
103 events.trigger(events.PullRequestMergeEvent(pull_request))
103 events.trigger(events.PullRequestMergeEvent(pull_request))
104 extras.update(pull_request.get_api_data())
104 extras.update(pull_request.get_api_data())
105 hooks_base.log_merge_pull_request(**extras)
105 hooks_base.log_merge_pull_request(**extras)
106
106
107
107
108 def trigger_log_close_pull_request_hook(username, repo_name, repo_alias,
108 def trigger_log_close_pull_request_hook(username, repo_name, repo_alias,
109 pull_request, data=None):
109 pull_request, data=None):
110 """
110 """
111 Triggers close pull request action hooks
111 Triggers close pull request action hooks
112
112
113 :param username: username who creates the pull request
113 :param username: username who creates the pull request
114 :param repo_name: name of target repo
114 :param repo_name: name of target repo
115 :param repo_alias: the type of SCM target repo
115 :param repo_alias: the type of SCM target repo
116 :param pull_request: the pull request that was closed
116 :param pull_request: the pull request that was closed
117 :param data: extra data for specific events e.g {'comment': comment_obj}
117 :param data: extra data for specific events e.g {'comment': comment_obj}
118 """
118 """
119 if repo_alias not in ('hg', 'git'):
119 if repo_alias not in ('hg', 'git'):
120 return
120 return
121
121
122 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
122 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
123 'close_pull_request')
123 'close_pull_request')
124 events.trigger(events.PullRequestCloseEvent(pull_request))
124 events.trigger(events.PullRequestCloseEvent(pull_request))
125 extras.update(pull_request.get_api_data())
125 extras.update(pull_request.get_api_data())
126 hooks_base.log_close_pull_request(**extras)
126 hooks_base.log_close_pull_request(**extras)
127
127
128
128
129 def trigger_log_review_pull_request_hook(username, repo_name, repo_alias,
129 def trigger_log_review_pull_request_hook(username, repo_name, repo_alias,
130 pull_request, data=None):
130 pull_request, data=None):
131 """
131 """
132 Triggers review status change pull request action hooks
132 Triggers review status change pull request action hooks
133
133
134 :param username: username who creates the pull request
134 :param username: username who creates the pull request
135 :param repo_name: name of target repo
135 :param repo_name: name of target repo
136 :param repo_alias: the type of SCM target repo
136 :param repo_alias: the type of SCM target repo
137 :param pull_request: the pull request that review status changed
137 :param pull_request: the pull request that review status changed
138 :param data: extra data for specific events e.g {'comment': comment_obj}
138 :param data: extra data for specific events e.g {'comment': comment_obj}
139 """
139 """
140 if repo_alias not in ('hg', 'git'):
140 if repo_alias not in ('hg', 'git'):
141 return
141 return
142
142
143 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
143 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
144 'review_pull_request')
144 'review_pull_request')
145 status = data.get('status')
145 status = data.get('status')
146 events.trigger(events.PullRequestReviewEvent(pull_request, status))
146 events.trigger(events.PullRequestReviewEvent(pull_request, status))
147 extras.update(pull_request.get_api_data())
147 extras.update(pull_request.get_api_data())
148 hooks_base.log_review_pull_request(**extras)
148 hooks_base.log_review_pull_request(**extras)
149
149
150
150
151 def trigger_log_update_pull_request_hook(username, repo_name, repo_alias,
151 def trigger_log_update_pull_request_hook(username, repo_name, repo_alias,
152 pull_request, data=None):
152 pull_request, data=None):
153 """
153 """
154 Triggers update pull request action hooks
154 Triggers update pull request action hooks
155
155
156 :param username: username who creates the pull request
156 :param username: username who creates the pull request
157 :param repo_name: name of target repo
157 :param repo_name: name of target repo
158 :param repo_alias: the type of SCM target repo
158 :param repo_alias: the type of SCM target repo
159 :param pull_request: the pull request that was updated
159 :param pull_request: the pull request that was updated
160 :param data: extra data for specific events e.g {'comment': comment_obj}
160 :param data: extra data for specific events e.g {'comment': comment_obj}
161 """
161 """
162 if repo_alias not in ('hg', 'git'):
162 if repo_alias not in ('hg', 'git'):
163 return
163 return
164
164
165 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
165 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
166 'update_pull_request')
166 'update_pull_request')
167 events.trigger(events.PullRequestUpdateEvent(pull_request))
167 events.trigger(events.PullRequestUpdateEvent(pull_request))
168 extras.update(pull_request.get_api_data())
168 extras.update(pull_request.get_api_data())
169 hooks_base.log_update_pull_request(**extras)
169 hooks_base.log_update_pull_request(**extras)
@@ -1,1846 +1,1849 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from pyramid import compat
36 from pyramid import compat
37
37
38 from rhodecode.translation import lazy_ugettext
38 from rhodecode.translation import lazy_ugettext
39 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 from rhodecode.lib.utils2 import safe_str, safe_unicode
40 from rhodecode.lib.vcs import connection
40 from rhodecode.lib.vcs import connection
41 from rhodecode.lib.vcs.utils import author_name, author_email
41 from rhodecode.lib.vcs.utils import author_name, author_email
42 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.conf import settings
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
48 RepositoryError)
48 RepositoryError)
49
49
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 FILEMODE_DEFAULT = 0o100644
54 FILEMODE_DEFAULT = 0o100644
55 FILEMODE_EXECUTABLE = 0o100755
55 FILEMODE_EXECUTABLE = 0o100755
56
56
57 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
58
58
59
59
60 class MergeFailureReason(object):
60 class MergeFailureReason(object):
61 """
61 """
62 Enumeration with all the reasons why the server side merge could fail.
62 Enumeration with all the reasons why the server side merge could fail.
63
63
64 DO NOT change the number of the reasons, as they may be stored in the
64 DO NOT change the number of the reasons, as they may be stored in the
65 database.
65 database.
66
66
67 Changing the name of a reason is acceptable and encouraged to deprecate old
67 Changing the name of a reason is acceptable and encouraged to deprecate old
68 reasons.
68 reasons.
69 """
69 """
70
70
71 # Everything went well.
71 # Everything went well.
72 NONE = 0
72 NONE = 0
73
73
74 # An unexpected exception was raised. Check the logs for more details.
74 # An unexpected exception was raised. Check the logs for more details.
75 UNKNOWN = 1
75 UNKNOWN = 1
76
76
77 # The merge was not successful, there are conflicts.
77 # The merge was not successful, there are conflicts.
78 MERGE_FAILED = 2
78 MERGE_FAILED = 2
79
79
80 # The merge succeeded but we could not push it to the target repository.
80 # The merge succeeded but we could not push it to the target repository.
81 PUSH_FAILED = 3
81 PUSH_FAILED = 3
82
82
83 # The specified target is not a head in the target repository.
83 # The specified target is not a head in the target repository.
84 TARGET_IS_NOT_HEAD = 4
84 TARGET_IS_NOT_HEAD = 4
85
85
86 # The source repository contains more branches than the target. Pushing
86 # The source repository contains more branches than the target. Pushing
87 # the merge will create additional branches in the target.
87 # the merge will create additional branches in the target.
88 HG_SOURCE_HAS_MORE_BRANCHES = 5
88 HG_SOURCE_HAS_MORE_BRANCHES = 5
89
89
90 # The target reference has multiple heads. That does not allow to correctly
90 # The target reference has multiple heads. That does not allow to correctly
91 # identify the target location. This could only happen for mercurial
91 # identify the target location. This could only happen for mercurial
92 # branches.
92 # branches.
93 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93 HG_TARGET_HAS_MULTIPLE_HEADS = 6
94
94
95 # The target repository is locked
95 # The target repository is locked
96 TARGET_IS_LOCKED = 7
96 TARGET_IS_LOCKED = 7
97
97
98 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
98 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
99 # A involved commit could not be found.
99 # A involved commit could not be found.
100 _DEPRECATED_MISSING_COMMIT = 8
100 _DEPRECATED_MISSING_COMMIT = 8
101
101
102 # The target repo reference is missing.
102 # The target repo reference is missing.
103 MISSING_TARGET_REF = 9
103 MISSING_TARGET_REF = 9
104
104
105 # The source repo reference is missing.
105 # The source repo reference is missing.
106 MISSING_SOURCE_REF = 10
106 MISSING_SOURCE_REF = 10
107
107
108 # The merge was not successful, there are conflicts related to sub
108 # The merge was not successful, there are conflicts related to sub
109 # repositories.
109 # repositories.
110 SUBREPO_MERGE_FAILED = 11
110 SUBREPO_MERGE_FAILED = 11
111
111
112
112
113 class UpdateFailureReason(object):
113 class UpdateFailureReason(object):
114 """
114 """
115 Enumeration with all the reasons why the pull request update could fail.
115 Enumeration with all the reasons why the pull request update could fail.
116
116
117 DO NOT change the number of the reasons, as they may be stored in the
117 DO NOT change the number of the reasons, as they may be stored in the
118 database.
118 database.
119
119
120 Changing the name of a reason is acceptable and encouraged to deprecate old
120 Changing the name of a reason is acceptable and encouraged to deprecate old
121 reasons.
121 reasons.
122 """
122 """
123
123
124 # Everything went well.
124 # Everything went well.
125 NONE = 0
125 NONE = 0
126
126
127 # An unexpected exception was raised. Check the logs for more details.
127 # An unexpected exception was raised. Check the logs for more details.
128 UNKNOWN = 1
128 UNKNOWN = 1
129
129
130 # The pull request is up to date.
130 # The pull request is up to date.
131 NO_CHANGE = 2
131 NO_CHANGE = 2
132
132
133 # The pull request has a reference type that is not supported for update.
133 # The pull request has a reference type that is not supported for update.
134 WRONG_REF_TYPE = 3
134 WRONG_REF_TYPE = 3
135
135
136 # Update failed because the target reference is missing.
136 # Update failed because the target reference is missing.
137 MISSING_TARGET_REF = 4
137 MISSING_TARGET_REF = 4
138
138
139 # Update failed because the source reference is missing.
139 # Update failed because the source reference is missing.
140 MISSING_SOURCE_REF = 5
140 MISSING_SOURCE_REF = 5
141
141
142
142
143 class MergeResponse(object):
143 class MergeResponse(object):
144
144
145 # uses .format(**metadata) for variables
145 # uses .format(**metadata) for variables
146 MERGE_STATUS_MESSAGES = {
146 MERGE_STATUS_MESSAGES = {
147 MergeFailureReason.NONE: lazy_ugettext(
147 MergeFailureReason.NONE: lazy_ugettext(
148 u'This pull request can be automatically merged.'),
148 u'This pull request can be automatically merged.'),
149 MergeFailureReason.UNKNOWN: lazy_ugettext(
149 MergeFailureReason.UNKNOWN: lazy_ugettext(
150 u'This pull request cannot be merged because of an unhandled exception. '
150 u'This pull request cannot be merged because of an unhandled exception. '
151 u'{exception}'),
151 u'{exception}'),
152 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
152 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
153 u'This pull request cannot be merged because of merge conflicts.'),
153 u'This pull request cannot be merged because of merge conflicts.'),
154 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
154 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
155 u'This pull request could not be merged because push to '
155 u'This pull request could not be merged because push to '
156 u'target:`{target}@{merge_commit}` failed.'),
156 u'target:`{target}@{merge_commit}` failed.'),
157 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
157 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
158 u'This pull request cannot be merged because the target '
158 u'This pull request cannot be merged because the target '
159 u'`{target_ref.name}` is not a head.'),
159 u'`{target_ref.name}` is not a head.'),
160 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
160 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
161 u'This pull request cannot be merged because the source contains '
161 u'This pull request cannot be merged because the source contains '
162 u'more branches than the target.'),
162 u'more branches than the target.'),
163 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
163 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
164 u'This pull request cannot be merged because the target '
164 u'This pull request cannot be merged because the target `{target_ref.name}` '
165 u'has multiple heads: `{heads}`.'),
165 u'has multiple heads: `{heads}`.'),
166 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
166 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
167 u'This pull request cannot be merged because the target repository is '
167 u'This pull request cannot be merged because the target repository is '
168 u'locked by {locked_by}.'),
168 u'locked by {locked_by}.'),
169
169
170 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
170 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
171 u'This pull request cannot be merged because the target '
171 u'This pull request cannot be merged because the target '
172 u'reference `{target_ref.name}` is missing.'),
172 u'reference `{target_ref.name}` is missing.'),
173 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
173 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
174 u'This pull request cannot be merged because the source '
174 u'This pull request cannot be merged because the source '
175 u'reference `{source_ref.name}` is missing.'),
175 u'reference `{source_ref.name}` is missing.'),
176 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
176 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
177 u'This pull request cannot be merged because of conflicts related '
177 u'This pull request cannot be merged because of conflicts related '
178 u'to sub repositories.'),
178 u'to sub repositories.'),
179
179
180 # Deprecations
180 # Deprecations
181 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
181 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
182 u'This pull request cannot be merged because the target or the '
182 u'This pull request cannot be merged because the target or the '
183 u'source reference is missing.'),
183 u'source reference is missing.'),
184
184
185 }
185 }
186
186
187 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
187 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
188 self.possible = possible
188 self.possible = possible
189 self.executed = executed
189 self.executed = executed
190 self.merge_ref = merge_ref
190 self.merge_ref = merge_ref
191 self.failure_reason = failure_reason
191 self.failure_reason = failure_reason
192 self.metadata = metadata or {}
192 self.metadata = metadata or {}
193
193
194 def __repr__(self):
194 def __repr__(self):
195 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
195 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
196
196
197 def __eq__(self, other):
197 def __eq__(self, other):
198 same_instance = isinstance(other, self.__class__)
198 same_instance = isinstance(other, self.__class__)
199 return same_instance \
199 return same_instance \
200 and self.possible == other.possible \
200 and self.possible == other.possible \
201 and self.executed == other.executed \
201 and self.executed == other.executed \
202 and self.failure_reason == other.failure_reason
202 and self.failure_reason == other.failure_reason
203
203
204 @property
204 @property
205 def label(self):
205 def label(self):
206 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
206 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
207 not k.startswith('_'))
207 not k.startswith('_'))
208 return label_dict.get(self.failure_reason)
208 return label_dict.get(self.failure_reason)
209
209
210 @property
210 @property
211 def merge_status_message(self):
211 def merge_status_message(self):
212 """
212 """
213 Return a human friendly error message for the given merge status code.
213 Return a human friendly error message for the given merge status code.
214 """
214 """
215 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
215 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
216 try:
216 try:
217 return msg.format(**self.metadata)
217 return msg.format(**self.metadata)
218 except Exception:
218 except Exception:
219 log.exception('Failed to format %s message', self)
219 log.exception('Failed to format %s message', self)
220 return msg
220 return msg
221
221
222 def asdict(self):
222 def asdict(self):
223 data = {}
223 data = {}
224 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
224 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
225 'merge_status_message']:
225 'merge_status_message']:
226 data[k] = getattr(self, k)
226 data[k] = getattr(self, k)
227 return data
227 return data
228
228
229
229
230 class BaseRepository(object):
230 class BaseRepository(object):
231 """
231 """
232 Base Repository for final backends
232 Base Repository for final backends
233
233
234 .. attribute:: DEFAULT_BRANCH_NAME
234 .. attribute:: DEFAULT_BRANCH_NAME
235
235
236 name of default branch (i.e. "trunk" for svn, "master" for git etc.
236 name of default branch (i.e. "trunk" for svn, "master" for git etc.
237
237
238 .. attribute:: commit_ids
238 .. attribute:: commit_ids
239
239
240 list of all available commit ids, in ascending order
240 list of all available commit ids, in ascending order
241
241
242 .. attribute:: path
242 .. attribute:: path
243
243
244 absolute path to the repository
244 absolute path to the repository
245
245
246 .. attribute:: bookmarks
246 .. attribute:: bookmarks
247
247
248 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
248 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
249 there are no bookmarks or the backend implementation does not support
249 there are no bookmarks or the backend implementation does not support
250 bookmarks.
250 bookmarks.
251
251
252 .. attribute:: tags
252 .. attribute:: tags
253
253
254 Mapping from name to :term:`Commit ID` of the tag.
254 Mapping from name to :term:`Commit ID` of the tag.
255
255
256 """
256 """
257
257
258 DEFAULT_BRANCH_NAME = None
258 DEFAULT_BRANCH_NAME = None
259 DEFAULT_CONTACT = u"Unknown"
259 DEFAULT_CONTACT = u"Unknown"
260 DEFAULT_DESCRIPTION = u"unknown"
260 DEFAULT_DESCRIPTION = u"unknown"
261 EMPTY_COMMIT_ID = '0' * 40
261 EMPTY_COMMIT_ID = '0' * 40
262
262
263 path = None
263 path = None
264
264
265 def __init__(self, repo_path, config=None, create=False, **kwargs):
265 def __init__(self, repo_path, config=None, create=False, **kwargs):
266 """
266 """
267 Initializes repository. Raises RepositoryError if repository could
267 Initializes repository. Raises RepositoryError if repository could
268 not be find at the given ``repo_path`` or directory at ``repo_path``
268 not be find at the given ``repo_path`` or directory at ``repo_path``
269 exists and ``create`` is set to True.
269 exists and ``create`` is set to True.
270
270
271 :param repo_path: local path of the repository
271 :param repo_path: local path of the repository
272 :param config: repository configuration
272 :param config: repository configuration
273 :param create=False: if set to True, would try to create repository.
273 :param create=False: if set to True, would try to create repository.
274 :param src_url=None: if set, should be proper url from which repository
274 :param src_url=None: if set, should be proper url from which repository
275 would be cloned; requires ``create`` parameter to be set to True -
275 would be cloned; requires ``create`` parameter to be set to True -
276 raises RepositoryError if src_url is set and create evaluates to
276 raises RepositoryError if src_url is set and create evaluates to
277 False
277 False
278 """
278 """
279 raise NotImplementedError
279 raise NotImplementedError
280
280
281 def __repr__(self):
281 def __repr__(self):
282 return '<%s at %s>' % (self.__class__.__name__, self.path)
282 return '<%s at %s>' % (self.__class__.__name__, self.path)
283
283
284 def __len__(self):
284 def __len__(self):
285 return self.count()
285 return self.count()
286
286
287 def __eq__(self, other):
287 def __eq__(self, other):
288 same_instance = isinstance(other, self.__class__)
288 same_instance = isinstance(other, self.__class__)
289 return same_instance and other.path == self.path
289 return same_instance and other.path == self.path
290
290
291 def __ne__(self, other):
291 def __ne__(self, other):
292 return not self.__eq__(other)
292 return not self.__eq__(other)
293
293
294 def get_create_shadow_cache_pr_path(self, db_repo):
294 def get_create_shadow_cache_pr_path(self, db_repo):
295 path = db_repo.cached_diffs_dir
295 path = db_repo.cached_diffs_dir
296 if not os.path.exists(path):
296 if not os.path.exists(path):
297 os.makedirs(path, 0o755)
297 os.makedirs(path, 0o755)
298 return path
298 return path
299
299
300 @classmethod
300 @classmethod
301 def get_default_config(cls, default=None):
301 def get_default_config(cls, default=None):
302 config = Config()
302 config = Config()
303 if default and isinstance(default, list):
303 if default and isinstance(default, list):
304 for section, key, val in default:
304 for section, key, val in default:
305 config.set(section, key, val)
305 config.set(section, key, val)
306 return config
306 return config
307
307
308 @LazyProperty
308 @LazyProperty
309 def _remote(self):
309 def _remote(self):
310 raise NotImplementedError
310 raise NotImplementedError
311
311
312 def _heads(self, branch=None):
313 return []
314
312 @LazyProperty
315 @LazyProperty
313 def EMPTY_COMMIT(self):
316 def EMPTY_COMMIT(self):
314 return EmptyCommit(self.EMPTY_COMMIT_ID)
317 return EmptyCommit(self.EMPTY_COMMIT_ID)
315
318
316 @LazyProperty
319 @LazyProperty
317 def alias(self):
320 def alias(self):
318 for k, v in settings.BACKENDS.items():
321 for k, v in settings.BACKENDS.items():
319 if v.split('.')[-1] == str(self.__class__.__name__):
322 if v.split('.')[-1] == str(self.__class__.__name__):
320 return k
323 return k
321
324
322 @LazyProperty
325 @LazyProperty
323 def name(self):
326 def name(self):
324 return safe_unicode(os.path.basename(self.path))
327 return safe_unicode(os.path.basename(self.path))
325
328
326 @LazyProperty
329 @LazyProperty
327 def description(self):
330 def description(self):
328 raise NotImplementedError
331 raise NotImplementedError
329
332
330 def refs(self):
333 def refs(self):
331 """
334 """
332 returns a `dict` with branches, bookmarks, tags, and closed_branches
335 returns a `dict` with branches, bookmarks, tags, and closed_branches
333 for this repository
336 for this repository
334 """
337 """
335 return dict(
338 return dict(
336 branches=self.branches,
339 branches=self.branches,
337 branches_closed=self.branches_closed,
340 branches_closed=self.branches_closed,
338 tags=self.tags,
341 tags=self.tags,
339 bookmarks=self.bookmarks
342 bookmarks=self.bookmarks
340 )
343 )
341
344
342 @LazyProperty
345 @LazyProperty
343 def branches(self):
346 def branches(self):
344 """
347 """
345 A `dict` which maps branch names to commit ids.
348 A `dict` which maps branch names to commit ids.
346 """
349 """
347 raise NotImplementedError
350 raise NotImplementedError
348
351
349 @LazyProperty
352 @LazyProperty
350 def branches_closed(self):
353 def branches_closed(self):
351 """
354 """
352 A `dict` which maps tags names to commit ids.
355 A `dict` which maps tags names to commit ids.
353 """
356 """
354 raise NotImplementedError
357 raise NotImplementedError
355
358
356 @LazyProperty
359 @LazyProperty
357 def bookmarks(self):
360 def bookmarks(self):
358 """
361 """
359 A `dict` which maps tags names to commit ids.
362 A `dict` which maps tags names to commit ids.
360 """
363 """
361 raise NotImplementedError
364 raise NotImplementedError
362
365
363 @LazyProperty
366 @LazyProperty
364 def tags(self):
367 def tags(self):
365 """
368 """
366 A `dict` which maps tags names to commit ids.
369 A `dict` which maps tags names to commit ids.
367 """
370 """
368 raise NotImplementedError
371 raise NotImplementedError
369
372
370 @LazyProperty
373 @LazyProperty
371 def size(self):
374 def size(self):
372 """
375 """
373 Returns combined size in bytes for all repository files
376 Returns combined size in bytes for all repository files
374 """
377 """
375 tip = self.get_commit()
378 tip = self.get_commit()
376 return tip.size
379 return tip.size
377
380
378 def size_at_commit(self, commit_id):
381 def size_at_commit(self, commit_id):
379 commit = self.get_commit(commit_id)
382 commit = self.get_commit(commit_id)
380 return commit.size
383 return commit.size
381
384
382 def is_empty(self):
385 def is_empty(self):
383 return not bool(self.commit_ids)
386 return not bool(self.commit_ids)
384
387
385 @staticmethod
388 @staticmethod
386 def check_url(url, config):
389 def check_url(url, config):
387 """
390 """
388 Function will check given url and try to verify if it's a valid
391 Function will check given url and try to verify if it's a valid
389 link.
392 link.
390 """
393 """
391 raise NotImplementedError
394 raise NotImplementedError
392
395
393 @staticmethod
396 @staticmethod
394 def is_valid_repository(path):
397 def is_valid_repository(path):
395 """
398 """
396 Check if given `path` contains a valid repository of this backend
399 Check if given `path` contains a valid repository of this backend
397 """
400 """
398 raise NotImplementedError
401 raise NotImplementedError
399
402
400 # ==========================================================================
403 # ==========================================================================
401 # COMMITS
404 # COMMITS
402 # ==========================================================================
405 # ==========================================================================
403
406
404 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
407 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
405 """
408 """
406 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
409 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
407 are both None, most recent commit is returned.
410 are both None, most recent commit is returned.
408
411
409 :param pre_load: Optional. List of commit attributes to load.
412 :param pre_load: Optional. List of commit attributes to load.
410
413
411 :raises ``EmptyRepositoryError``: if there are no commits
414 :raises ``EmptyRepositoryError``: if there are no commits
412 """
415 """
413 raise NotImplementedError
416 raise NotImplementedError
414
417
415 def __iter__(self):
418 def __iter__(self):
416 for commit_id in self.commit_ids:
419 for commit_id in self.commit_ids:
417 yield self.get_commit(commit_id=commit_id)
420 yield self.get_commit(commit_id=commit_id)
418
421
419 def get_commits(
422 def get_commits(
420 self, start_id=None, end_id=None, start_date=None, end_date=None,
423 self, start_id=None, end_id=None, start_date=None, end_date=None,
421 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
424 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
422 """
425 """
423 Returns iterator of `BaseCommit` objects from start to end
426 Returns iterator of `BaseCommit` objects from start to end
424 not inclusive. This should behave just like a list, ie. end is not
427 not inclusive. This should behave just like a list, ie. end is not
425 inclusive.
428 inclusive.
426
429
427 :param start_id: None or str, must be a valid commit id
430 :param start_id: None or str, must be a valid commit id
428 :param end_id: None or str, must be a valid commit id
431 :param end_id: None or str, must be a valid commit id
429 :param start_date:
432 :param start_date:
430 :param end_date:
433 :param end_date:
431 :param branch_name:
434 :param branch_name:
432 :param show_hidden:
435 :param show_hidden:
433 :param pre_load:
436 :param pre_load:
434 :param translate_tags:
437 :param translate_tags:
435 """
438 """
436 raise NotImplementedError
439 raise NotImplementedError
437
440
438 def __getitem__(self, key):
441 def __getitem__(self, key):
439 """
442 """
440 Allows index based access to the commit objects of this repository.
443 Allows index based access to the commit objects of this repository.
441 """
444 """
442 pre_load = ["author", "branch", "date", "message", "parents"]
445 pre_load = ["author", "branch", "date", "message", "parents"]
443 if isinstance(key, slice):
446 if isinstance(key, slice):
444 return self._get_range(key, pre_load)
447 return self._get_range(key, pre_load)
445 return self.get_commit(commit_idx=key, pre_load=pre_load)
448 return self.get_commit(commit_idx=key, pre_load=pre_load)
446
449
447 def _get_range(self, slice_obj, pre_load):
450 def _get_range(self, slice_obj, pre_load):
448 for commit_id in self.commit_ids.__getitem__(slice_obj):
451 for commit_id in self.commit_ids.__getitem__(slice_obj):
449 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
452 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
450
453
451 def count(self):
454 def count(self):
452 return len(self.commit_ids)
455 return len(self.commit_ids)
453
456
454 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
457 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
455 """
458 """
456 Creates and returns a tag for the given ``commit_id``.
459 Creates and returns a tag for the given ``commit_id``.
457
460
458 :param name: name for new tag
461 :param name: name for new tag
459 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
462 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
460 :param commit_id: commit id for which new tag would be created
463 :param commit_id: commit id for which new tag would be created
461 :param message: message of the tag's commit
464 :param message: message of the tag's commit
462 :param date: date of tag's commit
465 :param date: date of tag's commit
463
466
464 :raises TagAlreadyExistError: if tag with same name already exists
467 :raises TagAlreadyExistError: if tag with same name already exists
465 """
468 """
466 raise NotImplementedError
469 raise NotImplementedError
467
470
468 def remove_tag(self, name, user, message=None, date=None):
471 def remove_tag(self, name, user, message=None, date=None):
469 """
472 """
470 Removes tag with the given ``name``.
473 Removes tag with the given ``name``.
471
474
472 :param name: name of the tag to be removed
475 :param name: name of the tag to be removed
473 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
476 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
474 :param message: message of the tag's removal commit
477 :param message: message of the tag's removal commit
475 :param date: date of tag's removal commit
478 :param date: date of tag's removal commit
476
479
477 :raises TagDoesNotExistError: if tag with given name does not exists
480 :raises TagDoesNotExistError: if tag with given name does not exists
478 """
481 """
479 raise NotImplementedError
482 raise NotImplementedError
480
483
481 def get_diff(
484 def get_diff(
482 self, commit1, commit2, path=None, ignore_whitespace=False,
485 self, commit1, commit2, path=None, ignore_whitespace=False,
483 context=3, path1=None):
486 context=3, path1=None):
484 """
487 """
485 Returns (git like) *diff*, as plain text. Shows changes introduced by
488 Returns (git like) *diff*, as plain text. Shows changes introduced by
486 `commit2` since `commit1`.
489 `commit2` since `commit1`.
487
490
488 :param commit1: Entry point from which diff is shown. Can be
491 :param commit1: Entry point from which diff is shown. Can be
489 ``self.EMPTY_COMMIT`` - in this case, patch showing all
492 ``self.EMPTY_COMMIT`` - in this case, patch showing all
490 the changes since empty state of the repository until `commit2`
493 the changes since empty state of the repository until `commit2`
491 :param commit2: Until which commit changes should be shown.
494 :param commit2: Until which commit changes should be shown.
492 :param path: Can be set to a path of a file to create a diff of that
495 :param path: Can be set to a path of a file to create a diff of that
493 file. If `path1` is also set, this value is only associated to
496 file. If `path1` is also set, this value is only associated to
494 `commit2`.
497 `commit2`.
495 :param ignore_whitespace: If set to ``True``, would not show whitespace
498 :param ignore_whitespace: If set to ``True``, would not show whitespace
496 changes. Defaults to ``False``.
499 changes. Defaults to ``False``.
497 :param context: How many lines before/after changed lines should be
500 :param context: How many lines before/after changed lines should be
498 shown. Defaults to ``3``.
501 shown. Defaults to ``3``.
499 :param path1: Can be set to a path to associate with `commit1`. This
502 :param path1: Can be set to a path to associate with `commit1`. This
500 parameter works only for backends which support diff generation for
503 parameter works only for backends which support diff generation for
501 different paths. Other backends will raise a `ValueError` if `path1`
504 different paths. Other backends will raise a `ValueError` if `path1`
502 is set and has a different value than `path`.
505 is set and has a different value than `path`.
503 :param file_path: filter this diff by given path pattern
506 :param file_path: filter this diff by given path pattern
504 """
507 """
505 raise NotImplementedError
508 raise NotImplementedError
506
509
507 def strip(self, commit_id, branch=None):
510 def strip(self, commit_id, branch=None):
508 """
511 """
509 Strip given commit_id from the repository
512 Strip given commit_id from the repository
510 """
513 """
511 raise NotImplementedError
514 raise NotImplementedError
512
515
513 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
516 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
514 """
517 """
515 Return a latest common ancestor commit if one exists for this repo
518 Return a latest common ancestor commit if one exists for this repo
516 `commit_id1` vs `commit_id2` from `repo2`.
519 `commit_id1` vs `commit_id2` from `repo2`.
517
520
518 :param commit_id1: Commit it from this repository to use as a
521 :param commit_id1: Commit it from this repository to use as a
519 target for the comparison.
522 target for the comparison.
520 :param commit_id2: Source commit id to use for comparison.
523 :param commit_id2: Source commit id to use for comparison.
521 :param repo2: Source repository to use for comparison.
524 :param repo2: Source repository to use for comparison.
522 """
525 """
523 raise NotImplementedError
526 raise NotImplementedError
524
527
525 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
528 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
526 """
529 """
527 Compare this repository's revision `commit_id1` with `commit_id2`.
530 Compare this repository's revision `commit_id1` with `commit_id2`.
528
531
529 Returns a tuple(commits, ancestor) that would be merged from
532 Returns a tuple(commits, ancestor) that would be merged from
530 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
533 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
531 will be returned as ancestor.
534 will be returned as ancestor.
532
535
533 :param commit_id1: Commit it from this repository to use as a
536 :param commit_id1: Commit it from this repository to use as a
534 target for the comparison.
537 target for the comparison.
535 :param commit_id2: Source commit id to use for comparison.
538 :param commit_id2: Source commit id to use for comparison.
536 :param repo2: Source repository to use for comparison.
539 :param repo2: Source repository to use for comparison.
537 :param merge: If set to ``True`` will do a merge compare which also
540 :param merge: If set to ``True`` will do a merge compare which also
538 returns the common ancestor.
541 returns the common ancestor.
539 :param pre_load: Optional. List of commit attributes to load.
542 :param pre_load: Optional. List of commit attributes to load.
540 """
543 """
541 raise NotImplementedError
544 raise NotImplementedError
542
545
543 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
546 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
544 user_name='', user_email='', message='', dry_run=False,
547 user_name='', user_email='', message='', dry_run=False,
545 use_rebase=False, close_branch=False):
548 use_rebase=False, close_branch=False):
546 """
549 """
547 Merge the revisions specified in `source_ref` from `source_repo`
550 Merge the revisions specified in `source_ref` from `source_repo`
548 onto the `target_ref` of this repository.
551 onto the `target_ref` of this repository.
549
552
550 `source_ref` and `target_ref` are named tupls with the following
553 `source_ref` and `target_ref` are named tupls with the following
551 fields `type`, `name` and `commit_id`.
554 fields `type`, `name` and `commit_id`.
552
555
553 Returns a MergeResponse named tuple with the following fields
556 Returns a MergeResponse named tuple with the following fields
554 'possible', 'executed', 'source_commit', 'target_commit',
557 'possible', 'executed', 'source_commit', 'target_commit',
555 'merge_commit'.
558 'merge_commit'.
556
559
557 :param repo_id: `repo_id` target repo id.
560 :param repo_id: `repo_id` target repo id.
558 :param workspace_id: `workspace_id` unique identifier.
561 :param workspace_id: `workspace_id` unique identifier.
559 :param target_ref: `target_ref` points to the commit on top of which
562 :param target_ref: `target_ref` points to the commit on top of which
560 the `source_ref` should be merged.
563 the `source_ref` should be merged.
561 :param source_repo: The repository that contains the commits to be
564 :param source_repo: The repository that contains the commits to be
562 merged.
565 merged.
563 :param source_ref: `source_ref` points to the topmost commit from
566 :param source_ref: `source_ref` points to the topmost commit from
564 the `source_repo` which should be merged.
567 the `source_repo` which should be merged.
565 :param user_name: Merge commit `user_name`.
568 :param user_name: Merge commit `user_name`.
566 :param user_email: Merge commit `user_email`.
569 :param user_email: Merge commit `user_email`.
567 :param message: Merge commit `message`.
570 :param message: Merge commit `message`.
568 :param dry_run: If `True` the merge will not take place.
571 :param dry_run: If `True` the merge will not take place.
569 :param use_rebase: If `True` commits from the source will be rebased
572 :param use_rebase: If `True` commits from the source will be rebased
570 on top of the target instead of being merged.
573 on top of the target instead of being merged.
571 :param close_branch: If `True` branch will be close before merging it
574 :param close_branch: If `True` branch will be close before merging it
572 """
575 """
573 if dry_run:
576 if dry_run:
574 message = message or settings.MERGE_DRY_RUN_MESSAGE
577 message = message or settings.MERGE_DRY_RUN_MESSAGE
575 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
578 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
576 user_name = user_name or settings.MERGE_DRY_RUN_USER
579 user_name = user_name or settings.MERGE_DRY_RUN_USER
577 else:
580 else:
578 if not user_name:
581 if not user_name:
579 raise ValueError('user_name cannot be empty')
582 raise ValueError('user_name cannot be empty')
580 if not user_email:
583 if not user_email:
581 raise ValueError('user_email cannot be empty')
584 raise ValueError('user_email cannot be empty')
582 if not message:
585 if not message:
583 raise ValueError('message cannot be empty')
586 raise ValueError('message cannot be empty')
584
587
585 try:
588 try:
586 return self._merge_repo(
589 return self._merge_repo(
587 repo_id, workspace_id, target_ref, source_repo,
590 repo_id, workspace_id, target_ref, source_repo,
588 source_ref, message, user_name, user_email, dry_run=dry_run,
591 source_ref, message, user_name, user_email, dry_run=dry_run,
589 use_rebase=use_rebase, close_branch=close_branch)
592 use_rebase=use_rebase, close_branch=close_branch)
590 except RepositoryError as exc:
593 except RepositoryError as exc:
591 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
594 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
592 return MergeResponse(
595 return MergeResponse(
593 False, False, None, MergeFailureReason.UNKNOWN,
596 False, False, None, MergeFailureReason.UNKNOWN,
594 metadata={'exception': str(exc)})
597 metadata={'exception': str(exc)})
595
598
596 def _merge_repo(self, repo_id, workspace_id, target_ref,
599 def _merge_repo(self, repo_id, workspace_id, target_ref,
597 source_repo, source_ref, merge_message,
600 source_repo, source_ref, merge_message,
598 merger_name, merger_email, dry_run=False,
601 merger_name, merger_email, dry_run=False,
599 use_rebase=False, close_branch=False):
602 use_rebase=False, close_branch=False):
600 """Internal implementation of merge."""
603 """Internal implementation of merge."""
601 raise NotImplementedError
604 raise NotImplementedError
602
605
603 def _maybe_prepare_merge_workspace(
606 def _maybe_prepare_merge_workspace(
604 self, repo_id, workspace_id, target_ref, source_ref):
607 self, repo_id, workspace_id, target_ref, source_ref):
605 """
608 """
606 Create the merge workspace.
609 Create the merge workspace.
607
610
608 :param workspace_id: `workspace_id` unique identifier.
611 :param workspace_id: `workspace_id` unique identifier.
609 """
612 """
610 raise NotImplementedError
613 raise NotImplementedError
611
614
612 def _get_legacy_shadow_repository_path(self, workspace_id):
615 def _get_legacy_shadow_repository_path(self, workspace_id):
613 """
616 """
614 Legacy version that was used before. We still need it for
617 Legacy version that was used before. We still need it for
615 backward compat
618 backward compat
616 """
619 """
617 return os.path.join(
620 return os.path.join(
618 os.path.dirname(self.path),
621 os.path.dirname(self.path),
619 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
622 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
620
623
621 def _get_shadow_repository_path(self, repo_id, workspace_id):
624 def _get_shadow_repository_path(self, repo_id, workspace_id):
622 # The name of the shadow repository must start with '.', so it is
625 # The name of the shadow repository must start with '.', so it is
623 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
626 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
624 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
627 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
625 if os.path.exists(legacy_repository_path):
628 if os.path.exists(legacy_repository_path):
626 return legacy_repository_path
629 return legacy_repository_path
627 else:
630 else:
628 return os.path.join(
631 return os.path.join(
629 os.path.dirname(self.path),
632 os.path.dirname(self.path),
630 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
633 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
631
634
632 def cleanup_merge_workspace(self, repo_id, workspace_id):
635 def cleanup_merge_workspace(self, repo_id, workspace_id):
633 """
636 """
634 Remove merge workspace.
637 Remove merge workspace.
635
638
636 This function MUST not fail in case there is no workspace associated to
639 This function MUST not fail in case there is no workspace associated to
637 the given `workspace_id`.
640 the given `workspace_id`.
638
641
639 :param workspace_id: `workspace_id` unique identifier.
642 :param workspace_id: `workspace_id` unique identifier.
640 """
643 """
641 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
644 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
642 shadow_repository_path_del = '{}.{}.delete'.format(
645 shadow_repository_path_del = '{}.{}.delete'.format(
643 shadow_repository_path, time.time())
646 shadow_repository_path, time.time())
644
647
645 # move the shadow repo, so it never conflicts with the one used.
648 # move the shadow repo, so it never conflicts with the one used.
646 # we use this method because shutil.rmtree had some edge case problems
649 # we use this method because shutil.rmtree had some edge case problems
647 # removing symlinked repositories
650 # removing symlinked repositories
648 if not os.path.isdir(shadow_repository_path):
651 if not os.path.isdir(shadow_repository_path):
649 return
652 return
650
653
651 shutil.move(shadow_repository_path, shadow_repository_path_del)
654 shutil.move(shadow_repository_path, shadow_repository_path_del)
652 try:
655 try:
653 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
656 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
654 except Exception:
657 except Exception:
655 log.exception('Failed to gracefully remove shadow repo under %s',
658 log.exception('Failed to gracefully remove shadow repo under %s',
656 shadow_repository_path_del)
659 shadow_repository_path_del)
657 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
660 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
658
661
659 # ========== #
662 # ========== #
660 # COMMIT API #
663 # COMMIT API #
661 # ========== #
664 # ========== #
662
665
663 @LazyProperty
666 @LazyProperty
664 def in_memory_commit(self):
667 def in_memory_commit(self):
665 """
668 """
666 Returns :class:`InMemoryCommit` object for this repository.
669 Returns :class:`InMemoryCommit` object for this repository.
667 """
670 """
668 raise NotImplementedError
671 raise NotImplementedError
669
672
670 # ======================== #
673 # ======================== #
671 # UTILITIES FOR SUBCLASSES #
674 # UTILITIES FOR SUBCLASSES #
672 # ======================== #
675 # ======================== #
673
676
674 def _validate_diff_commits(self, commit1, commit2):
677 def _validate_diff_commits(self, commit1, commit2):
675 """
678 """
676 Validates that the given commits are related to this repository.
679 Validates that the given commits are related to this repository.
677
680
678 Intended as a utility for sub classes to have a consistent validation
681 Intended as a utility for sub classes to have a consistent validation
679 of input parameters in methods like :meth:`get_diff`.
682 of input parameters in methods like :meth:`get_diff`.
680 """
683 """
681 self._validate_commit(commit1)
684 self._validate_commit(commit1)
682 self._validate_commit(commit2)
685 self._validate_commit(commit2)
683 if (isinstance(commit1, EmptyCommit) and
686 if (isinstance(commit1, EmptyCommit) and
684 isinstance(commit2, EmptyCommit)):
687 isinstance(commit2, EmptyCommit)):
685 raise ValueError("Cannot compare two empty commits")
688 raise ValueError("Cannot compare two empty commits")
686
689
687 def _validate_commit(self, commit):
690 def _validate_commit(self, commit):
688 if not isinstance(commit, BaseCommit):
691 if not isinstance(commit, BaseCommit):
689 raise TypeError(
692 raise TypeError(
690 "%s is not of type BaseCommit" % repr(commit))
693 "%s is not of type BaseCommit" % repr(commit))
691 if commit.repository != self and not isinstance(commit, EmptyCommit):
694 if commit.repository != self and not isinstance(commit, EmptyCommit):
692 raise ValueError(
695 raise ValueError(
693 "Commit %s must be a valid commit from this repository %s, "
696 "Commit %s must be a valid commit from this repository %s, "
694 "related to this repository instead %s." %
697 "related to this repository instead %s." %
695 (commit, self, commit.repository))
698 (commit, self, commit.repository))
696
699
697 def _validate_commit_id(self, commit_id):
700 def _validate_commit_id(self, commit_id):
698 if not isinstance(commit_id, compat.string_types):
701 if not isinstance(commit_id, compat.string_types):
699 raise TypeError("commit_id must be a string value")
702 raise TypeError("commit_id must be a string value")
700
703
701 def _validate_commit_idx(self, commit_idx):
704 def _validate_commit_idx(self, commit_idx):
702 if not isinstance(commit_idx, (int, long)):
705 if not isinstance(commit_idx, (int, long)):
703 raise TypeError("commit_idx must be a numeric value")
706 raise TypeError("commit_idx must be a numeric value")
704
707
705 def _validate_branch_name(self, branch_name):
708 def _validate_branch_name(self, branch_name):
706 if branch_name and branch_name not in self.branches_all:
709 if branch_name and branch_name not in self.branches_all:
707 msg = ("Branch %s not found in %s" % (branch_name, self))
710 msg = ("Branch %s not found in %s" % (branch_name, self))
708 raise BranchDoesNotExistError(msg)
711 raise BranchDoesNotExistError(msg)
709
712
710 #
713 #
711 # Supporting deprecated API parts
714 # Supporting deprecated API parts
712 # TODO: johbo: consider to move this into a mixin
715 # TODO: johbo: consider to move this into a mixin
713 #
716 #
714
717
715 @property
718 @property
716 def EMPTY_CHANGESET(self):
719 def EMPTY_CHANGESET(self):
717 warnings.warn(
720 warnings.warn(
718 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
721 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
719 return self.EMPTY_COMMIT_ID
722 return self.EMPTY_COMMIT_ID
720
723
721 @property
724 @property
722 def revisions(self):
725 def revisions(self):
723 warnings.warn("Use commits attribute instead", DeprecationWarning)
726 warnings.warn("Use commits attribute instead", DeprecationWarning)
724 return self.commit_ids
727 return self.commit_ids
725
728
726 @revisions.setter
729 @revisions.setter
727 def revisions(self, value):
730 def revisions(self, value):
728 warnings.warn("Use commits attribute instead", DeprecationWarning)
731 warnings.warn("Use commits attribute instead", DeprecationWarning)
729 self.commit_ids = value
732 self.commit_ids = value
730
733
731 def get_changeset(self, revision=None, pre_load=None):
734 def get_changeset(self, revision=None, pre_load=None):
732 warnings.warn("Use get_commit instead", DeprecationWarning)
735 warnings.warn("Use get_commit instead", DeprecationWarning)
733 commit_id = None
736 commit_id = None
734 commit_idx = None
737 commit_idx = None
735 if isinstance(revision, compat.string_types):
738 if isinstance(revision, compat.string_types):
736 commit_id = revision
739 commit_id = revision
737 else:
740 else:
738 commit_idx = revision
741 commit_idx = revision
739 return self.get_commit(
742 return self.get_commit(
740 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
743 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
741
744
742 def get_changesets(
745 def get_changesets(
743 self, start=None, end=None, start_date=None, end_date=None,
746 self, start=None, end=None, start_date=None, end_date=None,
744 branch_name=None, pre_load=None):
747 branch_name=None, pre_load=None):
745 warnings.warn("Use get_commits instead", DeprecationWarning)
748 warnings.warn("Use get_commits instead", DeprecationWarning)
746 start_id = self._revision_to_commit(start)
749 start_id = self._revision_to_commit(start)
747 end_id = self._revision_to_commit(end)
750 end_id = self._revision_to_commit(end)
748 return self.get_commits(
751 return self.get_commits(
749 start_id=start_id, end_id=end_id, start_date=start_date,
752 start_id=start_id, end_id=end_id, start_date=start_date,
750 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
753 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
751
754
752 def _revision_to_commit(self, revision):
755 def _revision_to_commit(self, revision):
753 """
756 """
754 Translates a revision to a commit_id
757 Translates a revision to a commit_id
755
758
756 Helps to support the old changeset based API which allows to use
759 Helps to support the old changeset based API which allows to use
757 commit ids and commit indices interchangeable.
760 commit ids and commit indices interchangeable.
758 """
761 """
759 if revision is None:
762 if revision is None:
760 return revision
763 return revision
761
764
762 if isinstance(revision, compat.string_types):
765 if isinstance(revision, compat.string_types):
763 commit_id = revision
766 commit_id = revision
764 else:
767 else:
765 commit_id = self.commit_ids[revision]
768 commit_id = self.commit_ids[revision]
766 return commit_id
769 return commit_id
767
770
768 @property
771 @property
769 def in_memory_changeset(self):
772 def in_memory_changeset(self):
770 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
773 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
771 return self.in_memory_commit
774 return self.in_memory_commit
772
775
773 def get_path_permissions(self, username):
776 def get_path_permissions(self, username):
774 """
777 """
775 Returns a path permission checker or None if not supported
778 Returns a path permission checker or None if not supported
776
779
777 :param username: session user name
780 :param username: session user name
778 :return: an instance of BasePathPermissionChecker or None
781 :return: an instance of BasePathPermissionChecker or None
779 """
782 """
780 return None
783 return None
781
784
782 def install_hooks(self, force=False):
785 def install_hooks(self, force=False):
783 return self._remote.install_hooks(force)
786 return self._remote.install_hooks(force)
784
787
785 def get_hooks_info(self):
788 def get_hooks_info(self):
786 return self._remote.get_hooks_info()
789 return self._remote.get_hooks_info()
787
790
788
791
789 class BaseCommit(object):
792 class BaseCommit(object):
790 """
793 """
791 Each backend should implement it's commit representation.
794 Each backend should implement it's commit representation.
792
795
793 **Attributes**
796 **Attributes**
794
797
795 ``repository``
798 ``repository``
796 repository object within which commit exists
799 repository object within which commit exists
797
800
798 ``id``
801 ``id``
799 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
802 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
800 just ``tip``.
803 just ``tip``.
801
804
802 ``raw_id``
805 ``raw_id``
803 raw commit representation (i.e. full 40 length sha for git
806 raw commit representation (i.e. full 40 length sha for git
804 backend)
807 backend)
805
808
806 ``short_id``
809 ``short_id``
807 shortened (if apply) version of ``raw_id``; it would be simple
810 shortened (if apply) version of ``raw_id``; it would be simple
808 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
811 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
809 as ``raw_id`` for subversion
812 as ``raw_id`` for subversion
810
813
811 ``idx``
814 ``idx``
812 commit index
815 commit index
813
816
814 ``files``
817 ``files``
815 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
818 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
816
819
817 ``dirs``
820 ``dirs``
818 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
821 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
819
822
820 ``nodes``
823 ``nodes``
821 combined list of ``Node`` objects
824 combined list of ``Node`` objects
822
825
823 ``author``
826 ``author``
824 author of the commit, as unicode
827 author of the commit, as unicode
825
828
826 ``message``
829 ``message``
827 message of the commit, as unicode
830 message of the commit, as unicode
828
831
829 ``parents``
832 ``parents``
830 list of parent commits
833 list of parent commits
831
834
832 """
835 """
833
836
834 branch = None
837 branch = None
835 """
838 """
836 Depending on the backend this should be set to the branch name of the
839 Depending on the backend this should be set to the branch name of the
837 commit. Backends not supporting branches on commits should leave this
840 commit. Backends not supporting branches on commits should leave this
838 value as ``None``.
841 value as ``None``.
839 """
842 """
840
843
841 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
844 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
842 """
845 """
843 This template is used to generate a default prefix for repository archives
846 This template is used to generate a default prefix for repository archives
844 if no prefix has been specified.
847 if no prefix has been specified.
845 """
848 """
846
849
847 def __str__(self):
850 def __str__(self):
848 return '<%s at %s:%s>' % (
851 return '<%s at %s:%s>' % (
849 self.__class__.__name__, self.idx, self.short_id)
852 self.__class__.__name__, self.idx, self.short_id)
850
853
851 def __repr__(self):
854 def __repr__(self):
852 return self.__str__()
855 return self.__str__()
853
856
854 def __unicode__(self):
857 def __unicode__(self):
855 return u'%s:%s' % (self.idx, self.short_id)
858 return u'%s:%s' % (self.idx, self.short_id)
856
859
857 def __eq__(self, other):
860 def __eq__(self, other):
858 same_instance = isinstance(other, self.__class__)
861 same_instance = isinstance(other, self.__class__)
859 return same_instance and self.raw_id == other.raw_id
862 return same_instance and self.raw_id == other.raw_id
860
863
861 def __json__(self):
864 def __json__(self):
862 parents = []
865 parents = []
863 try:
866 try:
864 for parent in self.parents:
867 for parent in self.parents:
865 parents.append({'raw_id': parent.raw_id})
868 parents.append({'raw_id': parent.raw_id})
866 except NotImplementedError:
869 except NotImplementedError:
867 # empty commit doesn't have parents implemented
870 # empty commit doesn't have parents implemented
868 pass
871 pass
869
872
870 return {
873 return {
871 'short_id': self.short_id,
874 'short_id': self.short_id,
872 'raw_id': self.raw_id,
875 'raw_id': self.raw_id,
873 'revision': self.idx,
876 'revision': self.idx,
874 'message': self.message,
877 'message': self.message,
875 'date': self.date,
878 'date': self.date,
876 'author': self.author,
879 'author': self.author,
877 'parents': parents,
880 'parents': parents,
878 'branch': self.branch
881 'branch': self.branch
879 }
882 }
880
883
881 def __getstate__(self):
884 def __getstate__(self):
882 d = self.__dict__.copy()
885 d = self.__dict__.copy()
883 d.pop('_remote', None)
886 d.pop('_remote', None)
884 d.pop('repository', None)
887 d.pop('repository', None)
885 return d
888 return d
886
889
887 def _get_refs(self):
890 def _get_refs(self):
888 return {
891 return {
889 'branches': [self.branch] if self.branch else [],
892 'branches': [self.branch] if self.branch else [],
890 'bookmarks': getattr(self, 'bookmarks', []),
893 'bookmarks': getattr(self, 'bookmarks', []),
891 'tags': self.tags
894 'tags': self.tags
892 }
895 }
893
896
894 @LazyProperty
897 @LazyProperty
895 def last(self):
898 def last(self):
896 """
899 """
897 ``True`` if this is last commit in repository, ``False``
900 ``True`` if this is last commit in repository, ``False``
898 otherwise; trying to access this attribute while there is no
901 otherwise; trying to access this attribute while there is no
899 commits would raise `EmptyRepositoryError`
902 commits would raise `EmptyRepositoryError`
900 """
903 """
901 if self.repository is None:
904 if self.repository is None:
902 raise CommitError("Cannot check if it's most recent commit")
905 raise CommitError("Cannot check if it's most recent commit")
903 return self.raw_id == self.repository.commit_ids[-1]
906 return self.raw_id == self.repository.commit_ids[-1]
904
907
905 @LazyProperty
908 @LazyProperty
906 def parents(self):
909 def parents(self):
907 """
910 """
908 Returns list of parent commits.
911 Returns list of parent commits.
909 """
912 """
910 raise NotImplementedError
913 raise NotImplementedError
911
914
912 @LazyProperty
915 @LazyProperty
913 def first_parent(self):
916 def first_parent(self):
914 """
917 """
915 Returns list of parent commits.
918 Returns list of parent commits.
916 """
919 """
917 return self.parents[0] if self.parents else EmptyCommit()
920 return self.parents[0] if self.parents else EmptyCommit()
918
921
919 @property
922 @property
920 def merge(self):
923 def merge(self):
921 """
924 """
922 Returns boolean if commit is a merge.
925 Returns boolean if commit is a merge.
923 """
926 """
924 return len(self.parents) > 1
927 return len(self.parents) > 1
925
928
926 @LazyProperty
929 @LazyProperty
927 def children(self):
930 def children(self):
928 """
931 """
929 Returns list of child commits.
932 Returns list of child commits.
930 """
933 """
931 raise NotImplementedError
934 raise NotImplementedError
932
935
933 @LazyProperty
936 @LazyProperty
934 def id(self):
937 def id(self):
935 """
938 """
936 Returns string identifying this commit.
939 Returns string identifying this commit.
937 """
940 """
938 raise NotImplementedError
941 raise NotImplementedError
939
942
940 @LazyProperty
943 @LazyProperty
941 def raw_id(self):
944 def raw_id(self):
942 """
945 """
943 Returns raw string identifying this commit.
946 Returns raw string identifying this commit.
944 """
947 """
945 raise NotImplementedError
948 raise NotImplementedError
946
949
947 @LazyProperty
950 @LazyProperty
948 def short_id(self):
951 def short_id(self):
949 """
952 """
950 Returns shortened version of ``raw_id`` attribute, as string,
953 Returns shortened version of ``raw_id`` attribute, as string,
951 identifying this commit, useful for presentation to users.
954 identifying this commit, useful for presentation to users.
952 """
955 """
953 raise NotImplementedError
956 raise NotImplementedError
954
957
955 @LazyProperty
958 @LazyProperty
956 def idx(self):
959 def idx(self):
957 """
960 """
958 Returns integer identifying this commit.
961 Returns integer identifying this commit.
959 """
962 """
960 raise NotImplementedError
963 raise NotImplementedError
961
964
962 @LazyProperty
965 @LazyProperty
963 def committer(self):
966 def committer(self):
964 """
967 """
965 Returns committer for this commit
968 Returns committer for this commit
966 """
969 """
967 raise NotImplementedError
970 raise NotImplementedError
968
971
969 @LazyProperty
972 @LazyProperty
970 def committer_name(self):
973 def committer_name(self):
971 """
974 """
972 Returns committer name for this commit
975 Returns committer name for this commit
973 """
976 """
974
977
975 return author_name(self.committer)
978 return author_name(self.committer)
976
979
977 @LazyProperty
980 @LazyProperty
978 def committer_email(self):
981 def committer_email(self):
979 """
982 """
980 Returns committer email address for this commit
983 Returns committer email address for this commit
981 """
984 """
982
985
983 return author_email(self.committer)
986 return author_email(self.committer)
984
987
985 @LazyProperty
988 @LazyProperty
986 def author(self):
989 def author(self):
987 """
990 """
988 Returns author for this commit
991 Returns author for this commit
989 """
992 """
990
993
991 raise NotImplementedError
994 raise NotImplementedError
992
995
993 @LazyProperty
996 @LazyProperty
994 def author_name(self):
997 def author_name(self):
995 """
998 """
996 Returns author name for this commit
999 Returns author name for this commit
997 """
1000 """
998
1001
999 return author_name(self.author)
1002 return author_name(self.author)
1000
1003
1001 @LazyProperty
1004 @LazyProperty
1002 def author_email(self):
1005 def author_email(self):
1003 """
1006 """
1004 Returns author email address for this commit
1007 Returns author email address for this commit
1005 """
1008 """
1006
1009
1007 return author_email(self.author)
1010 return author_email(self.author)
1008
1011
1009 def get_file_mode(self, path):
1012 def get_file_mode(self, path):
1010 """
1013 """
1011 Returns stat mode of the file at `path`.
1014 Returns stat mode of the file at `path`.
1012 """
1015 """
1013 raise NotImplementedError
1016 raise NotImplementedError
1014
1017
1015 def is_link(self, path):
1018 def is_link(self, path):
1016 """
1019 """
1017 Returns ``True`` if given `path` is a symlink
1020 Returns ``True`` if given `path` is a symlink
1018 """
1021 """
1019 raise NotImplementedError
1022 raise NotImplementedError
1020
1023
1021 def get_file_content(self, path):
1024 def get_file_content(self, path):
1022 """
1025 """
1023 Returns content of the file at the given `path`.
1026 Returns content of the file at the given `path`.
1024 """
1027 """
1025 raise NotImplementedError
1028 raise NotImplementedError
1026
1029
1027 def get_file_size(self, path):
1030 def get_file_size(self, path):
1028 """
1031 """
1029 Returns size of the file at the given `path`.
1032 Returns size of the file at the given `path`.
1030 """
1033 """
1031 raise NotImplementedError
1034 raise NotImplementedError
1032
1035
1033 def get_path_commit(self, path, pre_load=None):
1036 def get_path_commit(self, path, pre_load=None):
1034 """
1037 """
1035 Returns last commit of the file at the given `path`.
1038 Returns last commit of the file at the given `path`.
1036
1039
1037 :param pre_load: Optional. List of commit attributes to load.
1040 :param pre_load: Optional. List of commit attributes to load.
1038 """
1041 """
1039 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1042 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1040 if not commits:
1043 if not commits:
1041 raise RepositoryError(
1044 raise RepositoryError(
1042 'Failed to fetch history for path {}. '
1045 'Failed to fetch history for path {}. '
1043 'Please check if such path exists in your repository'.format(
1046 'Please check if such path exists in your repository'.format(
1044 path))
1047 path))
1045 return commits[0]
1048 return commits[0]
1046
1049
1047 def get_path_history(self, path, limit=None, pre_load=None):
1050 def get_path_history(self, path, limit=None, pre_load=None):
1048 """
1051 """
1049 Returns history of file as reversed list of :class:`BaseCommit`
1052 Returns history of file as reversed list of :class:`BaseCommit`
1050 objects for which file at given `path` has been modified.
1053 objects for which file at given `path` has been modified.
1051
1054
1052 :param limit: Optional. Allows to limit the size of the returned
1055 :param limit: Optional. Allows to limit the size of the returned
1053 history. This is intended as a hint to the underlying backend, so
1056 history. This is intended as a hint to the underlying backend, so
1054 that it can apply optimizations depending on the limit.
1057 that it can apply optimizations depending on the limit.
1055 :param pre_load: Optional. List of commit attributes to load.
1058 :param pre_load: Optional. List of commit attributes to load.
1056 """
1059 """
1057 raise NotImplementedError
1060 raise NotImplementedError
1058
1061
1059 def get_file_annotate(self, path, pre_load=None):
1062 def get_file_annotate(self, path, pre_load=None):
1060 """
1063 """
1061 Returns a generator of four element tuples with
1064 Returns a generator of four element tuples with
1062 lineno, sha, commit lazy loader and line
1065 lineno, sha, commit lazy loader and line
1063
1066
1064 :param pre_load: Optional. List of commit attributes to load.
1067 :param pre_load: Optional. List of commit attributes to load.
1065 """
1068 """
1066 raise NotImplementedError
1069 raise NotImplementedError
1067
1070
1068 def get_nodes(self, path):
1071 def get_nodes(self, path):
1069 """
1072 """
1070 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1073 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1071 state of commit at the given ``path``.
1074 state of commit at the given ``path``.
1072
1075
1073 :raises ``CommitError``: if node at the given ``path`` is not
1076 :raises ``CommitError``: if node at the given ``path`` is not
1074 instance of ``DirNode``
1077 instance of ``DirNode``
1075 """
1078 """
1076 raise NotImplementedError
1079 raise NotImplementedError
1077
1080
1078 def get_node(self, path):
1081 def get_node(self, path):
1079 """
1082 """
1080 Returns ``Node`` object from the given ``path``.
1083 Returns ``Node`` object from the given ``path``.
1081
1084
1082 :raises ``NodeDoesNotExistError``: if there is no node at the given
1085 :raises ``NodeDoesNotExistError``: if there is no node at the given
1083 ``path``
1086 ``path``
1084 """
1087 """
1085 raise NotImplementedError
1088 raise NotImplementedError
1086
1089
1087 def get_largefile_node(self, path):
1090 def get_largefile_node(self, path):
1088 """
1091 """
1089 Returns the path to largefile from Mercurial/Git-lfs storage.
1092 Returns the path to largefile from Mercurial/Git-lfs storage.
1090 or None if it's not a largefile node
1093 or None if it's not a largefile node
1091 """
1094 """
1092 return None
1095 return None
1093
1096
1094 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1097 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1095 prefix=None, write_metadata=False, mtime=None):
1098 prefix=None, write_metadata=False, mtime=None):
1096 """
1099 """
1097 Creates an archive containing the contents of the repository.
1100 Creates an archive containing the contents of the repository.
1098
1101
1099 :param file_path: path to the file which to create the archive.
1102 :param file_path: path to the file which to create the archive.
1100 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1103 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1101 :param prefix: name of root directory in archive.
1104 :param prefix: name of root directory in archive.
1102 Default is repository name and commit's short_id joined with dash:
1105 Default is repository name and commit's short_id joined with dash:
1103 ``"{repo_name}-{short_id}"``.
1106 ``"{repo_name}-{short_id}"``.
1104 :param write_metadata: write a metadata file into archive.
1107 :param write_metadata: write a metadata file into archive.
1105 :param mtime: custom modification time for archive creation, defaults
1108 :param mtime: custom modification time for archive creation, defaults
1106 to time.time() if not given.
1109 to time.time() if not given.
1107
1110
1108 :raise VCSError: If prefix has a problem.
1111 :raise VCSError: If prefix has a problem.
1109 """
1112 """
1110 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1113 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1111 if kind not in allowed_kinds:
1114 if kind not in allowed_kinds:
1112 raise ImproperArchiveTypeError(
1115 raise ImproperArchiveTypeError(
1113 'Archive kind (%s) not supported use one of %s' %
1116 'Archive kind (%s) not supported use one of %s' %
1114 (kind, allowed_kinds))
1117 (kind, allowed_kinds))
1115
1118
1116 prefix = self._validate_archive_prefix(prefix)
1119 prefix = self._validate_archive_prefix(prefix)
1117
1120
1118 mtime = mtime or time.mktime(self.date.timetuple())
1121 mtime = mtime or time.mktime(self.date.timetuple())
1119
1122
1120 file_info = []
1123 file_info = []
1121 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1124 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1122 for _r, _d, files in cur_rev.walk('/'):
1125 for _r, _d, files in cur_rev.walk('/'):
1123 for f in files:
1126 for f in files:
1124 f_path = os.path.join(prefix, f.path)
1127 f_path = os.path.join(prefix, f.path)
1125 file_info.append(
1128 file_info.append(
1126 (f_path, f.mode, f.is_link(), f.raw_bytes))
1129 (f_path, f.mode, f.is_link(), f.raw_bytes))
1127
1130
1128 if write_metadata:
1131 if write_metadata:
1129 metadata = [
1132 metadata = [
1130 ('repo_name', self.repository.name),
1133 ('repo_name', self.repository.name),
1131 ('rev', self.raw_id),
1134 ('rev', self.raw_id),
1132 ('create_time', mtime),
1135 ('create_time', mtime),
1133 ('branch', self.branch),
1136 ('branch', self.branch),
1134 ('tags', ','.join(self.tags)),
1137 ('tags', ','.join(self.tags)),
1135 ]
1138 ]
1136 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1139 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1137 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1140 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1138
1141
1139 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1142 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1140
1143
1141 def _validate_archive_prefix(self, prefix):
1144 def _validate_archive_prefix(self, prefix):
1142 if prefix is None:
1145 if prefix is None:
1143 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1146 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1144 repo_name=safe_str(self.repository.name),
1147 repo_name=safe_str(self.repository.name),
1145 short_id=self.short_id)
1148 short_id=self.short_id)
1146 elif not isinstance(prefix, str):
1149 elif not isinstance(prefix, str):
1147 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1150 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1148 elif prefix.startswith('/'):
1151 elif prefix.startswith('/'):
1149 raise VCSError("Prefix cannot start with leading slash")
1152 raise VCSError("Prefix cannot start with leading slash")
1150 elif prefix.strip() == '':
1153 elif prefix.strip() == '':
1151 raise VCSError("Prefix cannot be empty")
1154 raise VCSError("Prefix cannot be empty")
1152 return prefix
1155 return prefix
1153
1156
1154 @LazyProperty
1157 @LazyProperty
1155 def root(self):
1158 def root(self):
1156 """
1159 """
1157 Returns ``RootNode`` object for this commit.
1160 Returns ``RootNode`` object for this commit.
1158 """
1161 """
1159 return self.get_node('')
1162 return self.get_node('')
1160
1163
1161 def next(self, branch=None):
1164 def next(self, branch=None):
1162 """
1165 """
1163 Returns next commit from current, if branch is gives it will return
1166 Returns next commit from current, if branch is gives it will return
1164 next commit belonging to this branch
1167 next commit belonging to this branch
1165
1168
1166 :param branch: show commits within the given named branch
1169 :param branch: show commits within the given named branch
1167 """
1170 """
1168 indexes = xrange(self.idx + 1, self.repository.count())
1171 indexes = xrange(self.idx + 1, self.repository.count())
1169 return self._find_next(indexes, branch)
1172 return self._find_next(indexes, branch)
1170
1173
1171 def prev(self, branch=None):
1174 def prev(self, branch=None):
1172 """
1175 """
1173 Returns previous commit from current, if branch is gives it will
1176 Returns previous commit from current, if branch is gives it will
1174 return previous commit belonging to this branch
1177 return previous commit belonging to this branch
1175
1178
1176 :param branch: show commit within the given named branch
1179 :param branch: show commit within the given named branch
1177 """
1180 """
1178 indexes = xrange(self.idx - 1, -1, -1)
1181 indexes = xrange(self.idx - 1, -1, -1)
1179 return self._find_next(indexes, branch)
1182 return self._find_next(indexes, branch)
1180
1183
1181 def _find_next(self, indexes, branch=None):
1184 def _find_next(self, indexes, branch=None):
1182 if branch and self.branch != branch:
1185 if branch and self.branch != branch:
1183 raise VCSError('Branch option used on commit not belonging '
1186 raise VCSError('Branch option used on commit not belonging '
1184 'to that branch')
1187 'to that branch')
1185
1188
1186 for next_idx in indexes:
1189 for next_idx in indexes:
1187 commit = self.repository.get_commit(commit_idx=next_idx)
1190 commit = self.repository.get_commit(commit_idx=next_idx)
1188 if branch and branch != commit.branch:
1191 if branch and branch != commit.branch:
1189 continue
1192 continue
1190 return commit
1193 return commit
1191 raise CommitDoesNotExistError
1194 raise CommitDoesNotExistError
1192
1195
1193 def diff(self, ignore_whitespace=True, context=3):
1196 def diff(self, ignore_whitespace=True, context=3):
1194 """
1197 """
1195 Returns a `Diff` object representing the change made by this commit.
1198 Returns a `Diff` object representing the change made by this commit.
1196 """
1199 """
1197 parent = self.first_parent
1200 parent = self.first_parent
1198 diff = self.repository.get_diff(
1201 diff = self.repository.get_diff(
1199 parent, self,
1202 parent, self,
1200 ignore_whitespace=ignore_whitespace,
1203 ignore_whitespace=ignore_whitespace,
1201 context=context)
1204 context=context)
1202 return diff
1205 return diff
1203
1206
1204 @LazyProperty
1207 @LazyProperty
1205 def added(self):
1208 def added(self):
1206 """
1209 """
1207 Returns list of added ``FileNode`` objects.
1210 Returns list of added ``FileNode`` objects.
1208 """
1211 """
1209 raise NotImplementedError
1212 raise NotImplementedError
1210
1213
1211 @LazyProperty
1214 @LazyProperty
1212 def changed(self):
1215 def changed(self):
1213 """
1216 """
1214 Returns list of modified ``FileNode`` objects.
1217 Returns list of modified ``FileNode`` objects.
1215 """
1218 """
1216 raise NotImplementedError
1219 raise NotImplementedError
1217
1220
1218 @LazyProperty
1221 @LazyProperty
1219 def removed(self):
1222 def removed(self):
1220 """
1223 """
1221 Returns list of removed ``FileNode`` objects.
1224 Returns list of removed ``FileNode`` objects.
1222 """
1225 """
1223 raise NotImplementedError
1226 raise NotImplementedError
1224
1227
1225 @LazyProperty
1228 @LazyProperty
1226 def size(self):
1229 def size(self):
1227 """
1230 """
1228 Returns total number of bytes from contents of all filenodes.
1231 Returns total number of bytes from contents of all filenodes.
1229 """
1232 """
1230 return sum((node.size for node in self.get_filenodes_generator()))
1233 return sum((node.size for node in self.get_filenodes_generator()))
1231
1234
1232 def walk(self, topurl=''):
1235 def walk(self, topurl=''):
1233 """
1236 """
1234 Similar to os.walk method. Insted of filesystem it walks through
1237 Similar to os.walk method. Insted of filesystem it walks through
1235 commit starting at given ``topurl``. Returns generator of tuples
1238 commit starting at given ``topurl``. Returns generator of tuples
1236 (topnode, dirnodes, filenodes).
1239 (topnode, dirnodes, filenodes).
1237 """
1240 """
1238 topnode = self.get_node(topurl)
1241 topnode = self.get_node(topurl)
1239 if not topnode.is_dir():
1242 if not topnode.is_dir():
1240 return
1243 return
1241 yield (topnode, topnode.dirs, topnode.files)
1244 yield (topnode, topnode.dirs, topnode.files)
1242 for dirnode in topnode.dirs:
1245 for dirnode in topnode.dirs:
1243 for tup in self.walk(dirnode.path):
1246 for tup in self.walk(dirnode.path):
1244 yield tup
1247 yield tup
1245
1248
1246 def get_filenodes_generator(self):
1249 def get_filenodes_generator(self):
1247 """
1250 """
1248 Returns generator that yields *all* file nodes.
1251 Returns generator that yields *all* file nodes.
1249 """
1252 """
1250 for topnode, dirs, files in self.walk():
1253 for topnode, dirs, files in self.walk():
1251 for node in files:
1254 for node in files:
1252 yield node
1255 yield node
1253
1256
1254 #
1257 #
1255 # Utilities for sub classes to support consistent behavior
1258 # Utilities for sub classes to support consistent behavior
1256 #
1259 #
1257
1260
1258 def no_node_at_path(self, path):
1261 def no_node_at_path(self, path):
1259 return NodeDoesNotExistError(
1262 return NodeDoesNotExistError(
1260 u"There is no file nor directory at the given path: "
1263 u"There is no file nor directory at the given path: "
1261 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1264 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1262
1265
1263 def _fix_path(self, path):
1266 def _fix_path(self, path):
1264 """
1267 """
1265 Paths are stored without trailing slash so we need to get rid off it if
1268 Paths are stored without trailing slash so we need to get rid off it if
1266 needed.
1269 needed.
1267 """
1270 """
1268 return path.rstrip('/')
1271 return path.rstrip('/')
1269
1272
1270 #
1273 #
1271 # Deprecated API based on changesets
1274 # Deprecated API based on changesets
1272 #
1275 #
1273
1276
1274 @property
1277 @property
1275 def revision(self):
1278 def revision(self):
1276 warnings.warn("Use idx instead", DeprecationWarning)
1279 warnings.warn("Use idx instead", DeprecationWarning)
1277 return self.idx
1280 return self.idx
1278
1281
1279 @revision.setter
1282 @revision.setter
1280 def revision(self, value):
1283 def revision(self, value):
1281 warnings.warn("Use idx instead", DeprecationWarning)
1284 warnings.warn("Use idx instead", DeprecationWarning)
1282 self.idx = value
1285 self.idx = value
1283
1286
1284 def get_file_changeset(self, path):
1287 def get_file_changeset(self, path):
1285 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1288 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1286 return self.get_path_commit(path)
1289 return self.get_path_commit(path)
1287
1290
1288
1291
1289 class BaseChangesetClass(type):
1292 class BaseChangesetClass(type):
1290
1293
1291 def __instancecheck__(self, instance):
1294 def __instancecheck__(self, instance):
1292 return isinstance(instance, BaseCommit)
1295 return isinstance(instance, BaseCommit)
1293
1296
1294
1297
1295 class BaseChangeset(BaseCommit):
1298 class BaseChangeset(BaseCommit):
1296
1299
1297 __metaclass__ = BaseChangesetClass
1300 __metaclass__ = BaseChangesetClass
1298
1301
1299 def __new__(cls, *args, **kwargs):
1302 def __new__(cls, *args, **kwargs):
1300 warnings.warn(
1303 warnings.warn(
1301 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1304 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1302 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1305 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1303
1306
1304
1307
1305 class BaseInMemoryCommit(object):
1308 class BaseInMemoryCommit(object):
1306 """
1309 """
1307 Represents differences between repository's state (most recent head) and
1310 Represents differences between repository's state (most recent head) and
1308 changes made *in place*.
1311 changes made *in place*.
1309
1312
1310 **Attributes**
1313 **Attributes**
1311
1314
1312 ``repository``
1315 ``repository``
1313 repository object for this in-memory-commit
1316 repository object for this in-memory-commit
1314
1317
1315 ``added``
1318 ``added``
1316 list of ``FileNode`` objects marked as *added*
1319 list of ``FileNode`` objects marked as *added*
1317
1320
1318 ``changed``
1321 ``changed``
1319 list of ``FileNode`` objects marked as *changed*
1322 list of ``FileNode`` objects marked as *changed*
1320
1323
1321 ``removed``
1324 ``removed``
1322 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1325 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1323 *removed*
1326 *removed*
1324
1327
1325 ``parents``
1328 ``parents``
1326 list of :class:`BaseCommit` instances representing parents of
1329 list of :class:`BaseCommit` instances representing parents of
1327 in-memory commit. Should always be 2-element sequence.
1330 in-memory commit. Should always be 2-element sequence.
1328
1331
1329 """
1332 """
1330
1333
1331 def __init__(self, repository):
1334 def __init__(self, repository):
1332 self.repository = repository
1335 self.repository = repository
1333 self.added = []
1336 self.added = []
1334 self.changed = []
1337 self.changed = []
1335 self.removed = []
1338 self.removed = []
1336 self.parents = []
1339 self.parents = []
1337
1340
1338 def add(self, *filenodes):
1341 def add(self, *filenodes):
1339 """
1342 """
1340 Marks given ``FileNode`` objects as *to be committed*.
1343 Marks given ``FileNode`` objects as *to be committed*.
1341
1344
1342 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1345 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1343 latest commit
1346 latest commit
1344 :raises ``NodeAlreadyAddedError``: if node with same path is already
1347 :raises ``NodeAlreadyAddedError``: if node with same path is already
1345 marked as *added*
1348 marked as *added*
1346 """
1349 """
1347 # Check if not already marked as *added* first
1350 # Check if not already marked as *added* first
1348 for node in filenodes:
1351 for node in filenodes:
1349 if node.path in (n.path for n in self.added):
1352 if node.path in (n.path for n in self.added):
1350 raise NodeAlreadyAddedError(
1353 raise NodeAlreadyAddedError(
1351 "Such FileNode %s is already marked for addition"
1354 "Such FileNode %s is already marked for addition"
1352 % node.path)
1355 % node.path)
1353 for node in filenodes:
1356 for node in filenodes:
1354 self.added.append(node)
1357 self.added.append(node)
1355
1358
1356 def change(self, *filenodes):
1359 def change(self, *filenodes):
1357 """
1360 """
1358 Marks given ``FileNode`` objects to be *changed* in next commit.
1361 Marks given ``FileNode`` objects to be *changed* in next commit.
1359
1362
1360 :raises ``EmptyRepositoryError``: if there are no commits yet
1363 :raises ``EmptyRepositoryError``: if there are no commits yet
1361 :raises ``NodeAlreadyExistsError``: if node with same path is already
1364 :raises ``NodeAlreadyExistsError``: if node with same path is already
1362 marked to be *changed*
1365 marked to be *changed*
1363 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1366 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1364 marked to be *removed*
1367 marked to be *removed*
1365 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1368 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1366 commit
1369 commit
1367 :raises ``NodeNotChangedError``: if node hasn't really be changed
1370 :raises ``NodeNotChangedError``: if node hasn't really be changed
1368 """
1371 """
1369 for node in filenodes:
1372 for node in filenodes:
1370 if node.path in (n.path for n in self.removed):
1373 if node.path in (n.path for n in self.removed):
1371 raise NodeAlreadyRemovedError(
1374 raise NodeAlreadyRemovedError(
1372 "Node at %s is already marked as removed" % node.path)
1375 "Node at %s is already marked as removed" % node.path)
1373 try:
1376 try:
1374 self.repository.get_commit()
1377 self.repository.get_commit()
1375 except EmptyRepositoryError:
1378 except EmptyRepositoryError:
1376 raise EmptyRepositoryError(
1379 raise EmptyRepositoryError(
1377 "Nothing to change - try to *add* new nodes rather than "
1380 "Nothing to change - try to *add* new nodes rather than "
1378 "changing them")
1381 "changing them")
1379 for node in filenodes:
1382 for node in filenodes:
1380 if node.path in (n.path for n in self.changed):
1383 if node.path in (n.path for n in self.changed):
1381 raise NodeAlreadyChangedError(
1384 raise NodeAlreadyChangedError(
1382 "Node at '%s' is already marked as changed" % node.path)
1385 "Node at '%s' is already marked as changed" % node.path)
1383 self.changed.append(node)
1386 self.changed.append(node)
1384
1387
1385 def remove(self, *filenodes):
1388 def remove(self, *filenodes):
1386 """
1389 """
1387 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1390 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1388 *removed* in next commit.
1391 *removed* in next commit.
1389
1392
1390 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1393 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1391 be *removed*
1394 be *removed*
1392 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1395 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1393 be *changed*
1396 be *changed*
1394 """
1397 """
1395 for node in filenodes:
1398 for node in filenodes:
1396 if node.path in (n.path for n in self.removed):
1399 if node.path in (n.path for n in self.removed):
1397 raise NodeAlreadyRemovedError(
1400 raise NodeAlreadyRemovedError(
1398 "Node is already marked to for removal at %s" % node.path)
1401 "Node is already marked to for removal at %s" % node.path)
1399 if node.path in (n.path for n in self.changed):
1402 if node.path in (n.path for n in self.changed):
1400 raise NodeAlreadyChangedError(
1403 raise NodeAlreadyChangedError(
1401 "Node is already marked to be changed at %s" % node.path)
1404 "Node is already marked to be changed at %s" % node.path)
1402 # We only mark node as *removed* - real removal is done by
1405 # We only mark node as *removed* - real removal is done by
1403 # commit method
1406 # commit method
1404 self.removed.append(node)
1407 self.removed.append(node)
1405
1408
1406 def reset(self):
1409 def reset(self):
1407 """
1410 """
1408 Resets this instance to initial state (cleans ``added``, ``changed``
1411 Resets this instance to initial state (cleans ``added``, ``changed``
1409 and ``removed`` lists).
1412 and ``removed`` lists).
1410 """
1413 """
1411 self.added = []
1414 self.added = []
1412 self.changed = []
1415 self.changed = []
1413 self.removed = []
1416 self.removed = []
1414 self.parents = []
1417 self.parents = []
1415
1418
1416 def get_ipaths(self):
1419 def get_ipaths(self):
1417 """
1420 """
1418 Returns generator of paths from nodes marked as added, changed or
1421 Returns generator of paths from nodes marked as added, changed or
1419 removed.
1422 removed.
1420 """
1423 """
1421 for node in itertools.chain(self.added, self.changed, self.removed):
1424 for node in itertools.chain(self.added, self.changed, self.removed):
1422 yield node.path
1425 yield node.path
1423
1426
1424 def get_paths(self):
1427 def get_paths(self):
1425 """
1428 """
1426 Returns list of paths from nodes marked as added, changed or removed.
1429 Returns list of paths from nodes marked as added, changed or removed.
1427 """
1430 """
1428 return list(self.get_ipaths())
1431 return list(self.get_ipaths())
1429
1432
1430 def check_integrity(self, parents=None):
1433 def check_integrity(self, parents=None):
1431 """
1434 """
1432 Checks in-memory commit's integrity. Also, sets parents if not
1435 Checks in-memory commit's integrity. Also, sets parents if not
1433 already set.
1436 already set.
1434
1437
1435 :raises CommitError: if any error occurs (i.e.
1438 :raises CommitError: if any error occurs (i.e.
1436 ``NodeDoesNotExistError``).
1439 ``NodeDoesNotExistError``).
1437 """
1440 """
1438 if not self.parents:
1441 if not self.parents:
1439 parents = parents or []
1442 parents = parents or []
1440 if len(parents) == 0:
1443 if len(parents) == 0:
1441 try:
1444 try:
1442 parents = [self.repository.get_commit(), None]
1445 parents = [self.repository.get_commit(), None]
1443 except EmptyRepositoryError:
1446 except EmptyRepositoryError:
1444 parents = [None, None]
1447 parents = [None, None]
1445 elif len(parents) == 1:
1448 elif len(parents) == 1:
1446 parents += [None]
1449 parents += [None]
1447 self.parents = parents
1450 self.parents = parents
1448
1451
1449 # Local parents, only if not None
1452 # Local parents, only if not None
1450 parents = [p for p in self.parents if p]
1453 parents = [p for p in self.parents if p]
1451
1454
1452 # Check nodes marked as added
1455 # Check nodes marked as added
1453 for p in parents:
1456 for p in parents:
1454 for node in self.added:
1457 for node in self.added:
1455 try:
1458 try:
1456 p.get_node(node.path)
1459 p.get_node(node.path)
1457 except NodeDoesNotExistError:
1460 except NodeDoesNotExistError:
1458 pass
1461 pass
1459 else:
1462 else:
1460 raise NodeAlreadyExistsError(
1463 raise NodeAlreadyExistsError(
1461 "Node `%s` already exists at %s" % (node.path, p))
1464 "Node `%s` already exists at %s" % (node.path, p))
1462
1465
1463 # Check nodes marked as changed
1466 # Check nodes marked as changed
1464 missing = set(self.changed)
1467 missing = set(self.changed)
1465 not_changed = set(self.changed)
1468 not_changed = set(self.changed)
1466 if self.changed and not parents:
1469 if self.changed and not parents:
1467 raise NodeDoesNotExistError(str(self.changed[0].path))
1470 raise NodeDoesNotExistError(str(self.changed[0].path))
1468 for p in parents:
1471 for p in parents:
1469 for node in self.changed:
1472 for node in self.changed:
1470 try:
1473 try:
1471 old = p.get_node(node.path)
1474 old = p.get_node(node.path)
1472 missing.remove(node)
1475 missing.remove(node)
1473 # if content actually changed, remove node from not_changed
1476 # if content actually changed, remove node from not_changed
1474 if old.content != node.content:
1477 if old.content != node.content:
1475 not_changed.remove(node)
1478 not_changed.remove(node)
1476 except NodeDoesNotExistError:
1479 except NodeDoesNotExistError:
1477 pass
1480 pass
1478 if self.changed and missing:
1481 if self.changed and missing:
1479 raise NodeDoesNotExistError(
1482 raise NodeDoesNotExistError(
1480 "Node `%s` marked as modified but missing in parents: %s"
1483 "Node `%s` marked as modified but missing in parents: %s"
1481 % (node.path, parents))
1484 % (node.path, parents))
1482
1485
1483 if self.changed and not_changed:
1486 if self.changed and not_changed:
1484 raise NodeNotChangedError(
1487 raise NodeNotChangedError(
1485 "Node `%s` wasn't actually changed (parents: %s)"
1488 "Node `%s` wasn't actually changed (parents: %s)"
1486 % (not_changed.pop().path, parents))
1489 % (not_changed.pop().path, parents))
1487
1490
1488 # Check nodes marked as removed
1491 # Check nodes marked as removed
1489 if self.removed and not parents:
1492 if self.removed and not parents:
1490 raise NodeDoesNotExistError(
1493 raise NodeDoesNotExistError(
1491 "Cannot remove node at %s as there "
1494 "Cannot remove node at %s as there "
1492 "were no parents specified" % self.removed[0].path)
1495 "were no parents specified" % self.removed[0].path)
1493 really_removed = set()
1496 really_removed = set()
1494 for p in parents:
1497 for p in parents:
1495 for node in self.removed:
1498 for node in self.removed:
1496 try:
1499 try:
1497 p.get_node(node.path)
1500 p.get_node(node.path)
1498 really_removed.add(node)
1501 really_removed.add(node)
1499 except CommitError:
1502 except CommitError:
1500 pass
1503 pass
1501 not_removed = set(self.removed) - really_removed
1504 not_removed = set(self.removed) - really_removed
1502 if not_removed:
1505 if not_removed:
1503 # TODO: johbo: This code branch does not seem to be covered
1506 # TODO: johbo: This code branch does not seem to be covered
1504 raise NodeDoesNotExistError(
1507 raise NodeDoesNotExistError(
1505 "Cannot remove node at %s from "
1508 "Cannot remove node at %s from "
1506 "following parents: %s" % (not_removed, parents))
1509 "following parents: %s" % (not_removed, parents))
1507
1510
1508 def commit(
1511 def commit(
1509 self, message, author, parents=None, branch=None, date=None,
1512 self, message, author, parents=None, branch=None, date=None,
1510 **kwargs):
1513 **kwargs):
1511 """
1514 """
1512 Performs in-memory commit (doesn't check workdir in any way) and
1515 Performs in-memory commit (doesn't check workdir in any way) and
1513 returns newly created :class:`BaseCommit`. Updates repository's
1516 returns newly created :class:`BaseCommit`. Updates repository's
1514 attribute `commits`.
1517 attribute `commits`.
1515
1518
1516 .. note::
1519 .. note::
1517
1520
1518 While overriding this method each backend's should call
1521 While overriding this method each backend's should call
1519 ``self.check_integrity(parents)`` in the first place.
1522 ``self.check_integrity(parents)`` in the first place.
1520
1523
1521 :param message: message of the commit
1524 :param message: message of the commit
1522 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1525 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1523 :param parents: single parent or sequence of parents from which commit
1526 :param parents: single parent or sequence of parents from which commit
1524 would be derived
1527 would be derived
1525 :param date: ``datetime.datetime`` instance. Defaults to
1528 :param date: ``datetime.datetime`` instance. Defaults to
1526 ``datetime.datetime.now()``.
1529 ``datetime.datetime.now()``.
1527 :param branch: branch name, as string. If none given, default backend's
1530 :param branch: branch name, as string. If none given, default backend's
1528 branch would be used.
1531 branch would be used.
1529
1532
1530 :raises ``CommitError``: if any error occurs while committing
1533 :raises ``CommitError``: if any error occurs while committing
1531 """
1534 """
1532 raise NotImplementedError
1535 raise NotImplementedError
1533
1536
1534
1537
1535 class BaseInMemoryChangesetClass(type):
1538 class BaseInMemoryChangesetClass(type):
1536
1539
1537 def __instancecheck__(self, instance):
1540 def __instancecheck__(self, instance):
1538 return isinstance(instance, BaseInMemoryCommit)
1541 return isinstance(instance, BaseInMemoryCommit)
1539
1542
1540
1543
1541 class BaseInMemoryChangeset(BaseInMemoryCommit):
1544 class BaseInMemoryChangeset(BaseInMemoryCommit):
1542
1545
1543 __metaclass__ = BaseInMemoryChangesetClass
1546 __metaclass__ = BaseInMemoryChangesetClass
1544
1547
1545 def __new__(cls, *args, **kwargs):
1548 def __new__(cls, *args, **kwargs):
1546 warnings.warn(
1549 warnings.warn(
1547 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1550 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1548 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1551 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1549
1552
1550
1553
1551 class EmptyCommit(BaseCommit):
1554 class EmptyCommit(BaseCommit):
1552 """
1555 """
1553 An dummy empty commit. It's possible to pass hash when creating
1556 An dummy empty commit. It's possible to pass hash when creating
1554 an EmptyCommit
1557 an EmptyCommit
1555 """
1558 """
1556
1559
1557 def __init__(
1560 def __init__(
1558 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1561 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1559 message='', author='', date=None):
1562 message='', author='', date=None):
1560 self._empty_commit_id = commit_id
1563 self._empty_commit_id = commit_id
1561 # TODO: johbo: Solve idx parameter, default value does not make
1564 # TODO: johbo: Solve idx parameter, default value does not make
1562 # too much sense
1565 # too much sense
1563 self.idx = idx
1566 self.idx = idx
1564 self.message = message
1567 self.message = message
1565 self.author = author
1568 self.author = author
1566 self.date = date or datetime.datetime.fromtimestamp(0)
1569 self.date = date or datetime.datetime.fromtimestamp(0)
1567 self.repository = repo
1570 self.repository = repo
1568 self.alias = alias
1571 self.alias = alias
1569
1572
1570 @LazyProperty
1573 @LazyProperty
1571 def raw_id(self):
1574 def raw_id(self):
1572 """
1575 """
1573 Returns raw string identifying this commit, useful for web
1576 Returns raw string identifying this commit, useful for web
1574 representation.
1577 representation.
1575 """
1578 """
1576
1579
1577 return self._empty_commit_id
1580 return self._empty_commit_id
1578
1581
1579 @LazyProperty
1582 @LazyProperty
1580 def branch(self):
1583 def branch(self):
1581 if self.alias:
1584 if self.alias:
1582 from rhodecode.lib.vcs.backends import get_backend
1585 from rhodecode.lib.vcs.backends import get_backend
1583 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1586 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1584
1587
1585 @LazyProperty
1588 @LazyProperty
1586 def short_id(self):
1589 def short_id(self):
1587 return self.raw_id[:12]
1590 return self.raw_id[:12]
1588
1591
1589 @LazyProperty
1592 @LazyProperty
1590 def id(self):
1593 def id(self):
1591 return self.raw_id
1594 return self.raw_id
1592
1595
1593 def get_path_commit(self, path):
1596 def get_path_commit(self, path):
1594 return self
1597 return self
1595
1598
1596 def get_file_content(self, path):
1599 def get_file_content(self, path):
1597 return u''
1600 return u''
1598
1601
1599 def get_file_size(self, path):
1602 def get_file_size(self, path):
1600 return 0
1603 return 0
1601
1604
1602
1605
1603 class EmptyChangesetClass(type):
1606 class EmptyChangesetClass(type):
1604
1607
1605 def __instancecheck__(self, instance):
1608 def __instancecheck__(self, instance):
1606 return isinstance(instance, EmptyCommit)
1609 return isinstance(instance, EmptyCommit)
1607
1610
1608
1611
1609 class EmptyChangeset(EmptyCommit):
1612 class EmptyChangeset(EmptyCommit):
1610
1613
1611 __metaclass__ = EmptyChangesetClass
1614 __metaclass__ = EmptyChangesetClass
1612
1615
1613 def __new__(cls, *args, **kwargs):
1616 def __new__(cls, *args, **kwargs):
1614 warnings.warn(
1617 warnings.warn(
1615 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1618 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1616 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1619 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1617
1620
1618 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1621 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1619 alias=None, revision=-1, message='', author='', date=None):
1622 alias=None, revision=-1, message='', author='', date=None):
1620 if requested_revision is not None:
1623 if requested_revision is not None:
1621 warnings.warn(
1624 warnings.warn(
1622 "Parameter requested_revision not supported anymore",
1625 "Parameter requested_revision not supported anymore",
1623 DeprecationWarning)
1626 DeprecationWarning)
1624 super(EmptyChangeset, self).__init__(
1627 super(EmptyChangeset, self).__init__(
1625 commit_id=cs, repo=repo, alias=alias, idx=revision,
1628 commit_id=cs, repo=repo, alias=alias, idx=revision,
1626 message=message, author=author, date=date)
1629 message=message, author=author, date=date)
1627
1630
1628 @property
1631 @property
1629 def revision(self):
1632 def revision(self):
1630 warnings.warn("Use idx instead", DeprecationWarning)
1633 warnings.warn("Use idx instead", DeprecationWarning)
1631 return self.idx
1634 return self.idx
1632
1635
1633 @revision.setter
1636 @revision.setter
1634 def revision(self, value):
1637 def revision(self, value):
1635 warnings.warn("Use idx instead", DeprecationWarning)
1638 warnings.warn("Use idx instead", DeprecationWarning)
1636 self.idx = value
1639 self.idx = value
1637
1640
1638
1641
1639 class EmptyRepository(BaseRepository):
1642 class EmptyRepository(BaseRepository):
1640 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1643 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1641 pass
1644 pass
1642
1645
1643 def get_diff(self, *args, **kwargs):
1646 def get_diff(self, *args, **kwargs):
1644 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1647 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1645 return GitDiff('')
1648 return GitDiff('')
1646
1649
1647
1650
1648 class CollectionGenerator(object):
1651 class CollectionGenerator(object):
1649
1652
1650 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1653 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1651 self.repo = repo
1654 self.repo = repo
1652 self.commit_ids = commit_ids
1655 self.commit_ids = commit_ids
1653 # TODO: (oliver) this isn't currently hooked up
1656 # TODO: (oliver) this isn't currently hooked up
1654 self.collection_size = None
1657 self.collection_size = None
1655 self.pre_load = pre_load
1658 self.pre_load = pre_load
1656 self.translate_tag = translate_tag
1659 self.translate_tag = translate_tag
1657
1660
1658 def __len__(self):
1661 def __len__(self):
1659 if self.collection_size is not None:
1662 if self.collection_size is not None:
1660 return self.collection_size
1663 return self.collection_size
1661 return self.commit_ids.__len__()
1664 return self.commit_ids.__len__()
1662
1665
1663 def __iter__(self):
1666 def __iter__(self):
1664 for commit_id in self.commit_ids:
1667 for commit_id in self.commit_ids:
1665 # TODO: johbo: Mercurial passes in commit indices or commit ids
1668 # TODO: johbo: Mercurial passes in commit indices or commit ids
1666 yield self._commit_factory(commit_id)
1669 yield self._commit_factory(commit_id)
1667
1670
1668 def _commit_factory(self, commit_id):
1671 def _commit_factory(self, commit_id):
1669 """
1672 """
1670 Allows backends to override the way commits are generated.
1673 Allows backends to override the way commits are generated.
1671 """
1674 """
1672 return self.repo.get_commit(
1675 return self.repo.get_commit(
1673 commit_id=commit_id, pre_load=self.pre_load,
1676 commit_id=commit_id, pre_load=self.pre_load,
1674 translate_tag=self.translate_tag)
1677 translate_tag=self.translate_tag)
1675
1678
1676 def __getslice__(self, i, j):
1679 def __getslice__(self, i, j):
1677 """
1680 """
1678 Returns an iterator of sliced repository
1681 Returns an iterator of sliced repository
1679 """
1682 """
1680 commit_ids = self.commit_ids[i:j]
1683 commit_ids = self.commit_ids[i:j]
1681 return self.__class__(
1684 return self.__class__(
1682 self.repo, commit_ids, pre_load=self.pre_load,
1685 self.repo, commit_ids, pre_load=self.pre_load,
1683 translate_tag=self.translate_tag)
1686 translate_tag=self.translate_tag)
1684
1687
1685 def __repr__(self):
1688 def __repr__(self):
1686 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1689 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1687
1690
1688
1691
1689 class Config(object):
1692 class Config(object):
1690 """
1693 """
1691 Represents the configuration for a repository.
1694 Represents the configuration for a repository.
1692
1695
1693 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1696 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1694 standard library. It implements only the needed subset.
1697 standard library. It implements only the needed subset.
1695 """
1698 """
1696
1699
1697 def __init__(self):
1700 def __init__(self):
1698 self._values = {}
1701 self._values = {}
1699
1702
1700 def copy(self):
1703 def copy(self):
1701 clone = Config()
1704 clone = Config()
1702 for section, values in self._values.items():
1705 for section, values in self._values.items():
1703 clone._values[section] = values.copy()
1706 clone._values[section] = values.copy()
1704 return clone
1707 return clone
1705
1708
1706 def __repr__(self):
1709 def __repr__(self):
1707 return '<Config(%s sections) at %s>' % (
1710 return '<Config(%s sections) at %s>' % (
1708 len(self._values), hex(id(self)))
1711 len(self._values), hex(id(self)))
1709
1712
1710 def items(self, section):
1713 def items(self, section):
1711 return self._values.get(section, {}).iteritems()
1714 return self._values.get(section, {}).iteritems()
1712
1715
1713 def get(self, section, option):
1716 def get(self, section, option):
1714 return self._values.get(section, {}).get(option)
1717 return self._values.get(section, {}).get(option)
1715
1718
1716 def set(self, section, option, value):
1719 def set(self, section, option, value):
1717 section_values = self._values.setdefault(section, {})
1720 section_values = self._values.setdefault(section, {})
1718 section_values[option] = value
1721 section_values[option] = value
1719
1722
1720 def clear_section(self, section):
1723 def clear_section(self, section):
1721 self._values[section] = {}
1724 self._values[section] = {}
1722
1725
1723 def serialize(self):
1726 def serialize(self):
1724 """
1727 """
1725 Creates a list of three tuples (section, key, value) representing
1728 Creates a list of three tuples (section, key, value) representing
1726 this config object.
1729 this config object.
1727 """
1730 """
1728 items = []
1731 items = []
1729 for section in self._values:
1732 for section in self._values:
1730 for option, value in self._values[section].items():
1733 for option, value in self._values[section].items():
1731 items.append(
1734 items.append(
1732 (safe_str(section), safe_str(option), safe_str(value)))
1735 (safe_str(section), safe_str(option), safe_str(value)))
1733 return items
1736 return items
1734
1737
1735
1738
1736 class Diff(object):
1739 class Diff(object):
1737 """
1740 """
1738 Represents a diff result from a repository backend.
1741 Represents a diff result from a repository backend.
1739
1742
1740 Subclasses have to provide a backend specific value for
1743 Subclasses have to provide a backend specific value for
1741 :attr:`_header_re` and :attr:`_meta_re`.
1744 :attr:`_header_re` and :attr:`_meta_re`.
1742 """
1745 """
1743 _meta_re = None
1746 _meta_re = None
1744 _header_re = None
1747 _header_re = None
1745
1748
1746 def __init__(self, raw_diff):
1749 def __init__(self, raw_diff):
1747 self.raw = raw_diff
1750 self.raw = raw_diff
1748
1751
1749 def chunks(self):
1752 def chunks(self):
1750 """
1753 """
1751 split the diff in chunks of separate --git a/file b/file chunks
1754 split the diff in chunks of separate --git a/file b/file chunks
1752 to make diffs consistent we must prepend with \n, and make sure
1755 to make diffs consistent we must prepend with \n, and make sure
1753 we can detect last chunk as this was also has special rule
1756 we can detect last chunk as this was also has special rule
1754 """
1757 """
1755
1758
1756 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1759 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1757 header = diff_parts[0]
1760 header = diff_parts[0]
1758
1761
1759 if self._meta_re:
1762 if self._meta_re:
1760 match = self._meta_re.match(header)
1763 match = self._meta_re.match(header)
1761
1764
1762 chunks = diff_parts[1:]
1765 chunks = diff_parts[1:]
1763 total_chunks = len(chunks)
1766 total_chunks = len(chunks)
1764
1767
1765 return (
1768 return (
1766 DiffChunk(chunk, self, cur_chunk == total_chunks)
1769 DiffChunk(chunk, self, cur_chunk == total_chunks)
1767 for cur_chunk, chunk in enumerate(chunks, start=1))
1770 for cur_chunk, chunk in enumerate(chunks, start=1))
1768
1771
1769
1772
1770 class DiffChunk(object):
1773 class DiffChunk(object):
1771
1774
1772 def __init__(self, chunk, diff, last_chunk):
1775 def __init__(self, chunk, diff, last_chunk):
1773 self._diff = diff
1776 self._diff = diff
1774
1777
1775 # since we split by \ndiff --git that part is lost from original diff
1778 # since we split by \ndiff --git that part is lost from original diff
1776 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1779 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1777 if not last_chunk:
1780 if not last_chunk:
1778 chunk += '\n'
1781 chunk += '\n'
1779
1782
1780 match = self._diff._header_re.match(chunk)
1783 match = self._diff._header_re.match(chunk)
1781 self.header = match.groupdict()
1784 self.header = match.groupdict()
1782 self.diff = chunk[match.end():]
1785 self.diff = chunk[match.end():]
1783 self.raw = chunk
1786 self.raw = chunk
1784
1787
1785
1788
1786 class BasePathPermissionChecker(object):
1789 class BasePathPermissionChecker(object):
1787
1790
1788 @staticmethod
1791 @staticmethod
1789 def create_from_patterns(includes, excludes):
1792 def create_from_patterns(includes, excludes):
1790 if includes and '*' in includes and not excludes:
1793 if includes and '*' in includes and not excludes:
1791 return AllPathPermissionChecker()
1794 return AllPathPermissionChecker()
1792 elif excludes and '*' in excludes:
1795 elif excludes and '*' in excludes:
1793 return NonePathPermissionChecker()
1796 return NonePathPermissionChecker()
1794 else:
1797 else:
1795 return PatternPathPermissionChecker(includes, excludes)
1798 return PatternPathPermissionChecker(includes, excludes)
1796
1799
1797 @property
1800 @property
1798 def has_full_access(self):
1801 def has_full_access(self):
1799 raise NotImplemented()
1802 raise NotImplemented()
1800
1803
1801 def has_access(self, path):
1804 def has_access(self, path):
1802 raise NotImplemented()
1805 raise NotImplemented()
1803
1806
1804
1807
1805 class AllPathPermissionChecker(BasePathPermissionChecker):
1808 class AllPathPermissionChecker(BasePathPermissionChecker):
1806
1809
1807 @property
1810 @property
1808 def has_full_access(self):
1811 def has_full_access(self):
1809 return True
1812 return True
1810
1813
1811 def has_access(self, path):
1814 def has_access(self, path):
1812 return True
1815 return True
1813
1816
1814
1817
1815 class NonePathPermissionChecker(BasePathPermissionChecker):
1818 class NonePathPermissionChecker(BasePathPermissionChecker):
1816
1819
1817 @property
1820 @property
1818 def has_full_access(self):
1821 def has_full_access(self):
1819 return False
1822 return False
1820
1823
1821 def has_access(self, path):
1824 def has_access(self, path):
1822 return False
1825 return False
1823
1826
1824
1827
1825 class PatternPathPermissionChecker(BasePathPermissionChecker):
1828 class PatternPathPermissionChecker(BasePathPermissionChecker):
1826
1829
1827 def __init__(self, includes, excludes):
1830 def __init__(self, includes, excludes):
1828 self.includes = includes
1831 self.includes = includes
1829 self.excludes = excludes
1832 self.excludes = excludes
1830 self.includes_re = [] if not includes else [
1833 self.includes_re = [] if not includes else [
1831 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1834 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1832 self.excludes_re = [] if not excludes else [
1835 self.excludes_re = [] if not excludes else [
1833 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1836 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1834
1837
1835 @property
1838 @property
1836 def has_full_access(self):
1839 def has_full_access(self):
1837 return '*' in self.includes and not self.excludes
1840 return '*' in self.includes and not self.excludes
1838
1841
1839 def has_access(self, path):
1842 def has_access(self, path):
1840 for regex in self.excludes_re:
1843 for regex in self.excludes_re:
1841 if regex.match(path):
1844 if regex.match(path):
1842 return False
1845 return False
1843 for regex in self.includes_re:
1846 for regex in self.includes_re:
1844 if regex.match(path):
1847 if regex.match(path):
1845 return True
1848 return True
1846 return False
1849 return False
@@ -1,932 +1,937 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
35 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
45 from rhodecode.lib.vcs.compat import configparser
46
46
47 hexlify = binascii.hexlify
47 hexlify = binascii.hexlify
48 nullid = "\0" * 20
48 nullid = "\0" * 20
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class MercurialRepository(BaseRepository):
53 class MercurialRepository(BaseRepository):
54 """
54 """
55 Mercurial repository backend
55 Mercurial repository backend
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'default'
57 DEFAULT_BRANCH_NAME = 'default'
58
58
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 do_workspace_checkout=False, with_wire=None, bare=False):
60 do_workspace_checkout=False, with_wire=None, bare=False):
61 """
61 """
62 Raises RepositoryError if repository could not be find at the given
62 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
63 ``repo_path``.
64
64
65 :param repo_path: local path of the repository
65 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
66 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
67 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
68 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
69 :param src_url=None: would try to clone repository from given location
70 :param do_workspace_checkout=False: sets update of working copy after
70 :param do_workspace_checkout=False: sets update of working copy after
71 making a clone
71 making a clone
72 :param bare: not used, compatible with other VCS
72 :param bare: not used, compatible with other VCS
73 """
73 """
74
74
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 # mercurial since 4.4.X requires certain configuration to be present
76 # mercurial since 4.4.X requires certain configuration to be present
77 # because sometimes we init the repos with config we need to meet
77 # because sometimes we init the repos with config we need to meet
78 # special requirements
78 # special requirements
79 self.config = config if config else self.get_default_config(
79 self.config = config if config else self.get_default_config(
80 default=[('extensions', 'largefiles', '1')])
80 default=[('extensions', 'largefiles', '1')])
81 self.with_wire = with_wire
81 self.with_wire = with_wire
82
82
83 self._init_repo(create, src_url, do_workspace_checkout)
83 self._init_repo(create, src_url, do_workspace_checkout)
84
84
85 # caches
85 # caches
86 self._commit_ids = {}
86 self._commit_ids = {}
87
87
88 @LazyProperty
88 @LazyProperty
89 def _remote(self):
89 def _remote(self):
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91
91
92 @LazyProperty
92 @LazyProperty
93 def commit_ids(self):
93 def commit_ids(self):
94 """
94 """
95 Returns list of commit ids, in ascending order. Being lazy
95 Returns list of commit ids, in ascending order. Being lazy
96 attribute allows external tools to inject shas from cache.
96 attribute allows external tools to inject shas from cache.
97 """
97 """
98 commit_ids = self._get_all_commit_ids()
98 commit_ids = self._get_all_commit_ids()
99 self._rebuild_cache(commit_ids)
99 self._rebuild_cache(commit_ids)
100 return commit_ids
100 return commit_ids
101
101
102 def _rebuild_cache(self, commit_ids):
102 def _rebuild_cache(self, commit_ids):
103 self._commit_ids = dict((commit_id, index)
103 self._commit_ids = dict((commit_id, index)
104 for index, commit_id in enumerate(commit_ids))
104 for index, commit_id in enumerate(commit_ids))
105
105
106 @LazyProperty
106 @LazyProperty
107 def branches(self):
107 def branches(self):
108 return self._get_branches()
108 return self._get_branches()
109
109
110 @LazyProperty
110 @LazyProperty
111 def branches_closed(self):
111 def branches_closed(self):
112 return self._get_branches(active=False, closed=True)
112 return self._get_branches(active=False, closed=True)
113
113
114 @LazyProperty
114 @LazyProperty
115 def branches_all(self):
115 def branches_all(self):
116 all_branches = {}
116 all_branches = {}
117 all_branches.update(self.branches)
117 all_branches.update(self.branches)
118 all_branches.update(self.branches_closed)
118 all_branches.update(self.branches_closed)
119 return all_branches
119 return all_branches
120
120
121 def _get_branches(self, active=True, closed=False):
121 def _get_branches(self, active=True, closed=False):
122 """
122 """
123 Gets branches for this repository
123 Gets branches for this repository
124 Returns only not closed active branches by default
124 Returns only not closed active branches by default
125
125
126 :param active: return also active branches
126 :param active: return also active branches
127 :param closed: return also closed branches
127 :param closed: return also closed branches
128
128
129 """
129 """
130 if self.is_empty():
130 if self.is_empty():
131 return {}
131 return {}
132
132
133 def get_name(ctx):
133 def get_name(ctx):
134 return ctx[0]
134 return ctx[0]
135
135
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 self._remote.branches(active, closed).items()]
137 self._remote.branches(active, closed).items()]
138
138
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140
140
141 @LazyProperty
141 @LazyProperty
142 def tags(self):
142 def tags(self):
143 """
143 """
144 Gets tags for this repository
144 Gets tags for this repository
145 """
145 """
146 return self._get_tags()
146 return self._get_tags()
147
147
148 def _get_tags(self):
148 def _get_tags(self):
149 if self.is_empty():
149 if self.is_empty():
150 return {}
150 return {}
151
151
152 def get_name(ctx):
152 def get_name(ctx):
153 return ctx[0]
153 return ctx[0]
154
154
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 self._remote.tags().items()]
156 self._remote.tags().items()]
157
157
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159
159
160 def tag(self, name, user, commit_id=None, message=None, date=None,
160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 **kwargs):
161 **kwargs):
162 """
162 """
163 Creates and returns a tag for the given ``commit_id``.
163 Creates and returns a tag for the given ``commit_id``.
164
164
165 :param name: name for new tag
165 :param name: name for new tag
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param commit_id: commit id for which new tag would be created
167 :param commit_id: commit id for which new tag would be created
168 :param message: message of the tag's commit
168 :param message: message of the tag's commit
169 :param date: date of tag's commit
169 :param date: date of tag's commit
170
170
171 :raises TagAlreadyExistError: if tag with same name already exists
171 :raises TagAlreadyExistError: if tag with same name already exists
172 """
172 """
173 if name in self.tags:
173 if name in self.tags:
174 raise TagAlreadyExistError("Tag %s already exists" % name)
174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 commit = self.get_commit(commit_id=commit_id)
175 commit = self.get_commit(commit_id=commit_id)
176 local = kwargs.setdefault('local', False)
176 local = kwargs.setdefault('local', False)
177
177
178 if message is None:
178 if message is None:
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180
180
181 date, tz = date_to_timestamp_plus_offset(date)
181 date, tz = date_to_timestamp_plus_offset(date)
182
182
183 self._remote.tag(
183 self._remote.tag(
184 name, commit.raw_id, message, local, user, date, tz)
184 name, commit.raw_id, message, local, user, date, tz)
185 self._remote.invalidate_vcs_cache()
185 self._remote.invalidate_vcs_cache()
186
186
187 # Reinitialize tags
187 # Reinitialize tags
188 self.tags = self._get_tags()
188 self.tags = self._get_tags()
189 tag_id = self.tags[name]
189 tag_id = self.tags[name]
190
190
191 return self.get_commit(commit_id=tag_id)
191 return self.get_commit(commit_id=tag_id)
192
192
193 def remove_tag(self, name, user, message=None, date=None):
193 def remove_tag(self, name, user, message=None, date=None):
194 """
194 """
195 Removes tag with the given `name`.
195 Removes tag with the given `name`.
196
196
197 :param name: name of the tag to be removed
197 :param name: name of the tag to be removed
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param message: message of the tag's removal commit
199 :param message: message of the tag's removal commit
200 :param date: date of tag's removal commit
200 :param date: date of tag's removal commit
201
201
202 :raises TagDoesNotExistError: if tag with given name does not exists
202 :raises TagDoesNotExistError: if tag with given name does not exists
203 """
203 """
204 if name not in self.tags:
204 if name not in self.tags:
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 if message is None:
206 if message is None:
207 message = "Removed tag %s" % name
207 message = "Removed tag %s" % name
208 local = False
208 local = False
209
209
210 date, tz = date_to_timestamp_plus_offset(date)
210 date, tz = date_to_timestamp_plus_offset(date)
211
211
212 self._remote.tag(name, nullid, message, local, user, date, tz)
212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 self._remote.invalidate_vcs_cache()
213 self._remote.invalidate_vcs_cache()
214 self.tags = self._get_tags()
214 self.tags = self._get_tags()
215
215
216 @LazyProperty
216 @LazyProperty
217 def bookmarks(self):
217 def bookmarks(self):
218 """
218 """
219 Gets bookmarks for this repository
219 Gets bookmarks for this repository
220 """
220 """
221 return self._get_bookmarks()
221 return self._get_bookmarks()
222
222
223 def _get_bookmarks(self):
223 def _get_bookmarks(self):
224 if self.is_empty():
224 if self.is_empty():
225 return {}
225 return {}
226
226
227 def get_name(ctx):
227 def get_name(ctx):
228 return ctx[0]
228 return ctx[0]
229
229
230 _bookmarks = [
230 _bookmarks = [
231 (safe_unicode(n), hexlify(h)) for n, h in
231 (safe_unicode(n), hexlify(h)) for n, h in
232 self._remote.bookmarks().items()]
232 self._remote.bookmarks().items()]
233
233
234 return OrderedDict(sorted(_bookmarks, key=get_name))
234 return OrderedDict(sorted(_bookmarks, key=get_name))
235
235
236 def _get_all_commit_ids(self):
236 def _get_all_commit_ids(self):
237 return self._remote.get_all_commit_ids('visible')
237 return self._remote.get_all_commit_ids('visible')
238
238
239 def get_diff(
239 def get_diff(
240 self, commit1, commit2, path='', ignore_whitespace=False,
240 self, commit1, commit2, path='', ignore_whitespace=False,
241 context=3, path1=None):
241 context=3, path1=None):
242 """
242 """
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 `commit2` since `commit1`.
244 `commit2` since `commit1`.
245
245
246 :param commit1: Entry point from which diff is shown. Can be
246 :param commit1: Entry point from which diff is shown. Can be
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 the changes since empty state of the repository until `commit2`
248 the changes since empty state of the repository until `commit2`
249 :param commit2: Until which commit changes should be shown.
249 :param commit2: Until which commit changes should be shown.
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 changes. Defaults to ``False``.
251 changes. Defaults to ``False``.
252 :param context: How many lines before/after changed lines should be
252 :param context: How many lines before/after changed lines should be
253 shown. Defaults to ``3``.
253 shown. Defaults to ``3``.
254 """
254 """
255 self._validate_diff_commits(commit1, commit2)
255 self._validate_diff_commits(commit1, commit2)
256 if path1 is not None and path1 != path:
256 if path1 is not None and path1 != path:
257 raise ValueError("Diff of two different paths not supported.")
257 raise ValueError("Diff of two different paths not supported.")
258
258
259 if path:
259 if path:
260 file_filter = [self.path, path]
260 file_filter = [self.path, path]
261 else:
261 else:
262 file_filter = None
262 file_filter = None
263
263
264 diff = self._remote.diff(
264 diff = self._remote.diff(
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 opt_git=True, opt_ignorews=ignore_whitespace,
266 opt_git=True, opt_ignorews=ignore_whitespace,
267 context=context)
267 context=context)
268 return MercurialDiff(diff)
268 return MercurialDiff(diff)
269
269
270 def strip(self, commit_id, branch=None):
270 def strip(self, commit_id, branch=None):
271 self._remote.strip(commit_id, update=False, backup="none")
271 self._remote.strip(commit_id, update=False, backup="none")
272
272
273 self._remote.invalidate_vcs_cache()
273 self._remote.invalidate_vcs_cache()
274 self.commit_ids = self._get_all_commit_ids()
274 self.commit_ids = self._get_all_commit_ids()
275 self._rebuild_cache(self.commit_ids)
275 self._rebuild_cache(self.commit_ids)
276
276
277 def verify(self):
277 def verify(self):
278 verify = self._remote.verify()
278 verify = self._remote.verify()
279
279
280 self._remote.invalidate_vcs_cache()
280 self._remote.invalidate_vcs_cache()
281 return verify
281 return verify
282
282
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 if commit_id1 == commit_id2:
284 if commit_id1 == commit_id2:
285 return commit_id1
285 return commit_id1
286
286
287 ancestors = self._remote.revs_from_revspec(
287 ancestors = self._remote.revs_from_revspec(
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 other_path=repo2.path)
289 other_path=repo2.path)
290 return repo2[ancestors[0]].raw_id if ancestors else None
290 return repo2[ancestors[0]].raw_id if ancestors else None
291
291
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 if commit_id1 == commit_id2:
293 if commit_id1 == commit_id2:
294 commits = []
294 commits = []
295 else:
295 else:
296 if merge:
296 if merge:
297 indexes = self._remote.revs_from_revspec(
297 indexes = self._remote.revs_from_revspec(
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 else:
300 else:
301 indexes = self._remote.revs_from_revspec(
301 indexes = self._remote.revs_from_revspec(
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 commit_id1, other_path=repo2.path)
303 commit_id1, other_path=repo2.path)
304
304
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 for idx in indexes]
306 for idx in indexes]
307
307
308 return commits
308 return commits
309
309
310 @staticmethod
310 @staticmethod
311 def check_url(url, config):
311 def check_url(url, config):
312 """
312 """
313 Function will check given url and try to verify if it's a valid
313 Function will check given url and try to verify if it's a valid
314 link. Sometimes it may happened that mercurial will issue basic
314 link. Sometimes it may happened that mercurial will issue basic
315 auth request that can cause whole API to hang when used from python
315 auth request that can cause whole API to hang when used from python
316 or other external calls.
316 or other external calls.
317
317
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 when the return code is non 200
319 when the return code is non 200
320 """
320 """
321 # check first if it's not an local url
321 # check first if it's not an local url
322 if os.path.isdir(url) or url.startswith('file:'):
322 if os.path.isdir(url) or url.startswith('file:'):
323 return True
323 return True
324
324
325 # Request the _remote to verify the url
325 # Request the _remote to verify the url
326 return connection.Hg.check_url(url, config.serialize())
326 return connection.Hg.check_url(url, config.serialize())
327
327
328 @staticmethod
328 @staticmethod
329 def is_valid_repository(path):
329 def is_valid_repository(path):
330 return os.path.isdir(os.path.join(path, '.hg'))
330 return os.path.isdir(os.path.join(path, '.hg'))
331
331
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 """
333 """
334 Function will check for mercurial repository in given path. If there
334 Function will check for mercurial repository in given path. If there
335 is no repository in that path it will raise an exception unless
335 is no repository in that path it will raise an exception unless
336 `create` parameter is set to True - in that case repository would
336 `create` parameter is set to True - in that case repository would
337 be created.
337 be created.
338
338
339 If `src_url` is given, would try to clone repository from the
339 If `src_url` is given, would try to clone repository from the
340 location at given clone_point. Additionally it'll make update to
340 location at given clone_point. Additionally it'll make update to
341 working copy accordingly to `do_workspace_checkout` flag.
341 working copy accordingly to `do_workspace_checkout` flag.
342 """
342 """
343 if create and os.path.exists(self.path):
343 if create and os.path.exists(self.path):
344 raise RepositoryError(
344 raise RepositoryError(
345 "Cannot create repository at %s, location already exist"
345 "Cannot create repository at %s, location already exist"
346 % self.path)
346 % self.path)
347
347
348 if src_url:
348 if src_url:
349 url = str(self._get_url(src_url))
349 url = str(self._get_url(src_url))
350 MercurialRepository.check_url(url, self.config)
350 MercurialRepository.check_url(url, self.config)
351
351
352 self._remote.clone(url, self.path, do_workspace_checkout)
352 self._remote.clone(url, self.path, do_workspace_checkout)
353
353
354 # Don't try to create if we've already cloned repo
354 # Don't try to create if we've already cloned repo
355 create = False
355 create = False
356
356
357 if create:
357 if create:
358 os.makedirs(self.path, mode=0o755)
358 os.makedirs(self.path, mode=0o755)
359
359
360 self._remote.localrepository(create)
360 self._remote.localrepository(create)
361
361
362 @LazyProperty
362 @LazyProperty
363 def in_memory_commit(self):
363 def in_memory_commit(self):
364 return MercurialInMemoryCommit(self)
364 return MercurialInMemoryCommit(self)
365
365
366 @LazyProperty
366 @LazyProperty
367 def description(self):
367 def description(self):
368 description = self._remote.get_config_value(
368 description = self._remote.get_config_value(
369 'web', 'description', untrusted=True)
369 'web', 'description', untrusted=True)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371
371
372 @LazyProperty
372 @LazyProperty
373 def contact(self):
373 def contact(self):
374 contact = (
374 contact = (
375 self._remote.get_config_value("web", "contact") or
375 self._remote.get_config_value("web", "contact") or
376 self._remote.get_config_value("ui", "username"))
376 self._remote.get_config_value("ui", "username"))
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378
378
379 @LazyProperty
379 @LazyProperty
380 def last_change(self):
380 def last_change(self):
381 """
381 """
382 Returns last change made on this repository as
382 Returns last change made on this repository as
383 `datetime.datetime` object.
383 `datetime.datetime` object.
384 """
384 """
385 try:
385 try:
386 return self.get_commit().date
386 return self.get_commit().date
387 except RepositoryError:
387 except RepositoryError:
388 tzoffset = makedate()[1]
388 tzoffset = makedate()[1]
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390
390
391 def _get_fs_mtime(self):
391 def _get_fs_mtime(self):
392 # fallback to filesystem
392 # fallback to filesystem
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 st_path = os.path.join(self.path, '.hg', "store")
394 st_path = os.path.join(self.path, '.hg', "store")
395 if os.path.exists(cl_path):
395 if os.path.exists(cl_path):
396 return os.stat(cl_path).st_mtime
396 return os.stat(cl_path).st_mtime
397 else:
397 else:
398 return os.stat(st_path).st_mtime
398 return os.stat(st_path).st_mtime
399
399
400 def _get_url(self, url):
400 def _get_url(self, url):
401 """
401 """
402 Returns normalized url. If schema is not given, would fall
402 Returns normalized url. If schema is not given, would fall
403 to filesystem
403 to filesystem
404 (``file:///``) schema.
404 (``file:///``) schema.
405 """
405 """
406 url = url.encode('utf8')
406 url = url.encode('utf8')
407 if url != 'default' and '://' not in url:
407 if url != 'default' and '://' not in url:
408 url = "file:" + urllib.pathname2url(url)
408 url = "file:" + urllib.pathname2url(url)
409 return url
409 return url
410
410
411 def get_hook_location(self):
411 def get_hook_location(self):
412 """
412 """
413 returns absolute path to location where hooks are stored
413 returns absolute path to location where hooks are stored
414 """
414 """
415 return os.path.join(self.path, '.hg', '.hgrc')
415 return os.path.join(self.path, '.hg', '.hgrc')
416
416
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 """
418 """
419 Returns ``MercurialCommit`` object representing repository's
419 Returns ``MercurialCommit`` object representing repository's
420 commit at the given `commit_id` or `commit_idx`.
420 commit at the given `commit_id` or `commit_idx`.
421 """
421 """
422 if self.is_empty():
422 if self.is_empty():
423 raise EmptyRepositoryError("There are no commits yet")
423 raise EmptyRepositoryError("There are no commits yet")
424
424
425 if commit_id is not None:
425 if commit_id is not None:
426 self._validate_commit_id(commit_id)
426 self._validate_commit_id(commit_id)
427 try:
427 try:
428 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 except KeyError:
430 except KeyError:
431 pass
431 pass
432 elif commit_idx is not None:
432 elif commit_idx is not None:
433 self._validate_commit_idx(commit_idx)
433 self._validate_commit_idx(commit_idx)
434 try:
434 try:
435 id_ = self.commit_ids[commit_idx]
435 id_ = self.commit_ids[commit_idx]
436 if commit_idx < 0:
436 if commit_idx < 0:
437 commit_idx += len(self.commit_ids)
437 commit_idx += len(self.commit_ids)
438 return MercurialCommit(
438 return MercurialCommit(
439 self, id_, commit_idx, pre_load=pre_load)
439 self, id_, commit_idx, pre_load=pre_load)
440 except IndexError:
440 except IndexError:
441 commit_id = commit_idx
441 commit_id = commit_idx
442 else:
442 else:
443 commit_id = "tip"
443 commit_id = "tip"
444
444
445 if isinstance(commit_id, unicode):
445 if isinstance(commit_id, unicode):
446 commit_id = safe_str(commit_id)
446 commit_id = safe_str(commit_id)
447
447
448 try:
448 try:
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 except CommitDoesNotExistError:
450 except CommitDoesNotExistError:
451 msg = "Commit %s does not exist for %s" % (
451 msg = "Commit %s does not exist for %s" % (
452 commit_id, self)
452 commit_id, self)
453 raise CommitDoesNotExistError(msg)
453 raise CommitDoesNotExistError(msg)
454
454
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456
456
457 def get_commits(
457 def get_commits(
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 """
460 """
461 Returns generator of ``MercurialCommit`` objects from start to end
461 Returns generator of ``MercurialCommit`` objects from start to end
462 (both are inclusive)
462 (both are inclusive)
463
463
464 :param start_id: None, str(commit_id)
464 :param start_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
466 :param start_date: if specified, commits with commit date less than
466 :param start_date: if specified, commits with commit date less than
467 ``start_date`` would be filtered out from returned set
467 ``start_date`` would be filtered out from returned set
468 :param end_date: if specified, commits with commit date greater than
468 :param end_date: if specified, commits with commit date greater than
469 ``end_date`` would be filtered out from returned set
469 ``end_date`` would be filtered out from returned set
470 :param branch_name: if specified, commits not reachable from given
470 :param branch_name: if specified, commits not reachable from given
471 branch would be filtered out from returned set
471 branch would be filtered out from returned set
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 Mercurial evolve
473 Mercurial evolve
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 exist.
475 exist.
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 ``end`` could not be found.
477 ``end`` could not be found.
478 """
478 """
479 # actually we should check now if it's not an empty repo
479 # actually we should check now if it's not an empty repo
480 branch_ancestors = False
480 branch_ancestors = False
481 if self.is_empty():
481 if self.is_empty():
482 raise EmptyRepositoryError("There are no commits yet")
482 raise EmptyRepositoryError("There are no commits yet")
483 self._validate_branch_name(branch_name)
483 self._validate_branch_name(branch_name)
484
484
485 if start_id is not None:
485 if start_id is not None:
486 self._validate_commit_id(start_id)
486 self._validate_commit_id(start_id)
487 c_start = self.get_commit(commit_id=start_id)
487 c_start = self.get_commit(commit_id=start_id)
488 start_pos = self._commit_ids[c_start.raw_id]
488 start_pos = self._commit_ids[c_start.raw_id]
489 else:
489 else:
490 start_pos = None
490 start_pos = None
491
491
492 if end_id is not None:
492 if end_id is not None:
493 self._validate_commit_id(end_id)
493 self._validate_commit_id(end_id)
494 c_end = self.get_commit(commit_id=end_id)
494 c_end = self.get_commit(commit_id=end_id)
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 else:
496 else:
497 end_pos = None
497 end_pos = None
498
498
499 if None not in [start_id, end_id] and start_pos > end_pos:
499 if None not in [start_id, end_id] and start_pos > end_pos:
500 raise RepositoryError(
500 raise RepositoryError(
501 "Start commit '%s' cannot be after end commit '%s'" %
501 "Start commit '%s' cannot be after end commit '%s'" %
502 (start_id, end_id))
502 (start_id, end_id))
503
503
504 if end_pos is not None:
504 if end_pos is not None:
505 end_pos += 1
505 end_pos += 1
506
506
507 commit_filter = []
507 commit_filter = []
508
508
509 if branch_name and not branch_ancestors:
509 if branch_name and not branch_ancestors:
510 commit_filter.append('branch("%s")' % (branch_name,))
510 commit_filter.append('branch("%s")' % (branch_name,))
511 elif branch_name and branch_ancestors:
511 elif branch_name and branch_ancestors:
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513
513
514 if start_date and not end_date:
514 if start_date and not end_date:
515 commit_filter.append('date(">%s")' % (start_date,))
515 commit_filter.append('date(">%s")' % (start_date,))
516 if end_date and not start_date:
516 if end_date and not start_date:
517 commit_filter.append('date("<%s")' % (end_date,))
517 commit_filter.append('date("<%s")' % (end_date,))
518 if start_date and end_date:
518 if start_date and end_date:
519 commit_filter.append(
519 commit_filter.append(
520 'date(">%s") and date("<%s")' % (start_date, end_date))
520 'date(">%s") and date("<%s")' % (start_date, end_date))
521
521
522 if not show_hidden:
522 if not show_hidden:
523 commit_filter.append('not obsolete()')
523 commit_filter.append('not obsolete()')
524 commit_filter.append('not hidden()')
524 commit_filter.append('not hidden()')
525
525
526 # TODO: johbo: Figure out a simpler way for this solution
526 # TODO: johbo: Figure out a simpler way for this solution
527 collection_generator = CollectionGenerator
527 collection_generator = CollectionGenerator
528 if commit_filter:
528 if commit_filter:
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 revisions = self._remote.rev_range([commit_filter])
530 revisions = self._remote.rev_range([commit_filter])
531 collection_generator = MercurialIndexBasedCollectionGenerator
531 collection_generator = MercurialIndexBasedCollectionGenerator
532 else:
532 else:
533 revisions = self.commit_ids
533 revisions = self.commit_ids
534
534
535 if start_pos or end_pos:
535 if start_pos or end_pos:
536 revisions = revisions[start_pos:end_pos]
536 revisions = revisions[start_pos:end_pos]
537
537
538 return collection_generator(self, revisions, pre_load=pre_load)
538 return collection_generator(self, revisions, pre_load=pre_load)
539
539
540 def pull(self, url, commit_ids=None):
540 def pull(self, url, commit_ids=None):
541 """
541 """
542 Pull changes from external location.
542 Pull changes from external location.
543
543
544 :param commit_ids: Optional. Can be set to a list of commit ids
544 :param commit_ids: Optional. Can be set to a list of commit ids
545 which shall be pulled from the other repository.
545 which shall be pulled from the other repository.
546 """
546 """
547 url = self._get_url(url)
547 url = self._get_url(url)
548 self._remote.pull(url, commit_ids=commit_ids)
548 self._remote.pull(url, commit_ids=commit_ids)
549 self._remote.invalidate_vcs_cache()
549 self._remote.invalidate_vcs_cache()
550
550
551 def fetch(self, url, commit_ids=None):
551 def fetch(self, url, commit_ids=None):
552 """
552 """
553 Backward compatibility with GIT fetch==pull
553 Backward compatibility with GIT fetch==pull
554 """
554 """
555 return self.pull(url, commit_ids=commit_ids)
555 return self.pull(url, commit_ids=commit_ids)
556
556
557 def push(self, url):
557 def push(self, url):
558 url = self._get_url(url)
558 url = self._get_url(url)
559 self._remote.sync_push(url)
559 self._remote.sync_push(url)
560
560
561 def _local_clone(self, clone_path):
561 def _local_clone(self, clone_path):
562 """
562 """
563 Create a local clone of the current repo.
563 Create a local clone of the current repo.
564 """
564 """
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 hooks=False)
566 hooks=False)
567
567
568 def _update(self, revision, clean=False):
568 def _update(self, revision, clean=False):
569 """
569 """
570 Update the working copy to the specified revision.
570 Update the working copy to the specified revision.
571 """
571 """
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 self._remote.update(revision, clean=clean)
573 self._remote.update(revision, clean=clean)
574
574
575 def _identify(self):
575 def _identify(self):
576 """
576 """
577 Return the current state of the working directory.
577 Return the current state of the working directory.
578 """
578 """
579 return self._remote.identify().strip().rstrip('+')
579 return self._remote.identify().strip().rstrip('+')
580
580
581 def _heads(self, branch=None):
581 def _heads(self, branch=None):
582 """
582 """
583 Return the commit ids of the repository heads.
583 Return the commit ids of the repository heads.
584 """
584 """
585 return self._remote.heads(branch=branch).strip().split(' ')
585 return self._remote.heads(branch=branch).strip().split(' ')
586
586
587 def _ancestor(self, revision1, revision2):
587 def _ancestor(self, revision1, revision2):
588 """
588 """
589 Return the common ancestor of the two revisions.
589 Return the common ancestor of the two revisions.
590 """
590 """
591 return self._remote.ancestor(revision1, revision2)
591 return self._remote.ancestor(revision1, revision2)
592
592
593 def _local_push(
593 def _local_push(
594 self, revision, repository_path, push_branches=False,
594 self, revision, repository_path, push_branches=False,
595 enable_hooks=False):
595 enable_hooks=False):
596 """
596 """
597 Push the given revision to the specified repository.
597 Push the given revision to the specified repository.
598
598
599 :param push_branches: allow to create branches in the target repo.
599 :param push_branches: allow to create branches in the target repo.
600 """
600 """
601 self._remote.push(
601 self._remote.push(
602 [revision], repository_path, hooks=enable_hooks,
602 [revision], repository_path, hooks=enable_hooks,
603 push_branches=push_branches)
603 push_branches=push_branches)
604
604
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 source_ref, use_rebase=False, dry_run=False):
606 source_ref, use_rebase=False, dry_run=False):
607 """
607 """
608 Merge the given source_revision into the checked out revision.
608 Merge the given source_revision into the checked out revision.
609
609
610 Returns the commit id of the merge and a boolean indicating if the
610 Returns the commit id of the merge and a boolean indicating if the
611 commit needs to be pushed.
611 commit needs to be pushed.
612 """
612 """
613 self._update(target_ref.commit_id, clean=True)
613 self._update(target_ref.commit_id, clean=True)
614
614
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617
617
618 if ancestor == source_ref.commit_id:
618 if ancestor == source_ref.commit_id:
619 # Nothing to do, the changes were already integrated
619 # Nothing to do, the changes were already integrated
620 return target_ref.commit_id, False
620 return target_ref.commit_id, False
621
621
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 # In this case we should force a commit message
623 # In this case we should force a commit message
624 return source_ref.commit_id, True
624 return source_ref.commit_id, True
625
625
626 if use_rebase:
626 if use_rebase:
627 try:
627 try:
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 target_ref.commit_id)
629 target_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 self._remote.rebase(
631 self._remote.rebase(
632 source=source_ref.commit_id, dest=target_ref.commit_id)
632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 self._remote.invalidate_vcs_cache()
633 self._remote.invalidate_vcs_cache()
634 self._update(bookmark_name, clean=True)
634 self._update(bookmark_name, clean=True)
635 return self._identify(), True
635 return self._identify(), True
636 except RepositoryError:
636 except RepositoryError:
637 # The rebase-abort may raise another exception which 'hides'
637 # The rebase-abort may raise another exception which 'hides'
638 # the original one, therefore we log it here.
638 # the original one, therefore we log it here.
639 log.exception('Error while rebasing shadow repo during merge.')
639 log.exception('Error while rebasing shadow repo during merge.')
640
640
641 # Cleanup any rebase leftovers
641 # Cleanup any rebase leftovers
642 self._remote.invalidate_vcs_cache()
642 self._remote.invalidate_vcs_cache()
643 self._remote.rebase(abort=True)
643 self._remote.rebase(abort=True)
644 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
645 self._remote.update(clean=True)
645 self._remote.update(clean=True)
646 raise
646 raise
647 else:
647 else:
648 try:
648 try:
649 self._remote.merge(source_ref.commit_id)
649 self._remote.merge(source_ref.commit_id)
650 self._remote.invalidate_vcs_cache()
650 self._remote.invalidate_vcs_cache()
651 self._remote.commit(
651 self._remote.commit(
652 message=safe_str(merge_message),
652 message=safe_str(merge_message),
653 username=safe_str('%s <%s>' % (user_name, user_email)))
653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 self._remote.invalidate_vcs_cache()
654 self._remote.invalidate_vcs_cache()
655 return self._identify(), True
655 return self._identify(), True
656 except RepositoryError:
656 except RepositoryError:
657 # Cleanup any merge leftovers
657 # Cleanup any merge leftovers
658 self._remote.update(clean=True)
658 self._remote.update(clean=True)
659 raise
659 raise
660
660
661 def _local_close(self, target_ref, user_name, user_email,
661 def _local_close(self, target_ref, user_name, user_email,
662 source_ref, close_message=''):
662 source_ref, close_message=''):
663 """
663 """
664 Close the branch of the given source_revision
664 Close the branch of the given source_revision
665
665
666 Returns the commit id of the close and a boolean indicating if the
666 Returns the commit id of the close and a boolean indicating if the
667 commit needs to be pushed.
667 commit needs to be pushed.
668 """
668 """
669 self._update(source_ref.commit_id)
669 self._update(source_ref.commit_id)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 try:
671 try:
672 self._remote.commit(
672 self._remote.commit(
673 message=safe_str(message),
673 message=safe_str(message),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 close_branch=True)
675 close_branch=True)
676 self._remote.invalidate_vcs_cache()
676 self._remote.invalidate_vcs_cache()
677 return self._identify(), True
677 return self._identify(), True
678 except RepositoryError:
678 except RepositoryError:
679 # Cleanup any commit leftovers
679 # Cleanup any commit leftovers
680 self._remote.update(clean=True)
680 self._remote.update(clean=True)
681 raise
681 raise
682
682
683 def _is_the_same_branch(self, target_ref, source_ref):
683 def _is_the_same_branch(self, target_ref, source_ref):
684 return (
684 return (
685 self._get_branch_name(target_ref) ==
685 self._get_branch_name(target_ref) ==
686 self._get_branch_name(source_ref))
686 self._get_branch_name(source_ref))
687
687
688 def _get_branch_name(self, ref):
688 def _get_branch_name(self, ref):
689 if ref.type == 'branch':
689 if ref.type == 'branch':
690 return ref.name
690 return ref.name
691 return self._remote.ctx_branch(ref.commit_id)
691 return self._remote.ctx_branch(ref.commit_id)
692
692
693 def _maybe_prepare_merge_workspace(
693 def _maybe_prepare_merge_workspace(
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 shadow_repository_path = self._get_shadow_repository_path(
695 shadow_repository_path = self._get_shadow_repository_path(
696 repo_id, workspace_id)
696 repo_id, workspace_id)
697 if not os.path.exists(shadow_repository_path):
697 if not os.path.exists(shadow_repository_path):
698 self._local_clone(shadow_repository_path)
698 self._local_clone(shadow_repository_path)
699 log.debug(
699 log.debug(
700 'Prepared shadow repository in %s', shadow_repository_path)
700 'Prepared shadow repository in %s', shadow_repository_path)
701
701
702 return shadow_repository_path
702 return shadow_repository_path
703
703
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 source_repo, source_ref, merge_message,
705 source_repo, source_ref, merge_message,
706 merger_name, merger_email, dry_run=False,
706 merger_name, merger_email, dry_run=False,
707 use_rebase=False, close_branch=False):
707 use_rebase=False, close_branch=False):
708
708
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 'rebase' if use_rebase else 'merge', dry_run)
710 'rebase' if use_rebase else 'merge', dry_run)
711 if target_ref.commit_id not in self._heads():
711 if target_ref.commit_id not in self._heads():
712 return MergeResponse(
712 return MergeResponse(
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 metadata={'target_ref': target_ref})
714 metadata={'target_ref': target_ref})
715
715
716 try:
716 try:
717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 heads = ','.join(self._heads(target_ref.name))
718 heads = '\n,'.join(self._heads(target_ref.name))
719 metadata = {
720 'target_ref': target_ref,
721 'source_ref': source_ref,
722 'heads': heads
723 }
719 return MergeResponse(
724 return MergeResponse(
720 False, False, None,
725 False, False, None,
721 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
726 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
722 metadata={'heads': heads})
727 metadata=metadata)
723 except CommitDoesNotExistError:
728 except CommitDoesNotExistError:
724 log.exception('Failure when looking up branch heads on hg target')
729 log.exception('Failure when looking up branch heads on hg target')
725 return MergeResponse(
730 return MergeResponse(
726 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
731 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
727 metadata={'target_ref': target_ref})
732 metadata={'target_ref': target_ref})
728
733
729 shadow_repository_path = self._maybe_prepare_merge_workspace(
734 shadow_repository_path = self._maybe_prepare_merge_workspace(
730 repo_id, workspace_id, target_ref, source_ref)
735 repo_id, workspace_id, target_ref, source_ref)
731 shadow_repo = self._get_shadow_instance(shadow_repository_path)
736 shadow_repo = self._get_shadow_instance(shadow_repository_path)
732
737
733 log.debug('Pulling in target reference %s', target_ref)
738 log.debug('Pulling in target reference %s', target_ref)
734 self._validate_pull_reference(target_ref)
739 self._validate_pull_reference(target_ref)
735 shadow_repo._local_pull(self.path, target_ref)
740 shadow_repo._local_pull(self.path, target_ref)
736
741
737 try:
742 try:
738 log.debug('Pulling in source reference %s', source_ref)
743 log.debug('Pulling in source reference %s', source_ref)
739 source_repo._validate_pull_reference(source_ref)
744 source_repo._validate_pull_reference(source_ref)
740 shadow_repo._local_pull(source_repo.path, source_ref)
745 shadow_repo._local_pull(source_repo.path, source_ref)
741 except CommitDoesNotExistError:
746 except CommitDoesNotExistError:
742 log.exception('Failure when doing local pull on hg shadow repo')
747 log.exception('Failure when doing local pull on hg shadow repo')
743 return MergeResponse(
748 return MergeResponse(
744 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
749 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
745 metadata={'source_ref': source_ref})
750 metadata={'source_ref': source_ref})
746
751
747 merge_ref = None
752 merge_ref = None
748 merge_commit_id = None
753 merge_commit_id = None
749 close_commit_id = None
754 close_commit_id = None
750 merge_failure_reason = MergeFailureReason.NONE
755 merge_failure_reason = MergeFailureReason.NONE
751 metadata = {}
756 metadata = {}
752
757
753 # enforce that close branch should be used only in case we source from
758 # enforce that close branch should be used only in case we source from
754 # an actual Branch
759 # an actual Branch
755 close_branch = close_branch and source_ref.type == 'branch'
760 close_branch = close_branch and source_ref.type == 'branch'
756
761
757 # don't allow to close branch if source and target are the same
762 # don't allow to close branch if source and target are the same
758 close_branch = close_branch and source_ref.name != target_ref.name
763 close_branch = close_branch and source_ref.name != target_ref.name
759
764
760 needs_push_on_close = False
765 needs_push_on_close = False
761 if close_branch and not use_rebase and not dry_run:
766 if close_branch and not use_rebase and not dry_run:
762 try:
767 try:
763 close_commit_id, needs_push_on_close = shadow_repo._local_close(
768 close_commit_id, needs_push_on_close = shadow_repo._local_close(
764 target_ref, merger_name, merger_email, source_ref)
769 target_ref, merger_name, merger_email, source_ref)
765 merge_possible = True
770 merge_possible = True
766 except RepositoryError:
771 except RepositoryError:
767 log.exception('Failure when doing close branch on '
772 log.exception('Failure when doing close branch on '
768 'shadow repo: %s', shadow_repo)
773 'shadow repo: %s', shadow_repo)
769 merge_possible = False
774 merge_possible = False
770 merge_failure_reason = MergeFailureReason.MERGE_FAILED
775 merge_failure_reason = MergeFailureReason.MERGE_FAILED
771 else:
776 else:
772 merge_possible = True
777 merge_possible = True
773
778
774 needs_push = False
779 needs_push = False
775 if merge_possible:
780 if merge_possible:
776 try:
781 try:
777 merge_commit_id, needs_push = shadow_repo._local_merge(
782 merge_commit_id, needs_push = shadow_repo._local_merge(
778 target_ref, merge_message, merger_name, merger_email,
783 target_ref, merge_message, merger_name, merger_email,
779 source_ref, use_rebase=use_rebase, dry_run=dry_run)
784 source_ref, use_rebase=use_rebase, dry_run=dry_run)
780 merge_possible = True
785 merge_possible = True
781
786
782 # read the state of the close action, if it
787 # read the state of the close action, if it
783 # maybe required a push
788 # maybe required a push
784 needs_push = needs_push or needs_push_on_close
789 needs_push = needs_push or needs_push_on_close
785
790
786 # Set a bookmark pointing to the merge commit. This bookmark
791 # Set a bookmark pointing to the merge commit. This bookmark
787 # may be used to easily identify the last successful merge
792 # may be used to easily identify the last successful merge
788 # commit in the shadow repository.
793 # commit in the shadow repository.
789 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
794 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
790 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
795 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
791 except SubrepoMergeError:
796 except SubrepoMergeError:
792 log.exception(
797 log.exception(
793 'Subrepo merge error during local merge on hg shadow repo.')
798 'Subrepo merge error during local merge on hg shadow repo.')
794 merge_possible = False
799 merge_possible = False
795 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
800 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
796 needs_push = False
801 needs_push = False
797 except RepositoryError:
802 except RepositoryError:
798 log.exception('Failure when doing local merge on hg shadow repo')
803 log.exception('Failure when doing local merge on hg shadow repo')
799 merge_possible = False
804 merge_possible = False
800 merge_failure_reason = MergeFailureReason.MERGE_FAILED
805 merge_failure_reason = MergeFailureReason.MERGE_FAILED
801 needs_push = False
806 needs_push = False
802
807
803 if merge_possible and not dry_run:
808 if merge_possible and not dry_run:
804 if needs_push:
809 if needs_push:
805 # In case the target is a bookmark, update it, so after pushing
810 # In case the target is a bookmark, update it, so after pushing
806 # the bookmarks is also updated in the target.
811 # the bookmarks is also updated in the target.
807 if target_ref.type == 'book':
812 if target_ref.type == 'book':
808 shadow_repo.bookmark(
813 shadow_repo.bookmark(
809 target_ref.name, revision=merge_commit_id)
814 target_ref.name, revision=merge_commit_id)
810 try:
815 try:
811 shadow_repo_with_hooks = self._get_shadow_instance(
816 shadow_repo_with_hooks = self._get_shadow_instance(
812 shadow_repository_path,
817 shadow_repository_path,
813 enable_hooks=True)
818 enable_hooks=True)
814 # This is the actual merge action, we push from shadow
819 # This is the actual merge action, we push from shadow
815 # into origin.
820 # into origin.
816 # Note: the push_branches option will push any new branch
821 # Note: the push_branches option will push any new branch
817 # defined in the source repository to the target. This may
822 # defined in the source repository to the target. This may
818 # be dangerous as branches are permanent in Mercurial.
823 # be dangerous as branches are permanent in Mercurial.
819 # This feature was requested in issue #441.
824 # This feature was requested in issue #441.
820 shadow_repo_with_hooks._local_push(
825 shadow_repo_with_hooks._local_push(
821 merge_commit_id, self.path, push_branches=True,
826 merge_commit_id, self.path, push_branches=True,
822 enable_hooks=True)
827 enable_hooks=True)
823
828
824 # maybe we also need to push the close_commit_id
829 # maybe we also need to push the close_commit_id
825 if close_commit_id:
830 if close_commit_id:
826 shadow_repo_with_hooks._local_push(
831 shadow_repo_with_hooks._local_push(
827 close_commit_id, self.path, push_branches=True,
832 close_commit_id, self.path, push_branches=True,
828 enable_hooks=True)
833 enable_hooks=True)
829 merge_succeeded = True
834 merge_succeeded = True
830 except RepositoryError:
835 except RepositoryError:
831 log.exception(
836 log.exception(
832 'Failure when doing local push from the shadow '
837 'Failure when doing local push from the shadow '
833 'repository to the target repository at %s.', self.path)
838 'repository to the target repository at %s.', self.path)
834 merge_succeeded = False
839 merge_succeeded = False
835 merge_failure_reason = MergeFailureReason.PUSH_FAILED
840 merge_failure_reason = MergeFailureReason.PUSH_FAILED
836 metadata['target'] = 'hg shadow repo'
841 metadata['target'] = 'hg shadow repo'
837 metadata['merge_commit'] = merge_commit_id
842 metadata['merge_commit'] = merge_commit_id
838 else:
843 else:
839 merge_succeeded = True
844 merge_succeeded = True
840 else:
845 else:
841 merge_succeeded = False
846 merge_succeeded = False
842
847
843 return MergeResponse(
848 return MergeResponse(
844 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
849 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
845 metadata=metadata)
850 metadata=metadata)
846
851
847 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
852 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
848 config = self.config.copy()
853 config = self.config.copy()
849 if not enable_hooks:
854 if not enable_hooks:
850 config.clear_section('hooks')
855 config.clear_section('hooks')
851 return MercurialRepository(shadow_repository_path, config)
856 return MercurialRepository(shadow_repository_path, config)
852
857
853 def _validate_pull_reference(self, reference):
858 def _validate_pull_reference(self, reference):
854 if not (reference.name in self.bookmarks or
859 if not (reference.name in self.bookmarks or
855 reference.name in self.branches or
860 reference.name in self.branches or
856 self.get_commit(reference.commit_id)):
861 self.get_commit(reference.commit_id)):
857 raise CommitDoesNotExistError(
862 raise CommitDoesNotExistError(
858 'Unknown branch, bookmark or commit id')
863 'Unknown branch, bookmark or commit id')
859
864
860 def _local_pull(self, repository_path, reference):
865 def _local_pull(self, repository_path, reference):
861 """
866 """
862 Fetch a branch, bookmark or commit from a local repository.
867 Fetch a branch, bookmark or commit from a local repository.
863 """
868 """
864 repository_path = os.path.abspath(repository_path)
869 repository_path = os.path.abspath(repository_path)
865 if repository_path == self.path:
870 if repository_path == self.path:
866 raise ValueError('Cannot pull from the same repository')
871 raise ValueError('Cannot pull from the same repository')
867
872
868 reference_type_to_option_name = {
873 reference_type_to_option_name = {
869 'book': 'bookmark',
874 'book': 'bookmark',
870 'branch': 'branch',
875 'branch': 'branch',
871 }
876 }
872 option_name = reference_type_to_option_name.get(
877 option_name = reference_type_to_option_name.get(
873 reference.type, 'revision')
878 reference.type, 'revision')
874
879
875 if option_name == 'revision':
880 if option_name == 'revision':
876 ref = reference.commit_id
881 ref = reference.commit_id
877 else:
882 else:
878 ref = reference.name
883 ref = reference.name
879
884
880 options = {option_name: [ref]}
885 options = {option_name: [ref]}
881 self._remote.pull_cmd(repository_path, hooks=False, **options)
886 self._remote.pull_cmd(repository_path, hooks=False, **options)
882 self._remote.invalidate_vcs_cache()
887 self._remote.invalidate_vcs_cache()
883
888
884 def bookmark(self, bookmark, revision=None):
889 def bookmark(self, bookmark, revision=None):
885 if isinstance(bookmark, unicode):
890 if isinstance(bookmark, unicode):
886 bookmark = safe_str(bookmark)
891 bookmark = safe_str(bookmark)
887 self._remote.bookmark(bookmark, revision=revision)
892 self._remote.bookmark(bookmark, revision=revision)
888 self._remote.invalidate_vcs_cache()
893 self._remote.invalidate_vcs_cache()
889
894
890 def get_path_permissions(self, username):
895 def get_path_permissions(self, username):
891 hgacl_file = os.path.join(self.path, '.hg/hgacl')
896 hgacl_file = os.path.join(self.path, '.hg/hgacl')
892
897
893 def read_patterns(suffix):
898 def read_patterns(suffix):
894 svalue = None
899 svalue = None
895 try:
900 try:
896 svalue = hgacl.get('narrowhgacl', username + suffix)
901 svalue = hgacl.get('narrowhgacl', username + suffix)
897 except configparser.NoOptionError:
902 except configparser.NoOptionError:
898 try:
903 try:
899 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
904 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
900 except configparser.NoOptionError:
905 except configparser.NoOptionError:
901 pass
906 pass
902 if not svalue:
907 if not svalue:
903 return None
908 return None
904 result = ['/']
909 result = ['/']
905 for pattern in svalue.split():
910 for pattern in svalue.split():
906 result.append(pattern)
911 result.append(pattern)
907 if '*' not in pattern and '?' not in pattern:
912 if '*' not in pattern and '?' not in pattern:
908 result.append(pattern + '/*')
913 result.append(pattern + '/*')
909 return result
914 return result
910
915
911 if os.path.exists(hgacl_file):
916 if os.path.exists(hgacl_file):
912 try:
917 try:
913 hgacl = configparser.RawConfigParser()
918 hgacl = configparser.RawConfigParser()
914 hgacl.read(hgacl_file)
919 hgacl.read(hgacl_file)
915
920
916 includes = read_patterns('.includes')
921 includes = read_patterns('.includes')
917 excludes = read_patterns('.excludes')
922 excludes = read_patterns('.excludes')
918 return BasePathPermissionChecker.create_from_patterns(
923 return BasePathPermissionChecker.create_from_patterns(
919 includes, excludes)
924 includes, excludes)
920 except BaseException as e:
925 except BaseException as e:
921 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
926 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
922 hgacl_file, self.name, e)
927 hgacl_file, self.name, e)
923 raise exceptions.RepositoryRequirementError(msg)
928 raise exceptions.RepositoryRequirementError(msg)
924 else:
929 else:
925 return None
930 return None
926
931
927
932
928 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
933 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
929
934
930 def _commit_factory(self, commit_id):
935 def _commit_factory(self, commit_id):
931 return self.repo.get_commit(
936 return self.repo.get_commit(
932 commit_idx=commit_id, pre_load=self.pre_load)
937 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,1731 +1,1739 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2019 RhodeCode GmbH
3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid import compat
33 from pyramid import compat
34 from pyramid.threadlocal import get_current_request
34 from pyramid.threadlocal import get_current_request
35
35
36 from rhodecode import events
36 from rhodecode import events
37 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import audit_logger
39 from rhodecode.lib import audit_logger
40 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.compat import OrderedDict
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 from rhodecode.lib.markup_renderer import (
42 from rhodecode.lib.markup_renderer import (
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 from rhodecode.lib.vcs.backends.base import (
45 from rhodecode.lib.vcs.backends.base import (
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 from rhodecode.lib.vcs.exceptions import (
48 from rhodecode.lib.vcs.exceptions import (
49 CommitDoesNotExistError, EmptyRepositoryError)
49 CommitDoesNotExistError, EmptyRepositoryError)
50 from rhodecode.model import BaseModel
50 from rhodecode.model import BaseModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.comment import CommentsModel
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 from rhodecode.model.meta import Session
56 from rhodecode.model.meta import Session
57 from rhodecode.model.notification import NotificationModel, \
57 from rhodecode.model.notification import NotificationModel, \
58 EmailNotificationModel
58 EmailNotificationModel
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.settings import VcsSettingsModel
60 from rhodecode.model.settings import VcsSettingsModel
61
61
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65
65
66 # Data structure to hold the response data when updating commits during a pull
66 # Data structure to hold the response data when updating commits during a pull
67 # request update.
67 # request update.
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 'executed', 'reason', 'new', 'old', 'changes',
69 'executed', 'reason', 'new', 'old', 'changes',
70 'source_changed', 'target_changed'])
70 'source_changed', 'target_changed'])
71
71
72
72
73 class PullRequestModel(BaseModel):
73 class PullRequestModel(BaseModel):
74
74
75 cls = PullRequest
75 cls = PullRequest
76
76
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78
78
79 UPDATE_STATUS_MESSAGES = {
79 UPDATE_STATUS_MESSAGES = {
80 UpdateFailureReason.NONE: lazy_ugettext(
80 UpdateFailureReason.NONE: lazy_ugettext(
81 'Pull request update successful.'),
81 'Pull request update successful.'),
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 'Pull request update failed because of an unknown error.'),
83 'Pull request update failed because of an unknown error.'),
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 'No update needed because the source and target have not changed.'),
85 'No update needed because the source and target have not changed.'),
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 'Pull request cannot be updated because the reference type is '
87 'Pull request cannot be updated because the reference type is '
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 'This pull request cannot be updated because the target '
90 'This pull request cannot be updated because the target '
91 'reference is missing.'),
91 'reference is missing.'),
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 'This pull request cannot be updated because the source '
93 'This pull request cannot be updated because the source '
94 'reference is missing.'),
94 'reference is missing.'),
95 }
95 }
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98
98
99 def __get_pull_request(self, pull_request):
99 def __get_pull_request(self, pull_request):
100 return self._get_instance((
100 return self._get_instance((
101 PullRequest, PullRequestVersion), pull_request)
101 PullRequest, PullRequestVersion), pull_request)
102
102
103 def _check_perms(self, perms, pull_request, user, api=False):
103 def _check_perms(self, perms, pull_request, user, api=False):
104 if not api:
104 if not api:
105 return h.HasRepoPermissionAny(*perms)(
105 return h.HasRepoPermissionAny(*perms)(
106 user=user, repo_name=pull_request.target_repo.repo_name)
106 user=user, repo_name=pull_request.target_repo.repo_name)
107 else:
107 else:
108 return h.HasRepoPermissionAnyApi(*perms)(
108 return h.HasRepoPermissionAnyApi(*perms)(
109 user=user, repo_name=pull_request.target_repo.repo_name)
109 user=user, repo_name=pull_request.target_repo.repo_name)
110
110
111 def check_user_read(self, pull_request, user, api=False):
111 def check_user_read(self, pull_request, user, api=False):
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 return self._check_perms(_perms, pull_request, user, api)
113 return self._check_perms(_perms, pull_request, user, api)
114
114
115 def check_user_merge(self, pull_request, user, api=False):
115 def check_user_merge(self, pull_request, user, api=False):
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 return self._check_perms(_perms, pull_request, user, api)
117 return self._check_perms(_perms, pull_request, user, api)
118
118
119 def check_user_update(self, pull_request, user, api=False):
119 def check_user_update(self, pull_request, user, api=False):
120 owner = user.user_id == pull_request.user_id
120 owner = user.user_id == pull_request.user_id
121 return self.check_user_merge(pull_request, user, api) or owner
121 return self.check_user_merge(pull_request, user, api) or owner
122
122
123 def check_user_delete(self, pull_request, user):
123 def check_user_delete(self, pull_request, user):
124 owner = user.user_id == pull_request.user_id
124 owner = user.user_id == pull_request.user_id
125 _perms = ('repository.admin',)
125 _perms = ('repository.admin',)
126 return self._check_perms(_perms, pull_request, user) or owner
126 return self._check_perms(_perms, pull_request, user) or owner
127
127
128 def check_user_change_status(self, pull_request, user, api=False):
128 def check_user_change_status(self, pull_request, user, api=False):
129 reviewer = user.user_id in [x.user_id for x in
129 reviewer = user.user_id in [x.user_id for x in
130 pull_request.reviewers]
130 pull_request.reviewers]
131 return self.check_user_update(pull_request, user, api) or reviewer
131 return self.check_user_update(pull_request, user, api) or reviewer
132
132
133 def check_user_comment(self, pull_request, user):
133 def check_user_comment(self, pull_request, user):
134 owner = user.user_id == pull_request.user_id
134 owner = user.user_id == pull_request.user_id
135 return self.check_user_read(pull_request, user) or owner
135 return self.check_user_read(pull_request, user) or owner
136
136
137 def get(self, pull_request):
137 def get(self, pull_request):
138 return self.__get_pull_request(pull_request)
138 return self.__get_pull_request(pull_request)
139
139
140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 opened_by=None, order_by=None,
141 opened_by=None, order_by=None,
142 order_dir='desc', only_created=True):
142 order_dir='desc', only_created=True):
143 repo = None
143 repo = None
144 if repo_name:
144 if repo_name:
145 repo = self._get_repo(repo_name)
145 repo = self._get_repo(repo_name)
146
146
147 q = PullRequest.query()
147 q = PullRequest.query()
148
148
149 # source or target
149 # source or target
150 if repo and source:
150 if repo and source:
151 q = q.filter(PullRequest.source_repo == repo)
151 q = q.filter(PullRequest.source_repo == repo)
152 elif repo:
152 elif repo:
153 q = q.filter(PullRequest.target_repo == repo)
153 q = q.filter(PullRequest.target_repo == repo)
154
154
155 # closed,opened
155 # closed,opened
156 if statuses:
156 if statuses:
157 q = q.filter(PullRequest.status.in_(statuses))
157 q = q.filter(PullRequest.status.in_(statuses))
158
158
159 # opened by filter
159 # opened by filter
160 if opened_by:
160 if opened_by:
161 q = q.filter(PullRequest.user_id.in_(opened_by))
161 q = q.filter(PullRequest.user_id.in_(opened_by))
162
162
163 # only get those that are in "created" state
163 # only get those that are in "created" state
164 if only_created:
164 if only_created:
165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166
166
167 if order_by:
167 if order_by:
168 order_map = {
168 order_map = {
169 'name_raw': PullRequest.pull_request_id,
169 'name_raw': PullRequest.pull_request_id,
170 'id': PullRequest.pull_request_id,
170 'id': PullRequest.pull_request_id,
171 'title': PullRequest.title,
171 'title': PullRequest.title,
172 'updated_on_raw': PullRequest.updated_on,
172 'updated_on_raw': PullRequest.updated_on,
173 'target_repo': PullRequest.target_repo_id
173 'target_repo': PullRequest.target_repo_id
174 }
174 }
175 if order_dir == 'asc':
175 if order_dir == 'asc':
176 q = q.order_by(order_map[order_by].asc())
176 q = q.order_by(order_map[order_by].asc())
177 else:
177 else:
178 q = q.order_by(order_map[order_by].desc())
178 q = q.order_by(order_map[order_by].desc())
179
179
180 return q
180 return q
181
181
182 def count_all(self, repo_name, source=False, statuses=None,
182 def count_all(self, repo_name, source=False, statuses=None,
183 opened_by=None):
183 opened_by=None):
184 """
184 """
185 Count the number of pull requests for a specific repository.
185 Count the number of pull requests for a specific repository.
186
186
187 :param repo_name: target or source repo
187 :param repo_name: target or source repo
188 :param source: boolean flag to specify if repo_name refers to source
188 :param source: boolean flag to specify if repo_name refers to source
189 :param statuses: list of pull request statuses
189 :param statuses: list of pull request statuses
190 :param opened_by: author user of the pull request
190 :param opened_by: author user of the pull request
191 :returns: int number of pull requests
191 :returns: int number of pull requests
192 """
192 """
193 q = self._prepare_get_all_query(
193 q = self._prepare_get_all_query(
194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195
195
196 return q.count()
196 return q.count()
197
197
198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 offset=0, length=None, order_by=None, order_dir='desc'):
199 offset=0, length=None, order_by=None, order_dir='desc'):
200 """
200 """
201 Get all pull requests for a specific repository.
201 Get all pull requests for a specific repository.
202
202
203 :param repo_name: target or source repo
203 :param repo_name: target or source repo
204 :param source: boolean flag to specify if repo_name refers to source
204 :param source: boolean flag to specify if repo_name refers to source
205 :param statuses: list of pull request statuses
205 :param statuses: list of pull request statuses
206 :param opened_by: author user of the pull request
206 :param opened_by: author user of the pull request
207 :param offset: pagination offset
207 :param offset: pagination offset
208 :param length: length of returned list
208 :param length: length of returned list
209 :param order_by: order of the returned list
209 :param order_by: order of the returned list
210 :param order_dir: 'asc' or 'desc' ordering direction
210 :param order_dir: 'asc' or 'desc' ordering direction
211 :returns: list of pull requests
211 :returns: list of pull requests
212 """
212 """
213 q = self._prepare_get_all_query(
213 q = self._prepare_get_all_query(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 order_by=order_by, order_dir=order_dir)
215 order_by=order_by, order_dir=order_dir)
216
216
217 if length:
217 if length:
218 pull_requests = q.limit(length).offset(offset).all()
218 pull_requests = q.limit(length).offset(offset).all()
219 else:
219 else:
220 pull_requests = q.all()
220 pull_requests = q.all()
221
221
222 return pull_requests
222 return pull_requests
223
223
224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 opened_by=None):
225 opened_by=None):
226 """
226 """
227 Count the number of pull requests for a specific repository that are
227 Count the number of pull requests for a specific repository that are
228 awaiting review.
228 awaiting review.
229
229
230 :param repo_name: target or source repo
230 :param repo_name: target or source repo
231 :param source: boolean flag to specify if repo_name refers to source
231 :param source: boolean flag to specify if repo_name refers to source
232 :param statuses: list of pull request statuses
232 :param statuses: list of pull request statuses
233 :param opened_by: author user of the pull request
233 :param opened_by: author user of the pull request
234 :returns: int number of pull requests
234 :returns: int number of pull requests
235 """
235 """
236 pull_requests = self.get_awaiting_review(
236 pull_requests = self.get_awaiting_review(
237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238
238
239 return len(pull_requests)
239 return len(pull_requests)
240
240
241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 opened_by=None, offset=0, length=None,
242 opened_by=None, offset=0, length=None,
243 order_by=None, order_dir='desc'):
243 order_by=None, order_dir='desc'):
244 """
244 """
245 Get all pull requests for a specific repository that are awaiting
245 Get all pull requests for a specific repository that are awaiting
246 review.
246 review.
247
247
248 :param repo_name: target or source repo
248 :param repo_name: target or source repo
249 :param source: boolean flag to specify if repo_name refers to source
249 :param source: boolean flag to specify if repo_name refers to source
250 :param statuses: list of pull request statuses
250 :param statuses: list of pull request statuses
251 :param opened_by: author user of the pull request
251 :param opened_by: author user of the pull request
252 :param offset: pagination offset
252 :param offset: pagination offset
253 :param length: length of returned list
253 :param length: length of returned list
254 :param order_by: order of the returned list
254 :param order_by: order of the returned list
255 :param order_dir: 'asc' or 'desc' ordering direction
255 :param order_dir: 'asc' or 'desc' ordering direction
256 :returns: list of pull requests
256 :returns: list of pull requests
257 """
257 """
258 pull_requests = self.get_all(
258 pull_requests = self.get_all(
259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 order_by=order_by, order_dir=order_dir)
260 order_by=order_by, order_dir=order_dir)
261
261
262 _filtered_pull_requests = []
262 _filtered_pull_requests = []
263 for pr in pull_requests:
263 for pr in pull_requests:
264 status = pr.calculated_review_status()
264 status = pr.calculated_review_status()
265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 ChangesetStatus.STATUS_UNDER_REVIEW]:
266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 _filtered_pull_requests.append(pr)
267 _filtered_pull_requests.append(pr)
268 if length:
268 if length:
269 return _filtered_pull_requests[offset:offset+length]
269 return _filtered_pull_requests[offset:offset+length]
270 else:
270 else:
271 return _filtered_pull_requests
271 return _filtered_pull_requests
272
272
273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 opened_by=None, user_id=None):
274 opened_by=None, user_id=None):
275 """
275 """
276 Count the number of pull requests for a specific repository that are
276 Count the number of pull requests for a specific repository that are
277 awaiting review from a specific user.
277 awaiting review from a specific user.
278
278
279 :param repo_name: target or source repo
279 :param repo_name: target or source repo
280 :param source: boolean flag to specify if repo_name refers to source
280 :param source: boolean flag to specify if repo_name refers to source
281 :param statuses: list of pull request statuses
281 :param statuses: list of pull request statuses
282 :param opened_by: author user of the pull request
282 :param opened_by: author user of the pull request
283 :param user_id: reviewer user of the pull request
283 :param user_id: reviewer user of the pull request
284 :returns: int number of pull requests
284 :returns: int number of pull requests
285 """
285 """
286 pull_requests = self.get_awaiting_my_review(
286 pull_requests = self.get_awaiting_my_review(
287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 user_id=user_id)
288 user_id=user_id)
289
289
290 return len(pull_requests)
290 return len(pull_requests)
291
291
292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 opened_by=None, user_id=None, offset=0,
293 opened_by=None, user_id=None, offset=0,
294 length=None, order_by=None, order_dir='desc'):
294 length=None, order_by=None, order_dir='desc'):
295 """
295 """
296 Get all pull requests for a specific repository that are awaiting
296 Get all pull requests for a specific repository that are awaiting
297 review from a specific user.
297 review from a specific user.
298
298
299 :param repo_name: target or source repo
299 :param repo_name: target or source repo
300 :param source: boolean flag to specify if repo_name refers to source
300 :param source: boolean flag to specify if repo_name refers to source
301 :param statuses: list of pull request statuses
301 :param statuses: list of pull request statuses
302 :param opened_by: author user of the pull request
302 :param opened_by: author user of the pull request
303 :param user_id: reviewer user of the pull request
303 :param user_id: reviewer user of the pull request
304 :param offset: pagination offset
304 :param offset: pagination offset
305 :param length: length of returned list
305 :param length: length of returned list
306 :param order_by: order of the returned list
306 :param order_by: order of the returned list
307 :param order_dir: 'asc' or 'desc' ordering direction
307 :param order_dir: 'asc' or 'desc' ordering direction
308 :returns: list of pull requests
308 :returns: list of pull requests
309 """
309 """
310 pull_requests = self.get_all(
310 pull_requests = self.get_all(
311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 order_by=order_by, order_dir=order_dir)
312 order_by=order_by, order_dir=order_dir)
313
313
314 _my = PullRequestModel().get_not_reviewed(user_id)
314 _my = PullRequestModel().get_not_reviewed(user_id)
315 my_participation = []
315 my_participation = []
316 for pr in pull_requests:
316 for pr in pull_requests:
317 if pr in _my:
317 if pr in _my:
318 my_participation.append(pr)
318 my_participation.append(pr)
319 _filtered_pull_requests = my_participation
319 _filtered_pull_requests = my_participation
320 if length:
320 if length:
321 return _filtered_pull_requests[offset:offset+length]
321 return _filtered_pull_requests[offset:offset+length]
322 else:
322 else:
323 return _filtered_pull_requests
323 return _filtered_pull_requests
324
324
325 def get_not_reviewed(self, user_id):
325 def get_not_reviewed(self, user_id):
326 return [
326 return [
327 x.pull_request for x in PullRequestReviewers.query().filter(
327 x.pull_request for x in PullRequestReviewers.query().filter(
328 PullRequestReviewers.user_id == user_id).all()
328 PullRequestReviewers.user_id == user_id).all()
329 ]
329 ]
330
330
331 def _prepare_participating_query(self, user_id=None, statuses=None,
331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 order_by=None, order_dir='desc'):
332 order_by=None, order_dir='desc'):
333 q = PullRequest.query()
333 q = PullRequest.query()
334 if user_id:
334 if user_id:
335 reviewers_subquery = Session().query(
335 reviewers_subquery = Session().query(
336 PullRequestReviewers.pull_request_id).filter(
336 PullRequestReviewers.pull_request_id).filter(
337 PullRequestReviewers.user_id == user_id).subquery()
337 PullRequestReviewers.user_id == user_id).subquery()
338 user_filter = or_(
338 user_filter = or_(
339 PullRequest.user_id == user_id,
339 PullRequest.user_id == user_id,
340 PullRequest.pull_request_id.in_(reviewers_subquery)
340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 )
341 )
342 q = PullRequest.query().filter(user_filter)
342 q = PullRequest.query().filter(user_filter)
343
343
344 # closed,opened
344 # closed,opened
345 if statuses:
345 if statuses:
346 q = q.filter(PullRequest.status.in_(statuses))
346 q = q.filter(PullRequest.status.in_(statuses))
347
347
348 if order_by:
348 if order_by:
349 order_map = {
349 order_map = {
350 'name_raw': PullRequest.pull_request_id,
350 'name_raw': PullRequest.pull_request_id,
351 'title': PullRequest.title,
351 'title': PullRequest.title,
352 'updated_on_raw': PullRequest.updated_on,
352 'updated_on_raw': PullRequest.updated_on,
353 'target_repo': PullRequest.target_repo_id
353 'target_repo': PullRequest.target_repo_id
354 }
354 }
355 if order_dir == 'asc':
355 if order_dir == 'asc':
356 q = q.order_by(order_map[order_by].asc())
356 q = q.order_by(order_map[order_by].asc())
357 else:
357 else:
358 q = q.order_by(order_map[order_by].desc())
358 q = q.order_by(order_map[order_by].desc())
359
359
360 return q
360 return q
361
361
362 def count_im_participating_in(self, user_id=None, statuses=None):
362 def count_im_participating_in(self, user_id=None, statuses=None):
363 q = self._prepare_participating_query(user_id, statuses=statuses)
363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 return q.count()
364 return q.count()
365
365
366 def get_im_participating_in(
366 def get_im_participating_in(
367 self, user_id=None, statuses=None, offset=0,
367 self, user_id=None, statuses=None, offset=0,
368 length=None, order_by=None, order_dir='desc'):
368 length=None, order_by=None, order_dir='desc'):
369 """
369 """
370 Get all Pull requests that i'm participating in, or i have opened
370 Get all Pull requests that i'm participating in, or i have opened
371 """
371 """
372
372
373 q = self._prepare_participating_query(
373 q = self._prepare_participating_query(
374 user_id, statuses=statuses, order_by=order_by,
374 user_id, statuses=statuses, order_by=order_by,
375 order_dir=order_dir)
375 order_dir=order_dir)
376
376
377 if length:
377 if length:
378 pull_requests = q.limit(length).offset(offset).all()
378 pull_requests = q.limit(length).offset(offset).all()
379 else:
379 else:
380 pull_requests = q.all()
380 pull_requests = q.all()
381
381
382 return pull_requests
382 return pull_requests
383
383
384 def get_versions(self, pull_request):
384 def get_versions(self, pull_request):
385 """
385 """
386 returns version of pull request sorted by ID descending
386 returns version of pull request sorted by ID descending
387 """
387 """
388 return PullRequestVersion.query()\
388 return PullRequestVersion.query()\
389 .filter(PullRequestVersion.pull_request == pull_request)\
389 .filter(PullRequestVersion.pull_request == pull_request)\
390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 .all()
391 .all()
392
392
393 def get_pr_version(self, pull_request_id, version=None):
393 def get_pr_version(self, pull_request_id, version=None):
394 at_version = None
394 at_version = None
395
395
396 if version and version == 'latest':
396 if version and version == 'latest':
397 pull_request_ver = PullRequest.get(pull_request_id)
397 pull_request_ver = PullRequest.get(pull_request_id)
398 pull_request_obj = pull_request_ver
398 pull_request_obj = pull_request_ver
399 _org_pull_request_obj = pull_request_obj
399 _org_pull_request_obj = pull_request_obj
400 at_version = 'latest'
400 at_version = 'latest'
401 elif version:
401 elif version:
402 pull_request_ver = PullRequestVersion.get_or_404(version)
402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 pull_request_obj = pull_request_ver
403 pull_request_obj = pull_request_ver
404 _org_pull_request_obj = pull_request_ver.pull_request
404 _org_pull_request_obj = pull_request_ver.pull_request
405 at_version = pull_request_ver.pull_request_version_id
405 at_version = pull_request_ver.pull_request_version_id
406 else:
406 else:
407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 pull_request_id)
408 pull_request_id)
409
409
410 pull_request_display_obj = PullRequest.get_pr_display_object(
410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 pull_request_obj, _org_pull_request_obj)
411 pull_request_obj, _org_pull_request_obj)
412
412
413 return _org_pull_request_obj, pull_request_obj, \
413 return _org_pull_request_obj, pull_request_obj, \
414 pull_request_display_obj, at_version
414 pull_request_display_obj, at_version
415
415
416 def create(self, created_by, source_repo, source_ref, target_repo,
416 def create(self, created_by, source_repo, source_ref, target_repo,
417 target_ref, revisions, reviewers, title, description=None,
417 target_ref, revisions, reviewers, title, description=None,
418 description_renderer=None,
418 description_renderer=None,
419 reviewer_data=None, translator=None, auth_user=None):
419 reviewer_data=None, translator=None, auth_user=None):
420 translator = translator or get_current_request().translate
420 translator = translator or get_current_request().translate
421
421
422 created_by_user = self._get_user(created_by)
422 created_by_user = self._get_user(created_by)
423 auth_user = auth_user or created_by_user.AuthUser()
423 auth_user = auth_user or created_by_user.AuthUser()
424 source_repo = self._get_repo(source_repo)
424 source_repo = self._get_repo(source_repo)
425 target_repo = self._get_repo(target_repo)
425 target_repo = self._get_repo(target_repo)
426
426
427 pull_request = PullRequest()
427 pull_request = PullRequest()
428 pull_request.source_repo = source_repo
428 pull_request.source_repo = source_repo
429 pull_request.source_ref = source_ref
429 pull_request.source_ref = source_ref
430 pull_request.target_repo = target_repo
430 pull_request.target_repo = target_repo
431 pull_request.target_ref = target_ref
431 pull_request.target_ref = target_ref
432 pull_request.revisions = revisions
432 pull_request.revisions = revisions
433 pull_request.title = title
433 pull_request.title = title
434 pull_request.description = description
434 pull_request.description = description
435 pull_request.description_renderer = description_renderer
435 pull_request.description_renderer = description_renderer
436 pull_request.author = created_by_user
436 pull_request.author = created_by_user
437 pull_request.reviewer_data = reviewer_data
437 pull_request.reviewer_data = reviewer_data
438 pull_request.pull_request_state = pull_request.STATE_CREATING
438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 Session().add(pull_request)
439 Session().add(pull_request)
440 Session().flush()
440 Session().flush()
441
441
442 reviewer_ids = set()
442 reviewer_ids = set()
443 # members / reviewers
443 # members / reviewers
444 for reviewer_object in reviewers:
444 for reviewer_object in reviewers:
445 user_id, reasons, mandatory, rules = reviewer_object
445 user_id, reasons, mandatory, rules = reviewer_object
446 user = self._get_user(user_id)
446 user = self._get_user(user_id)
447
447
448 # skip duplicates
448 # skip duplicates
449 if user.user_id in reviewer_ids:
449 if user.user_id in reviewer_ids:
450 continue
450 continue
451
451
452 reviewer_ids.add(user.user_id)
452 reviewer_ids.add(user.user_id)
453
453
454 reviewer = PullRequestReviewers()
454 reviewer = PullRequestReviewers()
455 reviewer.user = user
455 reviewer.user = user
456 reviewer.pull_request = pull_request
456 reviewer.pull_request = pull_request
457 reviewer.reasons = reasons
457 reviewer.reasons = reasons
458 reviewer.mandatory = mandatory
458 reviewer.mandatory = mandatory
459
459
460 # NOTE(marcink): pick only first rule for now
460 # NOTE(marcink): pick only first rule for now
461 rule_id = list(rules)[0] if rules else None
461 rule_id = list(rules)[0] if rules else None
462 rule = RepoReviewRule.get(rule_id) if rule_id else None
462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 if rule:
463 if rule:
464 review_group = rule.user_group_vote_rule(user_id)
464 review_group = rule.user_group_vote_rule(user_id)
465 # we check if this particular reviewer is member of a voting group
465 # we check if this particular reviewer is member of a voting group
466 if review_group:
466 if review_group:
467 # NOTE(marcink):
467 # NOTE(marcink):
468 # can be that user is member of more but we pick the first same,
468 # can be that user is member of more but we pick the first same,
469 # same as default reviewers algo
469 # same as default reviewers algo
470 review_group = review_group[0]
470 review_group = review_group[0]
471
471
472 rule_data = {
472 rule_data = {
473 'rule_name':
473 'rule_name':
474 rule.review_rule_name,
474 rule.review_rule_name,
475 'rule_user_group_entry_id':
475 'rule_user_group_entry_id':
476 review_group.repo_review_rule_users_group_id,
476 review_group.repo_review_rule_users_group_id,
477 'rule_user_group_name':
477 'rule_user_group_name':
478 review_group.users_group.users_group_name,
478 review_group.users_group.users_group_name,
479 'rule_user_group_members':
479 'rule_user_group_members':
480 [x.user.username for x in review_group.users_group.members],
480 [x.user.username for x in review_group.users_group.members],
481 'rule_user_group_members_id':
481 'rule_user_group_members_id':
482 [x.user.user_id for x in review_group.users_group.members],
482 [x.user.user_id for x in review_group.users_group.members],
483 }
483 }
484 # e.g {'vote_rule': -1, 'mandatory': True}
484 # e.g {'vote_rule': -1, 'mandatory': True}
485 rule_data.update(review_group.rule_data())
485 rule_data.update(review_group.rule_data())
486
486
487 reviewer.rule_data = rule_data
487 reviewer.rule_data = rule_data
488
488
489 Session().add(reviewer)
489 Session().add(reviewer)
490 Session().flush()
490 Session().flush()
491
491
492 # Set approval status to "Under Review" for all commits which are
492 # Set approval status to "Under Review" for all commits which are
493 # part of this pull request.
493 # part of this pull request.
494 ChangesetStatusModel().set_status(
494 ChangesetStatusModel().set_status(
495 repo=target_repo,
495 repo=target_repo,
496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 user=created_by_user,
497 user=created_by_user,
498 pull_request=pull_request
498 pull_request=pull_request
499 )
499 )
500 # we commit early at this point. This has to do with a fact
500 # we commit early at this point. This has to do with a fact
501 # that before queries do some row-locking. And because of that
501 # that before queries do some row-locking. And because of that
502 # we need to commit and finish transaction before below validate call
502 # we need to commit and finish transaction before below validate call
503 # that for large repos could be long resulting in long row locks
503 # that for large repos could be long resulting in long row locks
504 Session().commit()
504 Session().commit()
505
505
506 # prepare workspace, and run initial merge simulation. Set state during that
506 # prepare workspace, and run initial merge simulation. Set state during that
507 # operation
507 # operation
508 pull_request = PullRequest.get(pull_request.pull_request_id)
508 pull_request = PullRequest.get(pull_request.pull_request_id)
509
509
510 # set as merging, for simulation, and if finished to created so we mark
510 # set as merging, for simulation, and if finished to created so we mark
511 # simulation is working fine
511 # simulation is working fine
512 with pull_request.set_state(PullRequest.STATE_MERGING,
512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 final_state=PullRequest.STATE_CREATED):
513 final_state=PullRequest.STATE_CREATED):
514 MergeCheck.validate(
514 MergeCheck.validate(
515 pull_request, auth_user=auth_user, translator=translator)
515 pull_request, auth_user=auth_user, translator=translator)
516
516
517 self.notify_reviewers(pull_request, reviewer_ids)
517 self.notify_reviewers(pull_request, reviewer_ids)
518 self.trigger_pull_request_hook(
518 self.trigger_pull_request_hook(
519 pull_request, created_by_user, 'create')
519 pull_request, created_by_user, 'create')
520
520
521 creation_data = pull_request.get_api_data(with_merge_state=False)
521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 self._log_audit_action(
522 self._log_audit_action(
523 'repo.pull_request.create', {'data': creation_data},
523 'repo.pull_request.create', {'data': creation_data},
524 auth_user, pull_request)
524 auth_user, pull_request)
525
525
526 return pull_request
526 return pull_request
527
527
528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 pull_request = self.__get_pull_request(pull_request)
529 pull_request = self.__get_pull_request(pull_request)
530 target_scm = pull_request.target_repo.scm_instance()
530 target_scm = pull_request.target_repo.scm_instance()
531 if action == 'create':
531 if action == 'create':
532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 elif action == 'merge':
533 elif action == 'merge':
534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 elif action == 'close':
535 elif action == 'close':
536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 elif action == 'review_status_change':
537 elif action == 'review_status_change':
538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 elif action == 'update':
539 elif action == 'update':
540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 elif action == 'comment':
541 elif action == 'comment':
542 # dummy hook ! for comment. We want this function to handle all cases
542 # dummy hook ! for comment. We want this function to handle all cases
543 def trigger_hook(*args, **kwargs):
543 def trigger_hook(*args, **kwargs):
544 pass
544 pass
545 comment = data['comment']
545 comment = data['comment']
546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 else:
547 else:
548 return
548 return
549
549
550 trigger_hook(
550 trigger_hook(
551 username=user.username,
551 username=user.username,
552 repo_name=pull_request.target_repo.repo_name,
552 repo_name=pull_request.target_repo.repo_name,
553 repo_alias=target_scm.alias,
553 repo_alias=target_scm.alias,
554 pull_request=pull_request,
554 pull_request=pull_request,
555 data=data)
555 data=data)
556
556
557 def _get_commit_ids(self, pull_request):
557 def _get_commit_ids(self, pull_request):
558 """
558 """
559 Return the commit ids of the merged pull request.
559 Return the commit ids of the merged pull request.
560
560
561 This method is not dealing correctly yet with the lack of autoupdates
561 This method is not dealing correctly yet with the lack of autoupdates
562 nor with the implicit target updates.
562 nor with the implicit target updates.
563 For example: if a commit in the source repo is already in the target it
563 For example: if a commit in the source repo is already in the target it
564 will be reported anyways.
564 will be reported anyways.
565 """
565 """
566 merge_rev = pull_request.merge_rev
566 merge_rev = pull_request.merge_rev
567 if merge_rev is None:
567 if merge_rev is None:
568 raise ValueError('This pull request was not merged yet')
568 raise ValueError('This pull request was not merged yet')
569
569
570 commit_ids = list(pull_request.revisions)
570 commit_ids = list(pull_request.revisions)
571 if merge_rev not in commit_ids:
571 if merge_rev not in commit_ids:
572 commit_ids.append(merge_rev)
572 commit_ids.append(merge_rev)
573
573
574 return commit_ids
574 return commit_ids
575
575
576 def merge_repo(self, pull_request, user, extras):
576 def merge_repo(self, pull_request, user, extras):
577 log.debug("Merging pull request %s", pull_request.pull_request_id)
577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 extras['user_agent'] = 'internal-merge'
578 extras['user_agent'] = 'internal-merge'
579 merge_state = self._merge_pull_request(pull_request, user, extras)
579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 if merge_state.executed:
580 if merge_state.executed:
581 log.debug("Merge was successful, updating the pull request comments.")
581 log.debug("Merge was successful, updating the pull request comments.")
582 self._comment_and_close_pr(pull_request, user, merge_state)
582 self._comment_and_close_pr(pull_request, user, merge_state)
583
583
584 self._log_audit_action(
584 self._log_audit_action(
585 'repo.pull_request.merge',
585 'repo.pull_request.merge',
586 {'merge_state': merge_state.__dict__},
586 {'merge_state': merge_state.__dict__},
587 user, pull_request)
587 user, pull_request)
588
588
589 else:
589 else:
590 log.warn("Merge failed, not updating the pull request.")
590 log.warn("Merge failed, not updating the pull request.")
591 return merge_state
591 return merge_state
592
592
593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 target_vcs = pull_request.target_repo.scm_instance()
594 target_vcs = pull_request.target_repo.scm_instance()
595 source_vcs = pull_request.source_repo.scm_instance()
595 source_vcs = pull_request.source_repo.scm_instance()
596
596
597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 pr_id=pull_request.pull_request_id,
598 pr_id=pull_request.pull_request_id,
599 pr_title=pull_request.title,
599 pr_title=pull_request.title,
600 source_repo=source_vcs.name,
600 source_repo=source_vcs.name,
601 source_ref_name=pull_request.source_ref_parts.name,
601 source_ref_name=pull_request.source_ref_parts.name,
602 target_repo=target_vcs.name,
602 target_repo=target_vcs.name,
603 target_ref_name=pull_request.target_ref_parts.name,
603 target_ref_name=pull_request.target_ref_parts.name,
604 )
604 )
605
605
606 workspace_id = self._workspace_id(pull_request)
606 workspace_id = self._workspace_id(pull_request)
607 repo_id = pull_request.target_repo.repo_id
607 repo_id = pull_request.target_repo.repo_id
608 use_rebase = self._use_rebase_for_merging(pull_request)
608 use_rebase = self._use_rebase_for_merging(pull_request)
609 close_branch = self._close_branch_before_merging(pull_request)
609 close_branch = self._close_branch_before_merging(pull_request)
610
610
611 target_ref = self._refresh_reference(
611 target_ref = self._refresh_reference(
612 pull_request.target_ref_parts, target_vcs)
612 pull_request.target_ref_parts, target_vcs)
613
613
614 callback_daemon, extras = prepare_callback_daemon(
614 callback_daemon, extras = prepare_callback_daemon(
615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 host=vcs_settings.HOOKS_HOST,
616 host=vcs_settings.HOOKS_HOST,
617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618
618
619 with callback_daemon:
619 with callback_daemon:
620 # TODO: johbo: Implement a clean way to run a config_override
620 # TODO: johbo: Implement a clean way to run a config_override
621 # for a single call.
621 # for a single call.
622 target_vcs.config.set(
622 target_vcs.config.set(
623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624
624
625 user_name = user.short_contact
625 user_name = user.short_contact
626 merge_state = target_vcs.merge(
626 merge_state = target_vcs.merge(
627 repo_id, workspace_id, target_ref, source_vcs,
627 repo_id, workspace_id, target_ref, source_vcs,
628 pull_request.source_ref_parts,
628 pull_request.source_ref_parts,
629 user_name=user_name, user_email=user.email,
629 user_name=user_name, user_email=user.email,
630 message=message, use_rebase=use_rebase,
630 message=message, use_rebase=use_rebase,
631 close_branch=close_branch)
631 close_branch=close_branch)
632 return merge_state
632 return merge_state
633
633
634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 pull_request.merge_rev = merge_state.merge_ref.commit_id
635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 pull_request.updated_on = datetime.datetime.now()
636 pull_request.updated_on = datetime.datetime.now()
637 close_msg = close_msg or 'Pull request merged and closed'
637 close_msg = close_msg or 'Pull request merged and closed'
638
638
639 CommentsModel().create(
639 CommentsModel().create(
640 text=safe_unicode(close_msg),
640 text=safe_unicode(close_msg),
641 repo=pull_request.target_repo.repo_id,
641 repo=pull_request.target_repo.repo_id,
642 user=user.user_id,
642 user=user.user_id,
643 pull_request=pull_request.pull_request_id,
643 pull_request=pull_request.pull_request_id,
644 f_path=None,
644 f_path=None,
645 line_no=None,
645 line_no=None,
646 closing_pr=True
646 closing_pr=True
647 )
647 )
648
648
649 Session().add(pull_request)
649 Session().add(pull_request)
650 Session().flush()
650 Session().flush()
651 # TODO: paris: replace invalidation with less radical solution
651 # TODO: paris: replace invalidation with less radical solution
652 ScmModel().mark_for_invalidation(
652 ScmModel().mark_for_invalidation(
653 pull_request.target_repo.repo_name)
653 pull_request.target_repo.repo_name)
654 self.trigger_pull_request_hook(pull_request, user, 'merge')
654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655
655
656 def has_valid_update_type(self, pull_request):
656 def has_valid_update_type(self, pull_request):
657 source_ref_type = pull_request.source_ref_parts.type
657 source_ref_type = pull_request.source_ref_parts.type
658 return source_ref_type in self.REF_TYPES
658 return source_ref_type in self.REF_TYPES
659
659
660 def update_commits(self, pull_request):
660 def update_commits(self, pull_request):
661 """
661 """
662 Get the updated list of commits for the pull request
662 Get the updated list of commits for the pull request
663 and return the new pull request version and the list
663 and return the new pull request version and the list
664 of commits processed by this update action
664 of commits processed by this update action
665 """
665 """
666 pull_request = self.__get_pull_request(pull_request)
666 pull_request = self.__get_pull_request(pull_request)
667 source_ref_type = pull_request.source_ref_parts.type
667 source_ref_type = pull_request.source_ref_parts.type
668 source_ref_name = pull_request.source_ref_parts.name
668 source_ref_name = pull_request.source_ref_parts.name
669 source_ref_id = pull_request.source_ref_parts.commit_id
669 source_ref_id = pull_request.source_ref_parts.commit_id
670
670
671 target_ref_type = pull_request.target_ref_parts.type
671 target_ref_type = pull_request.target_ref_parts.type
672 target_ref_name = pull_request.target_ref_parts.name
672 target_ref_name = pull_request.target_ref_parts.name
673 target_ref_id = pull_request.target_ref_parts.commit_id
673 target_ref_id = pull_request.target_ref_parts.commit_id
674
674
675 if not self.has_valid_update_type(pull_request):
675 if not self.has_valid_update_type(pull_request):
676 log.debug("Skipping update of pull request %s due to ref type: %s",
676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 pull_request, source_ref_type)
677 pull_request, source_ref_type)
678 return UpdateResponse(
678 return UpdateResponse(
679 executed=False,
679 executed=False,
680 reason=UpdateFailureReason.WRONG_REF_TYPE,
680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 old=pull_request, new=None, changes=None,
681 old=pull_request, new=None, changes=None,
682 source_changed=False, target_changed=False)
682 source_changed=False, target_changed=False)
683
683
684 # source repo
684 # source repo
685 source_repo = pull_request.source_repo.scm_instance()
685 source_repo = pull_request.source_repo.scm_instance()
686 try:
686 try:
687 source_commit = source_repo.get_commit(commit_id=source_ref_name)
687 source_commit = source_repo.get_commit(commit_id=source_ref_name)
688 except CommitDoesNotExistError:
688 except CommitDoesNotExistError:
689 return UpdateResponse(
689 return UpdateResponse(
690 executed=False,
690 executed=False,
691 reason=UpdateFailureReason.MISSING_SOURCE_REF,
691 reason=UpdateFailureReason.MISSING_SOURCE_REF,
692 old=pull_request, new=None, changes=None,
692 old=pull_request, new=None, changes=None,
693 source_changed=False, target_changed=False)
693 source_changed=False, target_changed=False)
694
694
695 source_changed = source_ref_id != source_commit.raw_id
695 source_changed = source_ref_id != source_commit.raw_id
696
696
697 # target repo
697 # target repo
698 target_repo = pull_request.target_repo.scm_instance()
698 target_repo = pull_request.target_repo.scm_instance()
699 try:
699 try:
700 target_commit = target_repo.get_commit(commit_id=target_ref_name)
700 target_commit = target_repo.get_commit(commit_id=target_ref_name)
701 except CommitDoesNotExistError:
701 except CommitDoesNotExistError:
702 return UpdateResponse(
702 return UpdateResponse(
703 executed=False,
703 executed=False,
704 reason=UpdateFailureReason.MISSING_TARGET_REF,
704 reason=UpdateFailureReason.MISSING_TARGET_REF,
705 old=pull_request, new=None, changes=None,
705 old=pull_request, new=None, changes=None,
706 source_changed=False, target_changed=False)
706 source_changed=False, target_changed=False)
707 target_changed = target_ref_id != target_commit.raw_id
707 target_changed = target_ref_id != target_commit.raw_id
708
708
709 if not (source_changed or target_changed):
709 if not (source_changed or target_changed):
710 log.debug("Nothing changed in pull request %s", pull_request)
710 log.debug("Nothing changed in pull request %s", pull_request)
711 return UpdateResponse(
711 return UpdateResponse(
712 executed=False,
712 executed=False,
713 reason=UpdateFailureReason.NO_CHANGE,
713 reason=UpdateFailureReason.NO_CHANGE,
714 old=pull_request, new=None, changes=None,
714 old=pull_request, new=None, changes=None,
715 source_changed=target_changed, target_changed=source_changed)
715 source_changed=target_changed, target_changed=source_changed)
716
716
717 change_in_found = 'target repo' if target_changed else 'source repo'
717 change_in_found = 'target repo' if target_changed else 'source repo'
718 log.debug('Updating pull request because of change in %s detected',
718 log.debug('Updating pull request because of change in %s detected',
719 change_in_found)
719 change_in_found)
720
720
721 # Finally there is a need for an update, in case of source change
721 # Finally there is a need for an update, in case of source change
722 # we create a new version, else just an update
722 # we create a new version, else just an update
723 if source_changed:
723 if source_changed:
724 pull_request_version = self._create_version_from_snapshot(pull_request)
724 pull_request_version = self._create_version_from_snapshot(pull_request)
725 self._link_comments_to_version(pull_request_version)
725 self._link_comments_to_version(pull_request_version)
726 else:
726 else:
727 try:
727 try:
728 ver = pull_request.versions[-1]
728 ver = pull_request.versions[-1]
729 except IndexError:
729 except IndexError:
730 ver = None
730 ver = None
731
731
732 pull_request.pull_request_version_id = \
732 pull_request.pull_request_version_id = \
733 ver.pull_request_version_id if ver else None
733 ver.pull_request_version_id if ver else None
734 pull_request_version = pull_request
734 pull_request_version = pull_request
735
735
736 try:
736 try:
737 if target_ref_type in self.REF_TYPES:
737 if target_ref_type in self.REF_TYPES:
738 target_commit = target_repo.get_commit(target_ref_name)
738 target_commit = target_repo.get_commit(target_ref_name)
739 else:
739 else:
740 target_commit = target_repo.get_commit(target_ref_id)
740 target_commit = target_repo.get_commit(target_ref_id)
741 except CommitDoesNotExistError:
741 except CommitDoesNotExistError:
742 return UpdateResponse(
742 return UpdateResponse(
743 executed=False,
743 executed=False,
744 reason=UpdateFailureReason.MISSING_TARGET_REF,
744 reason=UpdateFailureReason.MISSING_TARGET_REF,
745 old=pull_request, new=None, changes=None,
745 old=pull_request, new=None, changes=None,
746 source_changed=source_changed, target_changed=target_changed)
746 source_changed=source_changed, target_changed=target_changed)
747
747
748 # re-compute commit ids
748 # re-compute commit ids
749 old_commit_ids = pull_request.revisions
749 old_commit_ids = pull_request.revisions
750 pre_load = ["author", "branch", "date", "message"]
750 pre_load = ["author", "branch", "date", "message"]
751 commit_ranges = target_repo.compare(
751 commit_ranges = target_repo.compare(
752 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
752 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
753 pre_load=pre_load)
753 pre_load=pre_load)
754
754
755 ancestor = target_repo.get_common_ancestor(
755 ancestor = target_repo.get_common_ancestor(
756 target_commit.raw_id, source_commit.raw_id, source_repo)
756 target_commit.raw_id, source_commit.raw_id, source_repo)
757
757
758 pull_request.source_ref = '%s:%s:%s' % (
758 pull_request.source_ref = '%s:%s:%s' % (
759 source_ref_type, source_ref_name, source_commit.raw_id)
759 source_ref_type, source_ref_name, source_commit.raw_id)
760 pull_request.target_ref = '%s:%s:%s' % (
760 pull_request.target_ref = '%s:%s:%s' % (
761 target_ref_type, target_ref_name, ancestor)
761 target_ref_type, target_ref_name, ancestor)
762
762
763 pull_request.revisions = [
763 pull_request.revisions = [
764 commit.raw_id for commit in reversed(commit_ranges)]
764 commit.raw_id for commit in reversed(commit_ranges)]
765 pull_request.updated_on = datetime.datetime.now()
765 pull_request.updated_on = datetime.datetime.now()
766 Session().add(pull_request)
766 Session().add(pull_request)
767 new_commit_ids = pull_request.revisions
767 new_commit_ids = pull_request.revisions
768
768
769 old_diff_data, new_diff_data = self._generate_update_diffs(
769 old_diff_data, new_diff_data = self._generate_update_diffs(
770 pull_request, pull_request_version)
770 pull_request, pull_request_version)
771
771
772 # calculate commit and file changes
772 # calculate commit and file changes
773 changes = self._calculate_commit_id_changes(
773 changes = self._calculate_commit_id_changes(
774 old_commit_ids, new_commit_ids)
774 old_commit_ids, new_commit_ids)
775 file_changes = self._calculate_file_changes(
775 file_changes = self._calculate_file_changes(
776 old_diff_data, new_diff_data)
776 old_diff_data, new_diff_data)
777
777
778 # set comments as outdated if DIFFS changed
778 # set comments as outdated if DIFFS changed
779 CommentsModel().outdate_comments(
779 CommentsModel().outdate_comments(
780 pull_request, old_diff_data=old_diff_data,
780 pull_request, old_diff_data=old_diff_data,
781 new_diff_data=new_diff_data)
781 new_diff_data=new_diff_data)
782
782
783 commit_changes = (changes.added or changes.removed)
783 commit_changes = (changes.added or changes.removed)
784 file_node_changes = (
784 file_node_changes = (
785 file_changes.added or file_changes.modified or file_changes.removed)
785 file_changes.added or file_changes.modified or file_changes.removed)
786 pr_has_changes = commit_changes or file_node_changes
786 pr_has_changes = commit_changes or file_node_changes
787
787
788 # Add an automatic comment to the pull request, in case
788 # Add an automatic comment to the pull request, in case
789 # anything has changed
789 # anything has changed
790 if pr_has_changes:
790 if pr_has_changes:
791 update_comment = CommentsModel().create(
791 update_comment = CommentsModel().create(
792 text=self._render_update_message(changes, file_changes),
792 text=self._render_update_message(changes, file_changes),
793 repo=pull_request.target_repo,
793 repo=pull_request.target_repo,
794 user=pull_request.author,
794 user=pull_request.author,
795 pull_request=pull_request,
795 pull_request=pull_request,
796 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
796 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
797
797
798 # Update status to "Under Review" for added commits
798 # Update status to "Under Review" for added commits
799 for commit_id in changes.added:
799 for commit_id in changes.added:
800 ChangesetStatusModel().set_status(
800 ChangesetStatusModel().set_status(
801 repo=pull_request.source_repo,
801 repo=pull_request.source_repo,
802 status=ChangesetStatus.STATUS_UNDER_REVIEW,
802 status=ChangesetStatus.STATUS_UNDER_REVIEW,
803 comment=update_comment,
803 comment=update_comment,
804 user=pull_request.author,
804 user=pull_request.author,
805 pull_request=pull_request,
805 pull_request=pull_request,
806 revision=commit_id)
806 revision=commit_id)
807
807
808 log.debug(
808 log.debug(
809 'Updated pull request %s, added_ids: %s, common_ids: %s, '
809 'Updated pull request %s, added_ids: %s, common_ids: %s, '
810 'removed_ids: %s', pull_request.pull_request_id,
810 'removed_ids: %s', pull_request.pull_request_id,
811 changes.added, changes.common, changes.removed)
811 changes.added, changes.common, changes.removed)
812 log.debug(
812 log.debug(
813 'Updated pull request with the following file changes: %s',
813 'Updated pull request with the following file changes: %s',
814 file_changes)
814 file_changes)
815
815
816 log.info(
816 log.info(
817 "Updated pull request %s from commit %s to commit %s, "
817 "Updated pull request %s from commit %s to commit %s, "
818 "stored new version %s of this pull request.",
818 "stored new version %s of this pull request.",
819 pull_request.pull_request_id, source_ref_id,
819 pull_request.pull_request_id, source_ref_id,
820 pull_request.source_ref_parts.commit_id,
820 pull_request.source_ref_parts.commit_id,
821 pull_request_version.pull_request_version_id)
821 pull_request_version.pull_request_version_id)
822 Session().commit()
822 Session().commit()
823 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
823 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
824
824
825 return UpdateResponse(
825 return UpdateResponse(
826 executed=True, reason=UpdateFailureReason.NONE,
826 executed=True, reason=UpdateFailureReason.NONE,
827 old=pull_request, new=pull_request_version, changes=changes,
827 old=pull_request, new=pull_request_version, changes=changes,
828 source_changed=source_changed, target_changed=target_changed)
828 source_changed=source_changed, target_changed=target_changed)
829
829
830 def _create_version_from_snapshot(self, pull_request):
830 def _create_version_from_snapshot(self, pull_request):
831 version = PullRequestVersion()
831 version = PullRequestVersion()
832 version.title = pull_request.title
832 version.title = pull_request.title
833 version.description = pull_request.description
833 version.description = pull_request.description
834 version.status = pull_request.status
834 version.status = pull_request.status
835 version.pull_request_state = pull_request.pull_request_state
835 version.pull_request_state = pull_request.pull_request_state
836 version.created_on = datetime.datetime.now()
836 version.created_on = datetime.datetime.now()
837 version.updated_on = pull_request.updated_on
837 version.updated_on = pull_request.updated_on
838 version.user_id = pull_request.user_id
838 version.user_id = pull_request.user_id
839 version.source_repo = pull_request.source_repo
839 version.source_repo = pull_request.source_repo
840 version.source_ref = pull_request.source_ref
840 version.source_ref = pull_request.source_ref
841 version.target_repo = pull_request.target_repo
841 version.target_repo = pull_request.target_repo
842 version.target_ref = pull_request.target_ref
842 version.target_ref = pull_request.target_ref
843
843
844 version._last_merge_source_rev = pull_request._last_merge_source_rev
844 version._last_merge_source_rev = pull_request._last_merge_source_rev
845 version._last_merge_target_rev = pull_request._last_merge_target_rev
845 version._last_merge_target_rev = pull_request._last_merge_target_rev
846 version.last_merge_status = pull_request.last_merge_status
846 version.last_merge_status = pull_request.last_merge_status
847 version.shadow_merge_ref = pull_request.shadow_merge_ref
847 version.shadow_merge_ref = pull_request.shadow_merge_ref
848 version.merge_rev = pull_request.merge_rev
848 version.merge_rev = pull_request.merge_rev
849 version.reviewer_data = pull_request.reviewer_data
849 version.reviewer_data = pull_request.reviewer_data
850
850
851 version.revisions = pull_request.revisions
851 version.revisions = pull_request.revisions
852 version.pull_request = pull_request
852 version.pull_request = pull_request
853 Session().add(version)
853 Session().add(version)
854 Session().flush()
854 Session().flush()
855
855
856 return version
856 return version
857
857
858 def _generate_update_diffs(self, pull_request, pull_request_version):
858 def _generate_update_diffs(self, pull_request, pull_request_version):
859
859
860 diff_context = (
860 diff_context = (
861 self.DIFF_CONTEXT +
861 self.DIFF_CONTEXT +
862 CommentsModel.needed_extra_diff_context())
862 CommentsModel.needed_extra_diff_context())
863 hide_whitespace_changes = False
863 hide_whitespace_changes = False
864 source_repo = pull_request_version.source_repo
864 source_repo = pull_request_version.source_repo
865 source_ref_id = pull_request_version.source_ref_parts.commit_id
865 source_ref_id = pull_request_version.source_ref_parts.commit_id
866 target_ref_id = pull_request_version.target_ref_parts.commit_id
866 target_ref_id = pull_request_version.target_ref_parts.commit_id
867 old_diff = self._get_diff_from_pr_or_version(
867 old_diff = self._get_diff_from_pr_or_version(
868 source_repo, source_ref_id, target_ref_id,
868 source_repo, source_ref_id, target_ref_id,
869 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
869 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
870
870
871 source_repo = pull_request.source_repo
871 source_repo = pull_request.source_repo
872 source_ref_id = pull_request.source_ref_parts.commit_id
872 source_ref_id = pull_request.source_ref_parts.commit_id
873 target_ref_id = pull_request.target_ref_parts.commit_id
873 target_ref_id = pull_request.target_ref_parts.commit_id
874
874
875 new_diff = self._get_diff_from_pr_or_version(
875 new_diff = self._get_diff_from_pr_or_version(
876 source_repo, source_ref_id, target_ref_id,
876 source_repo, source_ref_id, target_ref_id,
877 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
877 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
878
878
879 old_diff_data = diffs.DiffProcessor(old_diff)
879 old_diff_data = diffs.DiffProcessor(old_diff)
880 old_diff_data.prepare()
880 old_diff_data.prepare()
881 new_diff_data = diffs.DiffProcessor(new_diff)
881 new_diff_data = diffs.DiffProcessor(new_diff)
882 new_diff_data.prepare()
882 new_diff_data.prepare()
883
883
884 return old_diff_data, new_diff_data
884 return old_diff_data, new_diff_data
885
885
886 def _link_comments_to_version(self, pull_request_version):
886 def _link_comments_to_version(self, pull_request_version):
887 """
887 """
888 Link all unlinked comments of this pull request to the given version.
888 Link all unlinked comments of this pull request to the given version.
889
889
890 :param pull_request_version: The `PullRequestVersion` to which
890 :param pull_request_version: The `PullRequestVersion` to which
891 the comments shall be linked.
891 the comments shall be linked.
892
892
893 """
893 """
894 pull_request = pull_request_version.pull_request
894 pull_request = pull_request_version.pull_request
895 comments = ChangesetComment.query()\
895 comments = ChangesetComment.query()\
896 .filter(
896 .filter(
897 # TODO: johbo: Should we query for the repo at all here?
897 # TODO: johbo: Should we query for the repo at all here?
898 # Pending decision on how comments of PRs are to be related
898 # Pending decision on how comments of PRs are to be related
899 # to either the source repo, the target repo or no repo at all.
899 # to either the source repo, the target repo or no repo at all.
900 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
900 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
901 ChangesetComment.pull_request == pull_request,
901 ChangesetComment.pull_request == pull_request,
902 ChangesetComment.pull_request_version == None)\
902 ChangesetComment.pull_request_version == None)\
903 .order_by(ChangesetComment.comment_id.asc())
903 .order_by(ChangesetComment.comment_id.asc())
904
904
905 # TODO: johbo: Find out why this breaks if it is done in a bulk
905 # TODO: johbo: Find out why this breaks if it is done in a bulk
906 # operation.
906 # operation.
907 for comment in comments:
907 for comment in comments:
908 comment.pull_request_version_id = (
908 comment.pull_request_version_id = (
909 pull_request_version.pull_request_version_id)
909 pull_request_version.pull_request_version_id)
910 Session().add(comment)
910 Session().add(comment)
911
911
912 def _calculate_commit_id_changes(self, old_ids, new_ids):
912 def _calculate_commit_id_changes(self, old_ids, new_ids):
913 added = [x for x in new_ids if x not in old_ids]
913 added = [x for x in new_ids if x not in old_ids]
914 common = [x for x in new_ids if x in old_ids]
914 common = [x for x in new_ids if x in old_ids]
915 removed = [x for x in old_ids if x not in new_ids]
915 removed = [x for x in old_ids if x not in new_ids]
916 total = new_ids
916 total = new_ids
917 return ChangeTuple(added, common, removed, total)
917 return ChangeTuple(added, common, removed, total)
918
918
919 def _calculate_file_changes(self, old_diff_data, new_diff_data):
919 def _calculate_file_changes(self, old_diff_data, new_diff_data):
920
920
921 old_files = OrderedDict()
921 old_files = OrderedDict()
922 for diff_data in old_diff_data.parsed_diff:
922 for diff_data in old_diff_data.parsed_diff:
923 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
923 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
924
924
925 added_files = []
925 added_files = []
926 modified_files = []
926 modified_files = []
927 removed_files = []
927 removed_files = []
928 for diff_data in new_diff_data.parsed_diff:
928 for diff_data in new_diff_data.parsed_diff:
929 new_filename = diff_data['filename']
929 new_filename = diff_data['filename']
930 new_hash = md5_safe(diff_data['raw_diff'])
930 new_hash = md5_safe(diff_data['raw_diff'])
931
931
932 old_hash = old_files.get(new_filename)
932 old_hash = old_files.get(new_filename)
933 if not old_hash:
933 if not old_hash:
934 # file is not present in old diff, means it's added
934 # file is not present in old diff, means it's added
935 added_files.append(new_filename)
935 added_files.append(new_filename)
936 else:
936 else:
937 if new_hash != old_hash:
937 if new_hash != old_hash:
938 modified_files.append(new_filename)
938 modified_files.append(new_filename)
939 # now remove a file from old, since we have seen it already
939 # now remove a file from old, since we have seen it already
940 del old_files[new_filename]
940 del old_files[new_filename]
941
941
942 # removed files is when there are present in old, but not in NEW,
942 # removed files is when there are present in old, but not in NEW,
943 # since we remove old files that are present in new diff, left-overs
943 # since we remove old files that are present in new diff, left-overs
944 # if any should be the removed files
944 # if any should be the removed files
945 removed_files.extend(old_files.keys())
945 removed_files.extend(old_files.keys())
946
946
947 return FileChangeTuple(added_files, modified_files, removed_files)
947 return FileChangeTuple(added_files, modified_files, removed_files)
948
948
949 def _render_update_message(self, changes, file_changes):
949 def _render_update_message(self, changes, file_changes):
950 """
950 """
951 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
951 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
952 so it's always looking the same disregarding on which default
952 so it's always looking the same disregarding on which default
953 renderer system is using.
953 renderer system is using.
954
954
955 :param changes: changes named tuple
955 :param changes: changes named tuple
956 :param file_changes: file changes named tuple
956 :param file_changes: file changes named tuple
957
957
958 """
958 """
959 new_status = ChangesetStatus.get_status_lbl(
959 new_status = ChangesetStatus.get_status_lbl(
960 ChangesetStatus.STATUS_UNDER_REVIEW)
960 ChangesetStatus.STATUS_UNDER_REVIEW)
961
961
962 changed_files = (
962 changed_files = (
963 file_changes.added + file_changes.modified + file_changes.removed)
963 file_changes.added + file_changes.modified + file_changes.removed)
964
964
965 params = {
965 params = {
966 'under_review_label': new_status,
966 'under_review_label': new_status,
967 'added_commits': changes.added,
967 'added_commits': changes.added,
968 'removed_commits': changes.removed,
968 'removed_commits': changes.removed,
969 'changed_files': changed_files,
969 'changed_files': changed_files,
970 'added_files': file_changes.added,
970 'added_files': file_changes.added,
971 'modified_files': file_changes.modified,
971 'modified_files': file_changes.modified,
972 'removed_files': file_changes.removed,
972 'removed_files': file_changes.removed,
973 }
973 }
974 renderer = RstTemplateRenderer()
974 renderer = RstTemplateRenderer()
975 return renderer.render('pull_request_update.mako', **params)
975 return renderer.render('pull_request_update.mako', **params)
976
976
977 def edit(self, pull_request, title, description, description_renderer, user):
977 def edit(self, pull_request, title, description, description_renderer, user):
978 pull_request = self.__get_pull_request(pull_request)
978 pull_request = self.__get_pull_request(pull_request)
979 old_data = pull_request.get_api_data(with_merge_state=False)
979 old_data = pull_request.get_api_data(with_merge_state=False)
980 if pull_request.is_closed():
980 if pull_request.is_closed():
981 raise ValueError('This pull request is closed')
981 raise ValueError('This pull request is closed')
982 if title:
982 if title:
983 pull_request.title = title
983 pull_request.title = title
984 pull_request.description = description
984 pull_request.description = description
985 pull_request.updated_on = datetime.datetime.now()
985 pull_request.updated_on = datetime.datetime.now()
986 pull_request.description_renderer = description_renderer
986 pull_request.description_renderer = description_renderer
987 Session().add(pull_request)
987 Session().add(pull_request)
988 self._log_audit_action(
988 self._log_audit_action(
989 'repo.pull_request.edit', {'old_data': old_data},
989 'repo.pull_request.edit', {'old_data': old_data},
990 user, pull_request)
990 user, pull_request)
991
991
992 def update_reviewers(self, pull_request, reviewer_data, user):
992 def update_reviewers(self, pull_request, reviewer_data, user):
993 """
993 """
994 Update the reviewers in the pull request
994 Update the reviewers in the pull request
995
995
996 :param pull_request: the pr to update
996 :param pull_request: the pr to update
997 :param reviewer_data: list of tuples
997 :param reviewer_data: list of tuples
998 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
998 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
999 """
999 """
1000 pull_request = self.__get_pull_request(pull_request)
1000 pull_request = self.__get_pull_request(pull_request)
1001 if pull_request.is_closed():
1001 if pull_request.is_closed():
1002 raise ValueError('This pull request is closed')
1002 raise ValueError('This pull request is closed')
1003
1003
1004 reviewers = {}
1004 reviewers = {}
1005 for user_id, reasons, mandatory, rules in reviewer_data:
1005 for user_id, reasons, mandatory, rules in reviewer_data:
1006 if isinstance(user_id, (int, compat.string_types)):
1006 if isinstance(user_id, (int, compat.string_types)):
1007 user_id = self._get_user(user_id).user_id
1007 user_id = self._get_user(user_id).user_id
1008 reviewers[user_id] = {
1008 reviewers[user_id] = {
1009 'reasons': reasons, 'mandatory': mandatory}
1009 'reasons': reasons, 'mandatory': mandatory}
1010
1010
1011 reviewers_ids = set(reviewers.keys())
1011 reviewers_ids = set(reviewers.keys())
1012 current_reviewers = PullRequestReviewers.query()\
1012 current_reviewers = PullRequestReviewers.query()\
1013 .filter(PullRequestReviewers.pull_request ==
1013 .filter(PullRequestReviewers.pull_request ==
1014 pull_request).all()
1014 pull_request).all()
1015 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1015 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1016
1016
1017 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1017 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1018 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1018 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1019
1019
1020 log.debug("Adding %s reviewers", ids_to_add)
1020 log.debug("Adding %s reviewers", ids_to_add)
1021 log.debug("Removing %s reviewers", ids_to_remove)
1021 log.debug("Removing %s reviewers", ids_to_remove)
1022 changed = False
1022 changed = False
1023 added_audit_reviewers = []
1023 added_audit_reviewers = []
1024 removed_audit_reviewers = []
1024 removed_audit_reviewers = []
1025
1025
1026 for uid in ids_to_add:
1026 for uid in ids_to_add:
1027 changed = True
1027 changed = True
1028 _usr = self._get_user(uid)
1028 _usr = self._get_user(uid)
1029 reviewer = PullRequestReviewers()
1029 reviewer = PullRequestReviewers()
1030 reviewer.user = _usr
1030 reviewer.user = _usr
1031 reviewer.pull_request = pull_request
1031 reviewer.pull_request = pull_request
1032 reviewer.reasons = reviewers[uid]['reasons']
1032 reviewer.reasons = reviewers[uid]['reasons']
1033 # NOTE(marcink): mandatory shouldn't be changed now
1033 # NOTE(marcink): mandatory shouldn't be changed now
1034 # reviewer.mandatory = reviewers[uid]['reasons']
1034 # reviewer.mandatory = reviewers[uid]['reasons']
1035 Session().add(reviewer)
1035 Session().add(reviewer)
1036 added_audit_reviewers.append(reviewer.get_dict())
1036 added_audit_reviewers.append(reviewer.get_dict())
1037
1037
1038 for uid in ids_to_remove:
1038 for uid in ids_to_remove:
1039 changed = True
1039 changed = True
1040 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1040 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1041 # that prevents and fixes cases that we added the same reviewer twice.
1041 # that prevents and fixes cases that we added the same reviewer twice.
1042 # this CAN happen due to the lack of DB checks
1042 # this CAN happen due to the lack of DB checks
1043 reviewers = PullRequestReviewers.query()\
1043 reviewers = PullRequestReviewers.query()\
1044 .filter(PullRequestReviewers.user_id == uid,
1044 .filter(PullRequestReviewers.user_id == uid,
1045 PullRequestReviewers.pull_request == pull_request)\
1045 PullRequestReviewers.pull_request == pull_request)\
1046 .all()
1046 .all()
1047
1047
1048 for obj in reviewers:
1048 for obj in reviewers:
1049 added_audit_reviewers.append(obj.get_dict())
1049 added_audit_reviewers.append(obj.get_dict())
1050 Session().delete(obj)
1050 Session().delete(obj)
1051
1051
1052 if changed:
1052 if changed:
1053 Session().expire_all()
1053 Session().expire_all()
1054 pull_request.updated_on = datetime.datetime.now()
1054 pull_request.updated_on = datetime.datetime.now()
1055 Session().add(pull_request)
1055 Session().add(pull_request)
1056
1056
1057 # finally store audit logs
1057 # finally store audit logs
1058 for user_data in added_audit_reviewers:
1058 for user_data in added_audit_reviewers:
1059 self._log_audit_action(
1059 self._log_audit_action(
1060 'repo.pull_request.reviewer.add', {'data': user_data},
1060 'repo.pull_request.reviewer.add', {'data': user_data},
1061 user, pull_request)
1061 user, pull_request)
1062 for user_data in removed_audit_reviewers:
1062 for user_data in removed_audit_reviewers:
1063 self._log_audit_action(
1063 self._log_audit_action(
1064 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1064 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1065 user, pull_request)
1065 user, pull_request)
1066
1066
1067 self.notify_reviewers(pull_request, ids_to_add)
1067 self.notify_reviewers(pull_request, ids_to_add)
1068 return ids_to_add, ids_to_remove
1068 return ids_to_add, ids_to_remove
1069
1069
1070 def get_url(self, pull_request, request=None, permalink=False):
1070 def get_url(self, pull_request, request=None, permalink=False):
1071 if not request:
1071 if not request:
1072 request = get_current_request()
1072 request = get_current_request()
1073
1073
1074 if permalink:
1074 if permalink:
1075 return request.route_url(
1075 return request.route_url(
1076 'pull_requests_global',
1076 'pull_requests_global',
1077 pull_request_id=pull_request.pull_request_id,)
1077 pull_request_id=pull_request.pull_request_id,)
1078 else:
1078 else:
1079 return request.route_url('pullrequest_show',
1079 return request.route_url('pullrequest_show',
1080 repo_name=safe_str(pull_request.target_repo.repo_name),
1080 repo_name=safe_str(pull_request.target_repo.repo_name),
1081 pull_request_id=pull_request.pull_request_id,)
1081 pull_request_id=pull_request.pull_request_id,)
1082
1082
1083 def get_shadow_clone_url(self, pull_request, request=None):
1083 def get_shadow_clone_url(self, pull_request, request=None):
1084 """
1084 """
1085 Returns qualified url pointing to the shadow repository. If this pull
1085 Returns qualified url pointing to the shadow repository. If this pull
1086 request is closed there is no shadow repository and ``None`` will be
1086 request is closed there is no shadow repository and ``None`` will be
1087 returned.
1087 returned.
1088 """
1088 """
1089 if pull_request.is_closed():
1089 if pull_request.is_closed():
1090 return None
1090 return None
1091 else:
1091 else:
1092 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1092 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1093 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1093 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1094
1094
1095 def notify_reviewers(self, pull_request, reviewers_ids):
1095 def notify_reviewers(self, pull_request, reviewers_ids):
1096 # notification to reviewers
1096 # notification to reviewers
1097 if not reviewers_ids:
1097 if not reviewers_ids:
1098 return
1098 return
1099
1099
1100 pull_request_obj = pull_request
1100 pull_request_obj = pull_request
1101 # get the current participants of this pull request
1101 # get the current participants of this pull request
1102 recipients = reviewers_ids
1102 recipients = reviewers_ids
1103 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1103 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1104
1104
1105 pr_source_repo = pull_request_obj.source_repo
1105 pr_source_repo = pull_request_obj.source_repo
1106 pr_target_repo = pull_request_obj.target_repo
1106 pr_target_repo = pull_request_obj.target_repo
1107
1107
1108 pr_url = h.route_url('pullrequest_show',
1108 pr_url = h.route_url('pullrequest_show',
1109 repo_name=pr_target_repo.repo_name,
1109 repo_name=pr_target_repo.repo_name,
1110 pull_request_id=pull_request_obj.pull_request_id,)
1110 pull_request_id=pull_request_obj.pull_request_id,)
1111
1111
1112 # set some variables for email notification
1112 # set some variables for email notification
1113 pr_target_repo_url = h.route_url(
1113 pr_target_repo_url = h.route_url(
1114 'repo_summary', repo_name=pr_target_repo.repo_name)
1114 'repo_summary', repo_name=pr_target_repo.repo_name)
1115
1115
1116 pr_source_repo_url = h.route_url(
1116 pr_source_repo_url = h.route_url(
1117 'repo_summary', repo_name=pr_source_repo.repo_name)
1117 'repo_summary', repo_name=pr_source_repo.repo_name)
1118
1118
1119 # pull request specifics
1119 # pull request specifics
1120 pull_request_commits = [
1120 pull_request_commits = [
1121 (x.raw_id, x.message)
1121 (x.raw_id, x.message)
1122 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1122 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1123
1123
1124 kwargs = {
1124 kwargs = {
1125 'user': pull_request.author,
1125 'user': pull_request.author,
1126 'pull_request': pull_request_obj,
1126 'pull_request': pull_request_obj,
1127 'pull_request_commits': pull_request_commits,
1127 'pull_request_commits': pull_request_commits,
1128
1128
1129 'pull_request_target_repo': pr_target_repo,
1129 'pull_request_target_repo': pr_target_repo,
1130 'pull_request_target_repo_url': pr_target_repo_url,
1130 'pull_request_target_repo_url': pr_target_repo_url,
1131
1131
1132 'pull_request_source_repo': pr_source_repo,
1132 'pull_request_source_repo': pr_source_repo,
1133 'pull_request_source_repo_url': pr_source_repo_url,
1133 'pull_request_source_repo_url': pr_source_repo_url,
1134
1134
1135 'pull_request_url': pr_url,
1135 'pull_request_url': pr_url,
1136 }
1136 }
1137
1137
1138 # pre-generate the subject for notification itself
1138 # pre-generate the subject for notification itself
1139 (subject,
1139 (subject,
1140 _h, _e, # we don't care about those
1140 _h, _e, # we don't care about those
1141 body_plaintext) = EmailNotificationModel().render_email(
1141 body_plaintext) = EmailNotificationModel().render_email(
1142 notification_type, **kwargs)
1142 notification_type, **kwargs)
1143
1143
1144 # create notification objects, and emails
1144 # create notification objects, and emails
1145 NotificationModel().create(
1145 NotificationModel().create(
1146 created_by=pull_request.author,
1146 created_by=pull_request.author,
1147 notification_subject=subject,
1147 notification_subject=subject,
1148 notification_body=body_plaintext,
1148 notification_body=body_plaintext,
1149 notification_type=notification_type,
1149 notification_type=notification_type,
1150 recipients=recipients,
1150 recipients=recipients,
1151 email_kwargs=kwargs,
1151 email_kwargs=kwargs,
1152 )
1152 )
1153
1153
1154 def delete(self, pull_request, user):
1154 def delete(self, pull_request, user):
1155 pull_request = self.__get_pull_request(pull_request)
1155 pull_request = self.__get_pull_request(pull_request)
1156 old_data = pull_request.get_api_data(with_merge_state=False)
1156 old_data = pull_request.get_api_data(with_merge_state=False)
1157 self._cleanup_merge_workspace(pull_request)
1157 self._cleanup_merge_workspace(pull_request)
1158 self._log_audit_action(
1158 self._log_audit_action(
1159 'repo.pull_request.delete', {'old_data': old_data},
1159 'repo.pull_request.delete', {'old_data': old_data},
1160 user, pull_request)
1160 user, pull_request)
1161 Session().delete(pull_request)
1161 Session().delete(pull_request)
1162
1162
1163 def close_pull_request(self, pull_request, user):
1163 def close_pull_request(self, pull_request, user):
1164 pull_request = self.__get_pull_request(pull_request)
1164 pull_request = self.__get_pull_request(pull_request)
1165 self._cleanup_merge_workspace(pull_request)
1165 self._cleanup_merge_workspace(pull_request)
1166 pull_request.status = PullRequest.STATUS_CLOSED
1166 pull_request.status = PullRequest.STATUS_CLOSED
1167 pull_request.updated_on = datetime.datetime.now()
1167 pull_request.updated_on = datetime.datetime.now()
1168 Session().add(pull_request)
1168 Session().add(pull_request)
1169 self.trigger_pull_request_hook(
1169 self.trigger_pull_request_hook(
1170 pull_request, pull_request.author, 'close')
1170 pull_request, pull_request.author, 'close')
1171
1171
1172 pr_data = pull_request.get_api_data(with_merge_state=False)
1172 pr_data = pull_request.get_api_data(with_merge_state=False)
1173 self._log_audit_action(
1173 self._log_audit_action(
1174 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1174 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1175
1175
1176 def close_pull_request_with_comment(
1176 def close_pull_request_with_comment(
1177 self, pull_request, user, repo, message=None, auth_user=None):
1177 self, pull_request, user, repo, message=None, auth_user=None):
1178
1178
1179 pull_request_review_status = pull_request.calculated_review_status()
1179 pull_request_review_status = pull_request.calculated_review_status()
1180
1180
1181 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1181 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1182 # approved only if we have voting consent
1182 # approved only if we have voting consent
1183 status = ChangesetStatus.STATUS_APPROVED
1183 status = ChangesetStatus.STATUS_APPROVED
1184 else:
1184 else:
1185 status = ChangesetStatus.STATUS_REJECTED
1185 status = ChangesetStatus.STATUS_REJECTED
1186 status_lbl = ChangesetStatus.get_status_lbl(status)
1186 status_lbl = ChangesetStatus.get_status_lbl(status)
1187
1187
1188 default_message = (
1188 default_message = (
1189 'Closing with status change {transition_icon} {status}.'
1189 'Closing with status change {transition_icon} {status}.'
1190 ).format(transition_icon='>', status=status_lbl)
1190 ).format(transition_icon='>', status=status_lbl)
1191 text = message or default_message
1191 text = message or default_message
1192
1192
1193 # create a comment, and link it to new status
1193 # create a comment, and link it to new status
1194 comment = CommentsModel().create(
1194 comment = CommentsModel().create(
1195 text=text,
1195 text=text,
1196 repo=repo.repo_id,
1196 repo=repo.repo_id,
1197 user=user.user_id,
1197 user=user.user_id,
1198 pull_request=pull_request.pull_request_id,
1198 pull_request=pull_request.pull_request_id,
1199 status_change=status_lbl,
1199 status_change=status_lbl,
1200 status_change_type=status,
1200 status_change_type=status,
1201 closing_pr=True,
1201 closing_pr=True,
1202 auth_user=auth_user,
1202 auth_user=auth_user,
1203 )
1203 )
1204
1204
1205 # calculate old status before we change it
1205 # calculate old status before we change it
1206 old_calculated_status = pull_request.calculated_review_status()
1206 old_calculated_status = pull_request.calculated_review_status()
1207 ChangesetStatusModel().set_status(
1207 ChangesetStatusModel().set_status(
1208 repo.repo_id,
1208 repo.repo_id,
1209 status,
1209 status,
1210 user.user_id,
1210 user.user_id,
1211 comment=comment,
1211 comment=comment,
1212 pull_request=pull_request.pull_request_id
1212 pull_request=pull_request.pull_request_id
1213 )
1213 )
1214
1214
1215 Session().flush()
1215 Session().flush()
1216 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1216 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1217 # we now calculate the status of pull request again, and based on that
1217 # we now calculate the status of pull request again, and based on that
1218 # calculation trigger status change. This might happen in cases
1218 # calculation trigger status change. This might happen in cases
1219 # that non-reviewer admin closes a pr, which means his vote doesn't
1219 # that non-reviewer admin closes a pr, which means his vote doesn't
1220 # change the status, while if he's a reviewer this might change it.
1220 # change the status, while if he's a reviewer this might change it.
1221 calculated_status = pull_request.calculated_review_status()
1221 calculated_status = pull_request.calculated_review_status()
1222 if old_calculated_status != calculated_status:
1222 if old_calculated_status != calculated_status:
1223 self.trigger_pull_request_hook(
1223 self.trigger_pull_request_hook(
1224 pull_request, user, 'review_status_change',
1224 pull_request, user, 'review_status_change',
1225 data={'status': calculated_status})
1225 data={'status': calculated_status})
1226
1226
1227 # finally close the PR
1227 # finally close the PR
1228 PullRequestModel().close_pull_request(
1228 PullRequestModel().close_pull_request(
1229 pull_request.pull_request_id, user)
1229 pull_request.pull_request_id, user)
1230
1230
1231 return comment, status
1231 return comment, status
1232
1232
1233 def merge_status(self, pull_request, translator=None,
1233 def merge_status(self, pull_request, translator=None,
1234 force_shadow_repo_refresh=False):
1234 force_shadow_repo_refresh=False):
1235 _ = translator or get_current_request().translate
1235 _ = translator or get_current_request().translate
1236
1236
1237 if not self._is_merge_enabled(pull_request):
1237 if not self._is_merge_enabled(pull_request):
1238 return False, _('Server-side pull request merging is disabled.')
1238 return False, _('Server-side pull request merging is disabled.')
1239 if pull_request.is_closed():
1239 if pull_request.is_closed():
1240 return False, _('This pull request is closed.')
1240 return False, _('This pull request is closed.')
1241 merge_possible, msg = self._check_repo_requirements(
1241 merge_possible, msg = self._check_repo_requirements(
1242 target=pull_request.target_repo, source=pull_request.source_repo,
1242 target=pull_request.target_repo, source=pull_request.source_repo,
1243 translator=_)
1243 translator=_)
1244 if not merge_possible:
1244 if not merge_possible:
1245 return merge_possible, msg
1245 return merge_possible, msg
1246
1246
1247 try:
1247 try:
1248 resp = self._try_merge(
1248 resp = self._try_merge(
1249 pull_request,
1249 pull_request,
1250 force_shadow_repo_refresh=force_shadow_repo_refresh)
1250 force_shadow_repo_refresh=force_shadow_repo_refresh)
1251 log.debug("Merge response: %s", resp)
1251 log.debug("Merge response: %s", resp)
1252 status = resp.possible, resp.merge_status_message
1252 status = resp.possible, resp.merge_status_message
1253 except NotImplementedError:
1253 except NotImplementedError:
1254 status = False, _('Pull request merging is not supported.')
1254 status = False, _('Pull request merging is not supported.')
1255
1255
1256 return status
1256 return status
1257
1257
1258 def _check_repo_requirements(self, target, source, translator):
1258 def _check_repo_requirements(self, target, source, translator):
1259 """
1259 """
1260 Check if `target` and `source` have compatible requirements.
1260 Check if `target` and `source` have compatible requirements.
1261
1261
1262 Currently this is just checking for largefiles.
1262 Currently this is just checking for largefiles.
1263 """
1263 """
1264 _ = translator
1264 _ = translator
1265 target_has_largefiles = self._has_largefiles(target)
1265 target_has_largefiles = self._has_largefiles(target)
1266 source_has_largefiles = self._has_largefiles(source)
1266 source_has_largefiles = self._has_largefiles(source)
1267 merge_possible = True
1267 merge_possible = True
1268 message = u''
1268 message = u''
1269
1269
1270 if target_has_largefiles != source_has_largefiles:
1270 if target_has_largefiles != source_has_largefiles:
1271 merge_possible = False
1271 merge_possible = False
1272 if source_has_largefiles:
1272 if source_has_largefiles:
1273 message = _(
1273 message = _(
1274 'Target repository large files support is disabled.')
1274 'Target repository large files support is disabled.')
1275 else:
1275 else:
1276 message = _(
1276 message = _(
1277 'Source repository large files support is disabled.')
1277 'Source repository large files support is disabled.')
1278
1278
1279 return merge_possible, message
1279 return merge_possible, message
1280
1280
1281 def _has_largefiles(self, repo):
1281 def _has_largefiles(self, repo):
1282 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1282 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1283 'extensions', 'largefiles')
1283 'extensions', 'largefiles')
1284 return largefiles_ui and largefiles_ui[0].active
1284 return largefiles_ui and largefiles_ui[0].active
1285
1285
1286 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1286 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1287 """
1287 """
1288 Try to merge the pull request and return the merge status.
1288 Try to merge the pull request and return the merge status.
1289 """
1289 """
1290 log.debug(
1290 log.debug(
1291 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1291 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1292 pull_request.pull_request_id, force_shadow_repo_refresh)
1292 pull_request.pull_request_id, force_shadow_repo_refresh)
1293 target_vcs = pull_request.target_repo.scm_instance()
1293 target_vcs = pull_request.target_repo.scm_instance()
1294 # Refresh the target reference.
1294 # Refresh the target reference.
1295 try:
1295 try:
1296 target_ref = self._refresh_reference(
1296 target_ref = self._refresh_reference(
1297 pull_request.target_ref_parts, target_vcs)
1297 pull_request.target_ref_parts, target_vcs)
1298 except CommitDoesNotExistError:
1298 except CommitDoesNotExistError:
1299 merge_state = MergeResponse(
1299 merge_state = MergeResponse(
1300 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1300 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1301 metadata={'target_ref': pull_request.target_ref_parts})
1301 metadata={'target_ref': pull_request.target_ref_parts})
1302 return merge_state
1302 return merge_state
1303
1303
1304 target_locked = pull_request.target_repo.locked
1304 target_locked = pull_request.target_repo.locked
1305 if target_locked and target_locked[0]:
1305 if target_locked and target_locked[0]:
1306 locked_by = 'user:{}'.format(target_locked[0])
1306 locked_by = 'user:{}'.format(target_locked[0])
1307 log.debug("The target repository is locked by %s.", locked_by)
1307 log.debug("The target repository is locked by %s.", locked_by)
1308 merge_state = MergeResponse(
1308 merge_state = MergeResponse(
1309 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1309 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1310 metadata={'locked_by': locked_by})
1310 metadata={'locked_by': locked_by})
1311 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1311 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1312 pull_request, target_ref):
1312 pull_request, target_ref):
1313 log.debug("Refreshing the merge status of the repository.")
1313 log.debug("Refreshing the merge status of the repository.")
1314 merge_state = self._refresh_merge_state(
1314 merge_state = self._refresh_merge_state(
1315 pull_request, target_vcs, target_ref)
1315 pull_request, target_vcs, target_ref)
1316 else:
1316 else:
1317 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1317 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1318 metadata = {
1318 metadata = {
1319 'target_ref': pull_request.target_ref_parts,
1319 'target_ref': pull_request.target_ref_parts,
1320 'source_ref': pull_request.source_ref_parts
1320 'source_ref': pull_request.source_ref_parts,
1321 }
1321 }
1322 if not possible and target_ref.type == 'branch':
1323 # NOTE(marcink): case for mercurial multiple heads on branch
1324 heads = target_vcs._heads(target_ref.name)
1325 if len(heads) != 1:
1326 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1327 metadata.update({
1328 'heads': heads
1329 })
1322 merge_state = MergeResponse(
1330 merge_state = MergeResponse(
1323 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1331 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1324
1332
1325 return merge_state
1333 return merge_state
1326
1334
1327 def _refresh_reference(self, reference, vcs_repository):
1335 def _refresh_reference(self, reference, vcs_repository):
1328 if reference.type in self.UPDATABLE_REF_TYPES:
1336 if reference.type in self.UPDATABLE_REF_TYPES:
1329 name_or_id = reference.name
1337 name_or_id = reference.name
1330 else:
1338 else:
1331 name_or_id = reference.commit_id
1339 name_or_id = reference.commit_id
1332 refreshed_commit = vcs_repository.get_commit(name_or_id)
1340 refreshed_commit = vcs_repository.get_commit(name_or_id)
1333 refreshed_reference = Reference(
1341 refreshed_reference = Reference(
1334 reference.type, reference.name, refreshed_commit.raw_id)
1342 reference.type, reference.name, refreshed_commit.raw_id)
1335 return refreshed_reference
1343 return refreshed_reference
1336
1344
1337 def _needs_merge_state_refresh(self, pull_request, target_reference):
1345 def _needs_merge_state_refresh(self, pull_request, target_reference):
1338 return not(
1346 return not(
1339 pull_request.revisions and
1347 pull_request.revisions and
1340 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1348 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1341 target_reference.commit_id == pull_request._last_merge_target_rev)
1349 target_reference.commit_id == pull_request._last_merge_target_rev)
1342
1350
1343 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1351 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1344 workspace_id = self._workspace_id(pull_request)
1352 workspace_id = self._workspace_id(pull_request)
1345 source_vcs = pull_request.source_repo.scm_instance()
1353 source_vcs = pull_request.source_repo.scm_instance()
1346 repo_id = pull_request.target_repo.repo_id
1354 repo_id = pull_request.target_repo.repo_id
1347 use_rebase = self._use_rebase_for_merging(pull_request)
1355 use_rebase = self._use_rebase_for_merging(pull_request)
1348 close_branch = self._close_branch_before_merging(pull_request)
1356 close_branch = self._close_branch_before_merging(pull_request)
1349 merge_state = target_vcs.merge(
1357 merge_state = target_vcs.merge(
1350 repo_id, workspace_id,
1358 repo_id, workspace_id,
1351 target_reference, source_vcs, pull_request.source_ref_parts,
1359 target_reference, source_vcs, pull_request.source_ref_parts,
1352 dry_run=True, use_rebase=use_rebase,
1360 dry_run=True, use_rebase=use_rebase,
1353 close_branch=close_branch)
1361 close_branch=close_branch)
1354
1362
1355 # Do not store the response if there was an unknown error.
1363 # Do not store the response if there was an unknown error.
1356 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1364 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1357 pull_request._last_merge_source_rev = \
1365 pull_request._last_merge_source_rev = \
1358 pull_request.source_ref_parts.commit_id
1366 pull_request.source_ref_parts.commit_id
1359 pull_request._last_merge_target_rev = target_reference.commit_id
1367 pull_request._last_merge_target_rev = target_reference.commit_id
1360 pull_request.last_merge_status = merge_state.failure_reason
1368 pull_request.last_merge_status = merge_state.failure_reason
1361 pull_request.shadow_merge_ref = merge_state.merge_ref
1369 pull_request.shadow_merge_ref = merge_state.merge_ref
1362 Session().add(pull_request)
1370 Session().add(pull_request)
1363 Session().commit()
1371 Session().commit()
1364
1372
1365 return merge_state
1373 return merge_state
1366
1374
1367 def _workspace_id(self, pull_request):
1375 def _workspace_id(self, pull_request):
1368 workspace_id = 'pr-%s' % pull_request.pull_request_id
1376 workspace_id = 'pr-%s' % pull_request.pull_request_id
1369 return workspace_id
1377 return workspace_id
1370
1378
1371 def generate_repo_data(self, repo, commit_id=None, branch=None,
1379 def generate_repo_data(self, repo, commit_id=None, branch=None,
1372 bookmark=None, translator=None):
1380 bookmark=None, translator=None):
1373 from rhodecode.model.repo import RepoModel
1381 from rhodecode.model.repo import RepoModel
1374
1382
1375 all_refs, selected_ref = \
1383 all_refs, selected_ref = \
1376 self._get_repo_pullrequest_sources(
1384 self._get_repo_pullrequest_sources(
1377 repo.scm_instance(), commit_id=commit_id,
1385 repo.scm_instance(), commit_id=commit_id,
1378 branch=branch, bookmark=bookmark, translator=translator)
1386 branch=branch, bookmark=bookmark, translator=translator)
1379
1387
1380 refs_select2 = []
1388 refs_select2 = []
1381 for element in all_refs:
1389 for element in all_refs:
1382 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1390 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1383 refs_select2.append({'text': element[1], 'children': children})
1391 refs_select2.append({'text': element[1], 'children': children})
1384
1392
1385 return {
1393 return {
1386 'user': {
1394 'user': {
1387 'user_id': repo.user.user_id,
1395 'user_id': repo.user.user_id,
1388 'username': repo.user.username,
1396 'username': repo.user.username,
1389 'firstname': repo.user.first_name,
1397 'firstname': repo.user.first_name,
1390 'lastname': repo.user.last_name,
1398 'lastname': repo.user.last_name,
1391 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1399 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1392 },
1400 },
1393 'name': repo.repo_name,
1401 'name': repo.repo_name,
1394 'link': RepoModel().get_url(repo),
1402 'link': RepoModel().get_url(repo),
1395 'description': h.chop_at_smart(repo.description_safe, '\n'),
1403 'description': h.chop_at_smart(repo.description_safe, '\n'),
1396 'refs': {
1404 'refs': {
1397 'all_refs': all_refs,
1405 'all_refs': all_refs,
1398 'selected_ref': selected_ref,
1406 'selected_ref': selected_ref,
1399 'select2_refs': refs_select2
1407 'select2_refs': refs_select2
1400 }
1408 }
1401 }
1409 }
1402
1410
1403 def generate_pullrequest_title(self, source, source_ref, target):
1411 def generate_pullrequest_title(self, source, source_ref, target):
1404 return u'{source}#{at_ref} to {target}'.format(
1412 return u'{source}#{at_ref} to {target}'.format(
1405 source=source,
1413 source=source,
1406 at_ref=source_ref,
1414 at_ref=source_ref,
1407 target=target,
1415 target=target,
1408 )
1416 )
1409
1417
1410 def _cleanup_merge_workspace(self, pull_request):
1418 def _cleanup_merge_workspace(self, pull_request):
1411 # Merging related cleanup
1419 # Merging related cleanup
1412 repo_id = pull_request.target_repo.repo_id
1420 repo_id = pull_request.target_repo.repo_id
1413 target_scm = pull_request.target_repo.scm_instance()
1421 target_scm = pull_request.target_repo.scm_instance()
1414 workspace_id = self._workspace_id(pull_request)
1422 workspace_id = self._workspace_id(pull_request)
1415
1423
1416 try:
1424 try:
1417 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1425 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1418 except NotImplementedError:
1426 except NotImplementedError:
1419 pass
1427 pass
1420
1428
1421 def _get_repo_pullrequest_sources(
1429 def _get_repo_pullrequest_sources(
1422 self, repo, commit_id=None, branch=None, bookmark=None,
1430 self, repo, commit_id=None, branch=None, bookmark=None,
1423 translator=None):
1431 translator=None):
1424 """
1432 """
1425 Return a structure with repo's interesting commits, suitable for
1433 Return a structure with repo's interesting commits, suitable for
1426 the selectors in pullrequest controller
1434 the selectors in pullrequest controller
1427
1435
1428 :param commit_id: a commit that must be in the list somehow
1436 :param commit_id: a commit that must be in the list somehow
1429 and selected by default
1437 and selected by default
1430 :param branch: a branch that must be in the list and selected
1438 :param branch: a branch that must be in the list and selected
1431 by default - even if closed
1439 by default - even if closed
1432 :param bookmark: a bookmark that must be in the list and selected
1440 :param bookmark: a bookmark that must be in the list and selected
1433 """
1441 """
1434 _ = translator or get_current_request().translate
1442 _ = translator or get_current_request().translate
1435
1443
1436 commit_id = safe_str(commit_id) if commit_id else None
1444 commit_id = safe_str(commit_id) if commit_id else None
1437 branch = safe_unicode(branch) if branch else None
1445 branch = safe_unicode(branch) if branch else None
1438 bookmark = safe_unicode(bookmark) if bookmark else None
1446 bookmark = safe_unicode(bookmark) if bookmark else None
1439
1447
1440 selected = None
1448 selected = None
1441
1449
1442 # order matters: first source that has commit_id in it will be selected
1450 # order matters: first source that has commit_id in it will be selected
1443 sources = []
1451 sources = []
1444 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1452 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1445 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1453 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1446
1454
1447 if commit_id:
1455 if commit_id:
1448 ref_commit = (h.short_id(commit_id), commit_id)
1456 ref_commit = (h.short_id(commit_id), commit_id)
1449 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1457 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1450
1458
1451 sources.append(
1459 sources.append(
1452 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1460 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1453 )
1461 )
1454
1462
1455 groups = []
1463 groups = []
1456
1464
1457 for group_key, ref_list, group_name, match in sources:
1465 for group_key, ref_list, group_name, match in sources:
1458 group_refs = []
1466 group_refs = []
1459 for ref_name, ref_id in ref_list:
1467 for ref_name, ref_id in ref_list:
1460 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1468 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1461 group_refs.append((ref_key, ref_name))
1469 group_refs.append((ref_key, ref_name))
1462
1470
1463 if not selected:
1471 if not selected:
1464 if set([commit_id, match]) & set([ref_id, ref_name]):
1472 if set([commit_id, match]) & set([ref_id, ref_name]):
1465 selected = ref_key
1473 selected = ref_key
1466
1474
1467 if group_refs:
1475 if group_refs:
1468 groups.append((group_refs, group_name))
1476 groups.append((group_refs, group_name))
1469
1477
1470 if not selected:
1478 if not selected:
1471 ref = commit_id or branch or bookmark
1479 ref = commit_id or branch or bookmark
1472 if ref:
1480 if ref:
1473 raise CommitDoesNotExistError(
1481 raise CommitDoesNotExistError(
1474 u'No commit refs could be found matching: {}'.format(ref))
1482 u'No commit refs could be found matching: {}'.format(ref))
1475 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1483 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1476 selected = u'branch:{}:{}'.format(
1484 selected = u'branch:{}:{}'.format(
1477 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1485 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1478 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1486 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1479 )
1487 )
1480 elif repo.commit_ids:
1488 elif repo.commit_ids:
1481 # make the user select in this case
1489 # make the user select in this case
1482 selected = None
1490 selected = None
1483 else:
1491 else:
1484 raise EmptyRepositoryError()
1492 raise EmptyRepositoryError()
1485 return groups, selected
1493 return groups, selected
1486
1494
1487 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1495 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1488 hide_whitespace_changes, diff_context):
1496 hide_whitespace_changes, diff_context):
1489
1497
1490 return self._get_diff_from_pr_or_version(
1498 return self._get_diff_from_pr_or_version(
1491 source_repo, source_ref_id, target_ref_id,
1499 source_repo, source_ref_id, target_ref_id,
1492 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1500 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1493
1501
1494 def _get_diff_from_pr_or_version(
1502 def _get_diff_from_pr_or_version(
1495 self, source_repo, source_ref_id, target_ref_id,
1503 self, source_repo, source_ref_id, target_ref_id,
1496 hide_whitespace_changes, diff_context):
1504 hide_whitespace_changes, diff_context):
1497
1505
1498 target_commit = source_repo.get_commit(
1506 target_commit = source_repo.get_commit(
1499 commit_id=safe_str(target_ref_id))
1507 commit_id=safe_str(target_ref_id))
1500 source_commit = source_repo.get_commit(
1508 source_commit = source_repo.get_commit(
1501 commit_id=safe_str(source_ref_id))
1509 commit_id=safe_str(source_ref_id))
1502 if isinstance(source_repo, Repository):
1510 if isinstance(source_repo, Repository):
1503 vcs_repo = source_repo.scm_instance()
1511 vcs_repo = source_repo.scm_instance()
1504 else:
1512 else:
1505 vcs_repo = source_repo
1513 vcs_repo = source_repo
1506
1514
1507 # TODO: johbo: In the context of an update, we cannot reach
1515 # TODO: johbo: In the context of an update, we cannot reach
1508 # the old commit anymore with our normal mechanisms. It needs
1516 # the old commit anymore with our normal mechanisms. It needs
1509 # some sort of special support in the vcs layer to avoid this
1517 # some sort of special support in the vcs layer to avoid this
1510 # workaround.
1518 # workaround.
1511 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1519 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1512 vcs_repo.alias == 'git'):
1520 vcs_repo.alias == 'git'):
1513 source_commit.raw_id = safe_str(source_ref_id)
1521 source_commit.raw_id = safe_str(source_ref_id)
1514
1522
1515 log.debug('calculating diff between '
1523 log.debug('calculating diff between '
1516 'source_ref:%s and target_ref:%s for repo `%s`',
1524 'source_ref:%s and target_ref:%s for repo `%s`',
1517 target_ref_id, source_ref_id,
1525 target_ref_id, source_ref_id,
1518 safe_unicode(vcs_repo.path))
1526 safe_unicode(vcs_repo.path))
1519
1527
1520 vcs_diff = vcs_repo.get_diff(
1528 vcs_diff = vcs_repo.get_diff(
1521 commit1=target_commit, commit2=source_commit,
1529 commit1=target_commit, commit2=source_commit,
1522 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1530 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1523 return vcs_diff
1531 return vcs_diff
1524
1532
1525 def _is_merge_enabled(self, pull_request):
1533 def _is_merge_enabled(self, pull_request):
1526 return self._get_general_setting(
1534 return self._get_general_setting(
1527 pull_request, 'rhodecode_pr_merge_enabled')
1535 pull_request, 'rhodecode_pr_merge_enabled')
1528
1536
1529 def _use_rebase_for_merging(self, pull_request):
1537 def _use_rebase_for_merging(self, pull_request):
1530 repo_type = pull_request.target_repo.repo_type
1538 repo_type = pull_request.target_repo.repo_type
1531 if repo_type == 'hg':
1539 if repo_type == 'hg':
1532 return self._get_general_setting(
1540 return self._get_general_setting(
1533 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1541 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1534 elif repo_type == 'git':
1542 elif repo_type == 'git':
1535 return self._get_general_setting(
1543 return self._get_general_setting(
1536 pull_request, 'rhodecode_git_use_rebase_for_merging')
1544 pull_request, 'rhodecode_git_use_rebase_for_merging')
1537
1545
1538 return False
1546 return False
1539
1547
1540 def _close_branch_before_merging(self, pull_request):
1548 def _close_branch_before_merging(self, pull_request):
1541 repo_type = pull_request.target_repo.repo_type
1549 repo_type = pull_request.target_repo.repo_type
1542 if repo_type == 'hg':
1550 if repo_type == 'hg':
1543 return self._get_general_setting(
1551 return self._get_general_setting(
1544 pull_request, 'rhodecode_hg_close_branch_before_merging')
1552 pull_request, 'rhodecode_hg_close_branch_before_merging')
1545 elif repo_type == 'git':
1553 elif repo_type == 'git':
1546 return self._get_general_setting(
1554 return self._get_general_setting(
1547 pull_request, 'rhodecode_git_close_branch_before_merging')
1555 pull_request, 'rhodecode_git_close_branch_before_merging')
1548
1556
1549 return False
1557 return False
1550
1558
1551 def _get_general_setting(self, pull_request, settings_key, default=False):
1559 def _get_general_setting(self, pull_request, settings_key, default=False):
1552 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1560 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1553 settings = settings_model.get_general_settings()
1561 settings = settings_model.get_general_settings()
1554 return settings.get(settings_key, default)
1562 return settings.get(settings_key, default)
1555
1563
1556 def _log_audit_action(self, action, action_data, user, pull_request):
1564 def _log_audit_action(self, action, action_data, user, pull_request):
1557 audit_logger.store(
1565 audit_logger.store(
1558 action=action,
1566 action=action,
1559 action_data=action_data,
1567 action_data=action_data,
1560 user=user,
1568 user=user,
1561 repo=pull_request.target_repo)
1569 repo=pull_request.target_repo)
1562
1570
1563 def get_reviewer_functions(self):
1571 def get_reviewer_functions(self):
1564 """
1572 """
1565 Fetches functions for validation and fetching default reviewers.
1573 Fetches functions for validation and fetching default reviewers.
1566 If available we use the EE package, else we fallback to CE
1574 If available we use the EE package, else we fallback to CE
1567 package functions
1575 package functions
1568 """
1576 """
1569 try:
1577 try:
1570 from rc_reviewers.utils import get_default_reviewers_data
1578 from rc_reviewers.utils import get_default_reviewers_data
1571 from rc_reviewers.utils import validate_default_reviewers
1579 from rc_reviewers.utils import validate_default_reviewers
1572 except ImportError:
1580 except ImportError:
1573 from rhodecode.apps.repository.utils import get_default_reviewers_data
1581 from rhodecode.apps.repository.utils import get_default_reviewers_data
1574 from rhodecode.apps.repository.utils import validate_default_reviewers
1582 from rhodecode.apps.repository.utils import validate_default_reviewers
1575
1583
1576 return get_default_reviewers_data, validate_default_reviewers
1584 return get_default_reviewers_data, validate_default_reviewers
1577
1585
1578
1586
1579 class MergeCheck(object):
1587 class MergeCheck(object):
1580 """
1588 """
1581 Perform Merge Checks and returns a check object which stores information
1589 Perform Merge Checks and returns a check object which stores information
1582 about merge errors, and merge conditions
1590 about merge errors, and merge conditions
1583 """
1591 """
1584 TODO_CHECK = 'todo'
1592 TODO_CHECK = 'todo'
1585 PERM_CHECK = 'perm'
1593 PERM_CHECK = 'perm'
1586 REVIEW_CHECK = 'review'
1594 REVIEW_CHECK = 'review'
1587 MERGE_CHECK = 'merge'
1595 MERGE_CHECK = 'merge'
1588
1596
1589 def __init__(self):
1597 def __init__(self):
1590 self.review_status = None
1598 self.review_status = None
1591 self.merge_possible = None
1599 self.merge_possible = None
1592 self.merge_msg = ''
1600 self.merge_msg = ''
1593 self.failed = None
1601 self.failed = None
1594 self.errors = []
1602 self.errors = []
1595 self.error_details = OrderedDict()
1603 self.error_details = OrderedDict()
1596
1604
1597 def push_error(self, error_type, message, error_key, details):
1605 def push_error(self, error_type, message, error_key, details):
1598 self.failed = True
1606 self.failed = True
1599 self.errors.append([error_type, message])
1607 self.errors.append([error_type, message])
1600 self.error_details[error_key] = dict(
1608 self.error_details[error_key] = dict(
1601 details=details,
1609 details=details,
1602 error_type=error_type,
1610 error_type=error_type,
1603 message=message
1611 message=message
1604 )
1612 )
1605
1613
1606 @classmethod
1614 @classmethod
1607 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1615 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1608 force_shadow_repo_refresh=False):
1616 force_shadow_repo_refresh=False):
1609 _ = translator
1617 _ = translator
1610 merge_check = cls()
1618 merge_check = cls()
1611
1619
1612 # permissions to merge
1620 # permissions to merge
1613 user_allowed_to_merge = PullRequestModel().check_user_merge(
1621 user_allowed_to_merge = PullRequestModel().check_user_merge(
1614 pull_request, auth_user)
1622 pull_request, auth_user)
1615 if not user_allowed_to_merge:
1623 if not user_allowed_to_merge:
1616 log.debug("MergeCheck: cannot merge, approval is pending.")
1624 log.debug("MergeCheck: cannot merge, approval is pending.")
1617
1625
1618 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1626 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1619 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1627 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1620 if fail_early:
1628 if fail_early:
1621 return merge_check
1629 return merge_check
1622
1630
1623 # permission to merge into the target branch
1631 # permission to merge into the target branch
1624 target_commit_id = pull_request.target_ref_parts.commit_id
1632 target_commit_id = pull_request.target_ref_parts.commit_id
1625 if pull_request.target_ref_parts.type == 'branch':
1633 if pull_request.target_ref_parts.type == 'branch':
1626 branch_name = pull_request.target_ref_parts.name
1634 branch_name = pull_request.target_ref_parts.name
1627 else:
1635 else:
1628 # for mercurial we can always figure out the branch from the commit
1636 # for mercurial we can always figure out the branch from the commit
1629 # in case of bookmark
1637 # in case of bookmark
1630 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1638 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1631 branch_name = target_commit.branch
1639 branch_name = target_commit.branch
1632
1640
1633 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1641 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1634 pull_request.target_repo.repo_name, branch_name)
1642 pull_request.target_repo.repo_name, branch_name)
1635 if branch_perm and branch_perm == 'branch.none':
1643 if branch_perm and branch_perm == 'branch.none':
1636 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1644 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1637 branch_name, rule)
1645 branch_name, rule)
1638 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1646 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1639 if fail_early:
1647 if fail_early:
1640 return merge_check
1648 return merge_check
1641
1649
1642 # review status, must be always present
1650 # review status, must be always present
1643 review_status = pull_request.calculated_review_status()
1651 review_status = pull_request.calculated_review_status()
1644 merge_check.review_status = review_status
1652 merge_check.review_status = review_status
1645
1653
1646 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1654 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1647 if not status_approved:
1655 if not status_approved:
1648 log.debug("MergeCheck: cannot merge, approval is pending.")
1656 log.debug("MergeCheck: cannot merge, approval is pending.")
1649
1657
1650 msg = _('Pull request reviewer approval is pending.')
1658 msg = _('Pull request reviewer approval is pending.')
1651
1659
1652 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1660 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1653
1661
1654 if fail_early:
1662 if fail_early:
1655 return merge_check
1663 return merge_check
1656
1664
1657 # left over TODOs
1665 # left over TODOs
1658 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1666 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1659 if todos:
1667 if todos:
1660 log.debug("MergeCheck: cannot merge, {} "
1668 log.debug("MergeCheck: cannot merge, {} "
1661 "unresolved TODOs left.".format(len(todos)))
1669 "unresolved TODOs left.".format(len(todos)))
1662
1670
1663 if len(todos) == 1:
1671 if len(todos) == 1:
1664 msg = _('Cannot merge, {} TODO still not resolved.').format(
1672 msg = _('Cannot merge, {} TODO still not resolved.').format(
1665 len(todos))
1673 len(todos))
1666 else:
1674 else:
1667 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1675 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1668 len(todos))
1676 len(todos))
1669
1677
1670 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1678 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1671
1679
1672 if fail_early:
1680 if fail_early:
1673 return merge_check
1681 return merge_check
1674
1682
1675 # merge possible, here is the filesystem simulation + shadow repo
1683 # merge possible, here is the filesystem simulation + shadow repo
1676 merge_status, msg = PullRequestModel().merge_status(
1684 merge_status, msg = PullRequestModel().merge_status(
1677 pull_request, translator=translator,
1685 pull_request, translator=translator,
1678 force_shadow_repo_refresh=force_shadow_repo_refresh)
1686 force_shadow_repo_refresh=force_shadow_repo_refresh)
1679 merge_check.merge_possible = merge_status
1687 merge_check.merge_possible = merge_status
1680 merge_check.merge_msg = msg
1688 merge_check.merge_msg = msg
1681 if not merge_status:
1689 if not merge_status:
1682 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1690 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1683 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1691 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1684
1692
1685 if fail_early:
1693 if fail_early:
1686 return merge_check
1694 return merge_check
1687
1695
1688 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1696 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1689 return merge_check
1697 return merge_check
1690
1698
1691 @classmethod
1699 @classmethod
1692 def get_merge_conditions(cls, pull_request, translator):
1700 def get_merge_conditions(cls, pull_request, translator):
1693 _ = translator
1701 _ = translator
1694 merge_details = {}
1702 merge_details = {}
1695
1703
1696 model = PullRequestModel()
1704 model = PullRequestModel()
1697 use_rebase = model._use_rebase_for_merging(pull_request)
1705 use_rebase = model._use_rebase_for_merging(pull_request)
1698
1706
1699 if use_rebase:
1707 if use_rebase:
1700 merge_details['merge_strategy'] = dict(
1708 merge_details['merge_strategy'] = dict(
1701 details={},
1709 details={},
1702 message=_('Merge strategy: rebase')
1710 message=_('Merge strategy: rebase')
1703 )
1711 )
1704 else:
1712 else:
1705 merge_details['merge_strategy'] = dict(
1713 merge_details['merge_strategy'] = dict(
1706 details={},
1714 details={},
1707 message=_('Merge strategy: explicit merge commit')
1715 message=_('Merge strategy: explicit merge commit')
1708 )
1716 )
1709
1717
1710 close_branch = model._close_branch_before_merging(pull_request)
1718 close_branch = model._close_branch_before_merging(pull_request)
1711 if close_branch:
1719 if close_branch:
1712 repo_type = pull_request.target_repo.repo_type
1720 repo_type = pull_request.target_repo.repo_type
1713 close_msg = ''
1721 close_msg = ''
1714 if repo_type == 'hg':
1722 if repo_type == 'hg':
1715 close_msg = _('Source branch will be closed after merge.')
1723 close_msg = _('Source branch will be closed after merge.')
1716 elif repo_type == 'git':
1724 elif repo_type == 'git':
1717 close_msg = _('Source branch will be deleted after merge.')
1725 close_msg = _('Source branch will be deleted after merge.')
1718
1726
1719 merge_details['close_branch'] = dict(
1727 merge_details['close_branch'] = dict(
1720 details={},
1728 details={},
1721 message=close_msg
1729 message=close_msg
1722 )
1730 )
1723
1731
1724 return merge_details
1732 return merge_details
1725
1733
1726
1734
1727 ChangeTuple = collections.namedtuple(
1735 ChangeTuple = collections.namedtuple(
1728 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1736 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1729
1737
1730 FileChangeTuple = collections.namedtuple(
1738 FileChangeTuple = collections.namedtuple(
1731 'FileChangeTuple', ['added', 'modified', 'removed'])
1739 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,949 +1,949 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture
47 @pytest.fixture
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 merge_resp = MergeResponse(
53 merge_resp = MergeResponse(
54 False, False, None, MergeFailureReason.UNKNOWN,
54 False, False, None, MergeFailureReason.UNKNOWN,
55 metadata={'exception': 'MockError'})
55 metadata={'exception': 'MockError'})
56 self.merge_patcher = mock.patch.object(
56 self.merge_patcher = mock.patch.object(
57 BackendClass, 'merge', return_value=merge_resp)
57 BackendClass, 'merge', return_value=merge_resp)
58 self.workspace_remove_patcher = mock.patch.object(
58 self.workspace_remove_patcher = mock.patch.object(
59 BackendClass, 'cleanup_merge_workspace')
59 BackendClass, 'cleanup_merge_workspace')
60
60
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
63 self.comment_patcher = mock.patch(
63 self.comment_patcher = mock.patch(
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 self.comment_patcher.start()
65 self.comment_patcher.start()
66 self.notification_patcher = mock.patch(
66 self.notification_patcher = mock.patch(
67 'rhodecode.model.notification.NotificationModel.create')
67 'rhodecode.model.notification.NotificationModel.create')
68 self.notification_patcher.start()
68 self.notification_patcher.start()
69 self.helper_patcher = mock.patch(
69 self.helper_patcher = mock.patch(
70 'rhodecode.lib.helpers.route_path')
70 'rhodecode.lib.helpers.route_path')
71 self.helper_patcher.start()
71 self.helper_patcher.start()
72
72
73 self.hook_patcher = mock.patch.object(PullRequestModel,
73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 'trigger_pull_request_hook')
74 'trigger_pull_request_hook')
75 self.hook_mock = self.hook_patcher.start()
75 self.hook_mock = self.hook_patcher.start()
76
76
77 self.invalidation_patcher = mock.patch(
77 self.invalidation_patcher = mock.patch(
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 self.invalidation_mock = self.invalidation_patcher.start()
79 self.invalidation_mock = self.invalidation_patcher.start()
80
80
81 self.pull_request = pr_util.create_pull_request(
81 self.pull_request = pr_util.create_pull_request(
82 mergeable=True, name_suffix=u'Δ…Δ‡')
82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 self.repo_id = self.pull_request.target_repo.repo_id
86 self.repo_id = self.pull_request.target_repo.repo_id
87
87
88 @request.addfinalizer
88 @request.addfinalizer
89 def cleanup_pull_request():
89 def cleanup_pull_request():
90 calls = [mock.call(
90 calls = [mock.call(
91 self.pull_request, self.pull_request.author, 'create')]
91 self.pull_request, self.pull_request.author, 'create')]
92 self.hook_mock.assert_has_calls(calls)
92 self.hook_mock.assert_has_calls(calls)
93
93
94 self.workspace_remove_patcher.stop()
94 self.workspace_remove_patcher.stop()
95 self.merge_patcher.stop()
95 self.merge_patcher.stop()
96 self.comment_patcher.stop()
96 self.comment_patcher.stop()
97 self.notification_patcher.stop()
97 self.notification_patcher.stop()
98 self.helper_patcher.stop()
98 self.helper_patcher.stop()
99 self.hook_patcher.stop()
99 self.hook_patcher.stop()
100 self.invalidation_patcher.stop()
100 self.invalidation_patcher.stop()
101
101
102 return self.pull_request
102 return self.pull_request
103
103
104 def test_get_all(self, pull_request):
104 def test_get_all(self, pull_request):
105 prs = PullRequestModel().get_all(pull_request.target_repo)
105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 assert isinstance(prs, list)
106 assert isinstance(prs, list)
107 assert len(prs) == 1
107 assert len(prs) == 1
108
108
109 def test_count_all(self, pull_request):
109 def test_count_all(self, pull_request):
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 assert pr_count == 1
111 assert pr_count == 1
112
112
113 def test_get_awaiting_review(self, pull_request):
113 def test_get_awaiting_review(self, pull_request):
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 assert isinstance(prs, list)
115 assert isinstance(prs, list)
116 assert len(prs) == 1
116 assert len(prs) == 1
117
117
118 def test_count_awaiting_review(self, pull_request):
118 def test_count_awaiting_review(self, pull_request):
119 pr_count = PullRequestModel().count_awaiting_review(
119 pr_count = PullRequestModel().count_awaiting_review(
120 pull_request.target_repo)
120 pull_request.target_repo)
121 assert pr_count == 1
121 assert pr_count == 1
122
122
123 def test_get_awaiting_my_review(self, pull_request):
123 def test_get_awaiting_my_review(self, pull_request):
124 PullRequestModel().update_reviewers(
124 PullRequestModel().update_reviewers(
125 pull_request, [(pull_request.author, ['author'], False, [])],
125 pull_request, [(pull_request.author, ['author'], False, [])],
126 pull_request.author)
126 pull_request.author)
127 prs = PullRequestModel().get_awaiting_my_review(
127 prs = PullRequestModel().get_awaiting_my_review(
128 pull_request.target_repo, user_id=pull_request.author.user_id)
128 pull_request.target_repo, user_id=pull_request.author.user_id)
129 assert isinstance(prs, list)
129 assert isinstance(prs, list)
130 assert len(prs) == 1
130 assert len(prs) == 1
131
131
132 def test_count_awaiting_my_review(self, pull_request):
132 def test_count_awaiting_my_review(self, pull_request):
133 PullRequestModel().update_reviewers(
133 PullRequestModel().update_reviewers(
134 pull_request, [(pull_request.author, ['author'], False, [])],
134 pull_request, [(pull_request.author, ['author'], False, [])],
135 pull_request.author)
135 pull_request.author)
136 pr_count = PullRequestModel().count_awaiting_my_review(
136 pr_count = PullRequestModel().count_awaiting_my_review(
137 pull_request.target_repo, user_id=pull_request.author.user_id)
137 pull_request.target_repo, user_id=pull_request.author.user_id)
138 assert pr_count == 1
138 assert pr_count == 1
139
139
140 def test_delete_calls_cleanup_merge(self, pull_request):
140 def test_delete_calls_cleanup_merge(self, pull_request):
141 repo_id = pull_request.target_repo.repo_id
141 repo_id = pull_request.target_repo.repo_id
142 PullRequestModel().delete(pull_request, pull_request.author)
142 PullRequestModel().delete(pull_request, pull_request.author)
143
143
144 self.workspace_remove_mock.assert_called_once_with(
144 self.workspace_remove_mock.assert_called_once_with(
145 repo_id, self.workspace_id)
145 repo_id, self.workspace_id)
146
146
147 def test_close_calls_cleanup_and_hook(self, pull_request):
147 def test_close_calls_cleanup_and_hook(self, pull_request):
148 PullRequestModel().close_pull_request(
148 PullRequestModel().close_pull_request(
149 pull_request, pull_request.author)
149 pull_request, pull_request.author)
150 repo_id = pull_request.target_repo.repo_id
150 repo_id = pull_request.target_repo.repo_id
151
151
152 self.workspace_remove_mock.assert_called_once_with(
152 self.workspace_remove_mock.assert_called_once_with(
153 repo_id, self.workspace_id)
153 repo_id, self.workspace_id)
154 self.hook_mock.assert_called_with(
154 self.hook_mock.assert_called_with(
155 self.pull_request, self.pull_request.author, 'close')
155 self.pull_request, self.pull_request.author, 'close')
156
156
157 def test_merge_status(self, pull_request):
157 def test_merge_status(self, pull_request):
158 self.merge_mock.return_value = MergeResponse(
158 self.merge_mock.return_value = MergeResponse(
159 True, False, None, MergeFailureReason.NONE)
159 True, False, None, MergeFailureReason.NONE)
160
160
161 assert pull_request._last_merge_source_rev is None
161 assert pull_request._last_merge_source_rev is None
162 assert pull_request._last_merge_target_rev is None
162 assert pull_request._last_merge_target_rev is None
163 assert pull_request.last_merge_status is None
163 assert pull_request.last_merge_status is None
164
164
165 status, msg = PullRequestModel().merge_status(pull_request)
165 status, msg = PullRequestModel().merge_status(pull_request)
166 assert status is True
166 assert status is True
167 assert msg == 'This pull request can be automatically merged.'
167 assert msg == 'This pull request can be automatically merged.'
168 self.merge_mock.assert_called_with(
168 self.merge_mock.assert_called_with(
169 self.repo_id, self.workspace_id,
169 self.repo_id, self.workspace_id,
170 pull_request.target_ref_parts,
170 pull_request.target_ref_parts,
171 pull_request.source_repo.scm_instance(),
171 pull_request.source_repo.scm_instance(),
172 pull_request.source_ref_parts, dry_run=True,
172 pull_request.source_ref_parts, dry_run=True,
173 use_rebase=False, close_branch=False)
173 use_rebase=False, close_branch=False)
174
174
175 assert pull_request._last_merge_source_rev == self.source_commit
175 assert pull_request._last_merge_source_rev == self.source_commit
176 assert pull_request._last_merge_target_rev == self.target_commit
176 assert pull_request._last_merge_target_rev == self.target_commit
177 assert pull_request.last_merge_status is MergeFailureReason.NONE
177 assert pull_request.last_merge_status is MergeFailureReason.NONE
178
178
179 self.merge_mock.reset_mock()
179 self.merge_mock.reset_mock()
180 status, msg = PullRequestModel().merge_status(pull_request)
180 status, msg = PullRequestModel().merge_status(pull_request)
181 assert status is True
181 assert status is True
182 assert msg == 'This pull request can be automatically merged.'
182 assert msg == 'This pull request can be automatically merged.'
183 assert self.merge_mock.called is False
183 assert self.merge_mock.called is False
184
184
185 def test_merge_status_known_failure(self, pull_request):
185 def test_merge_status_known_failure(self, pull_request):
186 self.merge_mock.return_value = MergeResponse(
186 self.merge_mock.return_value = MergeResponse(
187 False, False, None, MergeFailureReason.MERGE_FAILED)
187 False, False, None, MergeFailureReason.MERGE_FAILED)
188
188
189 assert pull_request._last_merge_source_rev is None
189 assert pull_request._last_merge_source_rev is None
190 assert pull_request._last_merge_target_rev is None
190 assert pull_request._last_merge_target_rev is None
191 assert pull_request.last_merge_status is None
191 assert pull_request.last_merge_status is None
192
192
193 status, msg = PullRequestModel().merge_status(pull_request)
193 status, msg = PullRequestModel().merge_status(pull_request)
194 assert status is False
194 assert status is False
195 assert msg == 'This pull request cannot be merged because of merge conflicts.'
195 assert msg == 'This pull request cannot be merged because of merge conflicts.'
196 self.merge_mock.assert_called_with(
196 self.merge_mock.assert_called_with(
197 self.repo_id, self.workspace_id,
197 self.repo_id, self.workspace_id,
198 pull_request.target_ref_parts,
198 pull_request.target_ref_parts,
199 pull_request.source_repo.scm_instance(),
199 pull_request.source_repo.scm_instance(),
200 pull_request.source_ref_parts, dry_run=True,
200 pull_request.source_ref_parts, dry_run=True,
201 use_rebase=False, close_branch=False)
201 use_rebase=False, close_branch=False)
202
202
203 assert pull_request._last_merge_source_rev == self.source_commit
203 assert pull_request._last_merge_source_rev == self.source_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
205 assert (
205 assert (
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207
207
208 self.merge_mock.reset_mock()
208 self.merge_mock.reset_mock()
209 status, msg = PullRequestModel().merge_status(pull_request)
209 status, msg = PullRequestModel().merge_status(pull_request)
210 assert status is False
210 assert status is False
211 assert msg == 'This pull request cannot be merged because of merge conflicts.'
211 assert msg == 'This pull request cannot be merged because of merge conflicts.'
212 assert self.merge_mock.called is False
212 assert self.merge_mock.called is False
213
213
214 def test_merge_status_unknown_failure(self, pull_request):
214 def test_merge_status_unknown_failure(self, pull_request):
215 self.merge_mock.return_value = MergeResponse(
215 self.merge_mock.return_value = MergeResponse(
216 False, False, None, MergeFailureReason.UNKNOWN,
216 False, False, None, MergeFailureReason.UNKNOWN,
217 metadata={'exception': 'MockError'})
217 metadata={'exception': 'MockError'})
218
218
219 assert pull_request._last_merge_source_rev is None
219 assert pull_request._last_merge_source_rev is None
220 assert pull_request._last_merge_target_rev is None
220 assert pull_request._last_merge_target_rev is None
221 assert pull_request.last_merge_status is None
221 assert pull_request.last_merge_status is None
222
222
223 status, msg = PullRequestModel().merge_status(pull_request)
223 status, msg = PullRequestModel().merge_status(pull_request)
224 assert status is False
224 assert status is False
225 assert msg == (
225 assert msg == (
226 'This pull request cannot be merged because of an unhandled exception. '
226 'This pull request cannot be merged because of an unhandled exception. '
227 'MockError')
227 'MockError')
228 self.merge_mock.assert_called_with(
228 self.merge_mock.assert_called_with(
229 self.repo_id, self.workspace_id,
229 self.repo_id, self.workspace_id,
230 pull_request.target_ref_parts,
230 pull_request.target_ref_parts,
231 pull_request.source_repo.scm_instance(),
231 pull_request.source_repo.scm_instance(),
232 pull_request.source_ref_parts, dry_run=True,
232 pull_request.source_ref_parts, dry_run=True,
233 use_rebase=False, close_branch=False)
233 use_rebase=False, close_branch=False)
234
234
235 assert pull_request._last_merge_source_rev is None
235 assert pull_request._last_merge_source_rev is None
236 assert pull_request._last_merge_target_rev is None
236 assert pull_request._last_merge_target_rev is None
237 assert pull_request.last_merge_status is None
237 assert pull_request.last_merge_status is None
238
238
239 self.merge_mock.reset_mock()
239 self.merge_mock.reset_mock()
240 status, msg = PullRequestModel().merge_status(pull_request)
240 status, msg = PullRequestModel().merge_status(pull_request)
241 assert status is False
241 assert status is False
242 assert msg == (
242 assert msg == (
243 'This pull request cannot be merged because of an unhandled exception. '
243 'This pull request cannot be merged because of an unhandled exception. '
244 'MockError')
244 'MockError')
245 assert self.merge_mock.called is True
245 assert self.merge_mock.called is True
246
246
247 def test_merge_status_when_target_is_locked(self, pull_request):
247 def test_merge_status_when_target_is_locked(self, pull_request):
248 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
248 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
249 status, msg = PullRequestModel().merge_status(pull_request)
249 status, msg = PullRequestModel().merge_status(pull_request)
250 assert status is False
250 assert status is False
251 assert msg == (
251 assert msg == (
252 'This pull request cannot be merged because the target repository '
252 'This pull request cannot be merged because the target repository '
253 'is locked by user:1.')
253 'is locked by user:1.')
254
254
255 def test_merge_status_requirements_check_target(self, pull_request):
255 def test_merge_status_requirements_check_target(self, pull_request):
256
256
257 def has_largefiles(self, repo):
257 def has_largefiles(self, repo):
258 return repo == pull_request.source_repo
258 return repo == pull_request.source_repo
259
259
260 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
260 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
261 with patcher:
261 with patcher:
262 status, msg = PullRequestModel().merge_status(pull_request)
262 status, msg = PullRequestModel().merge_status(pull_request)
263
263
264 assert status is False
264 assert status is False
265 assert msg == 'Target repository large files support is disabled.'
265 assert msg == 'Target repository large files support is disabled.'
266
266
267 def test_merge_status_requirements_check_source(self, pull_request):
267 def test_merge_status_requirements_check_source(self, pull_request):
268
268
269 def has_largefiles(self, repo):
269 def has_largefiles(self, repo):
270 return repo == pull_request.target_repo
270 return repo == pull_request.target_repo
271
271
272 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
272 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
273 with patcher:
273 with patcher:
274 status, msg = PullRequestModel().merge_status(pull_request)
274 status, msg = PullRequestModel().merge_status(pull_request)
275
275
276 assert status is False
276 assert status is False
277 assert msg == 'Source repository large files support is disabled.'
277 assert msg == 'Source repository large files support is disabled.'
278
278
279 def test_merge(self, pull_request, merge_extras):
279 def test_merge(self, pull_request, merge_extras):
280 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
280 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
281 merge_ref = Reference(
281 merge_ref = Reference(
282 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
282 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
283 self.merge_mock.return_value = MergeResponse(
283 self.merge_mock.return_value = MergeResponse(
284 True, True, merge_ref, MergeFailureReason.NONE)
284 True, True, merge_ref, MergeFailureReason.NONE)
285
285
286 merge_extras['repository'] = pull_request.target_repo.repo_name
286 merge_extras['repository'] = pull_request.target_repo.repo_name
287 PullRequestModel().merge_repo(
287 PullRequestModel().merge_repo(
288 pull_request, pull_request.author, extras=merge_extras)
288 pull_request, pull_request.author, extras=merge_extras)
289
289
290 message = (
290 message = (
291 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
291 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
292 u'\n\n {pr_title}'.format(
292 u'\n\n {pr_title}'.format(
293 pr_id=pull_request.pull_request_id,
293 pr_id=pull_request.pull_request_id,
294 source_repo=safe_unicode(
294 source_repo=safe_unicode(
295 pull_request.source_repo.scm_instance().name),
295 pull_request.source_repo.scm_instance().name),
296 source_ref_name=pull_request.source_ref_parts.name,
296 source_ref_name=pull_request.source_ref_parts.name,
297 pr_title=safe_unicode(pull_request.title)
297 pr_title=safe_unicode(pull_request.title)
298 )
298 )
299 )
299 )
300 self.merge_mock.assert_called_with(
300 self.merge_mock.assert_called_with(
301 self.repo_id, self.workspace_id,
301 self.repo_id, self.workspace_id,
302 pull_request.target_ref_parts,
302 pull_request.target_ref_parts,
303 pull_request.source_repo.scm_instance(),
303 pull_request.source_repo.scm_instance(),
304 pull_request.source_ref_parts,
304 pull_request.source_ref_parts,
305 user_name=user.short_contact, user_email=user.email, message=message,
305 user_name=user.short_contact, user_email=user.email, message=message,
306 use_rebase=False, close_branch=False
306 use_rebase=False, close_branch=False
307 )
307 )
308 self.invalidation_mock.assert_called_once_with(
308 self.invalidation_mock.assert_called_once_with(
309 pull_request.target_repo.repo_name)
309 pull_request.target_repo.repo_name)
310
310
311 self.hook_mock.assert_called_with(
311 self.hook_mock.assert_called_with(
312 self.pull_request, self.pull_request.author, 'merge')
312 self.pull_request, self.pull_request.author, 'merge')
313
313
314 pull_request = PullRequest.get(pull_request.pull_request_id)
314 pull_request = PullRequest.get(pull_request.pull_request_id)
315 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
315 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
316
316
317 def test_merge_with_status_lock(self, pull_request, merge_extras):
317 def test_merge_with_status_lock(self, pull_request, merge_extras):
318 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
318 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
319 merge_ref = Reference(
319 merge_ref = Reference(
320 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
320 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321 self.merge_mock.return_value = MergeResponse(
321 self.merge_mock.return_value = MergeResponse(
322 True, True, merge_ref, MergeFailureReason.NONE)
322 True, True, merge_ref, MergeFailureReason.NONE)
323
323
324 merge_extras['repository'] = pull_request.target_repo.repo_name
324 merge_extras['repository'] = pull_request.target_repo.repo_name
325
325
326 with pull_request.set_state(PullRequest.STATE_UPDATING):
326 with pull_request.set_state(PullRequest.STATE_UPDATING):
327 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
327 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
328 PullRequestModel().merge_repo(
328 PullRequestModel().merge_repo(
329 pull_request, pull_request.author, extras=merge_extras)
329 pull_request, pull_request.author, extras=merge_extras)
330
330
331 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
331 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
332
332
333 message = (
333 message = (
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 u'\n\n {pr_title}'.format(
335 u'\n\n {pr_title}'.format(
336 pr_id=pull_request.pull_request_id,
336 pr_id=pull_request.pull_request_id,
337 source_repo=safe_unicode(
337 source_repo=safe_unicode(
338 pull_request.source_repo.scm_instance().name),
338 pull_request.source_repo.scm_instance().name),
339 source_ref_name=pull_request.source_ref_parts.name,
339 source_ref_name=pull_request.source_ref_parts.name,
340 pr_title=safe_unicode(pull_request.title)
340 pr_title=safe_unicode(pull_request.title)
341 )
341 )
342 )
342 )
343 self.merge_mock.assert_called_with(
343 self.merge_mock.assert_called_with(
344 self.repo_id, self.workspace_id,
344 self.repo_id, self.workspace_id,
345 pull_request.target_ref_parts,
345 pull_request.target_ref_parts,
346 pull_request.source_repo.scm_instance(),
346 pull_request.source_repo.scm_instance(),
347 pull_request.source_ref_parts,
347 pull_request.source_ref_parts,
348 user_name=user.short_contact, user_email=user.email, message=message,
348 user_name=user.short_contact, user_email=user.email, message=message,
349 use_rebase=False, close_branch=False
349 use_rebase=False, close_branch=False
350 )
350 )
351 self.invalidation_mock.assert_called_once_with(
351 self.invalidation_mock.assert_called_once_with(
352 pull_request.target_repo.repo_name)
352 pull_request.target_repo.repo_name)
353
353
354 self.hook_mock.assert_called_with(
354 self.hook_mock.assert_called_with(
355 self.pull_request, self.pull_request.author, 'merge')
355 self.pull_request, self.pull_request.author, 'merge')
356
356
357 pull_request = PullRequest.get(pull_request.pull_request_id)
357 pull_request = PullRequest.get(pull_request.pull_request_id)
358 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
358 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
359
359
360 def test_merge_failed(self, pull_request, merge_extras):
360 def test_merge_failed(self, pull_request, merge_extras):
361 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
361 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
362 merge_ref = Reference(
362 merge_ref = Reference(
363 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
363 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
364 self.merge_mock.return_value = MergeResponse(
364 self.merge_mock.return_value = MergeResponse(
365 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
365 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
366
366
367 merge_extras['repository'] = pull_request.target_repo.repo_name
367 merge_extras['repository'] = pull_request.target_repo.repo_name
368 PullRequestModel().merge_repo(
368 PullRequestModel().merge_repo(
369 pull_request, pull_request.author, extras=merge_extras)
369 pull_request, pull_request.author, extras=merge_extras)
370
370
371 message = (
371 message = (
372 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
372 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
373 u'\n\n {pr_title}'.format(
373 u'\n\n {pr_title}'.format(
374 pr_id=pull_request.pull_request_id,
374 pr_id=pull_request.pull_request_id,
375 source_repo=safe_unicode(
375 source_repo=safe_unicode(
376 pull_request.source_repo.scm_instance().name),
376 pull_request.source_repo.scm_instance().name),
377 source_ref_name=pull_request.source_ref_parts.name,
377 source_ref_name=pull_request.source_ref_parts.name,
378 pr_title=safe_unicode(pull_request.title)
378 pr_title=safe_unicode(pull_request.title)
379 )
379 )
380 )
380 )
381 self.merge_mock.assert_called_with(
381 self.merge_mock.assert_called_with(
382 self.repo_id, self.workspace_id,
382 self.repo_id, self.workspace_id,
383 pull_request.target_ref_parts,
383 pull_request.target_ref_parts,
384 pull_request.source_repo.scm_instance(),
384 pull_request.source_repo.scm_instance(),
385 pull_request.source_ref_parts,
385 pull_request.source_ref_parts,
386 user_name=user.short_contact, user_email=user.email, message=message,
386 user_name=user.short_contact, user_email=user.email, message=message,
387 use_rebase=False, close_branch=False
387 use_rebase=False, close_branch=False
388 )
388 )
389
389
390 pull_request = PullRequest.get(pull_request.pull_request_id)
390 pull_request = PullRequest.get(pull_request.pull_request_id)
391 assert self.invalidation_mock.called is False
391 assert self.invalidation_mock.called is False
392 assert pull_request.merge_rev is None
392 assert pull_request.merge_rev is None
393
393
394 def test_get_commit_ids(self, pull_request):
394 def test_get_commit_ids(self, pull_request):
395 # The PR has been not merget yet, so expect an exception
395 # The PR has been not merget yet, so expect an exception
396 with pytest.raises(ValueError):
396 with pytest.raises(ValueError):
397 PullRequestModel()._get_commit_ids(pull_request)
397 PullRequestModel()._get_commit_ids(pull_request)
398
398
399 # Merge revision is in the revisions list
399 # Merge revision is in the revisions list
400 pull_request.merge_rev = pull_request.revisions[0]
400 pull_request.merge_rev = pull_request.revisions[0]
401 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
401 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
402 assert commit_ids == pull_request.revisions
402 assert commit_ids == pull_request.revisions
403
403
404 # Merge revision is not in the revisions list
404 # Merge revision is not in the revisions list
405 pull_request.merge_rev = 'f000' * 10
405 pull_request.merge_rev = 'f000' * 10
406 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
406 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
407 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
407 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
408
408
409 def test_get_diff_from_pr_version(self, pull_request):
409 def test_get_diff_from_pr_version(self, pull_request):
410 source_repo = pull_request.source_repo
410 source_repo = pull_request.source_repo
411 source_ref_id = pull_request.source_ref_parts.commit_id
411 source_ref_id = pull_request.source_ref_parts.commit_id
412 target_ref_id = pull_request.target_ref_parts.commit_id
412 target_ref_id = pull_request.target_ref_parts.commit_id
413 diff = PullRequestModel()._get_diff_from_pr_or_version(
413 diff = PullRequestModel()._get_diff_from_pr_or_version(
414 source_repo, source_ref_id, target_ref_id,
414 source_repo, source_ref_id, target_ref_id,
415 hide_whitespace_changes=False, diff_context=6)
415 hide_whitespace_changes=False, diff_context=6)
416 assert 'file_1' in diff.raw
416 assert 'file_1' in diff.raw
417
417
418 def test_generate_title_returns_unicode(self):
418 def test_generate_title_returns_unicode(self):
419 title = PullRequestModel().generate_pullrequest_title(
419 title = PullRequestModel().generate_pullrequest_title(
420 source='source-dummy',
420 source='source-dummy',
421 source_ref='source-ref-dummy',
421 source_ref='source-ref-dummy',
422 target='target-dummy',
422 target='target-dummy',
423 )
423 )
424 assert type(title) == unicode
424 assert type(title) == unicode
425
425
426
426
427 @pytest.mark.usefixtures('config_stub')
427 @pytest.mark.usefixtures('config_stub')
428 class TestIntegrationMerge(object):
428 class TestIntegrationMerge(object):
429 @pytest.mark.parametrize('extra_config', (
429 @pytest.mark.parametrize('extra_config', (
430 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
430 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
431 ))
431 ))
432 def test_merge_triggers_push_hooks(
432 def test_merge_triggers_push_hooks(
433 self, pr_util, user_admin, capture_rcextensions, merge_extras,
433 self, pr_util, user_admin, capture_rcextensions, merge_extras,
434 extra_config):
434 extra_config):
435
435
436 pull_request = pr_util.create_pull_request(
436 pull_request = pr_util.create_pull_request(
437 approved=True, mergeable=True)
437 approved=True, mergeable=True)
438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 merge_extras['repository'] = pull_request.target_repo.repo_name
439 merge_extras['repository'] = pull_request.target_repo.repo_name
440 Session().commit()
440 Session().commit()
441
441
442 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
442 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
443 merge_state = PullRequestModel().merge_repo(
443 merge_state = PullRequestModel().merge_repo(
444 pull_request, user_admin, extras=merge_extras)
444 pull_request, user_admin, extras=merge_extras)
445
445
446 assert merge_state.executed
446 assert merge_state.executed
447 assert '_pre_push_hook' in capture_rcextensions
447 assert '_pre_push_hook' in capture_rcextensions
448 assert '_push_hook' in capture_rcextensions
448 assert '_push_hook' in capture_rcextensions
449
449
450 def test_merge_can_be_rejected_by_pre_push_hook(
450 def test_merge_can_be_rejected_by_pre_push_hook(
451 self, pr_util, user_admin, capture_rcextensions, merge_extras):
451 self, pr_util, user_admin, capture_rcextensions, merge_extras):
452 pull_request = pr_util.create_pull_request(
452 pull_request = pr_util.create_pull_request(
453 approved=True, mergeable=True)
453 approved=True, mergeable=True)
454 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
454 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
455 merge_extras['repository'] = pull_request.target_repo.repo_name
455 merge_extras['repository'] = pull_request.target_repo.repo_name
456 Session().commit()
456 Session().commit()
457
457
458 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
458 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
459 pre_pull.side_effect = RepositoryError("Disallow push!")
459 pre_pull.side_effect = RepositoryError("Disallow push!")
460 merge_status = PullRequestModel().merge_repo(
460 merge_status = PullRequestModel().merge_repo(
461 pull_request, user_admin, extras=merge_extras)
461 pull_request, user_admin, extras=merge_extras)
462
462
463 assert not merge_status.executed
463 assert not merge_status.executed
464 assert 'pre_push' not in capture_rcextensions
464 assert 'pre_push' not in capture_rcextensions
465 assert 'post_push' not in capture_rcextensions
465 assert 'post_push' not in capture_rcextensions
466
466
467 def test_merge_fails_if_target_is_locked(
467 def test_merge_fails_if_target_is_locked(
468 self, pr_util, user_regular, merge_extras):
468 self, pr_util, user_regular, merge_extras):
469 pull_request = pr_util.create_pull_request(
469 pull_request = pr_util.create_pull_request(
470 approved=True, mergeable=True)
470 approved=True, mergeable=True)
471 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
471 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
472 pull_request.target_repo.locked = locked_by
472 pull_request.target_repo.locked = locked_by
473 # TODO: johbo: Check if this can work based on the database, currently
473 # TODO: johbo: Check if this can work based on the database, currently
474 # all data is pre-computed, that's why just updating the DB is not
474 # all data is pre-computed, that's why just updating the DB is not
475 # enough.
475 # enough.
476 merge_extras['locked_by'] = locked_by
476 merge_extras['locked_by'] = locked_by
477 merge_extras['repository'] = pull_request.target_repo.repo_name
477 merge_extras['repository'] = pull_request.target_repo.repo_name
478 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
478 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
479 Session().commit()
479 Session().commit()
480 merge_status = PullRequestModel().merge_repo(
480 merge_status = PullRequestModel().merge_repo(
481 pull_request, user_regular, extras=merge_extras)
481 pull_request, user_regular, extras=merge_extras)
482 assert not merge_status.executed
482 assert not merge_status.executed
483
483
484
484
485 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
485 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
486 (False, 1, 0),
486 (False, 1, 0),
487 (True, 0, 1),
487 (True, 0, 1),
488 ])
488 ])
489 def test_outdated_comments(
489 def test_outdated_comments(
490 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
490 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
491 pull_request = pr_util.create_pull_request()
491 pull_request = pr_util.create_pull_request()
492 pr_util.create_inline_comment(file_path='not_in_updated_diff')
492 pr_util.create_inline_comment(file_path='not_in_updated_diff')
493
493
494 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
494 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
495 pr_util.add_one_commit()
495 pr_util.add_one_commit()
496 assert_inline_comments(
496 assert_inline_comments(
497 pull_request, visible=inlines_count, outdated=outdated_count)
497 pull_request, visible=inlines_count, outdated=outdated_count)
498 outdated_comment_mock.assert_called_with(pull_request)
498 outdated_comment_mock.assert_called_with(pull_request)
499
499
500
500
501 @pytest.mark.parametrize('mr_type, expected_msg', [
501 @pytest.mark.parametrize('mr_type, expected_msg', [
502 (MergeFailureReason.NONE,
502 (MergeFailureReason.NONE,
503 'This pull request can be automatically merged.'),
503 'This pull request can be automatically merged.'),
504 (MergeFailureReason.UNKNOWN,
504 (MergeFailureReason.UNKNOWN,
505 'This pull request cannot be merged because of an unhandled exception. CRASH'),
505 'This pull request cannot be merged because of an unhandled exception. CRASH'),
506 (MergeFailureReason.MERGE_FAILED,
506 (MergeFailureReason.MERGE_FAILED,
507 'This pull request cannot be merged because of merge conflicts.'),
507 'This pull request cannot be merged because of merge conflicts.'),
508 (MergeFailureReason.PUSH_FAILED,
508 (MergeFailureReason.PUSH_FAILED,
509 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
509 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
510 (MergeFailureReason.TARGET_IS_NOT_HEAD,
510 (MergeFailureReason.TARGET_IS_NOT_HEAD,
511 'This pull request cannot be merged because the target `ref_name` is not a head.'),
511 'This pull request cannot be merged because the target `ref_name` is not a head.'),
512 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
512 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
513 'This pull request cannot be merged because the source contains more branches than the target.'),
513 'This pull request cannot be merged because the source contains more branches than the target.'),
514 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
514 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
515 'This pull request cannot be merged because the target has multiple heads: `a,b,c`.'),
515 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
516 (MergeFailureReason.TARGET_IS_LOCKED,
516 (MergeFailureReason.TARGET_IS_LOCKED,
517 'This pull request cannot be merged because the target repository is locked by user:123.'),
517 'This pull request cannot be merged because the target repository is locked by user:123.'),
518 (MergeFailureReason.MISSING_TARGET_REF,
518 (MergeFailureReason.MISSING_TARGET_REF,
519 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
519 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
520 (MergeFailureReason.MISSING_SOURCE_REF,
520 (MergeFailureReason.MISSING_SOURCE_REF,
521 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
521 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
522 (MergeFailureReason.SUBREPO_MERGE_FAILED,
522 (MergeFailureReason.SUBREPO_MERGE_FAILED,
523 'This pull request cannot be merged because of conflicts related to sub repositories.'),
523 'This pull request cannot be merged because of conflicts related to sub repositories.'),
524
524
525 ])
525 ])
526 def test_merge_response_message(mr_type, expected_msg):
526 def test_merge_response_message(mr_type, expected_msg):
527 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
527 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
528 metadata = {
528 metadata = {
529 'exception': "CRASH",
529 'exception': "CRASH",
530 'target': 'some-repo',
530 'target': 'some-repo',
531 'merge_commit': 'merge_commit',
531 'merge_commit': 'merge_commit',
532 'target_ref': merge_ref,
532 'target_ref': merge_ref,
533 'source_ref': merge_ref,
533 'source_ref': merge_ref,
534 'heads': ','.join(['a', 'b', 'c']),
534 'heads': ','.join(['a', 'b', 'c']),
535 'locked_by': 'user:123'}
535 'locked_by': 'user:123'}
536
536
537 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
537 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
538 assert merge_response.merge_status_message == expected_msg
538 assert merge_response.merge_status_message == expected_msg
539
539
540
540
541 @pytest.fixture
541 @pytest.fixture
542 def merge_extras(user_regular):
542 def merge_extras(user_regular):
543 """
543 """
544 Context for the vcs operation when running a merge.
544 Context for the vcs operation when running a merge.
545 """
545 """
546 extras = {
546 extras = {
547 'ip': '127.0.0.1',
547 'ip': '127.0.0.1',
548 'username': user_regular.username,
548 'username': user_regular.username,
549 'user_id': user_regular.user_id,
549 'user_id': user_regular.user_id,
550 'action': 'push',
550 'action': 'push',
551 'repository': 'fake_target_repo_name',
551 'repository': 'fake_target_repo_name',
552 'scm': 'git',
552 'scm': 'git',
553 'config': 'fake_config_ini_path',
553 'config': 'fake_config_ini_path',
554 'repo_store': '',
554 'repo_store': '',
555 'make_lock': None,
555 'make_lock': None,
556 'locked_by': [None, None, None],
556 'locked_by': [None, None, None],
557 'server_url': 'http://test.example.com:5000',
557 'server_url': 'http://test.example.com:5000',
558 'hooks': ['push', 'pull'],
558 'hooks': ['push', 'pull'],
559 'is_shadow_repo': False,
559 'is_shadow_repo': False,
560 }
560 }
561 return extras
561 return extras
562
562
563
563
564 @pytest.mark.usefixtures('config_stub')
564 @pytest.mark.usefixtures('config_stub')
565 class TestUpdateCommentHandling(object):
565 class TestUpdateCommentHandling(object):
566
566
567 @pytest.fixture(autouse=True, scope='class')
567 @pytest.fixture(autouse=True, scope='class')
568 def enable_outdated_comments(self, request, baseapp):
568 def enable_outdated_comments(self, request, baseapp):
569 config_patch = mock.patch.dict(
569 config_patch = mock.patch.dict(
570 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
570 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
571 config_patch.start()
571 config_patch.start()
572
572
573 @request.addfinalizer
573 @request.addfinalizer
574 def cleanup():
574 def cleanup():
575 config_patch.stop()
575 config_patch.stop()
576
576
577 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
577 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
578 commits = [
578 commits = [
579 {'message': 'a'},
579 {'message': 'a'},
580 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
580 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
581 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
581 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
582 ]
582 ]
583 pull_request = pr_util.create_pull_request(
583 pull_request = pr_util.create_pull_request(
584 commits=commits, target_head='a', source_head='b', revisions=['b'])
584 commits=commits, target_head='a', source_head='b', revisions=['b'])
585 pr_util.create_inline_comment(file_path='file_b')
585 pr_util.create_inline_comment(file_path='file_b')
586 pr_util.add_one_commit(head='c')
586 pr_util.add_one_commit(head='c')
587
587
588 assert_inline_comments(pull_request, visible=1, outdated=0)
588 assert_inline_comments(pull_request, visible=1, outdated=0)
589
589
590 def test_comment_stays_unflagged_on_change_above(self, pr_util):
590 def test_comment_stays_unflagged_on_change_above(self, pr_util):
591 original_content = ''.join(
591 original_content = ''.join(
592 ['line {}\n'.format(x) for x in range(1, 11)])
592 ['line {}\n'.format(x) for x in range(1, 11)])
593 updated_content = 'new_line_at_top\n' + original_content
593 updated_content = 'new_line_at_top\n' + original_content
594 commits = [
594 commits = [
595 {'message': 'a'},
595 {'message': 'a'},
596 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
596 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
597 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
597 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
598 ]
598 ]
599 pull_request = pr_util.create_pull_request(
599 pull_request = pr_util.create_pull_request(
600 commits=commits, target_head='a', source_head='b', revisions=['b'])
600 commits=commits, target_head='a', source_head='b', revisions=['b'])
601
601
602 with outdated_comments_patcher():
602 with outdated_comments_patcher():
603 comment = pr_util.create_inline_comment(
603 comment = pr_util.create_inline_comment(
604 line_no=u'n8', file_path='file_b')
604 line_no=u'n8', file_path='file_b')
605 pr_util.add_one_commit(head='c')
605 pr_util.add_one_commit(head='c')
606
606
607 assert_inline_comments(pull_request, visible=1, outdated=0)
607 assert_inline_comments(pull_request, visible=1, outdated=0)
608 assert comment.line_no == u'n9'
608 assert comment.line_no == u'n9'
609
609
610 def test_comment_stays_unflagged_on_change_below(self, pr_util):
610 def test_comment_stays_unflagged_on_change_below(self, pr_util):
611 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
611 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
612 updated_content = original_content + 'new_line_at_end\n'
612 updated_content = original_content + 'new_line_at_end\n'
613 commits = [
613 commits = [
614 {'message': 'a'},
614 {'message': 'a'},
615 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
615 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
616 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
616 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
617 ]
617 ]
618 pull_request = pr_util.create_pull_request(
618 pull_request = pr_util.create_pull_request(
619 commits=commits, target_head='a', source_head='b', revisions=['b'])
619 commits=commits, target_head='a', source_head='b', revisions=['b'])
620 pr_util.create_inline_comment(file_path='file_b')
620 pr_util.create_inline_comment(file_path='file_b')
621 pr_util.add_one_commit(head='c')
621 pr_util.add_one_commit(head='c')
622
622
623 assert_inline_comments(pull_request, visible=1, outdated=0)
623 assert_inline_comments(pull_request, visible=1, outdated=0)
624
624
625 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
625 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
626 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
626 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
627 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
627 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
628 change_lines = list(base_lines)
628 change_lines = list(base_lines)
629 change_lines.insert(6, 'line 6a added\n')
629 change_lines.insert(6, 'line 6a added\n')
630
630
631 # Changes on the last line of sight
631 # Changes on the last line of sight
632 update_lines = list(change_lines)
632 update_lines = list(change_lines)
633 update_lines[0] = 'line 1 changed\n'
633 update_lines[0] = 'line 1 changed\n'
634 update_lines[-1] = 'line 12 changed\n'
634 update_lines[-1] = 'line 12 changed\n'
635
635
636 def file_b(lines):
636 def file_b(lines):
637 return FileNode('file_b', ''.join(lines))
637 return FileNode('file_b', ''.join(lines))
638
638
639 commits = [
639 commits = [
640 {'message': 'a', 'added': [file_b(base_lines)]},
640 {'message': 'a', 'added': [file_b(base_lines)]},
641 {'message': 'b', 'changed': [file_b(change_lines)]},
641 {'message': 'b', 'changed': [file_b(change_lines)]},
642 {'message': 'c', 'changed': [file_b(update_lines)]},
642 {'message': 'c', 'changed': [file_b(update_lines)]},
643 ]
643 ]
644
644
645 pull_request = pr_util.create_pull_request(
645 pull_request = pr_util.create_pull_request(
646 commits=commits, target_head='a', source_head='b', revisions=['b'])
646 commits=commits, target_head='a', source_head='b', revisions=['b'])
647 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
647 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
648
648
649 with outdated_comments_patcher():
649 with outdated_comments_patcher():
650 pr_util.add_one_commit(head='c')
650 pr_util.add_one_commit(head='c')
651 assert_inline_comments(pull_request, visible=0, outdated=1)
651 assert_inline_comments(pull_request, visible=0, outdated=1)
652
652
653 @pytest.mark.parametrize("change, content", [
653 @pytest.mark.parametrize("change, content", [
654 ('changed', 'changed\n'),
654 ('changed', 'changed\n'),
655 ('removed', ''),
655 ('removed', ''),
656 ], ids=['changed', 'removed'])
656 ], ids=['changed', 'removed'])
657 def test_comment_flagged_on_change(self, pr_util, change, content):
657 def test_comment_flagged_on_change(self, pr_util, change, content):
658 commits = [
658 commits = [
659 {'message': 'a'},
659 {'message': 'a'},
660 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
660 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
661 {'message': 'c', change: [FileNode('file_b', content)]},
661 {'message': 'c', change: [FileNode('file_b', content)]},
662 ]
662 ]
663 pull_request = pr_util.create_pull_request(
663 pull_request = pr_util.create_pull_request(
664 commits=commits, target_head='a', source_head='b', revisions=['b'])
664 commits=commits, target_head='a', source_head='b', revisions=['b'])
665 pr_util.create_inline_comment(file_path='file_b')
665 pr_util.create_inline_comment(file_path='file_b')
666
666
667 with outdated_comments_patcher():
667 with outdated_comments_patcher():
668 pr_util.add_one_commit(head='c')
668 pr_util.add_one_commit(head='c')
669 assert_inline_comments(pull_request, visible=0, outdated=1)
669 assert_inline_comments(pull_request, visible=0, outdated=1)
670
670
671
671
672 @pytest.mark.usefixtures('config_stub')
672 @pytest.mark.usefixtures('config_stub')
673 class TestUpdateChangedFiles(object):
673 class TestUpdateChangedFiles(object):
674
674
675 def test_no_changes_on_unchanged_diff(self, pr_util):
675 def test_no_changes_on_unchanged_diff(self, pr_util):
676 commits = [
676 commits = [
677 {'message': 'a'},
677 {'message': 'a'},
678 {'message': 'b',
678 {'message': 'b',
679 'added': [FileNode('file_b', 'test_content b\n')]},
679 'added': [FileNode('file_b', 'test_content b\n')]},
680 {'message': 'c',
680 {'message': 'c',
681 'added': [FileNode('file_c', 'test_content c\n')]},
681 'added': [FileNode('file_c', 'test_content c\n')]},
682 ]
682 ]
683 # open a PR from a to b, adding file_b
683 # open a PR from a to b, adding file_b
684 pull_request = pr_util.create_pull_request(
684 pull_request = pr_util.create_pull_request(
685 commits=commits, target_head='a', source_head='b', revisions=['b'],
685 commits=commits, target_head='a', source_head='b', revisions=['b'],
686 name_suffix='per-file-review')
686 name_suffix='per-file-review')
687
687
688 # modify PR adding new file file_c
688 # modify PR adding new file file_c
689 pr_util.add_one_commit(head='c')
689 pr_util.add_one_commit(head='c')
690
690
691 assert_pr_file_changes(
691 assert_pr_file_changes(
692 pull_request,
692 pull_request,
693 added=['file_c'],
693 added=['file_c'],
694 modified=[],
694 modified=[],
695 removed=[])
695 removed=[])
696
696
697 def test_modify_and_undo_modification_diff(self, pr_util):
697 def test_modify_and_undo_modification_diff(self, pr_util):
698 commits = [
698 commits = [
699 {'message': 'a'},
699 {'message': 'a'},
700 {'message': 'b',
700 {'message': 'b',
701 'added': [FileNode('file_b', 'test_content b\n')]},
701 'added': [FileNode('file_b', 'test_content b\n')]},
702 {'message': 'c',
702 {'message': 'c',
703 'changed': [FileNode('file_b', 'test_content b modified\n')]},
703 'changed': [FileNode('file_b', 'test_content b modified\n')]},
704 {'message': 'd',
704 {'message': 'd',
705 'changed': [FileNode('file_b', 'test_content b\n')]},
705 'changed': [FileNode('file_b', 'test_content b\n')]},
706 ]
706 ]
707 # open a PR from a to b, adding file_b
707 # open a PR from a to b, adding file_b
708 pull_request = pr_util.create_pull_request(
708 pull_request = pr_util.create_pull_request(
709 commits=commits, target_head='a', source_head='b', revisions=['b'],
709 commits=commits, target_head='a', source_head='b', revisions=['b'],
710 name_suffix='per-file-review')
710 name_suffix='per-file-review')
711
711
712 # modify PR modifying file file_b
712 # modify PR modifying file file_b
713 pr_util.add_one_commit(head='c')
713 pr_util.add_one_commit(head='c')
714
714
715 assert_pr_file_changes(
715 assert_pr_file_changes(
716 pull_request,
716 pull_request,
717 added=[],
717 added=[],
718 modified=['file_b'],
718 modified=['file_b'],
719 removed=[])
719 removed=[])
720
720
721 # move the head again to d, which rollbacks change,
721 # move the head again to d, which rollbacks change,
722 # meaning we should indicate no changes
722 # meaning we should indicate no changes
723 pr_util.add_one_commit(head='d')
723 pr_util.add_one_commit(head='d')
724
724
725 assert_pr_file_changes(
725 assert_pr_file_changes(
726 pull_request,
726 pull_request,
727 added=[],
727 added=[],
728 modified=[],
728 modified=[],
729 removed=[])
729 removed=[])
730
730
731 def test_updated_all_files_in_pr(self, pr_util):
731 def test_updated_all_files_in_pr(self, pr_util):
732 commits = [
732 commits = [
733 {'message': 'a'},
733 {'message': 'a'},
734 {'message': 'b', 'added': [
734 {'message': 'b', 'added': [
735 FileNode('file_a', 'test_content a\n'),
735 FileNode('file_a', 'test_content a\n'),
736 FileNode('file_b', 'test_content b\n'),
736 FileNode('file_b', 'test_content b\n'),
737 FileNode('file_c', 'test_content c\n')]},
737 FileNode('file_c', 'test_content c\n')]},
738 {'message': 'c', 'changed': [
738 {'message': 'c', 'changed': [
739 FileNode('file_a', 'test_content a changed\n'),
739 FileNode('file_a', 'test_content a changed\n'),
740 FileNode('file_b', 'test_content b changed\n'),
740 FileNode('file_b', 'test_content b changed\n'),
741 FileNode('file_c', 'test_content c changed\n')]},
741 FileNode('file_c', 'test_content c changed\n')]},
742 ]
742 ]
743 # open a PR from a to b, changing 3 files
743 # open a PR from a to b, changing 3 files
744 pull_request = pr_util.create_pull_request(
744 pull_request = pr_util.create_pull_request(
745 commits=commits, target_head='a', source_head='b', revisions=['b'],
745 commits=commits, target_head='a', source_head='b', revisions=['b'],
746 name_suffix='per-file-review')
746 name_suffix='per-file-review')
747
747
748 pr_util.add_one_commit(head='c')
748 pr_util.add_one_commit(head='c')
749
749
750 assert_pr_file_changes(
750 assert_pr_file_changes(
751 pull_request,
751 pull_request,
752 added=[],
752 added=[],
753 modified=['file_a', 'file_b', 'file_c'],
753 modified=['file_a', 'file_b', 'file_c'],
754 removed=[])
754 removed=[])
755
755
756 def test_updated_and_removed_all_files_in_pr(self, pr_util):
756 def test_updated_and_removed_all_files_in_pr(self, pr_util):
757 commits = [
757 commits = [
758 {'message': 'a'},
758 {'message': 'a'},
759 {'message': 'b', 'added': [
759 {'message': 'b', 'added': [
760 FileNode('file_a', 'test_content a\n'),
760 FileNode('file_a', 'test_content a\n'),
761 FileNode('file_b', 'test_content b\n'),
761 FileNode('file_b', 'test_content b\n'),
762 FileNode('file_c', 'test_content c\n')]},
762 FileNode('file_c', 'test_content c\n')]},
763 {'message': 'c', 'removed': [
763 {'message': 'c', 'removed': [
764 FileNode('file_a', 'test_content a changed\n'),
764 FileNode('file_a', 'test_content a changed\n'),
765 FileNode('file_b', 'test_content b changed\n'),
765 FileNode('file_b', 'test_content b changed\n'),
766 FileNode('file_c', 'test_content c changed\n')]},
766 FileNode('file_c', 'test_content c changed\n')]},
767 ]
767 ]
768 # open a PR from a to b, removing 3 files
768 # open a PR from a to b, removing 3 files
769 pull_request = pr_util.create_pull_request(
769 pull_request = pr_util.create_pull_request(
770 commits=commits, target_head='a', source_head='b', revisions=['b'],
770 commits=commits, target_head='a', source_head='b', revisions=['b'],
771 name_suffix='per-file-review')
771 name_suffix='per-file-review')
772
772
773 pr_util.add_one_commit(head='c')
773 pr_util.add_one_commit(head='c')
774
774
775 assert_pr_file_changes(
775 assert_pr_file_changes(
776 pull_request,
776 pull_request,
777 added=[],
777 added=[],
778 modified=[],
778 modified=[],
779 removed=['file_a', 'file_b', 'file_c'])
779 removed=['file_a', 'file_b', 'file_c'])
780
780
781
781
782 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
782 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
783 model = PullRequestModel()
783 model = PullRequestModel()
784 pull_request = pr_util.create_pull_request()
784 pull_request = pr_util.create_pull_request()
785 pr_util.update_source_repository()
785 pr_util.update_source_repository()
786
786
787 model.update_commits(pull_request)
787 model.update_commits(pull_request)
788
788
789 # Expect that it has a version entry now
789 # Expect that it has a version entry now
790 assert len(model.get_versions(pull_request)) == 1
790 assert len(model.get_versions(pull_request)) == 1
791
791
792
792
793 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
793 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
794 pull_request = pr_util.create_pull_request()
794 pull_request = pr_util.create_pull_request()
795 model = PullRequestModel()
795 model = PullRequestModel()
796 model.update_commits(pull_request)
796 model.update_commits(pull_request)
797
797
798 # Expect that it still has no versions
798 # Expect that it still has no versions
799 assert len(model.get_versions(pull_request)) == 0
799 assert len(model.get_versions(pull_request)) == 0
800
800
801
801
802 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
802 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
803 model = PullRequestModel()
803 model = PullRequestModel()
804 pull_request = pr_util.create_pull_request()
804 pull_request = pr_util.create_pull_request()
805 comment = pr_util.create_comment()
805 comment = pr_util.create_comment()
806 pr_util.update_source_repository()
806 pr_util.update_source_repository()
807
807
808 model.update_commits(pull_request)
808 model.update_commits(pull_request)
809
809
810 # Expect that the comment is linked to the pr version now
810 # Expect that the comment is linked to the pr version now
811 assert comment.pull_request_version == model.get_versions(pull_request)[0]
811 assert comment.pull_request_version == model.get_versions(pull_request)[0]
812
812
813
813
814 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
814 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
815 model = PullRequestModel()
815 model = PullRequestModel()
816 pull_request = pr_util.create_pull_request()
816 pull_request = pr_util.create_pull_request()
817 pr_util.update_source_repository()
817 pr_util.update_source_repository()
818 pr_util.update_source_repository()
818 pr_util.update_source_repository()
819
819
820 model.update_commits(pull_request)
820 model.update_commits(pull_request)
821
821
822 # Expect to find a new comment about the change
822 # Expect to find a new comment about the change
823 expected_message = textwrap.dedent(
823 expected_message = textwrap.dedent(
824 """\
824 """\
825 Pull request updated. Auto status change to |under_review|
825 Pull request updated. Auto status change to |under_review|
826
826
827 .. role:: added
827 .. role:: added
828 .. role:: removed
828 .. role:: removed
829 .. parsed-literal::
829 .. parsed-literal::
830
830
831 Changed commits:
831 Changed commits:
832 * :added:`1 added`
832 * :added:`1 added`
833 * :removed:`0 removed`
833 * :removed:`0 removed`
834
834
835 Changed files:
835 Changed files:
836 * `A file_2 <#a_c--92ed3b5f07b4>`_
836 * `A file_2 <#a_c--92ed3b5f07b4>`_
837
837
838 .. |under_review| replace:: *"Under Review"*"""
838 .. |under_review| replace:: *"Under Review"*"""
839 )
839 )
840 pull_request_comments = sorted(
840 pull_request_comments = sorted(
841 pull_request.comments, key=lambda c: c.modified_at)
841 pull_request.comments, key=lambda c: c.modified_at)
842 update_comment = pull_request_comments[-1]
842 update_comment = pull_request_comments[-1]
843 assert update_comment.text == expected_message
843 assert update_comment.text == expected_message
844
844
845
845
846 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
846 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
847 pull_request = pr_util.create_pull_request()
847 pull_request = pr_util.create_pull_request()
848
848
849 # Avoiding default values
849 # Avoiding default values
850 pull_request.status = PullRequest.STATUS_CLOSED
850 pull_request.status = PullRequest.STATUS_CLOSED
851 pull_request._last_merge_source_rev = "0" * 40
851 pull_request._last_merge_source_rev = "0" * 40
852 pull_request._last_merge_target_rev = "1" * 40
852 pull_request._last_merge_target_rev = "1" * 40
853 pull_request.last_merge_status = 1
853 pull_request.last_merge_status = 1
854 pull_request.merge_rev = "2" * 40
854 pull_request.merge_rev = "2" * 40
855
855
856 # Remember automatic values
856 # Remember automatic values
857 created_on = pull_request.created_on
857 created_on = pull_request.created_on
858 updated_on = pull_request.updated_on
858 updated_on = pull_request.updated_on
859
859
860 # Create a new version of the pull request
860 # Create a new version of the pull request
861 version = PullRequestModel()._create_version_from_snapshot(pull_request)
861 version = PullRequestModel()._create_version_from_snapshot(pull_request)
862
862
863 # Check attributes
863 # Check attributes
864 assert version.title == pr_util.create_parameters['title']
864 assert version.title == pr_util.create_parameters['title']
865 assert version.description == pr_util.create_parameters['description']
865 assert version.description == pr_util.create_parameters['description']
866 assert version.status == PullRequest.STATUS_CLOSED
866 assert version.status == PullRequest.STATUS_CLOSED
867
867
868 # versions get updated created_on
868 # versions get updated created_on
869 assert version.created_on != created_on
869 assert version.created_on != created_on
870
870
871 assert version.updated_on == updated_on
871 assert version.updated_on == updated_on
872 assert version.user_id == pull_request.user_id
872 assert version.user_id == pull_request.user_id
873 assert version.revisions == pr_util.create_parameters['revisions']
873 assert version.revisions == pr_util.create_parameters['revisions']
874 assert version.source_repo == pr_util.source_repository
874 assert version.source_repo == pr_util.source_repository
875 assert version.source_ref == pr_util.create_parameters['source_ref']
875 assert version.source_ref == pr_util.create_parameters['source_ref']
876 assert version.target_repo == pr_util.target_repository
876 assert version.target_repo == pr_util.target_repository
877 assert version.target_ref == pr_util.create_parameters['target_ref']
877 assert version.target_ref == pr_util.create_parameters['target_ref']
878 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
878 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
879 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
879 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
880 assert version.last_merge_status == pull_request.last_merge_status
880 assert version.last_merge_status == pull_request.last_merge_status
881 assert version.merge_rev == pull_request.merge_rev
881 assert version.merge_rev == pull_request.merge_rev
882 assert version.pull_request == pull_request
882 assert version.pull_request == pull_request
883
883
884
884
885 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
885 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
886 version1 = pr_util.create_version_of_pull_request()
886 version1 = pr_util.create_version_of_pull_request()
887 comment_linked = pr_util.create_comment(linked_to=version1)
887 comment_linked = pr_util.create_comment(linked_to=version1)
888 comment_unlinked = pr_util.create_comment()
888 comment_unlinked = pr_util.create_comment()
889 version2 = pr_util.create_version_of_pull_request()
889 version2 = pr_util.create_version_of_pull_request()
890
890
891 PullRequestModel()._link_comments_to_version(version2)
891 PullRequestModel()._link_comments_to_version(version2)
892
892
893 # Expect that only the new comment is linked to version2
893 # Expect that only the new comment is linked to version2
894 assert (
894 assert (
895 comment_unlinked.pull_request_version_id ==
895 comment_unlinked.pull_request_version_id ==
896 version2.pull_request_version_id)
896 version2.pull_request_version_id)
897 assert (
897 assert (
898 comment_linked.pull_request_version_id ==
898 comment_linked.pull_request_version_id ==
899 version1.pull_request_version_id)
899 version1.pull_request_version_id)
900 assert (
900 assert (
901 comment_unlinked.pull_request_version_id !=
901 comment_unlinked.pull_request_version_id !=
902 comment_linked.pull_request_version_id)
902 comment_linked.pull_request_version_id)
903
903
904
904
905 def test_calculate_commits():
905 def test_calculate_commits():
906 old_ids = [1, 2, 3]
906 old_ids = [1, 2, 3]
907 new_ids = [1, 3, 4, 5]
907 new_ids = [1, 3, 4, 5]
908 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
908 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
909 assert change.added == [4, 5]
909 assert change.added == [4, 5]
910 assert change.common == [1, 3]
910 assert change.common == [1, 3]
911 assert change.removed == [2]
911 assert change.removed == [2]
912 assert change.total == [1, 3, 4, 5]
912 assert change.total == [1, 3, 4, 5]
913
913
914
914
915 def assert_inline_comments(pull_request, visible=None, outdated=None):
915 def assert_inline_comments(pull_request, visible=None, outdated=None):
916 if visible is not None:
916 if visible is not None:
917 inline_comments = CommentsModel().get_inline_comments(
917 inline_comments = CommentsModel().get_inline_comments(
918 pull_request.target_repo.repo_id, pull_request=pull_request)
918 pull_request.target_repo.repo_id, pull_request=pull_request)
919 inline_cnt = CommentsModel().get_inline_comments_count(
919 inline_cnt = CommentsModel().get_inline_comments_count(
920 inline_comments)
920 inline_comments)
921 assert inline_cnt == visible
921 assert inline_cnt == visible
922 if outdated is not None:
922 if outdated is not None:
923 outdated_comments = CommentsModel().get_outdated_comments(
923 outdated_comments = CommentsModel().get_outdated_comments(
924 pull_request.target_repo.repo_id, pull_request)
924 pull_request.target_repo.repo_id, pull_request)
925 assert len(outdated_comments) == outdated
925 assert len(outdated_comments) == outdated
926
926
927
927
928 def assert_pr_file_changes(
928 def assert_pr_file_changes(
929 pull_request, added=None, modified=None, removed=None):
929 pull_request, added=None, modified=None, removed=None):
930 pr_versions = PullRequestModel().get_versions(pull_request)
930 pr_versions = PullRequestModel().get_versions(pull_request)
931 # always use first version, ie original PR to calculate changes
931 # always use first version, ie original PR to calculate changes
932 pull_request_version = pr_versions[0]
932 pull_request_version = pr_versions[0]
933 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
933 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
934 pull_request, pull_request_version)
934 pull_request, pull_request_version)
935 file_changes = PullRequestModel()._calculate_file_changes(
935 file_changes = PullRequestModel()._calculate_file_changes(
936 old_diff_data, new_diff_data)
936 old_diff_data, new_diff_data)
937
937
938 assert added == file_changes.added, \
938 assert added == file_changes.added, \
939 'expected added:%s vs value:%s' % (added, file_changes.added)
939 'expected added:%s vs value:%s' % (added, file_changes.added)
940 assert modified == file_changes.modified, \
940 assert modified == file_changes.modified, \
941 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
941 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
942 assert removed == file_changes.removed, \
942 assert removed == file_changes.removed, \
943 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
943 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
944
944
945
945
946 def outdated_comments_patcher(use_outdated=True):
946 def outdated_comments_patcher(use_outdated=True):
947 return mock.patch.object(
947 return mock.patch.object(
948 CommentsModel, 'use_outdated_comments',
948 CommentsModel, 'use_outdated_comments',
949 return_value=use_outdated)
949 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now