##// END OF EJS Templates
pull-requests: fix way how pull-request calculates common ancestors....
marcink -
r4346:4dcd6440 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,47 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4 from sqlalchemy import *
5
6 from alembic.migration import MigrationContext
7 from alembic.operations import Operations
8 from sqlalchemy import BigInteger
9
10 from rhodecode.lib.dbmigrate.versions import _reset_base
11 from rhodecode.model import init_model_encryption
12
13
14 log = logging.getLogger(__name__)
15
16
17 def upgrade(migrate_engine):
18 """
19 Upgrade operations go here.
20 Don't create your own engine; bind migrate_engine to your metadata
21 """
22 _reset_base(migrate_engine)
23 from rhodecode.lib.dbmigrate.schema import db_4_19_0_0 as db
24
25 init_model_encryption(db)
26
27 context = MigrationContext.configure(migrate_engine.connect())
28 op = Operations(context)
29
30 pull_requests = db.PullRequest.__table__
31 with op.batch_alter_table(pull_requests.name) as batch_op:
32 new_column = Column('common_ancestor_id', Unicode(255), nullable=True)
33 batch_op.add_column(new_column)
34
35 pull_request_version = db.PullRequestVersion.__table__
36 with op.batch_alter_table(pull_request_version.name) as batch_op:
37 new_column = Column('common_ancestor_id', Unicode(255), nullable=True)
38 batch_op.add_column(new_column)
39
40
41 def downgrade(migrate_engine):
42 meta = MetaData()
43 meta.bind = migrate_engine
44
45
46 def fixups(models, _SESSION):
47 pass
@@ -1,60 +1,60 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 from collections import OrderedDict
22 from collections import OrderedDict
23
23
24 import sys
24 import sys
25 import platform
25 import platform
26
26
27 VERSION = tuple(open(os.path.join(
27 VERSION = tuple(open(os.path.join(
28 os.path.dirname(__file__), 'VERSION')).read().split('.'))
28 os.path.dirname(__file__), 'VERSION')).read().split('.'))
29
29
30 BACKENDS = OrderedDict()
30 BACKENDS = OrderedDict()
31
31
32 BACKENDS['hg'] = 'Mercurial repository'
32 BACKENDS['hg'] = 'Mercurial repository'
33 BACKENDS['git'] = 'Git repository'
33 BACKENDS['git'] = 'Git repository'
34 BACKENDS['svn'] = 'Subversion repository'
34 BACKENDS['svn'] = 'Subversion repository'
35
35
36
36
37 CELERY_ENABLED = False
37 CELERY_ENABLED = False
38 CELERY_EAGER = False
38 CELERY_EAGER = False
39
39
40 # link to config for pyramid
40 # link to config for pyramid
41 CONFIG = {}
41 CONFIG = {}
42
42
43 # Populated with the settings dictionary from application init in
43 # Populated with the settings dictionary from application init in
44 # rhodecode.conf.environment.load_pyramid_environment
44 # rhodecode.conf.environment.load_pyramid_environment
45 PYRAMID_SETTINGS = {}
45 PYRAMID_SETTINGS = {}
46
46
47 # Linked module for extensions
47 # Linked module for extensions
48 EXTENSIONS = {}
48 EXTENSIONS = {}
49
49
50 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
50 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
51 __dbversion__ = 106 # defines current db version for migrations
51 __dbversion__ = 107 # defines current db version for migrations
52 __platform__ = platform.system()
52 __platform__ = platform.system()
53 __license__ = 'AGPLv3, and Commercial License'
53 __license__ = 'AGPLv3, and Commercial License'
54 __author__ = 'RhodeCode GmbH'
54 __author__ = 'RhodeCode GmbH'
55 __url__ = 'https://code.rhodecode.com'
55 __url__ = 'https://code.rhodecode.com'
56
56
57 is_windows = __platform__ in ['Windows']
57 is_windows = __platform__ in ['Windows']
58 is_unix = not is_windows
58 is_unix = not is_windows
59 is_test = False
59 is_test = False
60 disable_error_handler = False
60 disable_error_handler = False
@@ -1,1018 +1,1018 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23
23
24 from rhodecode import events
24 from rhodecode import events
25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 from rhodecode.api.utils import (
26 from rhodecode.api.utils import (
27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 validate_repo_permissions, resolve_ref_or_error, validate_set_owner_permissions)
29 validate_repo_permissions, resolve_ref_or_error, validate_set_owner_permissions)
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 from rhodecode.lib.base import vcs_operation_context
31 from rhodecode.lib.base import vcs_operation_context
32 from rhodecode.lib.utils2 import str2bool
32 from rhodecode.lib.utils2 import str2bool
33 from rhodecode.model.changeset_status import ChangesetStatusModel
33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 from rhodecode.model.comment import CommentsModel
34 from rhodecode.model.comment import CommentsModel
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment, PullRequest
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment, PullRequest
36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 from rhodecode.model.settings import SettingsModel
37 from rhodecode.model.settings import SettingsModel
38 from rhodecode.model.validation_schema import Invalid
38 from rhodecode.model.validation_schema import Invalid
39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 ReviewerListSchema)
40 ReviewerListSchema)
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 @jsonrpc_method()
45 @jsonrpc_method()
46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None),
46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None),
47 merge_state=Optional(False)):
47 merge_state=Optional(False)):
48 """
48 """
49 Get a pull request based on the given ID.
49 Get a pull request based on the given ID.
50
50
51 :param apiuser: This is filled automatically from the |authtoken|.
51 :param apiuser: This is filled automatically from the |authtoken|.
52 :type apiuser: AuthUser
52 :type apiuser: AuthUser
53 :param repoid: Optional, repository name or repository ID from where
53 :param repoid: Optional, repository name or repository ID from where
54 the pull request was opened.
54 the pull request was opened.
55 :type repoid: str or int
55 :type repoid: str or int
56 :param pullrequestid: ID of the requested pull request.
56 :param pullrequestid: ID of the requested pull request.
57 :type pullrequestid: int
57 :type pullrequestid: int
58 :param merge_state: Optional calculate merge state for each repository.
58 :param merge_state: Optional calculate merge state for each repository.
59 This could result in longer time to fetch the data
59 This could result in longer time to fetch the data
60 :type merge_state: bool
60 :type merge_state: bool
61
61
62 Example output:
62 Example output:
63
63
64 .. code-block:: bash
64 .. code-block:: bash
65
65
66 "id": <id_given_in_input>,
66 "id": <id_given_in_input>,
67 "result":
67 "result":
68 {
68 {
69 "pull_request_id": "<pull_request_id>",
69 "pull_request_id": "<pull_request_id>",
70 "url": "<url>",
70 "url": "<url>",
71 "title": "<title>",
71 "title": "<title>",
72 "description": "<description>",
72 "description": "<description>",
73 "status" : "<status>",
73 "status" : "<status>",
74 "created_on": "<date_time_created>",
74 "created_on": "<date_time_created>",
75 "updated_on": "<date_time_updated>",
75 "updated_on": "<date_time_updated>",
76 "versions": "<number_or_versions_of_pr>",
76 "versions": "<number_or_versions_of_pr>",
77 "commit_ids": [
77 "commit_ids": [
78 ...
78 ...
79 "<commit_id>",
79 "<commit_id>",
80 "<commit_id>",
80 "<commit_id>",
81 ...
81 ...
82 ],
82 ],
83 "review_status": "<review_status>",
83 "review_status": "<review_status>",
84 "mergeable": {
84 "mergeable": {
85 "status": "<bool>",
85 "status": "<bool>",
86 "message": "<message>",
86 "message": "<message>",
87 },
87 },
88 "source": {
88 "source": {
89 "clone_url": "<clone_url>",
89 "clone_url": "<clone_url>",
90 "repository": "<repository_name>",
90 "repository": "<repository_name>",
91 "reference":
91 "reference":
92 {
92 {
93 "name": "<name>",
93 "name": "<name>",
94 "type": "<type>",
94 "type": "<type>",
95 "commit_id": "<commit_id>",
95 "commit_id": "<commit_id>",
96 }
96 }
97 },
97 },
98 "target": {
98 "target": {
99 "clone_url": "<clone_url>",
99 "clone_url": "<clone_url>",
100 "repository": "<repository_name>",
100 "repository": "<repository_name>",
101 "reference":
101 "reference":
102 {
102 {
103 "name": "<name>",
103 "name": "<name>",
104 "type": "<type>",
104 "type": "<type>",
105 "commit_id": "<commit_id>",
105 "commit_id": "<commit_id>",
106 }
106 }
107 },
107 },
108 "merge": {
108 "merge": {
109 "clone_url": "<clone_url>",
109 "clone_url": "<clone_url>",
110 "reference":
110 "reference":
111 {
111 {
112 "name": "<name>",
112 "name": "<name>",
113 "type": "<type>",
113 "type": "<type>",
114 "commit_id": "<commit_id>",
114 "commit_id": "<commit_id>",
115 }
115 }
116 },
116 },
117 "author": <user_obj>,
117 "author": <user_obj>,
118 "reviewers": [
118 "reviewers": [
119 ...
119 ...
120 {
120 {
121 "user": "<user_obj>",
121 "user": "<user_obj>",
122 "review_status": "<review_status>",
122 "review_status": "<review_status>",
123 }
123 }
124 ...
124 ...
125 ]
125 ]
126 },
126 },
127 "error": null
127 "error": null
128 """
128 """
129
129
130 pull_request = get_pull_request_or_error(pullrequestid)
130 pull_request = get_pull_request_or_error(pullrequestid)
131 if Optional.extract(repoid):
131 if Optional.extract(repoid):
132 repo = get_repo_or_error(repoid)
132 repo = get_repo_or_error(repoid)
133 else:
133 else:
134 repo = pull_request.target_repo
134 repo = pull_request.target_repo
135
135
136 if not PullRequestModel().check_user_read(pull_request, apiuser, api=True):
136 if not PullRequestModel().check_user_read(pull_request, apiuser, api=True):
137 raise JSONRPCError('repository `%s` or pull request `%s` '
137 raise JSONRPCError('repository `%s` or pull request `%s` '
138 'does not exist' % (repoid, pullrequestid))
138 'does not exist' % (repoid, pullrequestid))
139
139
140 # NOTE(marcink): only calculate and return merge state if the pr state is 'created'
140 # NOTE(marcink): only calculate and return merge state if the pr state is 'created'
141 # otherwise we can lock the repo on calculation of merge state while update/merge
141 # otherwise we can lock the repo on calculation of merge state while update/merge
142 # is happening.
142 # is happening.
143 pr_created = pull_request.pull_request_state == pull_request.STATE_CREATED
143 pr_created = pull_request.pull_request_state == pull_request.STATE_CREATED
144 merge_state = Optional.extract(merge_state, binary=True) and pr_created
144 merge_state = Optional.extract(merge_state, binary=True) and pr_created
145 data = pull_request.get_api_data(with_merge_state=merge_state)
145 data = pull_request.get_api_data(with_merge_state=merge_state)
146 return data
146 return data
147
147
148
148
149 @jsonrpc_method()
149 @jsonrpc_method()
150 def get_pull_requests(request, apiuser, repoid, status=Optional('new'),
150 def get_pull_requests(request, apiuser, repoid, status=Optional('new'),
151 merge_state=Optional(False)):
151 merge_state=Optional(False)):
152 """
152 """
153 Get all pull requests from the repository specified in `repoid`.
153 Get all pull requests from the repository specified in `repoid`.
154
154
155 :param apiuser: This is filled automatically from the |authtoken|.
155 :param apiuser: This is filled automatically from the |authtoken|.
156 :type apiuser: AuthUser
156 :type apiuser: AuthUser
157 :param repoid: Optional repository name or repository ID.
157 :param repoid: Optional repository name or repository ID.
158 :type repoid: str or int
158 :type repoid: str or int
159 :param status: Only return pull requests with the specified status.
159 :param status: Only return pull requests with the specified status.
160 Valid options are.
160 Valid options are.
161 * ``new`` (default)
161 * ``new`` (default)
162 * ``open``
162 * ``open``
163 * ``closed``
163 * ``closed``
164 :type status: str
164 :type status: str
165 :param merge_state: Optional calculate merge state for each repository.
165 :param merge_state: Optional calculate merge state for each repository.
166 This could result in longer time to fetch the data
166 This could result in longer time to fetch the data
167 :type merge_state: bool
167 :type merge_state: bool
168
168
169 Example output:
169 Example output:
170
170
171 .. code-block:: bash
171 .. code-block:: bash
172
172
173 "id": <id_given_in_input>,
173 "id": <id_given_in_input>,
174 "result":
174 "result":
175 [
175 [
176 ...
176 ...
177 {
177 {
178 "pull_request_id": "<pull_request_id>",
178 "pull_request_id": "<pull_request_id>",
179 "url": "<url>",
179 "url": "<url>",
180 "title" : "<title>",
180 "title" : "<title>",
181 "description": "<description>",
181 "description": "<description>",
182 "status": "<status>",
182 "status": "<status>",
183 "created_on": "<date_time_created>",
183 "created_on": "<date_time_created>",
184 "updated_on": "<date_time_updated>",
184 "updated_on": "<date_time_updated>",
185 "commit_ids": [
185 "commit_ids": [
186 ...
186 ...
187 "<commit_id>",
187 "<commit_id>",
188 "<commit_id>",
188 "<commit_id>",
189 ...
189 ...
190 ],
190 ],
191 "review_status": "<review_status>",
191 "review_status": "<review_status>",
192 "mergeable": {
192 "mergeable": {
193 "status": "<bool>",
193 "status": "<bool>",
194 "message: "<message>",
194 "message: "<message>",
195 },
195 },
196 "source": {
196 "source": {
197 "clone_url": "<clone_url>",
197 "clone_url": "<clone_url>",
198 "reference":
198 "reference":
199 {
199 {
200 "name": "<name>",
200 "name": "<name>",
201 "type": "<type>",
201 "type": "<type>",
202 "commit_id": "<commit_id>",
202 "commit_id": "<commit_id>",
203 }
203 }
204 },
204 },
205 "target": {
205 "target": {
206 "clone_url": "<clone_url>",
206 "clone_url": "<clone_url>",
207 "reference":
207 "reference":
208 {
208 {
209 "name": "<name>",
209 "name": "<name>",
210 "type": "<type>",
210 "type": "<type>",
211 "commit_id": "<commit_id>",
211 "commit_id": "<commit_id>",
212 }
212 }
213 },
213 },
214 "merge": {
214 "merge": {
215 "clone_url": "<clone_url>",
215 "clone_url": "<clone_url>",
216 "reference":
216 "reference":
217 {
217 {
218 "name": "<name>",
218 "name": "<name>",
219 "type": "<type>",
219 "type": "<type>",
220 "commit_id": "<commit_id>",
220 "commit_id": "<commit_id>",
221 }
221 }
222 },
222 },
223 "author": <user_obj>,
223 "author": <user_obj>,
224 "reviewers": [
224 "reviewers": [
225 ...
225 ...
226 {
226 {
227 "user": "<user_obj>",
227 "user": "<user_obj>",
228 "review_status": "<review_status>",
228 "review_status": "<review_status>",
229 }
229 }
230 ...
230 ...
231 ]
231 ]
232 }
232 }
233 ...
233 ...
234 ],
234 ],
235 "error": null
235 "error": null
236
236
237 """
237 """
238 repo = get_repo_or_error(repoid)
238 repo = get_repo_or_error(repoid)
239 if not has_superadmin_permission(apiuser):
239 if not has_superadmin_permission(apiuser):
240 _perms = (
240 _perms = (
241 'repository.admin', 'repository.write', 'repository.read',)
241 'repository.admin', 'repository.write', 'repository.read',)
242 validate_repo_permissions(apiuser, repoid, repo, _perms)
242 validate_repo_permissions(apiuser, repoid, repo, _perms)
243
243
244 status = Optional.extract(status)
244 status = Optional.extract(status)
245 merge_state = Optional.extract(merge_state, binary=True)
245 merge_state = Optional.extract(merge_state, binary=True)
246 pull_requests = PullRequestModel().get_all(repo, statuses=[status],
246 pull_requests = PullRequestModel().get_all(repo, statuses=[status],
247 order_by='id', order_dir='desc')
247 order_by='id', order_dir='desc')
248 data = [pr.get_api_data(with_merge_state=merge_state) for pr in pull_requests]
248 data = [pr.get_api_data(with_merge_state=merge_state) for pr in pull_requests]
249 return data
249 return data
250
250
251
251
252 @jsonrpc_method()
252 @jsonrpc_method()
253 def merge_pull_request(
253 def merge_pull_request(
254 request, apiuser, pullrequestid, repoid=Optional(None),
254 request, apiuser, pullrequestid, repoid=Optional(None),
255 userid=Optional(OAttr('apiuser'))):
255 userid=Optional(OAttr('apiuser'))):
256 """
256 """
257 Merge the pull request specified by `pullrequestid` into its target
257 Merge the pull request specified by `pullrequestid` into its target
258 repository.
258 repository.
259
259
260 :param apiuser: This is filled automatically from the |authtoken|.
260 :param apiuser: This is filled automatically from the |authtoken|.
261 :type apiuser: AuthUser
261 :type apiuser: AuthUser
262 :param repoid: Optional, repository name or repository ID of the
262 :param repoid: Optional, repository name or repository ID of the
263 target repository to which the |pr| is to be merged.
263 target repository to which the |pr| is to be merged.
264 :type repoid: str or int
264 :type repoid: str or int
265 :param pullrequestid: ID of the pull request which shall be merged.
265 :param pullrequestid: ID of the pull request which shall be merged.
266 :type pullrequestid: int
266 :type pullrequestid: int
267 :param userid: Merge the pull request as this user.
267 :param userid: Merge the pull request as this user.
268 :type userid: Optional(str or int)
268 :type userid: Optional(str or int)
269
269
270 Example output:
270 Example output:
271
271
272 .. code-block:: bash
272 .. code-block:: bash
273
273
274 "id": <id_given_in_input>,
274 "id": <id_given_in_input>,
275 "result": {
275 "result": {
276 "executed": "<bool>",
276 "executed": "<bool>",
277 "failure_reason": "<int>",
277 "failure_reason": "<int>",
278 "merge_status_message": "<str>",
278 "merge_status_message": "<str>",
279 "merge_commit_id": "<merge_commit_id>",
279 "merge_commit_id": "<merge_commit_id>",
280 "possible": "<bool>",
280 "possible": "<bool>",
281 "merge_ref": {
281 "merge_ref": {
282 "commit_id": "<commit_id>",
282 "commit_id": "<commit_id>",
283 "type": "<type>",
283 "type": "<type>",
284 "name": "<name>"
284 "name": "<name>"
285 }
285 }
286 },
286 },
287 "error": null
287 "error": null
288 """
288 """
289 pull_request = get_pull_request_or_error(pullrequestid)
289 pull_request = get_pull_request_or_error(pullrequestid)
290 if Optional.extract(repoid):
290 if Optional.extract(repoid):
291 repo = get_repo_or_error(repoid)
291 repo = get_repo_or_error(repoid)
292 else:
292 else:
293 repo = pull_request.target_repo
293 repo = pull_request.target_repo
294 auth_user = apiuser
294 auth_user = apiuser
295 if not isinstance(userid, Optional):
295 if not isinstance(userid, Optional):
296 if (has_superadmin_permission(apiuser) or
296 if (has_superadmin_permission(apiuser) or
297 HasRepoPermissionAnyApi('repository.admin')(
297 HasRepoPermissionAnyApi('repository.admin')(
298 user=apiuser, repo_name=repo.repo_name)):
298 user=apiuser, repo_name=repo.repo_name)):
299 apiuser = get_user_or_error(userid)
299 apiuser = get_user_or_error(userid)
300 auth_user = apiuser.AuthUser()
300 auth_user = apiuser.AuthUser()
301 else:
301 else:
302 raise JSONRPCError('userid is not the same as your user')
302 raise JSONRPCError('userid is not the same as your user')
303
303
304 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
304 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
305 raise JSONRPCError(
305 raise JSONRPCError(
306 'Operation forbidden because pull request is in state {}, '
306 'Operation forbidden because pull request is in state {}, '
307 'only state {} is allowed.'.format(
307 'only state {} is allowed.'.format(
308 pull_request.pull_request_state, PullRequest.STATE_CREATED))
308 pull_request.pull_request_state, PullRequest.STATE_CREATED))
309
309
310 with pull_request.set_state(PullRequest.STATE_UPDATING):
310 with pull_request.set_state(PullRequest.STATE_UPDATING):
311 check = MergeCheck.validate(pull_request, auth_user=auth_user,
311 check = MergeCheck.validate(pull_request, auth_user=auth_user,
312 translator=request.translate)
312 translator=request.translate)
313 merge_possible = not check.failed
313 merge_possible = not check.failed
314
314
315 if not merge_possible:
315 if not merge_possible:
316 error_messages = []
316 error_messages = []
317 for err_type, error_msg in check.errors:
317 for err_type, error_msg in check.errors:
318 error_msg = request.translate(error_msg)
318 error_msg = request.translate(error_msg)
319 error_messages.append(error_msg)
319 error_messages.append(error_msg)
320
320
321 reasons = ','.join(error_messages)
321 reasons = ','.join(error_messages)
322 raise JSONRPCError(
322 raise JSONRPCError(
323 'merge not possible for following reasons: {}'.format(reasons))
323 'merge not possible for following reasons: {}'.format(reasons))
324
324
325 target_repo = pull_request.target_repo
325 target_repo = pull_request.target_repo
326 extras = vcs_operation_context(
326 extras = vcs_operation_context(
327 request.environ, repo_name=target_repo.repo_name,
327 request.environ, repo_name=target_repo.repo_name,
328 username=auth_user.username, action='push',
328 username=auth_user.username, action='push',
329 scm=target_repo.repo_type)
329 scm=target_repo.repo_type)
330 with pull_request.set_state(PullRequest.STATE_UPDATING):
330 with pull_request.set_state(PullRequest.STATE_UPDATING):
331 merge_response = PullRequestModel().merge_repo(
331 merge_response = PullRequestModel().merge_repo(
332 pull_request, apiuser, extras=extras)
332 pull_request, apiuser, extras=extras)
333 if merge_response.executed:
333 if merge_response.executed:
334 PullRequestModel().close_pull_request(pull_request.pull_request_id, auth_user)
334 PullRequestModel().close_pull_request(pull_request.pull_request_id, auth_user)
335
335
336 Session().commit()
336 Session().commit()
337
337
338 # In previous versions the merge response directly contained the merge
338 # In previous versions the merge response directly contained the merge
339 # commit id. It is now contained in the merge reference object. To be
339 # commit id. It is now contained in the merge reference object. To be
340 # backwards compatible we have to extract it again.
340 # backwards compatible we have to extract it again.
341 merge_response = merge_response.asdict()
341 merge_response = merge_response.asdict()
342 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
342 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
343
343
344 return merge_response
344 return merge_response
345
345
346
346
347 @jsonrpc_method()
347 @jsonrpc_method()
348 def get_pull_request_comments(
348 def get_pull_request_comments(
349 request, apiuser, pullrequestid, repoid=Optional(None)):
349 request, apiuser, pullrequestid, repoid=Optional(None)):
350 """
350 """
351 Get all comments of pull request specified with the `pullrequestid`
351 Get all comments of pull request specified with the `pullrequestid`
352
352
353 :param apiuser: This is filled automatically from the |authtoken|.
353 :param apiuser: This is filled automatically from the |authtoken|.
354 :type apiuser: AuthUser
354 :type apiuser: AuthUser
355 :param repoid: Optional repository name or repository ID.
355 :param repoid: Optional repository name or repository ID.
356 :type repoid: str or int
356 :type repoid: str or int
357 :param pullrequestid: The pull request ID.
357 :param pullrequestid: The pull request ID.
358 :type pullrequestid: int
358 :type pullrequestid: int
359
359
360 Example output:
360 Example output:
361
361
362 .. code-block:: bash
362 .. code-block:: bash
363
363
364 id : <id_given_in_input>
364 id : <id_given_in_input>
365 result : [
365 result : [
366 {
366 {
367 "comment_author": {
367 "comment_author": {
368 "active": true,
368 "active": true,
369 "full_name_or_username": "Tom Gore",
369 "full_name_or_username": "Tom Gore",
370 "username": "admin"
370 "username": "admin"
371 },
371 },
372 "comment_created_on": "2017-01-02T18:43:45.533",
372 "comment_created_on": "2017-01-02T18:43:45.533",
373 "comment_f_path": null,
373 "comment_f_path": null,
374 "comment_id": 25,
374 "comment_id": 25,
375 "comment_lineno": null,
375 "comment_lineno": null,
376 "comment_status": {
376 "comment_status": {
377 "status": "under_review",
377 "status": "under_review",
378 "status_lbl": "Under Review"
378 "status_lbl": "Under Review"
379 },
379 },
380 "comment_text": "Example text",
380 "comment_text": "Example text",
381 "comment_type": null,
381 "comment_type": null,
382 "pull_request_version": null,
382 "pull_request_version": null,
383 "comment_commit_id": None,
383 "comment_commit_id": None,
384 "comment_pull_request_id": <pull_request_id>
384 "comment_pull_request_id": <pull_request_id>
385 }
385 }
386 ],
386 ],
387 error : null
387 error : null
388 """
388 """
389
389
390 pull_request = get_pull_request_or_error(pullrequestid)
390 pull_request = get_pull_request_or_error(pullrequestid)
391 if Optional.extract(repoid):
391 if Optional.extract(repoid):
392 repo = get_repo_or_error(repoid)
392 repo = get_repo_or_error(repoid)
393 else:
393 else:
394 repo = pull_request.target_repo
394 repo = pull_request.target_repo
395
395
396 if not PullRequestModel().check_user_read(
396 if not PullRequestModel().check_user_read(
397 pull_request, apiuser, api=True):
397 pull_request, apiuser, api=True):
398 raise JSONRPCError('repository `%s` or pull request `%s` '
398 raise JSONRPCError('repository `%s` or pull request `%s` '
399 'does not exist' % (repoid, pullrequestid))
399 'does not exist' % (repoid, pullrequestid))
400
400
401 (pull_request_latest,
401 (pull_request_latest,
402 pull_request_at_ver,
402 pull_request_at_ver,
403 pull_request_display_obj,
403 pull_request_display_obj,
404 at_version) = PullRequestModel().get_pr_version(
404 at_version) = PullRequestModel().get_pr_version(
405 pull_request.pull_request_id, version=None)
405 pull_request.pull_request_id, version=None)
406
406
407 versions = pull_request_display_obj.versions()
407 versions = pull_request_display_obj.versions()
408 ver_map = {
408 ver_map = {
409 ver.pull_request_version_id: cnt
409 ver.pull_request_version_id: cnt
410 for cnt, ver in enumerate(versions, 1)
410 for cnt, ver in enumerate(versions, 1)
411 }
411 }
412
412
413 # GENERAL COMMENTS with versions #
413 # GENERAL COMMENTS with versions #
414 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
414 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
415 q = q.order_by(ChangesetComment.comment_id.asc())
415 q = q.order_by(ChangesetComment.comment_id.asc())
416 general_comments = q.all()
416 general_comments = q.all()
417
417
418 # INLINE COMMENTS with versions #
418 # INLINE COMMENTS with versions #
419 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
419 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
420 q = q.order_by(ChangesetComment.comment_id.asc())
420 q = q.order_by(ChangesetComment.comment_id.asc())
421 inline_comments = q.all()
421 inline_comments = q.all()
422
422
423 data = []
423 data = []
424 for comment in inline_comments + general_comments:
424 for comment in inline_comments + general_comments:
425 full_data = comment.get_api_data()
425 full_data = comment.get_api_data()
426 pr_version_id = None
426 pr_version_id = None
427 if comment.pull_request_version_id:
427 if comment.pull_request_version_id:
428 pr_version_id = 'v{}'.format(
428 pr_version_id = 'v{}'.format(
429 ver_map[comment.pull_request_version_id])
429 ver_map[comment.pull_request_version_id])
430
430
431 # sanitize some entries
431 # sanitize some entries
432
432
433 full_data['pull_request_version'] = pr_version_id
433 full_data['pull_request_version'] = pr_version_id
434 full_data['comment_author'] = {
434 full_data['comment_author'] = {
435 'username': full_data['comment_author'].username,
435 'username': full_data['comment_author'].username,
436 'full_name_or_username': full_data['comment_author'].full_name_or_username,
436 'full_name_or_username': full_data['comment_author'].full_name_or_username,
437 'active': full_data['comment_author'].active,
437 'active': full_data['comment_author'].active,
438 }
438 }
439
439
440 if full_data['comment_status']:
440 if full_data['comment_status']:
441 full_data['comment_status'] = {
441 full_data['comment_status'] = {
442 'status': full_data['comment_status'][0].status,
442 'status': full_data['comment_status'][0].status,
443 'status_lbl': full_data['comment_status'][0].status_lbl,
443 'status_lbl': full_data['comment_status'][0].status_lbl,
444 }
444 }
445 else:
445 else:
446 full_data['comment_status'] = {}
446 full_data['comment_status'] = {}
447
447
448 data.append(full_data)
448 data.append(full_data)
449 return data
449 return data
450
450
451
451
452 @jsonrpc_method()
452 @jsonrpc_method()
453 def comment_pull_request(
453 def comment_pull_request(
454 request, apiuser, pullrequestid, repoid=Optional(None),
454 request, apiuser, pullrequestid, repoid=Optional(None),
455 message=Optional(None), commit_id=Optional(None), status=Optional(None),
455 message=Optional(None), commit_id=Optional(None), status=Optional(None),
456 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
456 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
457 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
457 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
458 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
458 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
459 """
459 """
460 Comment on the pull request specified with the `pullrequestid`,
460 Comment on the pull request specified with the `pullrequestid`,
461 in the |repo| specified by the `repoid`, and optionally change the
461 in the |repo| specified by the `repoid`, and optionally change the
462 review status.
462 review status.
463
463
464 :param apiuser: This is filled automatically from the |authtoken|.
464 :param apiuser: This is filled automatically from the |authtoken|.
465 :type apiuser: AuthUser
465 :type apiuser: AuthUser
466 :param repoid: Optional repository name or repository ID.
466 :param repoid: Optional repository name or repository ID.
467 :type repoid: str or int
467 :type repoid: str or int
468 :param pullrequestid: The pull request ID.
468 :param pullrequestid: The pull request ID.
469 :type pullrequestid: int
469 :type pullrequestid: int
470 :param commit_id: Specify the commit_id for which to set a comment. If
470 :param commit_id: Specify the commit_id for which to set a comment. If
471 given commit_id is different than latest in the PR status
471 given commit_id is different than latest in the PR status
472 change won't be performed.
472 change won't be performed.
473 :type commit_id: str
473 :type commit_id: str
474 :param message: The text content of the comment.
474 :param message: The text content of the comment.
475 :type message: str
475 :type message: str
476 :param status: (**Optional**) Set the approval status of the pull
476 :param status: (**Optional**) Set the approval status of the pull
477 request. One of: 'not_reviewed', 'approved', 'rejected',
477 request. One of: 'not_reviewed', 'approved', 'rejected',
478 'under_review'
478 'under_review'
479 :type status: str
479 :type status: str
480 :param comment_type: Comment type, one of: 'note', 'todo'
480 :param comment_type: Comment type, one of: 'note', 'todo'
481 :type comment_type: Optional(str), default: 'note'
481 :type comment_type: Optional(str), default: 'note'
482 :param resolves_comment_id: id of comment which this one will resolve
482 :param resolves_comment_id: id of comment which this one will resolve
483 :type resolves_comment_id: Optional(int)
483 :type resolves_comment_id: Optional(int)
484 :param extra_recipients: list of user ids or usernames to add
484 :param extra_recipients: list of user ids or usernames to add
485 notifications for this comment. Acts like a CC for notification
485 notifications for this comment. Acts like a CC for notification
486 :type extra_recipients: Optional(list)
486 :type extra_recipients: Optional(list)
487 :param userid: Comment on the pull request as this user
487 :param userid: Comment on the pull request as this user
488 :type userid: Optional(str or int)
488 :type userid: Optional(str or int)
489 :param send_email: Define if this comment should also send email notification
489 :param send_email: Define if this comment should also send email notification
490 :type send_email: Optional(bool)
490 :type send_email: Optional(bool)
491
491
492 Example output:
492 Example output:
493
493
494 .. code-block:: bash
494 .. code-block:: bash
495
495
496 id : <id_given_in_input>
496 id : <id_given_in_input>
497 result : {
497 result : {
498 "pull_request_id": "<Integer>",
498 "pull_request_id": "<Integer>",
499 "comment_id": "<Integer>",
499 "comment_id": "<Integer>",
500 "status": {"given": <given_status>,
500 "status": {"given": <given_status>,
501 "was_changed": <bool status_was_actually_changed> },
501 "was_changed": <bool status_was_actually_changed> },
502 },
502 },
503 error : null
503 error : null
504 """
504 """
505 pull_request = get_pull_request_or_error(pullrequestid)
505 pull_request = get_pull_request_or_error(pullrequestid)
506 if Optional.extract(repoid):
506 if Optional.extract(repoid):
507 repo = get_repo_or_error(repoid)
507 repo = get_repo_or_error(repoid)
508 else:
508 else:
509 repo = pull_request.target_repo
509 repo = pull_request.target_repo
510
510
511 auth_user = apiuser
511 auth_user = apiuser
512 if not isinstance(userid, Optional):
512 if not isinstance(userid, Optional):
513 if (has_superadmin_permission(apiuser) or
513 if (has_superadmin_permission(apiuser) or
514 HasRepoPermissionAnyApi('repository.admin')(
514 HasRepoPermissionAnyApi('repository.admin')(
515 user=apiuser, repo_name=repo.repo_name)):
515 user=apiuser, repo_name=repo.repo_name)):
516 apiuser = get_user_or_error(userid)
516 apiuser = get_user_or_error(userid)
517 auth_user = apiuser.AuthUser()
517 auth_user = apiuser.AuthUser()
518 else:
518 else:
519 raise JSONRPCError('userid is not the same as your user')
519 raise JSONRPCError('userid is not the same as your user')
520
520
521 if pull_request.is_closed():
521 if pull_request.is_closed():
522 raise JSONRPCError(
522 raise JSONRPCError(
523 'pull request `%s` comment failed, pull request is closed' % (
523 'pull request `%s` comment failed, pull request is closed' % (
524 pullrequestid,))
524 pullrequestid,))
525
525
526 if not PullRequestModel().check_user_read(
526 if not PullRequestModel().check_user_read(
527 pull_request, apiuser, api=True):
527 pull_request, apiuser, api=True):
528 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
528 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
529 message = Optional.extract(message)
529 message = Optional.extract(message)
530 status = Optional.extract(status)
530 status = Optional.extract(status)
531 commit_id = Optional.extract(commit_id)
531 commit_id = Optional.extract(commit_id)
532 comment_type = Optional.extract(comment_type)
532 comment_type = Optional.extract(comment_type)
533 resolves_comment_id = Optional.extract(resolves_comment_id)
533 resolves_comment_id = Optional.extract(resolves_comment_id)
534 extra_recipients = Optional.extract(extra_recipients)
534 extra_recipients = Optional.extract(extra_recipients)
535 send_email = Optional.extract(send_email, binary=True)
535 send_email = Optional.extract(send_email, binary=True)
536
536
537 if not message and not status:
537 if not message and not status:
538 raise JSONRPCError(
538 raise JSONRPCError(
539 'Both message and status parameters are missing. '
539 'Both message and status parameters are missing. '
540 'At least one is required.')
540 'At least one is required.')
541
541
542 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
542 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
543 status is not None):
543 status is not None):
544 raise JSONRPCError('Unknown comment status: `%s`' % status)
544 raise JSONRPCError('Unknown comment status: `%s`' % status)
545
545
546 if commit_id and commit_id not in pull_request.revisions:
546 if commit_id and commit_id not in pull_request.revisions:
547 raise JSONRPCError(
547 raise JSONRPCError(
548 'Invalid commit_id `%s` for this pull request.' % commit_id)
548 'Invalid commit_id `%s` for this pull request.' % commit_id)
549
549
550 allowed_to_change_status = PullRequestModel().check_user_change_status(
550 allowed_to_change_status = PullRequestModel().check_user_change_status(
551 pull_request, apiuser)
551 pull_request, apiuser)
552
552
553 # if commit_id is passed re-validated if user is allowed to change status
553 # if commit_id is passed re-validated if user is allowed to change status
554 # based on latest commit_id from the PR
554 # based on latest commit_id from the PR
555 if commit_id:
555 if commit_id:
556 commit_idx = pull_request.revisions.index(commit_id)
556 commit_idx = pull_request.revisions.index(commit_id)
557 if commit_idx != 0:
557 if commit_idx != 0:
558 allowed_to_change_status = False
558 allowed_to_change_status = False
559
559
560 if resolves_comment_id:
560 if resolves_comment_id:
561 comment = ChangesetComment.get(resolves_comment_id)
561 comment = ChangesetComment.get(resolves_comment_id)
562 if not comment:
562 if not comment:
563 raise JSONRPCError(
563 raise JSONRPCError(
564 'Invalid resolves_comment_id `%s` for this pull request.'
564 'Invalid resolves_comment_id `%s` for this pull request.'
565 % resolves_comment_id)
565 % resolves_comment_id)
566 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
566 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
567 raise JSONRPCError(
567 raise JSONRPCError(
568 'Comment `%s` is wrong type for setting status to resolved.'
568 'Comment `%s` is wrong type for setting status to resolved.'
569 % resolves_comment_id)
569 % resolves_comment_id)
570
570
571 text = message
571 text = message
572 status_label = ChangesetStatus.get_status_lbl(status)
572 status_label = ChangesetStatus.get_status_lbl(status)
573 if status and allowed_to_change_status:
573 if status and allowed_to_change_status:
574 st_message = ('Status change %(transition_icon)s %(status)s'
574 st_message = ('Status change %(transition_icon)s %(status)s'
575 % {'transition_icon': '>', 'status': status_label})
575 % {'transition_icon': '>', 'status': status_label})
576 text = message or st_message
576 text = message or st_message
577
577
578 rc_config = SettingsModel().get_all_settings()
578 rc_config = SettingsModel().get_all_settings()
579 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
579 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
580
580
581 status_change = status and allowed_to_change_status
581 status_change = status and allowed_to_change_status
582 comment = CommentsModel().create(
582 comment = CommentsModel().create(
583 text=text,
583 text=text,
584 repo=pull_request.target_repo.repo_id,
584 repo=pull_request.target_repo.repo_id,
585 user=apiuser.user_id,
585 user=apiuser.user_id,
586 pull_request=pull_request.pull_request_id,
586 pull_request=pull_request.pull_request_id,
587 f_path=None,
587 f_path=None,
588 line_no=None,
588 line_no=None,
589 status_change=(status_label if status_change else None),
589 status_change=(status_label if status_change else None),
590 status_change_type=(status if status_change else None),
590 status_change_type=(status if status_change else None),
591 closing_pr=False,
591 closing_pr=False,
592 renderer=renderer,
592 renderer=renderer,
593 comment_type=comment_type,
593 comment_type=comment_type,
594 resolves_comment_id=resolves_comment_id,
594 resolves_comment_id=resolves_comment_id,
595 auth_user=auth_user,
595 auth_user=auth_user,
596 extra_recipients=extra_recipients,
596 extra_recipients=extra_recipients,
597 send_email=send_email
597 send_email=send_email
598 )
598 )
599
599
600 if allowed_to_change_status and status:
600 if allowed_to_change_status and status:
601 old_calculated_status = pull_request.calculated_review_status()
601 old_calculated_status = pull_request.calculated_review_status()
602 ChangesetStatusModel().set_status(
602 ChangesetStatusModel().set_status(
603 pull_request.target_repo.repo_id,
603 pull_request.target_repo.repo_id,
604 status,
604 status,
605 apiuser.user_id,
605 apiuser.user_id,
606 comment,
606 comment,
607 pull_request=pull_request.pull_request_id
607 pull_request=pull_request.pull_request_id
608 )
608 )
609 Session().flush()
609 Session().flush()
610
610
611 Session().commit()
611 Session().commit()
612
612
613 PullRequestModel().trigger_pull_request_hook(
613 PullRequestModel().trigger_pull_request_hook(
614 pull_request, apiuser, 'comment',
614 pull_request, apiuser, 'comment',
615 data={'comment': comment})
615 data={'comment': comment})
616
616
617 if allowed_to_change_status and status:
617 if allowed_to_change_status and status:
618 # we now calculate the status of pull request, and based on that
618 # we now calculate the status of pull request, and based on that
619 # calculation we set the commits status
619 # calculation we set the commits status
620 calculated_status = pull_request.calculated_review_status()
620 calculated_status = pull_request.calculated_review_status()
621 if old_calculated_status != calculated_status:
621 if old_calculated_status != calculated_status:
622 PullRequestModel().trigger_pull_request_hook(
622 PullRequestModel().trigger_pull_request_hook(
623 pull_request, apiuser, 'review_status_change',
623 pull_request, apiuser, 'review_status_change',
624 data={'status': calculated_status})
624 data={'status': calculated_status})
625
625
626 data = {
626 data = {
627 'pull_request_id': pull_request.pull_request_id,
627 'pull_request_id': pull_request.pull_request_id,
628 'comment_id': comment.comment_id if comment else None,
628 'comment_id': comment.comment_id if comment else None,
629 'status': {'given': status, 'was_changed': status_change},
629 'status': {'given': status, 'was_changed': status_change},
630 }
630 }
631 return data
631 return data
632
632
633
633
634 @jsonrpc_method()
634 @jsonrpc_method()
635 def create_pull_request(
635 def create_pull_request(
636 request, apiuser, source_repo, target_repo, source_ref, target_ref,
636 request, apiuser, source_repo, target_repo, source_ref, target_ref,
637 owner=Optional(OAttr('apiuser')), title=Optional(''), description=Optional(''),
637 owner=Optional(OAttr('apiuser')), title=Optional(''), description=Optional(''),
638 description_renderer=Optional(''), reviewers=Optional(None)):
638 description_renderer=Optional(''), reviewers=Optional(None)):
639 """
639 """
640 Creates a new pull request.
640 Creates a new pull request.
641
641
642 Accepts refs in the following formats:
642 Accepts refs in the following formats:
643
643
644 * branch:<branch_name>:<sha>
644 * branch:<branch_name>:<sha>
645 * branch:<branch_name>
645 * branch:<branch_name>
646 * bookmark:<bookmark_name>:<sha> (Mercurial only)
646 * bookmark:<bookmark_name>:<sha> (Mercurial only)
647 * bookmark:<bookmark_name> (Mercurial only)
647 * bookmark:<bookmark_name> (Mercurial only)
648
648
649 :param apiuser: This is filled automatically from the |authtoken|.
649 :param apiuser: This is filled automatically from the |authtoken|.
650 :type apiuser: AuthUser
650 :type apiuser: AuthUser
651 :param source_repo: Set the source repository name.
651 :param source_repo: Set the source repository name.
652 :type source_repo: str
652 :type source_repo: str
653 :param target_repo: Set the target repository name.
653 :param target_repo: Set the target repository name.
654 :type target_repo: str
654 :type target_repo: str
655 :param source_ref: Set the source ref name.
655 :param source_ref: Set the source ref name.
656 :type source_ref: str
656 :type source_ref: str
657 :param target_ref: Set the target ref name.
657 :param target_ref: Set the target ref name.
658 :type target_ref: str
658 :type target_ref: str
659 :param owner: user_id or username
659 :param owner: user_id or username
660 :type owner: Optional(str)
660 :type owner: Optional(str)
661 :param title: Optionally Set the pull request title, it's generated otherwise
661 :param title: Optionally Set the pull request title, it's generated otherwise
662 :type title: str
662 :type title: str
663 :param description: Set the pull request description.
663 :param description: Set the pull request description.
664 :type description: Optional(str)
664 :type description: Optional(str)
665 :type description_renderer: Optional(str)
665 :type description_renderer: Optional(str)
666 :param description_renderer: Set pull request renderer for the description.
666 :param description_renderer: Set pull request renderer for the description.
667 It should be 'rst', 'markdown' or 'plain'. If not give default
667 It should be 'rst', 'markdown' or 'plain'. If not give default
668 system renderer will be used
668 system renderer will be used
669 :param reviewers: Set the new pull request reviewers list.
669 :param reviewers: Set the new pull request reviewers list.
670 Reviewer defined by review rules will be added automatically to the
670 Reviewer defined by review rules will be added automatically to the
671 defined list.
671 defined list.
672 :type reviewers: Optional(list)
672 :type reviewers: Optional(list)
673 Accepts username strings or objects of the format:
673 Accepts username strings or objects of the format:
674
674
675 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
675 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
676 """
676 """
677
677
678 source_db_repo = get_repo_or_error(source_repo)
678 source_db_repo = get_repo_or_error(source_repo)
679 target_db_repo = get_repo_or_error(target_repo)
679 target_db_repo = get_repo_or_error(target_repo)
680 if not has_superadmin_permission(apiuser):
680 if not has_superadmin_permission(apiuser):
681 _perms = ('repository.admin', 'repository.write', 'repository.read',)
681 _perms = ('repository.admin', 'repository.write', 'repository.read',)
682 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
682 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
683
683
684 owner = validate_set_owner_permissions(apiuser, owner)
684 owner = validate_set_owner_permissions(apiuser, owner)
685
685
686 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
686 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
687 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
687 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
688
688
689 source_scm = source_db_repo.scm_instance()
690 target_scm = target_db_repo.scm_instance()
691
692 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
689 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
693 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
690 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
694
691
695 ancestor = source_scm.get_common_ancestor(
696 source_commit.raw_id, target_commit.raw_id, target_scm)
697 if not ancestor:
698 raise JSONRPCError('no common ancestor found')
699
700 # recalculate target ref based on ancestor
701 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
702 full_target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
703
704 commit_ranges = target_scm.compare(
705 target_commit.raw_id, source_commit.raw_id, source_scm,
706 merge=True, pre_load=[])
707
708 if not commit_ranges:
709 raise JSONRPCError('no commits found')
710
711 reviewer_objects = Optional.extract(reviewers) or []
692 reviewer_objects = Optional.extract(reviewers) or []
712
693
713 # serialize and validate passed in given reviewers
694 # serialize and validate passed in given reviewers
714 if reviewer_objects:
695 if reviewer_objects:
715 schema = ReviewerListSchema()
696 schema = ReviewerListSchema()
716 try:
697 try:
717 reviewer_objects = schema.deserialize(reviewer_objects)
698 reviewer_objects = schema.deserialize(reviewer_objects)
718 except Invalid as err:
699 except Invalid as err:
719 raise JSONRPCValidationError(colander_exc=err)
700 raise JSONRPCValidationError(colander_exc=err)
720
701
721 # validate users
702 # validate users
722 for reviewer_object in reviewer_objects:
703 for reviewer_object in reviewer_objects:
723 user = get_user_or_error(reviewer_object['username'])
704 user = get_user_or_error(reviewer_object['username'])
724 reviewer_object['user_id'] = user.user_id
705 reviewer_object['user_id'] = user.user_id
725
706
726 get_default_reviewers_data, validate_default_reviewers = \
707 get_default_reviewers_data, validate_default_reviewers = \
727 PullRequestModel().get_reviewer_functions()
708 PullRequestModel().get_reviewer_functions()
728
709
729 # recalculate reviewers logic, to make sure we can validate this
710 # recalculate reviewers logic, to make sure we can validate this
730 reviewer_rules = get_default_reviewers_data(
711 default_reviewers_data = get_default_reviewers_data(
731 owner, source_db_repo,
712 owner, source_db_repo,
732 source_commit, target_db_repo, target_commit)
713 source_commit, target_db_repo, target_commit)
733
714
734 # now MERGE our given with the calculated
715 # now MERGE our given with the calculated
735 reviewer_objects = reviewer_rules['reviewers'] + reviewer_objects
716 reviewer_objects = default_reviewers_data['reviewers'] + reviewer_objects
736
717
737 try:
718 try:
738 reviewers = validate_default_reviewers(
719 reviewers = validate_default_reviewers(
739 reviewer_objects, reviewer_rules)
720 reviewer_objects, default_reviewers_data)
740 except ValueError as e:
721 except ValueError as e:
741 raise JSONRPCError('Reviewers Validation: {}'.format(e))
722 raise JSONRPCError('Reviewers Validation: {}'.format(e))
742
723
743 title = Optional.extract(title)
724 title = Optional.extract(title)
744 if not title:
725 if not title:
745 title_source_ref = source_ref.split(':', 2)[1]
726 title_source_ref = source_ref.split(':', 2)[1]
746 title = PullRequestModel().generate_pullrequest_title(
727 title = PullRequestModel().generate_pullrequest_title(
747 source=source_repo,
728 source=source_repo,
748 source_ref=title_source_ref,
729 source_ref=title_source_ref,
749 target=target_repo
730 target=target_repo
750 )
731 )
732
733 diff_info = default_reviewers_data['diff_info']
734 common_ancestor_id = diff_info['ancestor']
735 commits = diff_info['commits']
736
737 if not common_ancestor_id:
738 raise JSONRPCError('no common ancestor found')
739
740 if not commits:
741 raise JSONRPCError('no commits found')
742
743 # NOTE(marcink): reversed is consistent with how we open it in the WEB interface
744 revisions = [commit.raw_id for commit in reversed(commits)]
745
746 # recalculate target ref based on ancestor
747 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
748 full_target_ref = ':'.join((target_ref_type, target_ref_name, common_ancestor_id))
749
751 # fetch renderer, if set fallback to plain in case of PR
750 # fetch renderer, if set fallback to plain in case of PR
752 rc_config = SettingsModel().get_all_settings()
751 rc_config = SettingsModel().get_all_settings()
753 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
752 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
754 description = Optional.extract(description)
753 description = Optional.extract(description)
755 description_renderer = Optional.extract(description_renderer) or default_system_renderer
754 description_renderer = Optional.extract(description_renderer) or default_system_renderer
756
755
757 pull_request = PullRequestModel().create(
756 pull_request = PullRequestModel().create(
758 created_by=owner.user_id,
757 created_by=owner.user_id,
759 source_repo=source_repo,
758 source_repo=source_repo,
760 source_ref=full_source_ref,
759 source_ref=full_source_ref,
761 target_repo=target_repo,
760 target_repo=target_repo,
762 target_ref=full_target_ref,
761 target_ref=full_target_ref,
763 revisions=[commit.raw_id for commit in reversed(commit_ranges)],
762 common_ancestor_id=common_ancestor_id,
763 revisions=revisions,
764 reviewers=reviewers,
764 reviewers=reviewers,
765 title=title,
765 title=title,
766 description=description,
766 description=description,
767 description_renderer=description_renderer,
767 description_renderer=description_renderer,
768 reviewer_data=reviewer_rules,
768 reviewer_data=default_reviewers_data,
769 auth_user=apiuser
769 auth_user=apiuser
770 )
770 )
771
771
772 Session().commit()
772 Session().commit()
773 data = {
773 data = {
774 'msg': 'Created new pull request `{}`'.format(title),
774 'msg': 'Created new pull request `{}`'.format(title),
775 'pull_request_id': pull_request.pull_request_id,
775 'pull_request_id': pull_request.pull_request_id,
776 }
776 }
777 return data
777 return data
778
778
779
779
780 @jsonrpc_method()
780 @jsonrpc_method()
781 def update_pull_request(
781 def update_pull_request(
782 request, apiuser, pullrequestid, repoid=Optional(None),
782 request, apiuser, pullrequestid, repoid=Optional(None),
783 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
783 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
784 reviewers=Optional(None), update_commits=Optional(None)):
784 reviewers=Optional(None), update_commits=Optional(None)):
785 """
785 """
786 Updates a pull request.
786 Updates a pull request.
787
787
788 :param apiuser: This is filled automatically from the |authtoken|.
788 :param apiuser: This is filled automatically from the |authtoken|.
789 :type apiuser: AuthUser
789 :type apiuser: AuthUser
790 :param repoid: Optional repository name or repository ID.
790 :param repoid: Optional repository name or repository ID.
791 :type repoid: str or int
791 :type repoid: str or int
792 :param pullrequestid: The pull request ID.
792 :param pullrequestid: The pull request ID.
793 :type pullrequestid: int
793 :type pullrequestid: int
794 :param title: Set the pull request title.
794 :param title: Set the pull request title.
795 :type title: str
795 :type title: str
796 :param description: Update pull request description.
796 :param description: Update pull request description.
797 :type description: Optional(str)
797 :type description: Optional(str)
798 :type description_renderer: Optional(str)
798 :type description_renderer: Optional(str)
799 :param description_renderer: Update pull request renderer for the description.
799 :param description_renderer: Update pull request renderer for the description.
800 It should be 'rst', 'markdown' or 'plain'
800 It should be 'rst', 'markdown' or 'plain'
801 :param reviewers: Update pull request reviewers list with new value.
801 :param reviewers: Update pull request reviewers list with new value.
802 :type reviewers: Optional(list)
802 :type reviewers: Optional(list)
803 Accepts username strings or objects of the format:
803 Accepts username strings or objects of the format:
804
804
805 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
805 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
806
806
807 :param update_commits: Trigger update of commits for this pull request
807 :param update_commits: Trigger update of commits for this pull request
808 :type: update_commits: Optional(bool)
808 :type: update_commits: Optional(bool)
809
809
810 Example output:
810 Example output:
811
811
812 .. code-block:: bash
812 .. code-block:: bash
813
813
814 id : <id_given_in_input>
814 id : <id_given_in_input>
815 result : {
815 result : {
816 "msg": "Updated pull request `63`",
816 "msg": "Updated pull request `63`",
817 "pull_request": <pull_request_object>,
817 "pull_request": <pull_request_object>,
818 "updated_reviewers": {
818 "updated_reviewers": {
819 "added": [
819 "added": [
820 "username"
820 "username"
821 ],
821 ],
822 "removed": []
822 "removed": []
823 },
823 },
824 "updated_commits": {
824 "updated_commits": {
825 "added": [
825 "added": [
826 "<sha1_hash>"
826 "<sha1_hash>"
827 ],
827 ],
828 "common": [
828 "common": [
829 "<sha1_hash>",
829 "<sha1_hash>",
830 "<sha1_hash>",
830 "<sha1_hash>",
831 ],
831 ],
832 "removed": []
832 "removed": []
833 }
833 }
834 }
834 }
835 error : null
835 error : null
836 """
836 """
837
837
838 pull_request = get_pull_request_or_error(pullrequestid)
838 pull_request = get_pull_request_or_error(pullrequestid)
839 if Optional.extract(repoid):
839 if Optional.extract(repoid):
840 repo = get_repo_or_error(repoid)
840 repo = get_repo_or_error(repoid)
841 else:
841 else:
842 repo = pull_request.target_repo
842 repo = pull_request.target_repo
843
843
844 if not PullRequestModel().check_user_update(
844 if not PullRequestModel().check_user_update(
845 pull_request, apiuser, api=True):
845 pull_request, apiuser, api=True):
846 raise JSONRPCError(
846 raise JSONRPCError(
847 'pull request `%s` update failed, no permission to update.' % (
847 'pull request `%s` update failed, no permission to update.' % (
848 pullrequestid,))
848 pullrequestid,))
849 if pull_request.is_closed():
849 if pull_request.is_closed():
850 raise JSONRPCError(
850 raise JSONRPCError(
851 'pull request `%s` update failed, pull request is closed' % (
851 'pull request `%s` update failed, pull request is closed' % (
852 pullrequestid,))
852 pullrequestid,))
853
853
854 reviewer_objects = Optional.extract(reviewers) or []
854 reviewer_objects = Optional.extract(reviewers) or []
855
855
856 if reviewer_objects:
856 if reviewer_objects:
857 schema = ReviewerListSchema()
857 schema = ReviewerListSchema()
858 try:
858 try:
859 reviewer_objects = schema.deserialize(reviewer_objects)
859 reviewer_objects = schema.deserialize(reviewer_objects)
860 except Invalid as err:
860 except Invalid as err:
861 raise JSONRPCValidationError(colander_exc=err)
861 raise JSONRPCValidationError(colander_exc=err)
862
862
863 # validate users
863 # validate users
864 for reviewer_object in reviewer_objects:
864 for reviewer_object in reviewer_objects:
865 user = get_user_or_error(reviewer_object['username'])
865 user = get_user_or_error(reviewer_object['username'])
866 reviewer_object['user_id'] = user.user_id
866 reviewer_object['user_id'] = user.user_id
867
867
868 get_default_reviewers_data, get_validated_reviewers = \
868 get_default_reviewers_data, get_validated_reviewers = \
869 PullRequestModel().get_reviewer_functions()
869 PullRequestModel().get_reviewer_functions()
870
870
871 # re-use stored rules
871 # re-use stored rules
872 reviewer_rules = pull_request.reviewer_data
872 reviewer_rules = pull_request.reviewer_data
873 try:
873 try:
874 reviewers = get_validated_reviewers(
874 reviewers = get_validated_reviewers(
875 reviewer_objects, reviewer_rules)
875 reviewer_objects, reviewer_rules)
876 except ValueError as e:
876 except ValueError as e:
877 raise JSONRPCError('Reviewers Validation: {}'.format(e))
877 raise JSONRPCError('Reviewers Validation: {}'.format(e))
878 else:
878 else:
879 reviewers = []
879 reviewers = []
880
880
881 title = Optional.extract(title)
881 title = Optional.extract(title)
882 description = Optional.extract(description)
882 description = Optional.extract(description)
883 description_renderer = Optional.extract(description_renderer)
883 description_renderer = Optional.extract(description_renderer)
884
884
885 if title or description:
885 if title or description:
886 PullRequestModel().edit(
886 PullRequestModel().edit(
887 pull_request,
887 pull_request,
888 title or pull_request.title,
888 title or pull_request.title,
889 description or pull_request.description,
889 description or pull_request.description,
890 description_renderer or pull_request.description_renderer,
890 description_renderer or pull_request.description_renderer,
891 apiuser)
891 apiuser)
892 Session().commit()
892 Session().commit()
893
893
894 commit_changes = {"added": [], "common": [], "removed": []}
894 commit_changes = {"added": [], "common": [], "removed": []}
895 if str2bool(Optional.extract(update_commits)):
895 if str2bool(Optional.extract(update_commits)):
896
896
897 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
897 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
898 raise JSONRPCError(
898 raise JSONRPCError(
899 'Operation forbidden because pull request is in state {}, '
899 'Operation forbidden because pull request is in state {}, '
900 'only state {} is allowed.'.format(
900 'only state {} is allowed.'.format(
901 pull_request.pull_request_state, PullRequest.STATE_CREATED))
901 pull_request.pull_request_state, PullRequest.STATE_CREATED))
902
902
903 with pull_request.set_state(PullRequest.STATE_UPDATING):
903 with pull_request.set_state(PullRequest.STATE_UPDATING):
904 if PullRequestModel().has_valid_update_type(pull_request):
904 if PullRequestModel().has_valid_update_type(pull_request):
905 db_user = apiuser.get_instance()
905 db_user = apiuser.get_instance()
906 update_response = PullRequestModel().update_commits(
906 update_response = PullRequestModel().update_commits(
907 pull_request, db_user)
907 pull_request, db_user)
908 commit_changes = update_response.changes or commit_changes
908 commit_changes = update_response.changes or commit_changes
909 Session().commit()
909 Session().commit()
910
910
911 reviewers_changes = {"added": [], "removed": []}
911 reviewers_changes = {"added": [], "removed": []}
912 if reviewers:
912 if reviewers:
913 old_calculated_status = pull_request.calculated_review_status()
913 old_calculated_status = pull_request.calculated_review_status()
914 added_reviewers, removed_reviewers = \
914 added_reviewers, removed_reviewers = \
915 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
915 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
916
916
917 reviewers_changes['added'] = sorted(
917 reviewers_changes['added'] = sorted(
918 [get_user_or_error(n).username for n in added_reviewers])
918 [get_user_or_error(n).username for n in added_reviewers])
919 reviewers_changes['removed'] = sorted(
919 reviewers_changes['removed'] = sorted(
920 [get_user_or_error(n).username for n in removed_reviewers])
920 [get_user_or_error(n).username for n in removed_reviewers])
921 Session().commit()
921 Session().commit()
922
922
923 # trigger status changed if change in reviewers changes the status
923 # trigger status changed if change in reviewers changes the status
924 calculated_status = pull_request.calculated_review_status()
924 calculated_status = pull_request.calculated_review_status()
925 if old_calculated_status != calculated_status:
925 if old_calculated_status != calculated_status:
926 PullRequestModel().trigger_pull_request_hook(
926 PullRequestModel().trigger_pull_request_hook(
927 pull_request, apiuser, 'review_status_change',
927 pull_request, apiuser, 'review_status_change',
928 data={'status': calculated_status})
928 data={'status': calculated_status})
929
929
930 data = {
930 data = {
931 'msg': 'Updated pull request `{}`'.format(
931 'msg': 'Updated pull request `{}`'.format(
932 pull_request.pull_request_id),
932 pull_request.pull_request_id),
933 'pull_request': pull_request.get_api_data(),
933 'pull_request': pull_request.get_api_data(),
934 'updated_commits': commit_changes,
934 'updated_commits': commit_changes,
935 'updated_reviewers': reviewers_changes
935 'updated_reviewers': reviewers_changes
936 }
936 }
937
937
938 return data
938 return data
939
939
940
940
941 @jsonrpc_method()
941 @jsonrpc_method()
942 def close_pull_request(
942 def close_pull_request(
943 request, apiuser, pullrequestid, repoid=Optional(None),
943 request, apiuser, pullrequestid, repoid=Optional(None),
944 userid=Optional(OAttr('apiuser')), message=Optional('')):
944 userid=Optional(OAttr('apiuser')), message=Optional('')):
945 """
945 """
946 Close the pull request specified by `pullrequestid`.
946 Close the pull request specified by `pullrequestid`.
947
947
948 :param apiuser: This is filled automatically from the |authtoken|.
948 :param apiuser: This is filled automatically from the |authtoken|.
949 :type apiuser: AuthUser
949 :type apiuser: AuthUser
950 :param repoid: Repository name or repository ID to which the pull
950 :param repoid: Repository name or repository ID to which the pull
951 request belongs.
951 request belongs.
952 :type repoid: str or int
952 :type repoid: str or int
953 :param pullrequestid: ID of the pull request to be closed.
953 :param pullrequestid: ID of the pull request to be closed.
954 :type pullrequestid: int
954 :type pullrequestid: int
955 :param userid: Close the pull request as this user.
955 :param userid: Close the pull request as this user.
956 :type userid: Optional(str or int)
956 :type userid: Optional(str or int)
957 :param message: Optional message to close the Pull Request with. If not
957 :param message: Optional message to close the Pull Request with. If not
958 specified it will be generated automatically.
958 specified it will be generated automatically.
959 :type message: Optional(str)
959 :type message: Optional(str)
960
960
961 Example output:
961 Example output:
962
962
963 .. code-block:: bash
963 .. code-block:: bash
964
964
965 "id": <id_given_in_input>,
965 "id": <id_given_in_input>,
966 "result": {
966 "result": {
967 "pull_request_id": "<int>",
967 "pull_request_id": "<int>",
968 "close_status": "<str:status_lbl>,
968 "close_status": "<str:status_lbl>,
969 "closed": "<bool>"
969 "closed": "<bool>"
970 },
970 },
971 "error": null
971 "error": null
972
972
973 """
973 """
974 _ = request.translate
974 _ = request.translate
975
975
976 pull_request = get_pull_request_or_error(pullrequestid)
976 pull_request = get_pull_request_or_error(pullrequestid)
977 if Optional.extract(repoid):
977 if Optional.extract(repoid):
978 repo = get_repo_or_error(repoid)
978 repo = get_repo_or_error(repoid)
979 else:
979 else:
980 repo = pull_request.target_repo
980 repo = pull_request.target_repo
981
981
982 if not isinstance(userid, Optional):
982 if not isinstance(userid, Optional):
983 if (has_superadmin_permission(apiuser) or
983 if (has_superadmin_permission(apiuser) or
984 HasRepoPermissionAnyApi('repository.admin')(
984 HasRepoPermissionAnyApi('repository.admin')(
985 user=apiuser, repo_name=repo.repo_name)):
985 user=apiuser, repo_name=repo.repo_name)):
986 apiuser = get_user_or_error(userid)
986 apiuser = get_user_or_error(userid)
987 else:
987 else:
988 raise JSONRPCError('userid is not the same as your user')
988 raise JSONRPCError('userid is not the same as your user')
989
989
990 if pull_request.is_closed():
990 if pull_request.is_closed():
991 raise JSONRPCError(
991 raise JSONRPCError(
992 'pull request `%s` is already closed' % (pullrequestid,))
992 'pull request `%s` is already closed' % (pullrequestid,))
993
993
994 # only owner or admin or person with write permissions
994 # only owner or admin or person with write permissions
995 allowed_to_close = PullRequestModel().check_user_update(
995 allowed_to_close = PullRequestModel().check_user_update(
996 pull_request, apiuser, api=True)
996 pull_request, apiuser, api=True)
997
997
998 if not allowed_to_close:
998 if not allowed_to_close:
999 raise JSONRPCError(
999 raise JSONRPCError(
1000 'pull request `%s` close failed, no permission to close.' % (
1000 'pull request `%s` close failed, no permission to close.' % (
1001 pullrequestid,))
1001 pullrequestid,))
1002
1002
1003 # message we're using to close the PR, else it's automatically generated
1003 # message we're using to close the PR, else it's automatically generated
1004 message = Optional.extract(message)
1004 message = Optional.extract(message)
1005
1005
1006 # finally close the PR, with proper message comment
1006 # finally close the PR, with proper message comment
1007 comment, status = PullRequestModel().close_pull_request_with_comment(
1007 comment, status = PullRequestModel().close_pull_request_with_comment(
1008 pull_request, apiuser, repo, message=message, auth_user=apiuser)
1008 pull_request, apiuser, repo, message=message, auth_user=apiuser)
1009 status_lbl = ChangesetStatus.get_status_lbl(status)
1009 status_lbl = ChangesetStatus.get_status_lbl(status)
1010
1010
1011 Session().commit()
1011 Session().commit()
1012
1012
1013 data = {
1013 data = {
1014 'pull_request_id': pull_request.pull_request_id,
1014 'pull_request_id': pull_request.pull_request_id,
1015 'close_status': status_lbl,
1015 'close_status': status_lbl,
1016 'closed': True,
1016 'closed': True,
1017 }
1017 }
1018 return data
1018 return data
@@ -1,666 +1,667 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import lxml.html
23 import lxml.html
24
24
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 from rhodecode.tests import assert_session_flash
26 from rhodecode.tests import assert_session_flash
27 from rhodecode.tests.utils import AssertResponse, commit_change
27 from rhodecode.tests.utils import AssertResponse, commit_change
28
28
29
29
30 def route_path(name, params=None, **kwargs):
30 def route_path(name, params=None, **kwargs):
31 import urllib
31 import urllib
32
32
33 base_url = {
33 base_url = {
34 'repo_compare_select': '/{repo_name}/compare',
34 'repo_compare_select': '/{repo_name}/compare',
35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
36 }[name].format(**kwargs)
36 }[name].format(**kwargs)
37
37
38 if params:
38 if params:
39 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
39 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
40 return base_url
40 return base_url
41
41
42
42
43 @pytest.mark.usefixtures("autologin_user", "app")
43 @pytest.mark.usefixtures("autologin_user", "app")
44 class TestCompareView(object):
44 class TestCompareView(object):
45
45
46 def test_compare_index_is_reached_at_least_once(self, backend):
46 def test_compare_index_is_reached_at_least_once(self, backend):
47 repo = backend.repo
47 repo = backend.repo
48 self.app.get(
48 self.app.get(
49 route_path('repo_compare_select', repo_name=repo.repo_name))
49 route_path('repo_compare_select', repo_name=repo.repo_name))
50
50
51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
52 def test_compare_remote_with_different_commit_indexes(self, backend):
52 def test_compare_remote_with_different_commit_indexes(self, backend):
53 # Preparing the following repository structure:
53 # Preparing the following repository structure:
54 #
54 #
55 # Origin repository has two commits:
55 # Origin repository has two commits:
56 #
56 #
57 # 0 1
57 # 0 1
58 # A -- D
58 # A -- D
59 #
59 #
60 # The fork of it has a few more commits and "D" has a commit index
60 # The fork of it has a few more commits and "D" has a commit index
61 # which does not exist in origin.
61 # which does not exist in origin.
62 #
62 #
63 # 0 1 2 3 4
63 # 0 1 2 3 4
64 # A -- -- -- D -- E
64 # A -- -- -- D -- E
65 # \- B -- C
65 # \- B -- C
66 #
66 #
67
67
68 fork = backend.create_repo()
68 fork = backend.create_repo()
69
69
70 # prepare fork
70 # prepare fork
71 commit0 = commit_change(
71 commit0 = commit_change(
72 fork.repo_name, filename='file1', content='A',
72 fork.repo_name, filename='file1', content='A',
73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
74
74
75 commit1 = commit_change(
75 commit1 = commit_change(
76 fork.repo_name, filename='file1', content='B',
76 fork.repo_name, filename='file1', content='B',
77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
78
78
79 commit_change( # commit 2
79 commit_change( # commit 2
80 fork.repo_name, filename='file1', content='C',
80 fork.repo_name, filename='file1', content='C',
81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
82
82
83 commit3 = commit_change(
83 commit3 = commit_change(
84 fork.repo_name, filename='file1', content='D',
84 fork.repo_name, filename='file1', content='D',
85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
86
86
87 commit4 = commit_change(
87 commit4 = commit_change(
88 fork.repo_name, filename='file1', content='E',
88 fork.repo_name, filename='file1', content='E',
89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
90
90
91 # prepare origin repository, taking just the history up to D
91 # prepare origin repository, taking just the history up to D
92 origin = backend.create_repo()
92 origin = backend.create_repo()
93
93
94 origin_repo = origin.scm_instance(cache=False)
94 origin_repo = origin.scm_instance(cache=False)
95 origin_repo.config.clear_section('hooks')
95 origin_repo.config.clear_section('hooks')
96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
97 origin_repo = origin.scm_instance(cache=False) # cache rebuild
97 origin_repo = origin.scm_instance(cache=False) # cache rebuild
98
98
99 # Verify test fixture setup
99 # Verify test fixture setup
100 # This does not work for git
100 # This does not work for git
101 if backend.alias != 'git':
101 if backend.alias != 'git':
102 assert 5 == len(fork.scm_instance().commit_ids)
102 assert 5 == len(fork.scm_instance().commit_ids)
103 assert 2 == len(origin_repo.commit_ids)
103 assert 2 == len(origin_repo.commit_ids)
104
104
105 # Comparing the revisions
105 # Comparing the revisions
106 response = self.app.get(
106 response = self.app.get(
107 route_path('repo_compare',
107 route_path('repo_compare',
108 repo_name=origin.repo_name,
108 repo_name=origin.repo_name,
109 source_ref_type="rev", source_ref=commit3.raw_id,
109 source_ref_type="rev", source_ref=commit3.raw_id,
110 target_ref_type="rev", target_ref=commit4.raw_id,
110 target_ref_type="rev", target_ref=commit4.raw_id,
111 params=dict(merge='1', target_repo=fork.repo_name)
111 params=dict(merge='1', target_repo=fork.repo_name)
112 ))
112 ))
113
113
114 compare_page = ComparePage(response)
114 compare_page = ComparePage(response)
115 compare_page.contains_commits([commit4])
115 compare_page.contains_commits([commit4])
116
116
117 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
117 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
118 def test_compare_forks_on_branch_extra_commits(self, backend):
118 def test_compare_forks_on_branch_extra_commits(self, backend):
119 repo1 = backend.create_repo()
119 repo1 = backend.create_repo()
120
120
121 # commit something !
121 # commit something !
122 commit0 = commit_change(
122 commit0 = commit_change(
123 repo1.repo_name, filename='file1', content='line1\n',
123 repo1.repo_name, filename='file1', content='line1\n',
124 message='commit1', vcs_type=backend.alias, parent=None,
124 message='commit1', vcs_type=backend.alias, parent=None,
125 newfile=True)
125 newfile=True)
126
126
127 # fork this repo
127 # fork this repo
128 repo2 = backend.create_fork()
128 repo2 = backend.create_fork()
129
129
130 # add two extra commit into fork
130 # add two extra commit into fork
131 commit1 = commit_change(
131 commit1 = commit_change(
132 repo2.repo_name, filename='file1', content='line1\nline2\n',
132 repo2.repo_name, filename='file1', content='line1\nline2\n',
133 message='commit2', vcs_type=backend.alias, parent=commit0)
133 message='commit2', vcs_type=backend.alias, parent=commit0)
134
134
135 commit2 = commit_change(
135 commit2 = commit_change(
136 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
136 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
137 message='commit3', vcs_type=backend.alias, parent=commit1)
137 message='commit3', vcs_type=backend.alias, parent=commit1)
138
138
139 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
139 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
140 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
140 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
141
141
142 response = self.app.get(
142 response = self.app.get(
143 route_path('repo_compare',
143 route_path('repo_compare',
144 repo_name=repo1.repo_name,
144 repo_name=repo1.repo_name,
145 source_ref_type="branch", source_ref=commit_id2,
145 source_ref_type="branch", source_ref=commit_id2,
146 target_ref_type="branch", target_ref=commit_id1,
146 target_ref_type="branch", target_ref=commit_id1,
147 params=dict(merge='1', target_repo=repo2.repo_name)
147 params=dict(merge='1', target_repo=repo2.repo_name)
148 ))
148 ))
149
149
150 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
150 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
151 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
151 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
152
152
153 compare_page = ComparePage(response)
153 compare_page = ComparePage(response)
154 compare_page.contains_change_summary(1, 2, 0)
154 compare_page.contains_change_summary(1, 2, 0)
155 compare_page.contains_commits([commit1, commit2])
155 compare_page.contains_commits([commit1, commit2])
156
156
157 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
157 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
158 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
158 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
159
159
160 # Swap is removed when comparing branches since it's a PR feature and
160 # Swap is removed when comparing branches since it's a PR feature and
161 # it is then a preview mode
161 # it is then a preview mode
162 compare_page.swap_is_hidden()
162 compare_page.swap_is_hidden()
163 compare_page.target_source_are_disabled()
163 compare_page.target_source_are_disabled()
164
164
165 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
165 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
166 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend):
166 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend):
167 repo1 = backend.create_repo()
167 repo1 = backend.create_repo()
168
168
169 # commit something !
169 # commit something !
170 commit0 = commit_change(
170 commit0 = commit_change(
171 repo1.repo_name, filename='file1', content='line1\n',
171 repo1.repo_name, filename='file1', content='line1\n',
172 message='commit1', vcs_type=backend.alias, parent=None,
172 message='commit1', vcs_type=backend.alias, parent=None,
173 newfile=True)
173 newfile=True)
174
174
175 # fork this repo
175 # fork this repo
176 repo2 = backend.create_fork()
176 repo2 = backend.create_fork()
177
177
178 # now commit something to origin repo
178 # now commit something to origin repo
179 commit_change(
179 commit_change(
180 repo1.repo_name, filename='file2', content='line1file2\n',
180 repo1.repo_name, filename='file2', content='line1file2\n',
181 message='commit2', vcs_type=backend.alias, parent=commit0,
181 message='commit2', vcs_type=backend.alias, parent=commit0,
182 newfile=True)
182 newfile=True)
183
183
184 # add two extra commit into fork
184 # add two extra commit into fork
185 commit1 = commit_change(
185 commit1 = commit_change(
186 repo2.repo_name, filename='file1', content='line1\nline2\n',
186 repo2.repo_name, filename='file1', content='line1\nline2\n',
187 message='commit2', vcs_type=backend.alias, parent=commit0)
187 message='commit2', vcs_type=backend.alias, parent=commit0)
188
188
189 commit2 = commit_change(
189 commit2 = commit_change(
190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
191 message='commit3', vcs_type=backend.alias, parent=commit1)
191 message='commit3', vcs_type=backend.alias, parent=commit1)
192
192
193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
195
195
196 response = self.app.get(
196 response = self.app.get(
197 route_path('repo_compare',
197 route_path('repo_compare',
198 repo_name=repo1.repo_name,
198 repo_name=repo1.repo_name,
199 source_ref_type="branch", source_ref=commit_id2,
199 source_ref_type="branch", source_ref=commit_id2,
200 target_ref_type="branch", target_ref=commit_id1,
200 target_ref_type="branch", target_ref=commit_id1,
201 params=dict(merge='1', target_repo=repo2.repo_name),
201 params=dict(merge='1', target_repo=repo2.repo_name),
202 ))
202 ))
203
203
204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
206
206
207 compare_page = ComparePage(response)
207 compare_page = ComparePage(response)
208 compare_page.contains_change_summary(1, 2, 0)
208 compare_page.contains_change_summary(1, 2, 0)
209 compare_page.contains_commits([commit1, commit2])
209 compare_page.contains_commits([commit1, commit2])
210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
212
212
213 # Swap is removed when comparing branches since it's a PR feature and
213 # Swap is removed when comparing branches since it's a PR feature and
214 # it is then a preview mode
214 # it is then a preview mode
215 compare_page.swap_is_hidden()
215 compare_page.swap_is_hidden()
216 compare_page.target_source_are_disabled()
216 compare_page.target_source_are_disabled()
217
217
218 @pytest.mark.xfail_backends("svn")
218 @pytest.mark.xfail_backends("svn")
219 # TODO(marcink): no svn support for compare two seperate repos
219 # TODO(marcink): no svn support for compare two seperate repos
220 def test_compare_of_unrelated_forks(self, backend):
220 def test_compare_of_unrelated_forks(self, backend):
221 orig = backend.create_repo(number_of_commits=1)
221 orig = backend.create_repo(number_of_commits=1)
222 fork = backend.create_repo(number_of_commits=1)
222 fork = backend.create_repo(number_of_commits=1)
223
223
224 response = self.app.get(
224 response = self.app.get(
225 route_path('repo_compare',
225 route_path('repo_compare',
226 repo_name=orig.repo_name,
226 repo_name=orig.repo_name,
227 source_ref_type="rev", source_ref="tip",
227 source_ref_type="rev", source_ref="tip",
228 target_ref_type="rev", target_ref="tip",
228 target_ref_type="rev", target_ref="tip",
229 params=dict(merge='1', target_repo=fork.repo_name),
229 params=dict(merge='1', target_repo=fork.repo_name),
230 ),
230 ),
231 status=302)
231 status=302)
232 response = response.follow()
232 response = response.follow()
233 response.mustcontain("Repositories unrelated.")
233 response.mustcontain("Repositories unrelated.")
234
234
235 @pytest.mark.xfail_backends("svn")
235 @pytest.mark.xfail_backends("svn")
236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
237
237
238 # repo1:
238 # repo1:
239 # commit0:
239 # commit0:
240 # commit1:
240 # commit1:
241 # repo1-fork- in which we will cherry pick bottom commits
241 # repo1-fork- in which we will cherry pick bottom commits
242 # commit0:
242 # commit0:
243 # commit1:
243 # commit1:
244 # commit2: x
244 # commit2: x
245 # commit3: x
245 # commit3: x
246 # commit4: x
246 # commit4: x
247 # commit5:
247 # commit5:
248 # make repo1, and commit1+commit2
248 # make repo1, and commit1+commit2
249
249
250 repo1 = backend.create_repo()
250 repo1 = backend.create_repo()
251
251
252 # commit something !
252 # commit something !
253 commit0 = commit_change(
253 commit0 = commit_change(
254 repo1.repo_name, filename='file1', content='line1\n',
254 repo1.repo_name, filename='file1', content='line1\n',
255 message='commit1', vcs_type=backend.alias, parent=None,
255 message='commit1', vcs_type=backend.alias, parent=None,
256 newfile=True)
256 newfile=True)
257 commit1 = commit_change(
257 commit1 = commit_change(
258 repo1.repo_name, filename='file1', content='line1\nline2\n',
258 repo1.repo_name, filename='file1', content='line1\nline2\n',
259 message='commit2', vcs_type=backend.alias, parent=commit0)
259 message='commit2', vcs_type=backend.alias, parent=commit0)
260
260
261 # fork this repo
261 # fork this repo
262 repo2 = backend.create_fork()
262 repo2 = backend.create_fork()
263
263
264 # now make commit3-6
264 # now make commit3-6
265 commit2 = commit_change(
265 commit2 = commit_change(
266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
267 message='commit3', vcs_type=backend.alias, parent=commit1)
267 message='commit3', vcs_type=backend.alias, parent=commit1)
268 commit3 = commit_change(
268 commit3 = commit_change(
269 repo1.repo_name, filename='file1',
269 repo1.repo_name, filename='file1',
270 content='line1\nline2\nline3\nline4\n', message='commit4',
270 content='line1\nline2\nline3\nline4\n', message='commit4',
271 vcs_type=backend.alias, parent=commit2)
271 vcs_type=backend.alias, parent=commit2)
272 commit4 = commit_change(
272 commit4 = commit_change(
273 repo1.repo_name, filename='file1',
273 repo1.repo_name, filename='file1',
274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
275 vcs_type=backend.alias, parent=commit3)
275 vcs_type=backend.alias, parent=commit3)
276 commit_change( # commit 5
276 commit_change( # commit 5
277 repo1.repo_name, filename='file1',
277 repo1.repo_name, filename='file1',
278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
279 message='commit6', vcs_type=backend.alias, parent=commit4)
279 message='commit6', vcs_type=backend.alias, parent=commit4)
280
280
281 response = self.app.get(
281 response = self.app.get(
282 route_path('repo_compare',
282 route_path('repo_compare',
283 repo_name=repo2.repo_name,
283 repo_name=repo2.repo_name,
284 # parent of commit2, in target repo2
284 # parent of commit2, in target repo2
285 source_ref_type="rev", source_ref=commit1.raw_id,
285 source_ref_type="rev", source_ref=commit1.raw_id,
286 target_ref_type="rev", target_ref=commit4.raw_id,
286 target_ref_type="rev", target_ref=commit4.raw_id,
287 params=dict(merge='1', target_repo=repo1.repo_name),
287 params=dict(merge='1', target_repo=repo1.repo_name),
288 ))
288 ))
289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
291
291
292 # files
292 # files
293 compare_page = ComparePage(response)
293 compare_page = ComparePage(response)
294 compare_page.contains_change_summary(1, 3, 0)
294 compare_page.contains_change_summary(1, 3, 0)
295 compare_page.contains_commits([commit2, commit3, commit4])
295 compare_page.contains_commits([commit2, commit3, commit4])
296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
298
298
299 @pytest.mark.xfail_backends("svn")
299 @pytest.mark.xfail_backends("svn")
300 def test_compare_cherry_pick_commits_from_top(self, backend):
300 def test_compare_cherry_pick_commits_from_top(self, backend):
301 # repo1:
301 # repo1:
302 # commit0:
302 # commit0:
303 # commit1:
303 # commit1:
304 # repo1-fork- in which we will cherry pick bottom commits
304 # repo1-fork- in which we will cherry pick bottom commits
305 # commit0:
305 # commit0:
306 # commit1:
306 # commit1:
307 # commit2:
307 # commit2:
308 # commit3: x
308 # commit3: x
309 # commit4: x
309 # commit4: x
310 # commit5: x
310 # commit5: x
311
311
312 # make repo1, and commit1+commit2
312 # make repo1, and commit1+commit2
313 repo1 = backend.create_repo()
313 repo1 = backend.create_repo()
314
314
315 # commit something !
315 # commit something !
316 commit0 = commit_change(
316 commit0 = commit_change(
317 repo1.repo_name, filename='file1', content='line1\n',
317 repo1.repo_name, filename='file1', content='line1\n',
318 message='commit1', vcs_type=backend.alias, parent=None,
318 message='commit1', vcs_type=backend.alias, parent=None,
319 newfile=True)
319 newfile=True)
320 commit1 = commit_change(
320 commit1 = commit_change(
321 repo1.repo_name, filename='file1', content='line1\nline2\n',
321 repo1.repo_name, filename='file1', content='line1\nline2\n',
322 message='commit2', vcs_type=backend.alias, parent=commit0)
322 message='commit2', vcs_type=backend.alias, parent=commit0)
323
323
324 # fork this repo
324 # fork this repo
325 backend.create_fork()
325 backend.create_fork()
326
326
327 # now make commit3-6
327 # now make commit3-6
328 commit2 = commit_change(
328 commit2 = commit_change(
329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
330 message='commit3', vcs_type=backend.alias, parent=commit1)
330 message='commit3', vcs_type=backend.alias, parent=commit1)
331 commit3 = commit_change(
331 commit3 = commit_change(
332 repo1.repo_name, filename='file1',
332 repo1.repo_name, filename='file1',
333 content='line1\nline2\nline3\nline4\n', message='commit4',
333 content='line1\nline2\nline3\nline4\n', message='commit4',
334 vcs_type=backend.alias, parent=commit2)
334 vcs_type=backend.alias, parent=commit2)
335 commit4 = commit_change(
335 commit4 = commit_change(
336 repo1.repo_name, filename='file1',
336 repo1.repo_name, filename='file1',
337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
338 vcs_type=backend.alias, parent=commit3)
338 vcs_type=backend.alias, parent=commit3)
339 commit5 = commit_change(
339 commit5 = commit_change(
340 repo1.repo_name, filename='file1',
340 repo1.repo_name, filename='file1',
341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
342 message='commit6', vcs_type=backend.alias, parent=commit4)
342 message='commit6', vcs_type=backend.alias, parent=commit4)
343
343
344 response = self.app.get(
344 response = self.app.get(
345 route_path('repo_compare',
345 route_path('repo_compare',
346 repo_name=repo1.repo_name,
346 repo_name=repo1.repo_name,
347 # parent of commit3, not in source repo2
347 # parent of commit3, not in source repo2
348 source_ref_type="rev", source_ref=commit2.raw_id,
348 source_ref_type="rev", source_ref=commit2.raw_id,
349 target_ref_type="rev", target_ref=commit5.raw_id,
349 target_ref_type="rev", target_ref=commit5.raw_id,
350 params=dict(merge='1'),))
350 params=dict(merge='1'),))
351
351
352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
354
354
355 compare_page = ComparePage(response)
355 compare_page = ComparePage(response)
356 compare_page.contains_change_summary(1, 3, 0)
356 compare_page.contains_change_summary(1, 3, 0)
357 compare_page.contains_commits([commit3, commit4, commit5])
357 compare_page.contains_commits([commit3, commit4, commit5])
358
358
359 # files
359 # files
360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
362
362
363 @pytest.mark.xfail_backends("svn")
363 @pytest.mark.xfail_backends("svn")
364 def test_compare_remote_branches(self, backend):
364 def test_compare_remote_branches(self, backend):
365 repo1 = backend.repo
365 repo1 = backend.repo
366 repo2 = backend.create_fork()
366 repo2 = backend.create_fork()
367
367
368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
372
372
373 response = self.app.get(
373 response = self.app.get(
374 route_path('repo_compare',
374 route_path('repo_compare',
375 repo_name=repo1.repo_name,
375 repo_name=repo1.repo_name,
376 source_ref_type="rev", source_ref=commit_id1,
376 source_ref_type="rev", source_ref=commit_id1,
377 target_ref_type="rev", target_ref=commit_id2,
377 target_ref_type="rev", target_ref=commit_id2,
378 params=dict(merge='1', target_repo=repo2.repo_name),
378 params=dict(merge='1', target_repo=repo2.repo_name),
379 ))
379 ))
380
380
381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
383
383
384 compare_page = ComparePage(response)
384 compare_page = ComparePage(response)
385
385
386 # outgoing commits between those commits
386 # outgoing commits between those commits
387 compare_page.contains_commits(
387 compare_page.contains_commits(
388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
389
389
390 # files
390 # files
391 compare_page.contains_file_links_and_anchors([
391 compare_page.contains_file_links_and_anchors([
392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
395 ])
395 ])
396
396
397 @pytest.mark.xfail_backends("svn")
397 @pytest.mark.xfail_backends("svn")
398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
399 repo1 = backend.create_repo()
399 repo1 = backend.create_repo()
400 r1_name = repo1.repo_name
400 r1_name = repo1.repo_name
401
401
402 commit0 = commit_change(
402 commit0 = commit_change(
403 repo=r1_name, filename='file1',
403 repo=r1_name, filename='file1',
404 content='line1', message='commit1', vcs_type=backend.alias,
404 content='line1', message='commit1', vcs_type=backend.alias,
405 newfile=True)
405 newfile=True)
406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
407
407
408 # fork the repo1
408 # fork the repo1
409 repo2 = backend.create_fork()
409 repo2 = backend.create_fork()
410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
411
411
412 self.r2_id = repo2.repo_id
412 self.r2_id = repo2.repo_id
413 r2_name = repo2.repo_name
413 r2_name = repo2.repo_name
414
414
415 commit1 = commit_change(
415 commit1 = commit_change(
416 repo=r2_name, filename='file1-fork',
416 repo=r2_name, filename='file1-fork',
417 content='file1-line1-from-fork', message='commit1-fork',
417 content='file1-line1-from-fork', message='commit1-fork',
418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
419 newfile=True)
419 newfile=True)
420
420
421 commit2 = commit_change(
421 commit2 = commit_change(
422 repo=r2_name, filename='file2-fork',
422 repo=r2_name, filename='file2-fork',
423 content='file2-line1-from-fork', message='commit2-fork',
423 content='file2-line1-from-fork', message='commit2-fork',
424 vcs_type=backend.alias, parent=commit1,
424 vcs_type=backend.alias, parent=commit1,
425 newfile=True)
425 newfile=True)
426
426
427 commit_change( # commit 3
427 commit_change( # commit 3
428 repo=r2_name, filename='file3-fork',
428 repo=r2_name, filename='file3-fork',
429 content='file3-line1-from-fork', message='commit3-fork',
429 content='file3-line1-from-fork', message='commit3-fork',
430 vcs_type=backend.alias, parent=commit2, newfile=True)
430 vcs_type=backend.alias, parent=commit2, newfile=True)
431
431
432 # compare !
432 # compare !
433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
435
435
436 response = self.app.get(
436 response = self.app.get(
437 route_path('repo_compare',
437 route_path('repo_compare',
438 repo_name=r2_name,
438 repo_name=r2_name,
439 source_ref_type="branch", source_ref=commit_id1,
439 source_ref_type="branch", source_ref=commit_id1,
440 target_ref_type="branch", target_ref=commit_id2,
440 target_ref_type="branch", target_ref=commit_id2,
441 params=dict(merge='1', target_repo=r1_name),
441 params=dict(merge='1', target_repo=r1_name),
442 ))
442 ))
443
443
444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
446 response.mustcontain('No files')
446 response.mustcontain('No files')
447 response.mustcontain('No commits in this compare')
447 response.mustcontain('No commits in this compare')
448
448
449 commit0 = commit_change(
449 commit0 = commit_change(
450 repo=r1_name, filename='file2',
450 repo=r1_name, filename='file2',
451 content='line1-added-after-fork', message='commit2-parent',
451 content='line1-added-after-fork', message='commit2-parent',
452 vcs_type=backend.alias, parent=None, newfile=True)
452 vcs_type=backend.alias, parent=None, newfile=True)
453
453
454 # compare !
454 # compare !
455 response = self.app.get(
455 response = self.app.get(
456 route_path('repo_compare',
456 route_path('repo_compare',
457 repo_name=r2_name,
457 repo_name=r2_name,
458 source_ref_type="branch", source_ref=commit_id1,
458 source_ref_type="branch", source_ref=commit_id1,
459 target_ref_type="branch", target_ref=commit_id2,
459 target_ref_type="branch", target_ref=commit_id2,
460 params=dict(merge='1', target_repo=r1_name),
460 params=dict(merge='1', target_repo=r1_name),
461 ))
461 ))
462
462
463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
465
465
466 response.mustcontain("""commit2-parent""")
466 response.mustcontain("""commit2-parent""")
467 response.mustcontain("""line1-added-after-fork""")
467 response.mustcontain("""line1-added-after-fork""")
468 compare_page = ComparePage(response)
468 compare_page = ComparePage(response)
469 compare_page.contains_change_summary(1, 1, 0)
469 compare_page.contains_change_summary(1, 1, 0)
470
470
471 @pytest.mark.xfail_backends("svn")
471 @pytest.mark.xfail_backends("svn")
472 def test_compare_commits(self, backend, xhr_header):
472 def test_compare_commits(self, backend, xhr_header):
473 commit0 = backend.repo.get_commit(commit_idx=0)
473 commit0 = backend.repo.get_commit(commit_idx=0)
474 commit1 = backend.repo.get_commit(commit_idx=1)
474 commit1 = backend.repo.get_commit(commit_idx=1)
475
475
476 response = self.app.get(
476 response = self.app.get(
477 route_path('repo_compare',
477 route_path('repo_compare',
478 repo_name=backend.repo_name,
478 repo_name=backend.repo_name,
479 source_ref_type="rev", source_ref=commit0.raw_id,
479 source_ref_type="rev", source_ref=commit0.raw_id,
480 target_ref_type="rev", target_ref=commit1.raw_id,
480 target_ref_type="rev", target_ref=commit1.raw_id,
481 params=dict(merge='1')
481 params=dict(merge='1')
482 ),
482 ),
483 extra_environ=xhr_header, )
483 extra_environ=xhr_header, )
484
484
485 # outgoing commits between those commits
485 # outgoing commits between those commits
486 compare_page = ComparePage(response)
486 compare_page = ComparePage(response)
487 compare_page.contains_commits(commits=[commit1], ancestors=[commit0])
487 compare_page.contains_commits(commits=[commit1])
488
488
489 def test_errors_when_comparing_unknown_source_repo(self, backend):
489 def test_errors_when_comparing_unknown_source_repo(self, backend):
490 repo = backend.repo
490 repo = backend.repo
491 badrepo = 'badrepo'
491 badrepo = 'badrepo'
492
492
493 response = self.app.get(
493 response = self.app.get(
494 route_path('repo_compare',
494 route_path('repo_compare',
495 repo_name=badrepo,
495 repo_name=badrepo,
496 source_ref_type="rev", source_ref='tip',
496 source_ref_type="rev", source_ref='tip',
497 target_ref_type="rev", target_ref='tip',
497 target_ref_type="rev", target_ref='tip',
498 params=dict(merge='1', target_repo=repo.repo_name)
498 params=dict(merge='1', target_repo=repo.repo_name)
499 ),
499 ),
500 status=404)
500 status=404)
501
501
502 def test_errors_when_comparing_unknown_target_repo(self, backend):
502 def test_errors_when_comparing_unknown_target_repo(self, backend):
503 repo = backend.repo
503 repo = backend.repo
504 badrepo = 'badrepo'
504 badrepo = 'badrepo'
505
505
506 response = self.app.get(
506 response = self.app.get(
507 route_path('repo_compare',
507 route_path('repo_compare',
508 repo_name=repo.repo_name,
508 repo_name=repo.repo_name,
509 source_ref_type="rev", source_ref='tip',
509 source_ref_type="rev", source_ref='tip',
510 target_ref_type="rev", target_ref='tip',
510 target_ref_type="rev", target_ref='tip',
511 params=dict(merge='1', target_repo=badrepo),
511 params=dict(merge='1', target_repo=badrepo),
512 ),
512 ),
513 status=302)
513 status=302)
514 redirected = response.follow()
514 redirected = response.follow()
515 redirected.mustcontain(
515 redirected.mustcontain(
516 'Could not find the target repo: `{}`'.format(badrepo))
516 'Could not find the target repo: `{}`'.format(badrepo))
517
517
518 def test_compare_not_in_preview_mode(self, backend_stub):
518 def test_compare_not_in_preview_mode(self, backend_stub):
519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
521
521
522 response = self.app.get(
522 response = self.app.get(
523 route_path('repo_compare',
523 route_path('repo_compare',
524 repo_name=backend_stub.repo_name,
524 repo_name=backend_stub.repo_name,
525 source_ref_type="rev", source_ref=commit0.raw_id,
525 source_ref_type="rev", source_ref=commit0.raw_id,
526 target_ref_type="rev", target_ref=commit1.raw_id,
526 target_ref_type="rev", target_ref=commit1.raw_id,
527 ))
527 ))
528
528
529 # outgoing commits between those commits
529 # outgoing commits between those commits
530 compare_page = ComparePage(response)
530 compare_page = ComparePage(response)
531 compare_page.swap_is_visible()
531 compare_page.swap_is_visible()
532 compare_page.target_source_are_enabled()
532 compare_page.target_source_are_enabled()
533
533
534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
535 orig = backend_hg.create_repo(number_of_commits=1)
535 orig = backend_hg.create_repo(number_of_commits=1)
536 fork = backend_hg.create_fork()
536 fork = backend_hg.create_fork()
537
537
538 settings_util.create_repo_rhodecode_ui(
538 settings_util.create_repo_rhodecode_ui(
539 orig, 'extensions', value='', key='largefiles', active=False)
539 orig, 'extensions', value='', key='largefiles', active=False)
540 settings_util.create_repo_rhodecode_ui(
540 settings_util.create_repo_rhodecode_ui(
541 fork, 'extensions', value='', key='largefiles', active=True)
541 fork, 'extensions', value='', key='largefiles', active=True)
542
542
543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
544 'MercurialRepository.compare')
544 'MercurialRepository.compare')
545 with mock.patch(compare_module) as compare_mock:
545 with mock.patch(compare_module) as compare_mock:
546 compare_mock.side_effect = RepositoryRequirementError()
546 compare_mock.side_effect = RepositoryRequirementError()
547
547
548 response = self.app.get(
548 response = self.app.get(
549 route_path('repo_compare',
549 route_path('repo_compare',
550 repo_name=orig.repo_name,
550 repo_name=orig.repo_name,
551 source_ref_type="rev", source_ref="tip",
551 source_ref_type="rev", source_ref="tip",
552 target_ref_type="rev", target_ref="tip",
552 target_ref_type="rev", target_ref="tip",
553 params=dict(merge='1', target_repo=fork.repo_name),
553 params=dict(merge='1', target_repo=fork.repo_name),
554 ),
554 ),
555 status=302)
555 status=302)
556
556
557 assert_session_flash(
557 assert_session_flash(
558 response,
558 response,
559 'Could not compare repos with different large file settings')
559 'Could not compare repos with different large file settings')
560
560
561
561
562 @pytest.mark.usefixtures("autologin_user")
562 @pytest.mark.usefixtures("autologin_user")
563 class TestCompareControllerSvn(object):
563 class TestCompareControllerSvn(object):
564
564
565 def test_supports_references_with_path(self, app, backend_svn):
565 def test_supports_references_with_path(self, app, backend_svn):
566 repo = backend_svn['svn-simple-layout']
566 repo = backend_svn['svn-simple-layout']
567 commit_id = repo.get_commit(commit_idx=-1).raw_id
567 commit_id = repo.get_commit(commit_idx=-1).raw_id
568 response = app.get(
568 response = app.get(
569 route_path('repo_compare',
569 route_path('repo_compare',
570 repo_name=repo.repo_name,
570 repo_name=repo.repo_name,
571 source_ref_type="tag",
571 source_ref_type="tag",
572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
573 target_ref_type="tag",
573 target_ref_type="tag",
574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
575 params=dict(merge='1'),
575 params=dict(merge='1'),
576 ),
576 ),
577 status=200)
577 status=200)
578
578
579 # Expecting no commits, since both paths are at the same revision
579 # Expecting no commits, since both paths are at the same revision
580 response.mustcontain('No commits in this compare')
580 response.mustcontain('No commits in this compare')
581
581
582 # Should find only one file changed when comparing those two tags
582 # Should find only one file changed when comparing those two tags
583 response.mustcontain('example.py')
583 response.mustcontain('example.py')
584 compare_page = ComparePage(response)
584 compare_page = ComparePage(response)
585 compare_page.contains_change_summary(1, 5, 1)
585 compare_page.contains_change_summary(1, 5, 1)
586
586
587 def test_shows_commits_if_different_ids(self, app, backend_svn):
587 def test_shows_commits_if_different_ids(self, app, backend_svn):
588 repo = backend_svn['svn-simple-layout']
588 repo = backend_svn['svn-simple-layout']
589 source_id = repo.get_commit(commit_idx=-6).raw_id
589 source_id = repo.get_commit(commit_idx=-6).raw_id
590 target_id = repo.get_commit(commit_idx=-1).raw_id
590 target_id = repo.get_commit(commit_idx=-1).raw_id
591 response = app.get(
591 response = app.get(
592 route_path('repo_compare',
592 route_path('repo_compare',
593 repo_name=repo.repo_name,
593 repo_name=repo.repo_name,
594 source_ref_type="tag",
594 source_ref_type="tag",
595 source_ref="%s@%s" % ('tags/v0.1', source_id),
595 source_ref="%s@%s" % ('tags/v0.1', source_id),
596 target_ref_type="tag",
596 target_ref_type="tag",
597 target_ref="%s@%s" % ('tags/v0.2', target_id),
597 target_ref="%s@%s" % ('tags/v0.2', target_id),
598 params=dict(merge='1')
598 params=dict(merge='1')
599 ),
599 ),
600 status=200)
600 status=200)
601
601
602 # It should show commits
602 # It should show commits
603 assert 'No commits in this compare' not in response.body
603 assert 'No commits in this compare' not in response.body
604
604
605 # Should find only one file changed when comparing those two tags
605 # Should find only one file changed when comparing those two tags
606 response.mustcontain('example.py')
606 response.mustcontain('example.py')
607 compare_page = ComparePage(response)
607 compare_page = ComparePage(response)
608 compare_page.contains_change_summary(1, 5, 1)
608 compare_page.contains_change_summary(1, 5, 1)
609
609
610
610
611 class ComparePage(AssertResponse):
611 class ComparePage(AssertResponse):
612 """
612 """
613 Abstracts the page template from the tests
613 Abstracts the page template from the tests
614 """
614 """
615
615
616 def contains_file_links_and_anchors(self, files):
616 def contains_file_links_and_anchors(self, files):
617 doc = lxml.html.fromstring(self.response.body)
617 doc = lxml.html.fromstring(self.response.body)
618 for filename, file_id in files:
618 for filename, file_id in files:
619 self.contains_one_anchor(file_id)
619 self.contains_one_anchor(file_id)
620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
621 assert len(diffblock) == 2
621 assert len(diffblock) == 2
622 assert len(diffblock[0].cssselect('a[href="#%s"]' % file_id)) == 1
622 assert len(diffblock[0].cssselect('a[href="#%s"]' % file_id)) == 1
623
623
624 def contains_change_summary(self, files_changed, inserted, deleted):
624 def contains_change_summary(self, files_changed, inserted, deleted):
625 template = (
625 template = (
626 '{files_changed} file{plural} changed: '
626 '{files_changed} file{plural} changed: '
627 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
627 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
628 self.response.mustcontain(template.format(
628 self.response.mustcontain(template.format(
629 files_changed=files_changed,
629 files_changed=files_changed,
630 plural="s" if files_changed > 1 else "",
630 plural="s" if files_changed > 1 else "",
631 inserted=inserted,
631 inserted=inserted,
632 deleted=deleted))
632 deleted=deleted))
633
633
634 def contains_commits(self, commits, ancestors=None):
634 def contains_commits(self, commits, ancestors=None):
635 response = self.response
635 response = self.response
636
636
637 for commit in commits:
637 for commit in commits:
638 # Expecting to see the commit message in an element which
638 # Expecting to see the commit message in an element which
639 # has the ID "c-{commit.raw_id}"
639 # has the ID "c-{commit.raw_id}"
640 self.element_contains('#c-' + commit.raw_id, commit.message)
640 self.element_contains('#c-' + commit.raw_id, commit.message)
641 self.contains_one_link(
641 self.contains_one_link(
642 'r%s:%s' % (commit.idx, commit.short_id),
642 'r%s:%s' % (commit.idx, commit.short_id),
643 self._commit_url(commit))
643 self._commit_url(commit))
644
644 if ancestors:
645 if ancestors:
645 response.mustcontain('Ancestor')
646 response.mustcontain('Ancestor')
646 for ancestor in ancestors:
647 for ancestor in ancestors:
647 self.contains_one_link(
648 self.contains_one_link(
648 ancestor.short_id, self._commit_url(ancestor))
649 ancestor.short_id, self._commit_url(ancestor))
649
650
650 def _commit_url(self, commit):
651 def _commit_url(self, commit):
651 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
652 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
652
653
653 def swap_is_hidden(self):
654 def swap_is_hidden(self):
654 assert '<a id="btn-swap"' not in self.response.text
655 assert '<a id="btn-swap"' not in self.response.text
655
656
656 def swap_is_visible(self):
657 def swap_is_visible(self):
657 assert '<a id="btn-swap"' in self.response.text
658 assert '<a id="btn-swap"' in self.response.text
658
659
659 def target_source_are_disabled(self):
660 def target_source_are_disabled(self):
660 response = self.response
661 response = self.response
661 response.mustcontain("var enable_fields = false;")
662 response.mustcontain("var enable_fields = false;")
662 response.mustcontain('.select2("enable", enable_fields)')
663 response.mustcontain('.select2("enable", enable_fields)')
663
664
664 def target_source_are_enabled(self):
665 def target_source_are_enabled(self):
665 response = self.response
666 response = self.response
666 response.mustcontain("var enable_fields = true;")
667 response.mustcontain("var enable_fields = true;")
@@ -1,79 +1,87 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from rhodecode.lib import helpers as h
21 from rhodecode.lib import helpers as h
22 from rhodecode.lib.utils2 import safe_int
22 from rhodecode.lib.utils2 import safe_int
23 from rhodecode.model.pull_request import get_diff_info
24
25 REVIEWER_API_VERSION = 'V3'
23
26
24
27
25 def reviewer_as_json(user, reasons=None, mandatory=False, rules=None, user_group=None):
28 def reviewer_as_json(user, reasons=None, mandatory=False, rules=None, user_group=None):
26 """
29 """
27 Returns json struct of a reviewer for frontend
30 Returns json struct of a reviewer for frontend
28
31
29 :param user: the reviewer
32 :param user: the reviewer
30 :param reasons: list of strings of why they are reviewers
33 :param reasons: list of strings of why they are reviewers
31 :param mandatory: bool, to set user as mandatory
34 :param mandatory: bool, to set user as mandatory
32 """
35 """
33
36
34 return {
37 return {
35 'user_id': user.user_id,
38 'user_id': user.user_id,
36 'reasons': reasons or [],
39 'reasons': reasons or [],
37 'rules': rules or [],
40 'rules': rules or [],
38 'mandatory': mandatory,
41 'mandatory': mandatory,
39 'user_group': user_group,
42 'user_group': user_group,
40 'username': user.username,
43 'username': user.username,
41 'first_name': user.first_name,
44 'first_name': user.first_name,
42 'last_name': user.last_name,
45 'last_name': user.last_name,
43 'user_link': h.link_to_user(user),
46 'user_link': h.link_to_user(user),
44 'gravatar_link': h.gravatar_url(user.email, 14),
47 'gravatar_link': h.gravatar_url(user.email, 14),
45 }
48 }
46
49
47
50
48 def get_default_reviewers_data(
51 def get_default_reviewers_data(
49 current_user, source_repo, source_commit, target_repo, target_commit):
52 current_user, source_repo, source_commit, target_repo, target_commit):
53 """
54 Return json for default reviewers of a repository
55 """
50
56
51 """ Return json for default reviewers of a repository """
57 diff_info = get_diff_info(
58 source_repo, source_commit.raw_id, target_repo, target_commit.raw_id)
52
59
53 reasons = ['Default reviewer', 'Repository owner']
60 reasons = ['Default reviewer', 'Repository owner']
54 json_reviewers = [reviewer_as_json(
61 json_reviewers = [reviewer_as_json(
55 user=target_repo.user, reasons=reasons, mandatory=False, rules=None)]
62 user=target_repo.user, reasons=reasons, mandatory=False, rules=None)]
56
63
57 return {
64 return {
58 'api_ver': 'v1', # define version for later possible schema upgrade
65 'api_ver': REVIEWER_API_VERSION, # define version for later possible schema upgrade
66 'diff_info': diff_info,
59 'reviewers': json_reviewers,
67 'reviewers': json_reviewers,
60 'rules': {},
68 'rules': {},
61 'rules_data': {},
69 'rules_data': {},
62 }
70 }
63
71
64
72
65 def validate_default_reviewers(review_members, reviewer_rules):
73 def validate_default_reviewers(review_members, reviewer_rules):
66 """
74 """
67 Function to validate submitted reviewers against the saved rules
75 Function to validate submitted reviewers against the saved rules
68
76
69 """
77 """
70 reviewers = []
78 reviewers = []
71 reviewer_by_id = {}
79 reviewer_by_id = {}
72 for r in review_members:
80 for r in review_members:
73 reviewer_user_id = safe_int(r['user_id'])
81 reviewer_user_id = safe_int(r['user_id'])
74 entry = (reviewer_user_id, r['reasons'], r['mandatory'], r['rules'])
82 entry = (reviewer_user_id, r['reasons'], r['mandatory'], r['rules'])
75
83
76 reviewer_by_id[reviewer_user_id] = entry
84 reviewer_by_id[reviewer_user_id] = entry
77 reviewers.append(entry)
85 reviewers.append(entry)
78
86
79 return reviewers
87 return reviewers
@@ -1,1512 +1,1520 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33
33
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib.base import vcs_operation_context
35 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 NotAnonymous, CSRFRequired)
40 NotAnonymous, CSRFRequired)
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
43 from rhodecode.lib.vcs.exceptions import (
44 RepositoryRequirementError, EmptyRepositoryError)
44 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
45 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
47 from rhodecode.model.db import (
48 ChangesetComment, ChangesetStatus, Repository)
48 func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository)
49 from rhodecode.model.forms import PullRequestForm
49 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.meta import Session
50 from rhodecode.model.meta import Session
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.scm import ScmModel
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58
58
59 def load_default_context(self):
59 def load_default_context(self):
60 c = self._get_local_tmpl_context(include_app_defaults=True)
60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 # backward compat., we use for OLD PRs a plain renderer
63 # backward compat., we use for OLD PRs a plain renderer
64 c.renderer = 'plain'
64 c.renderer = 'plain'
65 return c
65 return c
66
66
67 def _get_pull_requests_list(
67 def _get_pull_requests_list(
68 self, repo_name, source, filter_type, opened_by, statuses):
68 self, repo_name, source, filter_type, opened_by, statuses):
69
69
70 draw, start, limit = self._extract_chunk(self.request)
70 draw, start, limit = self._extract_chunk(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 _render = self.request.get_partial_renderer(
72 _render = self.request.get_partial_renderer(
73 'rhodecode:templates/data_table/_dt_elements.mako')
73 'rhodecode:templates/data_table/_dt_elements.mako')
74
74
75 # pagination
75 # pagination
76
76
77 if filter_type == 'awaiting_review':
77 if filter_type == 'awaiting_review':
78 pull_requests = PullRequestModel().get_awaiting_review(
78 pull_requests = PullRequestModel().get_awaiting_review(
79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 statuses=statuses, offset=start, length=limit,
80 statuses=statuses, offset=start, length=limit,
81 order_by=order_by, order_dir=order_dir)
81 order_by=order_by, order_dir=order_dir)
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 repo_name, search_q=search_q, source=source, statuses=statuses,
83 repo_name, search_q=search_q, source=source, statuses=statuses,
84 opened_by=opened_by)
84 opened_by=opened_by)
85 elif filter_type == 'awaiting_my_review':
85 elif filter_type == 'awaiting_my_review':
86 pull_requests = PullRequestModel().get_awaiting_my_review(
86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 offset=start, length=limit, order_by=order_by,
89 offset=start, length=limit, order_by=order_by,
90 order_dir=order_dir)
90 order_dir=order_dir)
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 statuses=statuses, opened_by=opened_by)
93 statuses=statuses, opened_by=opened_by)
94 else:
94 else:
95 pull_requests = PullRequestModel().get_all(
95 pull_requests = PullRequestModel().get_all(
96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 statuses=statuses, offset=start, length=limit,
97 statuses=statuses, offset=start, length=limit,
98 order_by=order_by, order_dir=order_dir)
98 order_by=order_by, order_dir=order_dir)
99 pull_requests_total_count = PullRequestModel().count_all(
99 pull_requests_total_count = PullRequestModel().count_all(
100 repo_name, search_q=search_q, source=source, statuses=statuses,
100 repo_name, search_q=search_q, source=source, statuses=statuses,
101 opened_by=opened_by)
101 opened_by=opened_by)
102
102
103 data = []
103 data = []
104 comments_model = CommentsModel()
104 comments_model = CommentsModel()
105 for pr in pull_requests:
105 for pr in pull_requests:
106 comments = comments_model.get_all_comments(
106 comments = comments_model.get_all_comments(
107 self.db_repo.repo_id, pull_request=pr)
107 self.db_repo.repo_id, pull_request=pr)
108
108
109 data.append({
109 data.append({
110 'name': _render('pullrequest_name',
110 'name': _render('pullrequest_name',
111 pr.pull_request_id, pr.pull_request_state,
111 pr.pull_request_id, pr.pull_request_state,
112 pr.work_in_progress, pr.target_repo.repo_name),
112 pr.work_in_progress, pr.target_repo.repo_name),
113 'name_raw': pr.pull_request_id,
113 'name_raw': pr.pull_request_id,
114 'status': _render('pullrequest_status',
114 'status': _render('pullrequest_status',
115 pr.calculated_review_status()),
115 pr.calculated_review_status()),
116 'title': _render('pullrequest_title', pr.title, pr.description),
116 'title': _render('pullrequest_title', pr.title, pr.description),
117 'description': h.escape(pr.description),
117 'description': h.escape(pr.description),
118 'updated_on': _render('pullrequest_updated_on',
118 'updated_on': _render('pullrequest_updated_on',
119 h.datetime_to_time(pr.updated_on)),
119 h.datetime_to_time(pr.updated_on)),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'created_on': _render('pullrequest_updated_on',
121 'created_on': _render('pullrequest_updated_on',
122 h.datetime_to_time(pr.created_on)),
122 h.datetime_to_time(pr.created_on)),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'state': pr.pull_request_state,
124 'state': pr.pull_request_state,
125 'author': _render('pullrequest_author',
125 'author': _render('pullrequest_author',
126 pr.author.full_contact, ),
126 pr.author.full_contact, ),
127 'author_raw': pr.author.full_name,
127 'author_raw': pr.author.full_name,
128 'comments': _render('pullrequest_comments', len(comments)),
128 'comments': _render('pullrequest_comments', len(comments)),
129 'comments_raw': len(comments),
129 'comments_raw': len(comments),
130 'closed': pr.is_closed(),
130 'closed': pr.is_closed(),
131 })
131 })
132
132
133 data = ({
133 data = ({
134 'draw': draw,
134 'draw': draw,
135 'data': data,
135 'data': data,
136 'recordsTotal': pull_requests_total_count,
136 'recordsTotal': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
138 })
138 })
139 return data
139 return data
140
140
141 @LoginRequired()
141 @LoginRequired()
142 @HasRepoPermissionAnyDecorator(
142 @HasRepoPermissionAnyDecorator(
143 'repository.read', 'repository.write', 'repository.admin')
143 'repository.read', 'repository.write', 'repository.admin')
144 @view_config(
144 @view_config(
145 route_name='pullrequest_show_all', request_method='GET',
145 route_name='pullrequest_show_all', request_method='GET',
146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 def pull_request_list(self):
147 def pull_request_list(self):
148 c = self.load_default_context()
148 c = self.load_default_context()
149
149
150 req_get = self.request.GET
150 req_get = self.request.GET
151 c.source = str2bool(req_get.get('source'))
151 c.source = str2bool(req_get.get('source'))
152 c.closed = str2bool(req_get.get('closed'))
152 c.closed = str2bool(req_get.get('closed'))
153 c.my = str2bool(req_get.get('my'))
153 c.my = str2bool(req_get.get('my'))
154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156
156
157 c.active = 'open'
157 c.active = 'open'
158 if c.my:
158 if c.my:
159 c.active = 'my'
159 c.active = 'my'
160 if c.closed:
160 if c.closed:
161 c.active = 'closed'
161 c.active = 'closed'
162 if c.awaiting_review and not c.source:
162 if c.awaiting_review and not c.source:
163 c.active = 'awaiting'
163 c.active = 'awaiting'
164 if c.source and not c.awaiting_review:
164 if c.source and not c.awaiting_review:
165 c.active = 'source'
165 c.active = 'source'
166 if c.awaiting_my_review:
166 if c.awaiting_my_review:
167 c.active = 'awaiting_my'
167 c.active = 'awaiting_my'
168
168
169 return self._get_template_context(c)
169 return self._get_template_context(c)
170
170
171 @LoginRequired()
171 @LoginRequired()
172 @HasRepoPermissionAnyDecorator(
172 @HasRepoPermissionAnyDecorator(
173 'repository.read', 'repository.write', 'repository.admin')
173 'repository.read', 'repository.write', 'repository.admin')
174 @view_config(
174 @view_config(
175 route_name='pullrequest_show_all_data', request_method='GET',
175 route_name='pullrequest_show_all_data', request_method='GET',
176 renderer='json_ext', xhr=True)
176 renderer='json_ext', xhr=True)
177 def pull_request_list_data(self):
177 def pull_request_list_data(self):
178 self.load_default_context()
178 self.load_default_context()
179
179
180 # additional filters
180 # additional filters
181 req_get = self.request.GET
181 req_get = self.request.GET
182 source = str2bool(req_get.get('source'))
182 source = str2bool(req_get.get('source'))
183 closed = str2bool(req_get.get('closed'))
183 closed = str2bool(req_get.get('closed'))
184 my = str2bool(req_get.get('my'))
184 my = str2bool(req_get.get('my'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187
187
188 filter_type = 'awaiting_review' if awaiting_review \
188 filter_type = 'awaiting_review' if awaiting_review \
189 else 'awaiting_my_review' if awaiting_my_review \
189 else 'awaiting_my_review' if awaiting_my_review \
190 else None
190 else None
191
191
192 opened_by = None
192 opened_by = None
193 if my:
193 if my:
194 opened_by = [self._rhodecode_user.user_id]
194 opened_by = [self._rhodecode_user.user_id]
195
195
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 if closed:
197 if closed:
198 statuses = [PullRequest.STATUS_CLOSED]
198 statuses = [PullRequest.STATUS_CLOSED]
199
199
200 data = self._get_pull_requests_list(
200 data = self._get_pull_requests_list(
201 repo_name=self.db_repo_name, source=source,
201 repo_name=self.db_repo_name, source=source,
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203
203
204 return data
204 return data
205
205
206 def _is_diff_cache_enabled(self, target_repo):
206 def _is_diff_cache_enabled(self, target_repo):
207 caching_enabled = self._get_general_setting(
207 caching_enabled = self._get_general_setting(
208 target_repo, 'rhodecode_diff_cache')
208 target_repo, 'rhodecode_diff_cache')
209 log.debug('Diff caching enabled: %s', caching_enabled)
209 log.debug('Diff caching enabled: %s', caching_enabled)
210 return caching_enabled
210 return caching_enabled
211
211
212 def _get_diffset(self, source_repo_name, source_repo,
212 def _get_diffset(self, source_repo_name, source_repo,
213 ancestor_commit,
213 source_ref_id, target_ref_id,
214 source_ref_id, target_ref_id,
214 target_commit, source_commit, diff_limit, file_limit,
215 target_commit, source_commit, diff_limit, file_limit,
215 fulldiff, hide_whitespace_changes, diff_context):
216 fulldiff, hide_whitespace_changes, diff_context):
216
217
218 target_ref_id = ancestor_commit.raw_id
217 vcs_diff = PullRequestModel().get_diff(
219 vcs_diff = PullRequestModel().get_diff(
218 source_repo, source_ref_id, target_ref_id,
220 source_repo, source_ref_id, target_ref_id,
219 hide_whitespace_changes, diff_context)
221 hide_whitespace_changes, diff_context)
220
222
221 diff_processor = diffs.DiffProcessor(
223 diff_processor = diffs.DiffProcessor(
222 vcs_diff, format='newdiff', diff_limit=diff_limit,
224 vcs_diff, format='newdiff', diff_limit=diff_limit,
223 file_limit=file_limit, show_full_diff=fulldiff)
225 file_limit=file_limit, show_full_diff=fulldiff)
224
226
225 _parsed = diff_processor.prepare()
227 _parsed = diff_processor.prepare()
226
228
227 diffset = codeblocks.DiffSet(
229 diffset = codeblocks.DiffSet(
228 repo_name=self.db_repo_name,
230 repo_name=self.db_repo_name,
229 source_repo_name=source_repo_name,
231 source_repo_name=source_repo_name,
230 source_node_getter=codeblocks.diffset_node_getter(target_commit),
232 source_node_getter=codeblocks.diffset_node_getter(target_commit),
231 target_node_getter=codeblocks.diffset_node_getter(source_commit),
233 target_node_getter=codeblocks.diffset_node_getter(source_commit),
232 )
234 )
233 diffset = self.path_filter.render_patchset_filtered(
235 diffset = self.path_filter.render_patchset_filtered(
234 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
236 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
235
237
236 return diffset
238 return diffset
237
239
238 def _get_range_diffset(self, source_scm, source_repo,
240 def _get_range_diffset(self, source_scm, source_repo,
239 commit1, commit2, diff_limit, file_limit,
241 commit1, commit2, diff_limit, file_limit,
240 fulldiff, hide_whitespace_changes, diff_context):
242 fulldiff, hide_whitespace_changes, diff_context):
241 vcs_diff = source_scm.get_diff(
243 vcs_diff = source_scm.get_diff(
242 commit1, commit2,
244 commit1, commit2,
243 ignore_whitespace=hide_whitespace_changes,
245 ignore_whitespace=hide_whitespace_changes,
244 context=diff_context)
246 context=diff_context)
245
247
246 diff_processor = diffs.DiffProcessor(
248 diff_processor = diffs.DiffProcessor(
247 vcs_diff, format='newdiff', diff_limit=diff_limit,
249 vcs_diff, format='newdiff', diff_limit=diff_limit,
248 file_limit=file_limit, show_full_diff=fulldiff)
250 file_limit=file_limit, show_full_diff=fulldiff)
249
251
250 _parsed = diff_processor.prepare()
252 _parsed = diff_processor.prepare()
251
253
252 diffset = codeblocks.DiffSet(
254 diffset = codeblocks.DiffSet(
253 repo_name=source_repo.repo_name,
255 repo_name=source_repo.repo_name,
254 source_node_getter=codeblocks.diffset_node_getter(commit1),
256 source_node_getter=codeblocks.diffset_node_getter(commit1),
255 target_node_getter=codeblocks.diffset_node_getter(commit2))
257 target_node_getter=codeblocks.diffset_node_getter(commit2))
256
258
257 diffset = self.path_filter.render_patchset_filtered(
259 diffset = self.path_filter.render_patchset_filtered(
258 diffset, _parsed, commit1.raw_id, commit2.raw_id)
260 diffset, _parsed, commit1.raw_id, commit2.raw_id)
259
261
260 return diffset
262 return diffset
261
263
262 @LoginRequired()
264 @LoginRequired()
263 @HasRepoPermissionAnyDecorator(
265 @HasRepoPermissionAnyDecorator(
264 'repository.read', 'repository.write', 'repository.admin')
266 'repository.read', 'repository.write', 'repository.admin')
265 @view_config(
267 @view_config(
266 route_name='pullrequest_show', request_method='GET',
268 route_name='pullrequest_show', request_method='GET',
267 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
269 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
268 def pull_request_show(self):
270 def pull_request_show(self):
269 _ = self.request.translate
271 _ = self.request.translate
270 c = self.load_default_context()
272 c = self.load_default_context()
271
273
272 pull_request = PullRequest.get_or_404(
274 pull_request = PullRequest.get_or_404(
273 self.request.matchdict['pull_request_id'])
275 self.request.matchdict['pull_request_id'])
274 pull_request_id = pull_request.pull_request_id
276 pull_request_id = pull_request.pull_request_id
275
277
276 c.state_progressing = pull_request.is_state_changing()
278 c.state_progressing = pull_request.is_state_changing()
277
279
278 _new_state = {
280 _new_state = {
279 'created': PullRequest.STATE_CREATED,
281 'created': PullRequest.STATE_CREATED,
280 }.get(self.request.GET.get('force_state'))
282 }.get(self.request.GET.get('force_state'))
283
281 if c.is_super_admin and _new_state:
284 if c.is_super_admin and _new_state:
282 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
285 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
283 h.flash(
286 h.flash(
284 _('Pull Request state was force changed to `{}`').format(_new_state),
287 _('Pull Request state was force changed to `{}`').format(_new_state),
285 category='success')
288 category='success')
286 Session().commit()
289 Session().commit()
287
290
288 raise HTTPFound(h.route_path(
291 raise HTTPFound(h.route_path(
289 'pullrequest_show', repo_name=self.db_repo_name,
292 'pullrequest_show', repo_name=self.db_repo_name,
290 pull_request_id=pull_request_id))
293 pull_request_id=pull_request_id))
291
294
292 version = self.request.GET.get('version')
295 version = self.request.GET.get('version')
293 from_version = self.request.GET.get('from_version') or version
296 from_version = self.request.GET.get('from_version') or version
294 merge_checks = self.request.GET.get('merge_checks')
297 merge_checks = self.request.GET.get('merge_checks')
295 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
298 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
296
299
297 # fetch global flags of ignore ws or context lines
300 # fetch global flags of ignore ws or context lines
298 diff_context = diffs.get_diff_context(self.request)
301 diff_context = diffs.get_diff_context(self.request)
299 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
302 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
300
303
301 force_refresh = str2bool(self.request.GET.get('force_refresh'))
304 force_refresh = str2bool(self.request.GET.get('force_refresh'))
302
305
303 (pull_request_latest,
306 (pull_request_latest,
304 pull_request_at_ver,
307 pull_request_at_ver,
305 pull_request_display_obj,
308 pull_request_display_obj,
306 at_version) = PullRequestModel().get_pr_version(
309 at_version) = PullRequestModel().get_pr_version(
307 pull_request_id, version=version)
310 pull_request_id, version=version)
308 pr_closed = pull_request_latest.is_closed()
311 pr_closed = pull_request_latest.is_closed()
309
312
310 if pr_closed and (version or from_version):
313 if pr_closed and (version or from_version):
311 # not allow to browse versions
314 # not allow to browse versions
312 raise HTTPFound(h.route_path(
315 raise HTTPFound(h.route_path(
313 'pullrequest_show', repo_name=self.db_repo_name,
316 'pullrequest_show', repo_name=self.db_repo_name,
314 pull_request_id=pull_request_id))
317 pull_request_id=pull_request_id))
315
318
316 versions = pull_request_display_obj.versions()
319 versions = pull_request_display_obj.versions()
317 # used to store per-commit range diffs
320 # used to store per-commit range diffs
318 c.changes = collections.OrderedDict()
321 c.changes = collections.OrderedDict()
319 c.range_diff_on = self.request.GET.get('range-diff') == "1"
322 c.range_diff_on = self.request.GET.get('range-diff') == "1"
320
323
321 c.at_version = at_version
324 c.at_version = at_version
322 c.at_version_num = (at_version
325 c.at_version_num = (at_version
323 if at_version and at_version != 'latest'
326 if at_version and at_version != 'latest'
324 else None)
327 else None)
325 c.at_version_pos = ChangesetComment.get_index_from_version(
328 c.at_version_pos = ChangesetComment.get_index_from_version(
326 c.at_version_num, versions)
329 c.at_version_num, versions)
327
330
328 (prev_pull_request_latest,
331 (prev_pull_request_latest,
329 prev_pull_request_at_ver,
332 prev_pull_request_at_ver,
330 prev_pull_request_display_obj,
333 prev_pull_request_display_obj,
331 prev_at_version) = PullRequestModel().get_pr_version(
334 prev_at_version) = PullRequestModel().get_pr_version(
332 pull_request_id, version=from_version)
335 pull_request_id, version=from_version)
333
336
334 c.from_version = prev_at_version
337 c.from_version = prev_at_version
335 c.from_version_num = (prev_at_version
338 c.from_version_num = (prev_at_version
336 if prev_at_version and prev_at_version != 'latest'
339 if prev_at_version and prev_at_version != 'latest'
337 else None)
340 else None)
338 c.from_version_pos = ChangesetComment.get_index_from_version(
341 c.from_version_pos = ChangesetComment.get_index_from_version(
339 c.from_version_num, versions)
342 c.from_version_num, versions)
340
343
341 # define if we're in COMPARE mode or VIEW at version mode
344 # define if we're in COMPARE mode or VIEW at version mode
342 compare = at_version != prev_at_version
345 compare = at_version != prev_at_version
343
346
344 # pull_requests repo_name we opened it against
347 # pull_requests repo_name we opened it against
345 # ie. target_repo must match
348 # ie. target_repo must match
346 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
349 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
347 raise HTTPNotFound()
350 raise HTTPNotFound()
348
351
349 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
352 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
350 pull_request_at_ver)
353 pull_request_at_ver)
351
354
352 c.pull_request = pull_request_display_obj
355 c.pull_request = pull_request_display_obj
353 c.renderer = pull_request_at_ver.description_renderer or c.renderer
356 c.renderer = pull_request_at_ver.description_renderer or c.renderer
354 c.pull_request_latest = pull_request_latest
357 c.pull_request_latest = pull_request_latest
355
358
356 if compare or (at_version and not at_version == 'latest'):
359 if compare or (at_version and not at_version == 'latest'):
357 c.allowed_to_change_status = False
360 c.allowed_to_change_status = False
358 c.allowed_to_update = False
361 c.allowed_to_update = False
359 c.allowed_to_merge = False
362 c.allowed_to_merge = False
360 c.allowed_to_delete = False
363 c.allowed_to_delete = False
361 c.allowed_to_comment = False
364 c.allowed_to_comment = False
362 c.allowed_to_close = False
365 c.allowed_to_close = False
363 else:
366 else:
364 can_change_status = PullRequestModel().check_user_change_status(
367 can_change_status = PullRequestModel().check_user_change_status(
365 pull_request_at_ver, self._rhodecode_user)
368 pull_request_at_ver, self._rhodecode_user)
366 c.allowed_to_change_status = can_change_status and not pr_closed
369 c.allowed_to_change_status = can_change_status and not pr_closed
367
370
368 c.allowed_to_update = PullRequestModel().check_user_update(
371 c.allowed_to_update = PullRequestModel().check_user_update(
369 pull_request_latest, self._rhodecode_user) and not pr_closed
372 pull_request_latest, self._rhodecode_user) and not pr_closed
370 c.allowed_to_merge = PullRequestModel().check_user_merge(
373 c.allowed_to_merge = PullRequestModel().check_user_merge(
371 pull_request_latest, self._rhodecode_user) and not pr_closed
374 pull_request_latest, self._rhodecode_user) and not pr_closed
372 c.allowed_to_delete = PullRequestModel().check_user_delete(
375 c.allowed_to_delete = PullRequestModel().check_user_delete(
373 pull_request_latest, self._rhodecode_user) and not pr_closed
376 pull_request_latest, self._rhodecode_user) and not pr_closed
374 c.allowed_to_comment = not pr_closed
377 c.allowed_to_comment = not pr_closed
375 c.allowed_to_close = c.allowed_to_merge and not pr_closed
378 c.allowed_to_close = c.allowed_to_merge and not pr_closed
376
379
377 c.forbid_adding_reviewers = False
380 c.forbid_adding_reviewers = False
378 c.forbid_author_to_review = False
381 c.forbid_author_to_review = False
379 c.forbid_commit_author_to_review = False
382 c.forbid_commit_author_to_review = False
380
383
381 if pull_request_latest.reviewer_data and \
384 if pull_request_latest.reviewer_data and \
382 'rules' in pull_request_latest.reviewer_data:
385 'rules' in pull_request_latest.reviewer_data:
383 rules = pull_request_latest.reviewer_data['rules'] or {}
386 rules = pull_request_latest.reviewer_data['rules'] or {}
384 try:
387 try:
385 c.forbid_adding_reviewers = rules.get(
388 c.forbid_adding_reviewers = rules.get(
386 'forbid_adding_reviewers')
389 'forbid_adding_reviewers')
387 c.forbid_author_to_review = rules.get(
390 c.forbid_author_to_review = rules.get(
388 'forbid_author_to_review')
391 'forbid_author_to_review')
389 c.forbid_commit_author_to_review = rules.get(
392 c.forbid_commit_author_to_review = rules.get(
390 'forbid_commit_author_to_review')
393 'forbid_commit_author_to_review')
391 except Exception:
394 except Exception:
392 pass
395 pass
393
396
394 # check merge capabilities
397 # check merge capabilities
395 _merge_check = MergeCheck.validate(
398 _merge_check = MergeCheck.validate(
396 pull_request_latest, auth_user=self._rhodecode_user,
399 pull_request_latest, auth_user=self._rhodecode_user,
397 translator=self.request.translate,
400 translator=self.request.translate,
398 force_shadow_repo_refresh=force_refresh)
401 force_shadow_repo_refresh=force_refresh)
399
402
400 c.pr_merge_errors = _merge_check.error_details
403 c.pr_merge_errors = _merge_check.error_details
401 c.pr_merge_possible = not _merge_check.failed
404 c.pr_merge_possible = not _merge_check.failed
402 c.pr_merge_message = _merge_check.merge_msg
405 c.pr_merge_message = _merge_check.merge_msg
403 c.pr_merge_source_commit = _merge_check.source_commit
406 c.pr_merge_source_commit = _merge_check.source_commit
404 c.pr_merge_target_commit = _merge_check.target_commit
407 c.pr_merge_target_commit = _merge_check.target_commit
405
408
406 c.pr_merge_info = MergeCheck.get_merge_conditions(
409 c.pr_merge_info = MergeCheck.get_merge_conditions(
407 pull_request_latest, translator=self.request.translate)
410 pull_request_latest, translator=self.request.translate)
408
411
409 c.pull_request_review_status = _merge_check.review_status
412 c.pull_request_review_status = _merge_check.review_status
410 if merge_checks:
413 if merge_checks:
411 self.request.override_renderer = \
414 self.request.override_renderer = \
412 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
415 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
413 return self._get_template_context(c)
416 return self._get_template_context(c)
414
417
415 comments_model = CommentsModel()
418 comments_model = CommentsModel()
416
419
417 # reviewers and statuses
420 # reviewers and statuses
418 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
421 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
419 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
422 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
420
423
421 # GENERAL COMMENTS with versions #
424 # GENERAL COMMENTS with versions #
422 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
425 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
423 q = q.order_by(ChangesetComment.comment_id.asc())
426 q = q.order_by(ChangesetComment.comment_id.asc())
424 general_comments = q
427 general_comments = q
425
428
426 # pick comments we want to render at current version
429 # pick comments we want to render at current version
427 c.comment_versions = comments_model.aggregate_comments(
430 c.comment_versions = comments_model.aggregate_comments(
428 general_comments, versions, c.at_version_num)
431 general_comments, versions, c.at_version_num)
429 c.comments = c.comment_versions[c.at_version_num]['until']
432 c.comments = c.comment_versions[c.at_version_num]['until']
430
433
431 # INLINE COMMENTS with versions #
434 # INLINE COMMENTS with versions #
432 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
435 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
433 q = q.order_by(ChangesetComment.comment_id.asc())
436 q = q.order_by(ChangesetComment.comment_id.asc())
434 inline_comments = q
437 inline_comments = q
435
438
436 c.inline_versions = comments_model.aggregate_comments(
439 c.inline_versions = comments_model.aggregate_comments(
437 inline_comments, versions, c.at_version_num, inline=True)
440 inline_comments, versions, c.at_version_num, inline=True)
438
441
439 # TODOs
442 # TODOs
440 c.unresolved_comments = CommentsModel() \
443 c.unresolved_comments = CommentsModel() \
441 .get_pull_request_unresolved_todos(pull_request)
444 .get_pull_request_unresolved_todos(pull_request)
442 c.resolved_comments = CommentsModel() \
445 c.resolved_comments = CommentsModel() \
443 .get_pull_request_resolved_todos(pull_request)
446 .get_pull_request_resolved_todos(pull_request)
444
447
445 # inject latest version
448 # inject latest version
446 latest_ver = PullRequest.get_pr_display_object(
449 latest_ver = PullRequest.get_pr_display_object(
447 pull_request_latest, pull_request_latest)
450 pull_request_latest, pull_request_latest)
448
451
449 c.versions = versions + [latest_ver]
452 c.versions = versions + [latest_ver]
450
453
451 # if we use version, then do not show later comments
454 # if we use version, then do not show later comments
452 # than current version
455 # than current version
453 display_inline_comments = collections.defaultdict(
456 display_inline_comments = collections.defaultdict(
454 lambda: collections.defaultdict(list))
457 lambda: collections.defaultdict(list))
455 for co in inline_comments:
458 for co in inline_comments:
456 if c.at_version_num:
459 if c.at_version_num:
457 # pick comments that are at least UPTO given version, so we
460 # pick comments that are at least UPTO given version, so we
458 # don't render comments for higher version
461 # don't render comments for higher version
459 should_render = co.pull_request_version_id and \
462 should_render = co.pull_request_version_id and \
460 co.pull_request_version_id <= c.at_version_num
463 co.pull_request_version_id <= c.at_version_num
461 else:
464 else:
462 # showing all, for 'latest'
465 # showing all, for 'latest'
463 should_render = True
466 should_render = True
464
467
465 if should_render:
468 if should_render:
466 display_inline_comments[co.f_path][co.line_no].append(co)
469 display_inline_comments[co.f_path][co.line_no].append(co)
467
470
468 # load diff data into template context, if we use compare mode then
471 # load diff data into template context, if we use compare mode then
469 # diff is calculated based on changes between versions of PR
472 # diff is calculated based on changes between versions of PR
470
473
471 source_repo = pull_request_at_ver.source_repo
474 source_repo = pull_request_at_ver.source_repo
472 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
475 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
473
476
474 target_repo = pull_request_at_ver.target_repo
477 target_repo = pull_request_at_ver.target_repo
475 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
478 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
476
479
477 if compare:
480 if compare:
478 # in compare switch the diff base to latest commit from prev version
481 # in compare switch the diff base to latest commit from prev version
479 target_ref_id = prev_pull_request_display_obj.revisions[0]
482 target_ref_id = prev_pull_request_display_obj.revisions[0]
480
483
481 # despite opening commits for bookmarks/branches/tags, we always
484 # despite opening commits for bookmarks/branches/tags, we always
482 # convert this to rev to prevent changes after bookmark or branch change
485 # convert this to rev to prevent changes after bookmark or branch change
483 c.source_ref_type = 'rev'
486 c.source_ref_type = 'rev'
484 c.source_ref = source_ref_id
487 c.source_ref = source_ref_id
485
488
486 c.target_ref_type = 'rev'
489 c.target_ref_type = 'rev'
487 c.target_ref = target_ref_id
490 c.target_ref = target_ref_id
488
491
489 c.source_repo = source_repo
492 c.source_repo = source_repo
490 c.target_repo = target_repo
493 c.target_repo = target_repo
491
494
492 c.commit_ranges = []
495 c.commit_ranges = []
493 source_commit = EmptyCommit()
496 source_commit = EmptyCommit()
494 target_commit = EmptyCommit()
497 target_commit = EmptyCommit()
495 c.missing_requirements = False
498 c.missing_requirements = False
496
499
497 source_scm = source_repo.scm_instance()
500 source_scm = source_repo.scm_instance()
498 target_scm = target_repo.scm_instance()
501 target_scm = target_repo.scm_instance()
499
502
500 shadow_scm = None
503 shadow_scm = None
501 try:
504 try:
502 shadow_scm = pull_request_latest.get_shadow_repo()
505 shadow_scm = pull_request_latest.get_shadow_repo()
503 except Exception:
506 except Exception:
504 log.debug('Failed to get shadow repo', exc_info=True)
507 log.debug('Failed to get shadow repo', exc_info=True)
505 # try first the existing source_repo, and then shadow
508 # try first the existing source_repo, and then shadow
506 # repo if we can obtain one
509 # repo if we can obtain one
507 commits_source_repo = source_scm
510 commits_source_repo = source_scm
508 if shadow_scm:
511 if shadow_scm:
509 commits_source_repo = shadow_scm
512 commits_source_repo = shadow_scm
510
513
511 c.commits_source_repo = commits_source_repo
514 c.commits_source_repo = commits_source_repo
512 c.ancestor = None # set it to None, to hide it from PR view
515 c.ancestor = None # set it to None, to hide it from PR view
513
516
514 # empty version means latest, so we keep this to prevent
517 # empty version means latest, so we keep this to prevent
515 # double caching
518 # double caching
516 version_normalized = version or 'latest'
519 version_normalized = version or 'latest'
517 from_version_normalized = from_version or 'latest'
520 from_version_normalized = from_version or 'latest'
518
521
519 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
522 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
520 cache_file_path = diff_cache_exist(
523 cache_file_path = diff_cache_exist(
521 cache_path, 'pull_request', pull_request_id, version_normalized,
524 cache_path, 'pull_request', pull_request_id, version_normalized,
522 from_version_normalized, source_ref_id, target_ref_id,
525 from_version_normalized, source_ref_id, target_ref_id,
523 hide_whitespace_changes, diff_context, c.fulldiff)
526 hide_whitespace_changes, diff_context, c.fulldiff)
524
527
525 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
528 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
526 force_recache = self.get_recache_flag()
529 force_recache = self.get_recache_flag()
527
530
528 cached_diff = None
531 cached_diff = None
529 if caching_enabled:
532 if caching_enabled:
530 cached_diff = load_cached_diff(cache_file_path)
533 cached_diff = load_cached_diff(cache_file_path)
531
534
532 has_proper_commit_cache = (
535 has_proper_commit_cache = (
533 cached_diff and cached_diff.get('commits')
536 cached_diff and cached_diff.get('commits')
534 and len(cached_diff.get('commits', [])) == 5
537 and len(cached_diff.get('commits', [])) == 5
535 and cached_diff.get('commits')[0]
538 and cached_diff.get('commits')[0]
536 and cached_diff.get('commits')[3])
539 and cached_diff.get('commits')[3])
537
540
538 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
541 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
539 diff_commit_cache = \
542 diff_commit_cache = \
540 (ancestor_commit, commit_cache, missing_requirements,
543 (ancestor_commit, commit_cache, missing_requirements,
541 source_commit, target_commit) = cached_diff['commits']
544 source_commit, target_commit) = cached_diff['commits']
542 else:
545 else:
543 # NOTE(marcink): we reach potentially unreachable errors when a PR has
546 # NOTE(marcink): we reach potentially unreachable errors when a PR has
544 # merge errors resulting in potentially hidden commits in the shadow repo.
547 # merge errors resulting in potentially hidden commits in the shadow repo.
545 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
548 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
546 and _merge_check.merge_response
549 and _merge_check.merge_response
547 maybe_unreachable = maybe_unreachable \
550 maybe_unreachable = maybe_unreachable \
548 and _merge_check.merge_response.metadata.get('unresolved_files')
551 and _merge_check.merge_response.metadata.get('unresolved_files')
549 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
552 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
550 diff_commit_cache = \
553 diff_commit_cache = \
551 (ancestor_commit, commit_cache, missing_requirements,
554 (ancestor_commit, commit_cache, missing_requirements,
552 source_commit, target_commit) = self.get_commits(
555 source_commit, target_commit) = self.get_commits(
553 commits_source_repo,
556 commits_source_repo,
554 pull_request_at_ver,
557 pull_request_at_ver,
555 source_commit,
558 source_commit,
556 source_ref_id,
559 source_ref_id,
557 source_scm,
560 source_scm,
558 target_commit,
561 target_commit,
559 target_ref_id,
562 target_ref_id,
560 target_scm, maybe_unreachable=maybe_unreachable)
563 target_scm,
564 maybe_unreachable=maybe_unreachable)
561
565
562 # register our commit range
566 # register our commit range
563 for comm in commit_cache.values():
567 for comm in commit_cache.values():
564 c.commit_ranges.append(comm)
568 c.commit_ranges.append(comm)
565
569
566 c.missing_requirements = missing_requirements
570 c.missing_requirements = missing_requirements
567 c.ancestor_commit = ancestor_commit
571 c.ancestor_commit = ancestor_commit
568 c.statuses = source_repo.statuses(
572 c.statuses = source_repo.statuses(
569 [x.raw_id for x in c.commit_ranges])
573 [x.raw_id for x in c.commit_ranges])
570
574
571 # auto collapse if we have more than limit
575 # auto collapse if we have more than limit
572 collapse_limit = diffs.DiffProcessor._collapse_commits_over
576 collapse_limit = diffs.DiffProcessor._collapse_commits_over
573 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
577 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
574 c.compare_mode = compare
578 c.compare_mode = compare
575
579
576 # diff_limit is the old behavior, will cut off the whole diff
580 # diff_limit is the old behavior, will cut off the whole diff
577 # if the limit is applied otherwise will just hide the
581 # if the limit is applied otherwise will just hide the
578 # big files from the front-end
582 # big files from the front-end
579 diff_limit = c.visual.cut_off_limit_diff
583 diff_limit = c.visual.cut_off_limit_diff
580 file_limit = c.visual.cut_off_limit_file
584 file_limit = c.visual.cut_off_limit_file
581
585
582 c.missing_commits = False
586 c.missing_commits = False
583 if (c.missing_requirements
587 if (c.missing_requirements
584 or isinstance(source_commit, EmptyCommit)
588 or isinstance(source_commit, EmptyCommit)
585 or source_commit == target_commit):
589 or source_commit == target_commit):
586
590
587 c.missing_commits = True
591 c.missing_commits = True
588 else:
592 else:
589 c.inline_comments = display_inline_comments
593 c.inline_comments = display_inline_comments
590
594
591 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
595 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
592 if not force_recache and has_proper_diff_cache:
596 if not force_recache and has_proper_diff_cache:
593 c.diffset = cached_diff['diff']
597 c.diffset = cached_diff['diff']
594 (ancestor_commit, commit_cache, missing_requirements,
595 source_commit, target_commit) = cached_diff['commits']
596 else:
598 else:
597 c.diffset = self._get_diffset(
599 c.diffset = self._get_diffset(
598 c.source_repo.repo_name, commits_source_repo,
600 c.source_repo.repo_name, commits_source_repo,
601 c.ancestor_commit,
599 source_ref_id, target_ref_id,
602 source_ref_id, target_ref_id,
600 target_commit, source_commit,
603 target_commit, source_commit,
601 diff_limit, file_limit, c.fulldiff,
604 diff_limit, file_limit, c.fulldiff,
602 hide_whitespace_changes, diff_context)
605 hide_whitespace_changes, diff_context)
603
606
604 # save cached diff
607 # save cached diff
605 if caching_enabled:
608 if caching_enabled:
606 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
609 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
607
610
608 c.limited_diff = c.diffset.limited_diff
611 c.limited_diff = c.diffset.limited_diff
609
612
610 # calculate removed files that are bound to comments
613 # calculate removed files that are bound to comments
611 comment_deleted_files = [
614 comment_deleted_files = [
612 fname for fname in display_inline_comments
615 fname for fname in display_inline_comments
613 if fname not in c.diffset.file_stats]
616 if fname not in c.diffset.file_stats]
614
617
615 c.deleted_files_comments = collections.defaultdict(dict)
618 c.deleted_files_comments = collections.defaultdict(dict)
616 for fname, per_line_comments in display_inline_comments.items():
619 for fname, per_line_comments in display_inline_comments.items():
617 if fname in comment_deleted_files:
620 if fname in comment_deleted_files:
618 c.deleted_files_comments[fname]['stats'] = 0
621 c.deleted_files_comments[fname]['stats'] = 0
619 c.deleted_files_comments[fname]['comments'] = list()
622 c.deleted_files_comments[fname]['comments'] = list()
620 for lno, comments in per_line_comments.items():
623 for lno, comments in per_line_comments.items():
621 c.deleted_files_comments[fname]['comments'].extend(comments)
624 c.deleted_files_comments[fname]['comments'].extend(comments)
622
625
623 # maybe calculate the range diff
626 # maybe calculate the range diff
624 if c.range_diff_on:
627 if c.range_diff_on:
625 # TODO(marcink): set whitespace/context
628 # TODO(marcink): set whitespace/context
626 context_lcl = 3
629 context_lcl = 3
627 ign_whitespace_lcl = False
630 ign_whitespace_lcl = False
628
631
629 for commit in c.commit_ranges:
632 for commit in c.commit_ranges:
630 commit2 = commit
633 commit2 = commit
631 commit1 = commit.first_parent
634 commit1 = commit.first_parent
632
635
633 range_diff_cache_file_path = diff_cache_exist(
636 range_diff_cache_file_path = diff_cache_exist(
634 cache_path, 'diff', commit.raw_id,
637 cache_path, 'diff', commit.raw_id,
635 ign_whitespace_lcl, context_lcl, c.fulldiff)
638 ign_whitespace_lcl, context_lcl, c.fulldiff)
636
639
637 cached_diff = None
640 cached_diff = None
638 if caching_enabled:
641 if caching_enabled:
639 cached_diff = load_cached_diff(range_diff_cache_file_path)
642 cached_diff = load_cached_diff(range_diff_cache_file_path)
640
643
641 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
644 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
642 if not force_recache and has_proper_diff_cache:
645 if not force_recache and has_proper_diff_cache:
643 diffset = cached_diff['diff']
646 diffset = cached_diff['diff']
644 else:
647 else:
645 diffset = self._get_range_diffset(
648 diffset = self._get_range_diffset(
646 commits_source_repo, source_repo,
649 commits_source_repo, source_repo,
647 commit1, commit2, diff_limit, file_limit,
650 commit1, commit2, diff_limit, file_limit,
648 c.fulldiff, ign_whitespace_lcl, context_lcl
651 c.fulldiff, ign_whitespace_lcl, context_lcl
649 )
652 )
650
653
651 # save cached diff
654 # save cached diff
652 if caching_enabled:
655 if caching_enabled:
653 cache_diff(range_diff_cache_file_path, diffset, None)
656 cache_diff(range_diff_cache_file_path, diffset, None)
654
657
655 c.changes[commit.raw_id] = diffset
658 c.changes[commit.raw_id] = diffset
656
659
657 # this is a hack to properly display links, when creating PR, the
660 # this is a hack to properly display links, when creating PR, the
658 # compare view and others uses different notation, and
661 # compare view and others uses different notation, and
659 # compare_commits.mako renders links based on the target_repo.
662 # compare_commits.mako renders links based on the target_repo.
660 # We need to swap that here to generate it properly on the html side
663 # We need to swap that here to generate it properly on the html side
661 c.target_repo = c.source_repo
664 c.target_repo = c.source_repo
662
665
663 c.commit_statuses = ChangesetStatus.STATUSES
666 c.commit_statuses = ChangesetStatus.STATUSES
664
667
665 c.show_version_changes = not pr_closed
668 c.show_version_changes = not pr_closed
666 if c.show_version_changes:
669 if c.show_version_changes:
667 cur_obj = pull_request_at_ver
670 cur_obj = pull_request_at_ver
668 prev_obj = prev_pull_request_at_ver
671 prev_obj = prev_pull_request_at_ver
669
672
670 old_commit_ids = prev_obj.revisions
673 old_commit_ids = prev_obj.revisions
671 new_commit_ids = cur_obj.revisions
674 new_commit_ids = cur_obj.revisions
672 commit_changes = PullRequestModel()._calculate_commit_id_changes(
675 commit_changes = PullRequestModel()._calculate_commit_id_changes(
673 old_commit_ids, new_commit_ids)
676 old_commit_ids, new_commit_ids)
674 c.commit_changes_summary = commit_changes
677 c.commit_changes_summary = commit_changes
675
678
676 # calculate the diff for commits between versions
679 # calculate the diff for commits between versions
677 c.commit_changes = []
680 c.commit_changes = []
678 mark = lambda cs, fw: list(
681
679 h.itertools.izip_longest([], cs, fillvalue=fw))
682 def mark(cs, fw):
683 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
684
680 for c_type, raw_id in mark(commit_changes.added, 'a') \
685 for c_type, raw_id in mark(commit_changes.added, 'a') \
681 + mark(commit_changes.removed, 'r') \
686 + mark(commit_changes.removed, 'r') \
682 + mark(commit_changes.common, 'c'):
687 + mark(commit_changes.common, 'c'):
683
688
684 if raw_id in commit_cache:
689 if raw_id in commit_cache:
685 commit = commit_cache[raw_id]
690 commit = commit_cache[raw_id]
686 else:
691 else:
687 try:
692 try:
688 commit = commits_source_repo.get_commit(raw_id)
693 commit = commits_source_repo.get_commit(raw_id)
689 except CommitDoesNotExistError:
694 except CommitDoesNotExistError:
690 # in case we fail extracting still use "dummy" commit
695 # in case we fail extracting still use "dummy" commit
691 # for display in commit diff
696 # for display in commit diff
692 commit = h.AttributeDict(
697 commit = h.AttributeDict(
693 {'raw_id': raw_id,
698 {'raw_id': raw_id,
694 'message': 'EMPTY or MISSING COMMIT'})
699 'message': 'EMPTY or MISSING COMMIT'})
695 c.commit_changes.append([c_type, commit])
700 c.commit_changes.append([c_type, commit])
696
701
697 # current user review statuses for each version
702 # current user review statuses for each version
698 c.review_versions = {}
703 c.review_versions = {}
699 if self._rhodecode_user.user_id in allowed_reviewers:
704 if self._rhodecode_user.user_id in allowed_reviewers:
700 for co in general_comments:
705 for co in general_comments:
701 if co.author.user_id == self._rhodecode_user.user_id:
706 if co.author.user_id == self._rhodecode_user.user_id:
702 status = co.status_change
707 status = co.status_change
703 if status:
708 if status:
704 _ver_pr = status[0].comment.pull_request_version_id
709 _ver_pr = status[0].comment.pull_request_version_id
705 c.review_versions[_ver_pr] = status[0]
710 c.review_versions[_ver_pr] = status[0]
706
711
707 return self._get_template_context(c)
712 return self._get_template_context(c)
708
713
709 def get_commits(
714 def get_commits(
710 self, commits_source_repo, pull_request_at_ver, source_commit,
715 self, commits_source_repo, pull_request_at_ver, source_commit,
711 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
716 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
712 maybe_unreachable=False):
717 maybe_unreachable=False):
713
718
714 commit_cache = collections.OrderedDict()
719 commit_cache = collections.OrderedDict()
715 missing_requirements = False
720 missing_requirements = False
716
721
717 try:
722 try:
718 pre_load = ["author", "date", "message", "branch", "parents"]
723 pre_load = ["author", "date", "message", "branch", "parents"]
719
724
720 pull_request_commits = pull_request_at_ver.revisions
725 pull_request_commits = pull_request_at_ver.revisions
721 log.debug('Loading %s commits from %s',
726 log.debug('Loading %s commits from %s',
722 len(pull_request_commits), commits_source_repo)
727 len(pull_request_commits), commits_source_repo)
723
728
724 for rev in pull_request_commits:
729 for rev in pull_request_commits:
725 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
730 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
726 maybe_unreachable=maybe_unreachable)
731 maybe_unreachable=maybe_unreachable)
727 commit_cache[comm.raw_id] = comm
732 commit_cache[comm.raw_id] = comm
728
733
729 # Order here matters, we first need to get target, and then
734 # Order here matters, we first need to get target, and then
730 # the source
735 # the source
731 target_commit = commits_source_repo.get_commit(
736 target_commit = commits_source_repo.get_commit(
732 commit_id=safe_str(target_ref_id))
737 commit_id=safe_str(target_ref_id))
733
738
734 source_commit = commits_source_repo.get_commit(
739 source_commit = commits_source_repo.get_commit(
735 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
740 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
736 except CommitDoesNotExistError:
741 except CommitDoesNotExistError:
737 log.warning('Failed to get commit from `{}` repo'.format(
742 log.warning('Failed to get commit from `{}` repo'.format(
738 commits_source_repo), exc_info=True)
743 commits_source_repo), exc_info=True)
739 except RepositoryRequirementError:
744 except RepositoryRequirementError:
740 log.warning('Failed to get all required data from repo', exc_info=True)
745 log.warning('Failed to get all required data from repo', exc_info=True)
741 missing_requirements = True
746 missing_requirements = True
742 ancestor_commit = None
747
748 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
749
743 try:
750 try:
744 ancestor_id = source_scm.get_common_ancestor(
751 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
745 source_commit.raw_id, target_commit.raw_id, target_scm)
746 ancestor_commit = source_scm.get_commit(ancestor_id)
747 except Exception:
752 except Exception:
748 ancestor_commit = None
753 ancestor_commit = None
754
749 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
755 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
750
756
751 def assure_not_empty_repo(self):
757 def assure_not_empty_repo(self):
752 _ = self.request.translate
758 _ = self.request.translate
753
759
754 try:
760 try:
755 self.db_repo.scm_instance().get_commit()
761 self.db_repo.scm_instance().get_commit()
756 except EmptyRepositoryError:
762 except EmptyRepositoryError:
757 h.flash(h.literal(_('There are no commits yet')),
763 h.flash(h.literal(_('There are no commits yet')),
758 category='warning')
764 category='warning')
759 raise HTTPFound(
765 raise HTTPFound(
760 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
766 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
761
767
762 @LoginRequired()
768 @LoginRequired()
763 @NotAnonymous()
769 @NotAnonymous()
764 @HasRepoPermissionAnyDecorator(
770 @HasRepoPermissionAnyDecorator(
765 'repository.read', 'repository.write', 'repository.admin')
771 'repository.read', 'repository.write', 'repository.admin')
766 @view_config(
772 @view_config(
767 route_name='pullrequest_new', request_method='GET',
773 route_name='pullrequest_new', request_method='GET',
768 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
774 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
769 def pull_request_new(self):
775 def pull_request_new(self):
770 _ = self.request.translate
776 _ = self.request.translate
771 c = self.load_default_context()
777 c = self.load_default_context()
772
778
773 self.assure_not_empty_repo()
779 self.assure_not_empty_repo()
774 source_repo = self.db_repo
780 source_repo = self.db_repo
775
781
776 commit_id = self.request.GET.get('commit')
782 commit_id = self.request.GET.get('commit')
777 branch_ref = self.request.GET.get('branch')
783 branch_ref = self.request.GET.get('branch')
778 bookmark_ref = self.request.GET.get('bookmark')
784 bookmark_ref = self.request.GET.get('bookmark')
779
785
780 try:
786 try:
781 source_repo_data = PullRequestModel().generate_repo_data(
787 source_repo_data = PullRequestModel().generate_repo_data(
782 source_repo, commit_id=commit_id,
788 source_repo, commit_id=commit_id,
783 branch=branch_ref, bookmark=bookmark_ref,
789 branch=branch_ref, bookmark=bookmark_ref,
784 translator=self.request.translate)
790 translator=self.request.translate)
785 except CommitDoesNotExistError as e:
791 except CommitDoesNotExistError as e:
786 log.exception(e)
792 log.exception(e)
787 h.flash(_('Commit does not exist'), 'error')
793 h.flash(_('Commit does not exist'), 'error')
788 raise HTTPFound(
794 raise HTTPFound(
789 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
795 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
790
796
791 default_target_repo = source_repo
797 default_target_repo = source_repo
792
798
793 if source_repo.parent and c.has_origin_repo_read_perm:
799 if source_repo.parent and c.has_origin_repo_read_perm:
794 parent_vcs_obj = source_repo.parent.scm_instance()
800 parent_vcs_obj = source_repo.parent.scm_instance()
795 if parent_vcs_obj and not parent_vcs_obj.is_empty():
801 if parent_vcs_obj and not parent_vcs_obj.is_empty():
796 # change default if we have a parent repo
802 # change default if we have a parent repo
797 default_target_repo = source_repo.parent
803 default_target_repo = source_repo.parent
798
804
799 target_repo_data = PullRequestModel().generate_repo_data(
805 target_repo_data = PullRequestModel().generate_repo_data(
800 default_target_repo, translator=self.request.translate)
806 default_target_repo, translator=self.request.translate)
801
807
802 selected_source_ref = source_repo_data['refs']['selected_ref']
808 selected_source_ref = source_repo_data['refs']['selected_ref']
803 title_source_ref = ''
809 title_source_ref = ''
804 if selected_source_ref:
810 if selected_source_ref:
805 title_source_ref = selected_source_ref.split(':', 2)[1]
811 title_source_ref = selected_source_ref.split(':', 2)[1]
806 c.default_title = PullRequestModel().generate_pullrequest_title(
812 c.default_title = PullRequestModel().generate_pullrequest_title(
807 source=source_repo.repo_name,
813 source=source_repo.repo_name,
808 source_ref=title_source_ref,
814 source_ref=title_source_ref,
809 target=default_target_repo.repo_name
815 target=default_target_repo.repo_name
810 )
816 )
811
817
812 c.default_repo_data = {
818 c.default_repo_data = {
813 'source_repo_name': source_repo.repo_name,
819 'source_repo_name': source_repo.repo_name,
814 'source_refs_json': json.dumps(source_repo_data),
820 'source_refs_json': json.dumps(source_repo_data),
815 'target_repo_name': default_target_repo.repo_name,
821 'target_repo_name': default_target_repo.repo_name,
816 'target_refs_json': json.dumps(target_repo_data),
822 'target_refs_json': json.dumps(target_repo_data),
817 }
823 }
818 c.default_source_ref = selected_source_ref
824 c.default_source_ref = selected_source_ref
819
825
820 return self._get_template_context(c)
826 return self._get_template_context(c)
821
827
822 @LoginRequired()
828 @LoginRequired()
823 @NotAnonymous()
829 @NotAnonymous()
824 @HasRepoPermissionAnyDecorator(
830 @HasRepoPermissionAnyDecorator(
825 'repository.read', 'repository.write', 'repository.admin')
831 'repository.read', 'repository.write', 'repository.admin')
826 @view_config(
832 @view_config(
827 route_name='pullrequest_repo_refs', request_method='GET',
833 route_name='pullrequest_repo_refs', request_method='GET',
828 renderer='json_ext', xhr=True)
834 renderer='json_ext', xhr=True)
829 def pull_request_repo_refs(self):
835 def pull_request_repo_refs(self):
830 self.load_default_context()
836 self.load_default_context()
831 target_repo_name = self.request.matchdict['target_repo_name']
837 target_repo_name = self.request.matchdict['target_repo_name']
832 repo = Repository.get_by_repo_name(target_repo_name)
838 repo = Repository.get_by_repo_name(target_repo_name)
833 if not repo:
839 if not repo:
834 raise HTTPNotFound()
840 raise HTTPNotFound()
835
841
836 target_perm = HasRepoPermissionAny(
842 target_perm = HasRepoPermissionAny(
837 'repository.read', 'repository.write', 'repository.admin')(
843 'repository.read', 'repository.write', 'repository.admin')(
838 target_repo_name)
844 target_repo_name)
839 if not target_perm:
845 if not target_perm:
840 raise HTTPNotFound()
846 raise HTTPNotFound()
841
847
842 return PullRequestModel().generate_repo_data(
848 return PullRequestModel().generate_repo_data(
843 repo, translator=self.request.translate)
849 repo, translator=self.request.translate)
844
850
845 @LoginRequired()
851 @LoginRequired()
846 @NotAnonymous()
852 @NotAnonymous()
847 @HasRepoPermissionAnyDecorator(
853 @HasRepoPermissionAnyDecorator(
848 'repository.read', 'repository.write', 'repository.admin')
854 'repository.read', 'repository.write', 'repository.admin')
849 @view_config(
855 @view_config(
850 route_name='pullrequest_repo_targets', request_method='GET',
856 route_name='pullrequest_repo_targets', request_method='GET',
851 renderer='json_ext', xhr=True)
857 renderer='json_ext', xhr=True)
852 def pullrequest_repo_targets(self):
858 def pullrequest_repo_targets(self):
853 _ = self.request.translate
859 _ = self.request.translate
854 filter_query = self.request.GET.get('query')
860 filter_query = self.request.GET.get('query')
855
861
856 # get the parents
862 # get the parents
857 parent_target_repos = []
863 parent_target_repos = []
858 if self.db_repo.parent:
864 if self.db_repo.parent:
859 parents_query = Repository.query() \
865 parents_query = Repository.query() \
860 .order_by(func.length(Repository.repo_name)) \
866 .order_by(func.length(Repository.repo_name)) \
861 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
867 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
862
868
863 if filter_query:
869 if filter_query:
864 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
870 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
865 parents_query = parents_query.filter(
871 parents_query = parents_query.filter(
866 Repository.repo_name.ilike(ilike_expression))
872 Repository.repo_name.ilike(ilike_expression))
867 parents = parents_query.limit(20).all()
873 parents = parents_query.limit(20).all()
868
874
869 for parent in parents:
875 for parent in parents:
870 parent_vcs_obj = parent.scm_instance()
876 parent_vcs_obj = parent.scm_instance()
871 if parent_vcs_obj and not parent_vcs_obj.is_empty():
877 if parent_vcs_obj and not parent_vcs_obj.is_empty():
872 parent_target_repos.append(parent)
878 parent_target_repos.append(parent)
873
879
874 # get other forks, and repo itself
880 # get other forks, and repo itself
875 query = Repository.query() \
881 query = Repository.query() \
876 .order_by(func.length(Repository.repo_name)) \
882 .order_by(func.length(Repository.repo_name)) \
877 .filter(
883 .filter(
878 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
884 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
879 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
885 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
880 ) \
886 ) \
881 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
887 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
882
888
883 if filter_query:
889 if filter_query:
884 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
890 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
885 query = query.filter(Repository.repo_name.ilike(ilike_expression))
891 query = query.filter(Repository.repo_name.ilike(ilike_expression))
886
892
887 limit = max(20 - len(parent_target_repos), 5) # not less then 5
893 limit = max(20 - len(parent_target_repos), 5) # not less then 5
888 target_repos = query.limit(limit).all()
894 target_repos = query.limit(limit).all()
889
895
890 all_target_repos = target_repos + parent_target_repos
896 all_target_repos = target_repos + parent_target_repos
891
897
892 repos = []
898 repos = []
893 # This checks permissions to the repositories
899 # This checks permissions to the repositories
894 for obj in ScmModel().get_repos(all_target_repos):
900 for obj in ScmModel().get_repos(all_target_repos):
895 repos.append({
901 repos.append({
896 'id': obj['name'],
902 'id': obj['name'],
897 'text': obj['name'],
903 'text': obj['name'],
898 'type': 'repo',
904 'type': 'repo',
899 'repo_id': obj['dbrepo']['repo_id'],
905 'repo_id': obj['dbrepo']['repo_id'],
900 'repo_type': obj['dbrepo']['repo_type'],
906 'repo_type': obj['dbrepo']['repo_type'],
901 'private': obj['dbrepo']['private'],
907 'private': obj['dbrepo']['private'],
902
908
903 })
909 })
904
910
905 data = {
911 data = {
906 'more': False,
912 'more': False,
907 'results': [{
913 'results': [{
908 'text': _('Repositories'),
914 'text': _('Repositories'),
909 'children': repos
915 'children': repos
910 }] if repos else []
916 }] if repos else []
911 }
917 }
912 return data
918 return data
913
919
914 @LoginRequired()
920 @LoginRequired()
915 @NotAnonymous()
921 @NotAnonymous()
916 @HasRepoPermissionAnyDecorator(
922 @HasRepoPermissionAnyDecorator(
917 'repository.read', 'repository.write', 'repository.admin')
923 'repository.read', 'repository.write', 'repository.admin')
918 @CSRFRequired()
924 @CSRFRequired()
919 @view_config(
925 @view_config(
920 route_name='pullrequest_create', request_method='POST',
926 route_name='pullrequest_create', request_method='POST',
921 renderer=None)
927 renderer=None)
922 def pull_request_create(self):
928 def pull_request_create(self):
923 _ = self.request.translate
929 _ = self.request.translate
924 self.assure_not_empty_repo()
930 self.assure_not_empty_repo()
925 self.load_default_context()
931 self.load_default_context()
926
932
927 controls = peppercorn.parse(self.request.POST.items())
933 controls = peppercorn.parse(self.request.POST.items())
928
934
929 try:
935 try:
930 form = PullRequestForm(
936 form = PullRequestForm(
931 self.request.translate, self.db_repo.repo_id)()
937 self.request.translate, self.db_repo.repo_id)()
932 _form = form.to_python(controls)
938 _form = form.to_python(controls)
933 except formencode.Invalid as errors:
939 except formencode.Invalid as errors:
934 if errors.error_dict.get('revisions'):
940 if errors.error_dict.get('revisions'):
935 msg = 'Revisions: %s' % errors.error_dict['revisions']
941 msg = 'Revisions: %s' % errors.error_dict['revisions']
936 elif errors.error_dict.get('pullrequest_title'):
942 elif errors.error_dict.get('pullrequest_title'):
937 msg = errors.error_dict.get('pullrequest_title')
943 msg = errors.error_dict.get('pullrequest_title')
938 else:
944 else:
939 msg = _('Error creating pull request: {}').format(errors)
945 msg = _('Error creating pull request: {}').format(errors)
940 log.exception(msg)
946 log.exception(msg)
941 h.flash(msg, 'error')
947 h.flash(msg, 'error')
942
948
943 # would rather just go back to form ...
949 # would rather just go back to form ...
944 raise HTTPFound(
950 raise HTTPFound(
945 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
951 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
946
952
947 source_repo = _form['source_repo']
953 source_repo = _form['source_repo']
948 source_ref = _form['source_ref']
954 source_ref = _form['source_ref']
949 target_repo = _form['target_repo']
955 target_repo = _form['target_repo']
950 target_ref = _form['target_ref']
956 target_ref = _form['target_ref']
951 commit_ids = _form['revisions'][::-1]
957 commit_ids = _form['revisions'][::-1]
958 common_ancestor_id = _form['common_ancestor']
952
959
953 # find the ancestor for this pr
960 # find the ancestor for this pr
954 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
961 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
955 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
962 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
956
963
957 if not (source_db_repo or target_db_repo):
964 if not (source_db_repo or target_db_repo):
958 h.flash(_('source_repo or target repo not found'), category='error')
965 h.flash(_('source_repo or target repo not found'), category='error')
959 raise HTTPFound(
966 raise HTTPFound(
960 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
967 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
961
968
962 # re-check permissions again here
969 # re-check permissions again here
963 # source_repo we must have read permissions
970 # source_repo we must have read permissions
964
971
965 source_perm = HasRepoPermissionAny(
972 source_perm = HasRepoPermissionAny(
966 'repository.read', 'repository.write', 'repository.admin')(
973 'repository.read', 'repository.write', 'repository.admin')(
967 source_db_repo.repo_name)
974 source_db_repo.repo_name)
968 if not source_perm:
975 if not source_perm:
969 msg = _('Not Enough permissions to source repo `{}`.'.format(
976 msg = _('Not Enough permissions to source repo `{}`.'.format(
970 source_db_repo.repo_name))
977 source_db_repo.repo_name))
971 h.flash(msg, category='error')
978 h.flash(msg, category='error')
972 # copy the args back to redirect
979 # copy the args back to redirect
973 org_query = self.request.GET.mixed()
980 org_query = self.request.GET.mixed()
974 raise HTTPFound(
981 raise HTTPFound(
975 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
982 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
976 _query=org_query))
983 _query=org_query))
977
984
978 # target repo we must have read permissions, and also later on
985 # target repo we must have read permissions, and also later on
979 # we want to check branch permissions here
986 # we want to check branch permissions here
980 target_perm = HasRepoPermissionAny(
987 target_perm = HasRepoPermissionAny(
981 'repository.read', 'repository.write', 'repository.admin')(
988 'repository.read', 'repository.write', 'repository.admin')(
982 target_db_repo.repo_name)
989 target_db_repo.repo_name)
983 if not target_perm:
990 if not target_perm:
984 msg = _('Not Enough permissions to target repo `{}`.'.format(
991 msg = _('Not Enough permissions to target repo `{}`.'.format(
985 target_db_repo.repo_name))
992 target_db_repo.repo_name))
986 h.flash(msg, category='error')
993 h.flash(msg, category='error')
987 # copy the args back to redirect
994 # copy the args back to redirect
988 org_query = self.request.GET.mixed()
995 org_query = self.request.GET.mixed()
989 raise HTTPFound(
996 raise HTTPFound(
990 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
997 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
991 _query=org_query))
998 _query=org_query))
992
999
993 source_scm = source_db_repo.scm_instance()
1000 source_scm = source_db_repo.scm_instance()
994 target_scm = target_db_repo.scm_instance()
1001 target_scm = target_db_repo.scm_instance()
995
1002
996 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
1003 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
997 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
1004 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
998
1005
999 ancestor = source_scm.get_common_ancestor(
1006 ancestor = source_scm.get_common_ancestor(
1000 source_commit.raw_id, target_commit.raw_id, target_scm)
1007 source_commit.raw_id, target_commit.raw_id, target_scm)
1001
1008
1002 # recalculate target ref based on ancestor
1009 # recalculate target ref based on ancestor
1003 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1010 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1004 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1011 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1005
1012
1006 get_default_reviewers_data, validate_default_reviewers = \
1013 get_default_reviewers_data, validate_default_reviewers = \
1007 PullRequestModel().get_reviewer_functions()
1014 PullRequestModel().get_reviewer_functions()
1008
1015
1009 # recalculate reviewers logic, to make sure we can validate this
1016 # recalculate reviewers logic, to make sure we can validate this
1010 reviewer_rules = get_default_reviewers_data(
1017 reviewer_rules = get_default_reviewers_data(
1011 self._rhodecode_db_user, source_db_repo,
1018 self._rhodecode_db_user, source_db_repo,
1012 source_commit, target_db_repo, target_commit)
1019 source_commit, target_db_repo, target_commit)
1013
1020
1014 given_reviewers = _form['review_members']
1021 given_reviewers = _form['review_members']
1015 reviewers = validate_default_reviewers(
1022 reviewers = validate_default_reviewers(
1016 given_reviewers, reviewer_rules)
1023 given_reviewers, reviewer_rules)
1017
1024
1018 pullrequest_title = _form['pullrequest_title']
1025 pullrequest_title = _form['pullrequest_title']
1019 title_source_ref = source_ref.split(':', 2)[1]
1026 title_source_ref = source_ref.split(':', 2)[1]
1020 if not pullrequest_title:
1027 if not pullrequest_title:
1021 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1028 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1022 source=source_repo,
1029 source=source_repo,
1023 source_ref=title_source_ref,
1030 source_ref=title_source_ref,
1024 target=target_repo
1031 target=target_repo
1025 )
1032 )
1026
1033
1027 description = _form['pullrequest_desc']
1034 description = _form['pullrequest_desc']
1028 description_renderer = _form['description_renderer']
1035 description_renderer = _form['description_renderer']
1029
1036
1030 try:
1037 try:
1031 pull_request = PullRequestModel().create(
1038 pull_request = PullRequestModel().create(
1032 created_by=self._rhodecode_user.user_id,
1039 created_by=self._rhodecode_user.user_id,
1033 source_repo=source_repo,
1040 source_repo=source_repo,
1034 source_ref=source_ref,
1041 source_ref=source_ref,
1035 target_repo=target_repo,
1042 target_repo=target_repo,
1036 target_ref=target_ref,
1043 target_ref=target_ref,
1037 revisions=commit_ids,
1044 revisions=commit_ids,
1045 common_ancestor_id=common_ancestor_id,
1038 reviewers=reviewers,
1046 reviewers=reviewers,
1039 title=pullrequest_title,
1047 title=pullrequest_title,
1040 description=description,
1048 description=description,
1041 description_renderer=description_renderer,
1049 description_renderer=description_renderer,
1042 reviewer_data=reviewer_rules,
1050 reviewer_data=reviewer_rules,
1043 auth_user=self._rhodecode_user
1051 auth_user=self._rhodecode_user
1044 )
1052 )
1045 Session().commit()
1053 Session().commit()
1046
1054
1047 h.flash(_('Successfully opened new pull request'),
1055 h.flash(_('Successfully opened new pull request'),
1048 category='success')
1056 category='success')
1049 except Exception:
1057 except Exception:
1050 msg = _('Error occurred during creation of this pull request.')
1058 msg = _('Error occurred during creation of this pull request.')
1051 log.exception(msg)
1059 log.exception(msg)
1052 h.flash(msg, category='error')
1060 h.flash(msg, category='error')
1053
1061
1054 # copy the args back to redirect
1062 # copy the args back to redirect
1055 org_query = self.request.GET.mixed()
1063 org_query = self.request.GET.mixed()
1056 raise HTTPFound(
1064 raise HTTPFound(
1057 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1065 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1058 _query=org_query))
1066 _query=org_query))
1059
1067
1060 raise HTTPFound(
1068 raise HTTPFound(
1061 h.route_path('pullrequest_show', repo_name=target_repo,
1069 h.route_path('pullrequest_show', repo_name=target_repo,
1062 pull_request_id=pull_request.pull_request_id))
1070 pull_request_id=pull_request.pull_request_id))
1063
1071
1064 @LoginRequired()
1072 @LoginRequired()
1065 @NotAnonymous()
1073 @NotAnonymous()
1066 @HasRepoPermissionAnyDecorator(
1074 @HasRepoPermissionAnyDecorator(
1067 'repository.read', 'repository.write', 'repository.admin')
1075 'repository.read', 'repository.write', 'repository.admin')
1068 @CSRFRequired()
1076 @CSRFRequired()
1069 @view_config(
1077 @view_config(
1070 route_name='pullrequest_update', request_method='POST',
1078 route_name='pullrequest_update', request_method='POST',
1071 renderer='json_ext')
1079 renderer='json_ext')
1072 def pull_request_update(self):
1080 def pull_request_update(self):
1073 pull_request = PullRequest.get_or_404(
1081 pull_request = PullRequest.get_or_404(
1074 self.request.matchdict['pull_request_id'])
1082 self.request.matchdict['pull_request_id'])
1075 _ = self.request.translate
1083 _ = self.request.translate
1076
1084
1077 self.load_default_context()
1085 self.load_default_context()
1078 redirect_url = None
1086 redirect_url = None
1079
1087
1080 if pull_request.is_closed():
1088 if pull_request.is_closed():
1081 log.debug('update: forbidden because pull request is closed')
1089 log.debug('update: forbidden because pull request is closed')
1082 msg = _(u'Cannot update closed pull requests.')
1090 msg = _(u'Cannot update closed pull requests.')
1083 h.flash(msg, category='error')
1091 h.flash(msg, category='error')
1084 return {'response': True,
1092 return {'response': True,
1085 'redirect_url': redirect_url}
1093 'redirect_url': redirect_url}
1086
1094
1087 is_state_changing = pull_request.is_state_changing()
1095 is_state_changing = pull_request.is_state_changing()
1088
1096
1089 # only owner or admin can update it
1097 # only owner or admin can update it
1090 allowed_to_update = PullRequestModel().check_user_update(
1098 allowed_to_update = PullRequestModel().check_user_update(
1091 pull_request, self._rhodecode_user)
1099 pull_request, self._rhodecode_user)
1092 if allowed_to_update:
1100 if allowed_to_update:
1093 controls = peppercorn.parse(self.request.POST.items())
1101 controls = peppercorn.parse(self.request.POST.items())
1094 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1102 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1095
1103
1096 if 'review_members' in controls:
1104 if 'review_members' in controls:
1097 self._update_reviewers(
1105 self._update_reviewers(
1098 pull_request, controls['review_members'],
1106 pull_request, controls['review_members'],
1099 pull_request.reviewer_data)
1107 pull_request.reviewer_data)
1100 elif str2bool(self.request.POST.get('update_commits', 'false')):
1108 elif str2bool(self.request.POST.get('update_commits', 'false')):
1101 if is_state_changing:
1109 if is_state_changing:
1102 log.debug('commits update: forbidden because pull request is in state %s',
1110 log.debug('commits update: forbidden because pull request is in state %s',
1103 pull_request.pull_request_state)
1111 pull_request.pull_request_state)
1104 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1112 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1105 u'Current state is: `{}`').format(
1113 u'Current state is: `{}`').format(
1106 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1114 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1107 h.flash(msg, category='error')
1115 h.flash(msg, category='error')
1108 return {'response': True,
1116 return {'response': True,
1109 'redirect_url': redirect_url}
1117 'redirect_url': redirect_url}
1110
1118
1111 self._update_commits(pull_request)
1119 self._update_commits(pull_request)
1112 if force_refresh:
1120 if force_refresh:
1113 redirect_url = h.route_path(
1121 redirect_url = h.route_path(
1114 'pullrequest_show', repo_name=self.db_repo_name,
1122 'pullrequest_show', repo_name=self.db_repo_name,
1115 pull_request_id=pull_request.pull_request_id,
1123 pull_request_id=pull_request.pull_request_id,
1116 _query={"force_refresh": 1})
1124 _query={"force_refresh": 1})
1117 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1125 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1118 self._edit_pull_request(pull_request)
1126 self._edit_pull_request(pull_request)
1119 else:
1127 else:
1120 raise HTTPBadRequest()
1128 raise HTTPBadRequest()
1121
1129
1122 return {'response': True,
1130 return {'response': True,
1123 'redirect_url': redirect_url}
1131 'redirect_url': redirect_url}
1124 raise HTTPForbidden()
1132 raise HTTPForbidden()
1125
1133
1126 def _edit_pull_request(self, pull_request):
1134 def _edit_pull_request(self, pull_request):
1127 _ = self.request.translate
1135 _ = self.request.translate
1128
1136
1129 try:
1137 try:
1130 PullRequestModel().edit(
1138 PullRequestModel().edit(
1131 pull_request,
1139 pull_request,
1132 self.request.POST.get('title'),
1140 self.request.POST.get('title'),
1133 self.request.POST.get('description'),
1141 self.request.POST.get('description'),
1134 self.request.POST.get('description_renderer'),
1142 self.request.POST.get('description_renderer'),
1135 self._rhodecode_user)
1143 self._rhodecode_user)
1136 except ValueError:
1144 except ValueError:
1137 msg = _(u'Cannot update closed pull requests.')
1145 msg = _(u'Cannot update closed pull requests.')
1138 h.flash(msg, category='error')
1146 h.flash(msg, category='error')
1139 return
1147 return
1140 else:
1148 else:
1141 Session().commit()
1149 Session().commit()
1142
1150
1143 msg = _(u'Pull request title & description updated.')
1151 msg = _(u'Pull request title & description updated.')
1144 h.flash(msg, category='success')
1152 h.flash(msg, category='success')
1145 return
1153 return
1146
1154
1147 def _update_commits(self, pull_request):
1155 def _update_commits(self, pull_request):
1148 _ = self.request.translate
1156 _ = self.request.translate
1149
1157
1150 with pull_request.set_state(PullRequest.STATE_UPDATING):
1158 with pull_request.set_state(PullRequest.STATE_UPDATING):
1151 resp = PullRequestModel().update_commits(
1159 resp = PullRequestModel().update_commits(
1152 pull_request, self._rhodecode_db_user)
1160 pull_request, self._rhodecode_db_user)
1153
1161
1154 if resp.executed:
1162 if resp.executed:
1155
1163
1156 if resp.target_changed and resp.source_changed:
1164 if resp.target_changed and resp.source_changed:
1157 changed = 'target and source repositories'
1165 changed = 'target and source repositories'
1158 elif resp.target_changed and not resp.source_changed:
1166 elif resp.target_changed and not resp.source_changed:
1159 changed = 'target repository'
1167 changed = 'target repository'
1160 elif not resp.target_changed and resp.source_changed:
1168 elif not resp.target_changed and resp.source_changed:
1161 changed = 'source repository'
1169 changed = 'source repository'
1162 else:
1170 else:
1163 changed = 'nothing'
1171 changed = 'nothing'
1164
1172
1165 msg = _(u'Pull request updated to "{source_commit_id}" with '
1173 msg = _(u'Pull request updated to "{source_commit_id}" with '
1166 u'{count_added} added, {count_removed} removed commits. '
1174 u'{count_added} added, {count_removed} removed commits. '
1167 u'Source of changes: {change_source}')
1175 u'Source of changes: {change_source}')
1168 msg = msg.format(
1176 msg = msg.format(
1169 source_commit_id=pull_request.source_ref_parts.commit_id,
1177 source_commit_id=pull_request.source_ref_parts.commit_id,
1170 count_added=len(resp.changes.added),
1178 count_added=len(resp.changes.added),
1171 count_removed=len(resp.changes.removed),
1179 count_removed=len(resp.changes.removed),
1172 change_source=changed)
1180 change_source=changed)
1173 h.flash(msg, category='success')
1181 h.flash(msg, category='success')
1174
1182
1175 channel = '/repo${}$/pr/{}'.format(
1183 channel = '/repo${}$/pr/{}'.format(
1176 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1184 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1177 message = msg + (
1185 message = msg + (
1178 ' - <a onclick="window.location.reload()">'
1186 ' - <a onclick="window.location.reload()">'
1179 '<strong>{}</strong></a>'.format(_('Reload page')))
1187 '<strong>{}</strong></a>'.format(_('Reload page')))
1180 channelstream.post_message(
1188 channelstream.post_message(
1181 channel, message, self._rhodecode_user.username,
1189 channel, message, self._rhodecode_user.username,
1182 registry=self.request.registry)
1190 registry=self.request.registry)
1183 else:
1191 else:
1184 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1192 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1185 warning_reasons = [
1193 warning_reasons = [
1186 UpdateFailureReason.NO_CHANGE,
1194 UpdateFailureReason.NO_CHANGE,
1187 UpdateFailureReason.WRONG_REF_TYPE,
1195 UpdateFailureReason.WRONG_REF_TYPE,
1188 ]
1196 ]
1189 category = 'warning' if resp.reason in warning_reasons else 'error'
1197 category = 'warning' if resp.reason in warning_reasons else 'error'
1190 h.flash(msg, category=category)
1198 h.flash(msg, category=category)
1191
1199
1192 @LoginRequired()
1200 @LoginRequired()
1193 @NotAnonymous()
1201 @NotAnonymous()
1194 @HasRepoPermissionAnyDecorator(
1202 @HasRepoPermissionAnyDecorator(
1195 'repository.read', 'repository.write', 'repository.admin')
1203 'repository.read', 'repository.write', 'repository.admin')
1196 @CSRFRequired()
1204 @CSRFRequired()
1197 @view_config(
1205 @view_config(
1198 route_name='pullrequest_merge', request_method='POST',
1206 route_name='pullrequest_merge', request_method='POST',
1199 renderer='json_ext')
1207 renderer='json_ext')
1200 def pull_request_merge(self):
1208 def pull_request_merge(self):
1201 """
1209 """
1202 Merge will perform a server-side merge of the specified
1210 Merge will perform a server-side merge of the specified
1203 pull request, if the pull request is approved and mergeable.
1211 pull request, if the pull request is approved and mergeable.
1204 After successful merging, the pull request is automatically
1212 After successful merging, the pull request is automatically
1205 closed, with a relevant comment.
1213 closed, with a relevant comment.
1206 """
1214 """
1207 pull_request = PullRequest.get_or_404(
1215 pull_request = PullRequest.get_or_404(
1208 self.request.matchdict['pull_request_id'])
1216 self.request.matchdict['pull_request_id'])
1209 _ = self.request.translate
1217 _ = self.request.translate
1210
1218
1211 if pull_request.is_state_changing():
1219 if pull_request.is_state_changing():
1212 log.debug('show: forbidden because pull request is in state %s',
1220 log.debug('show: forbidden because pull request is in state %s',
1213 pull_request.pull_request_state)
1221 pull_request.pull_request_state)
1214 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1222 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1215 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1223 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1216 pull_request.pull_request_state)
1224 pull_request.pull_request_state)
1217 h.flash(msg, category='error')
1225 h.flash(msg, category='error')
1218 raise HTTPFound(
1226 raise HTTPFound(
1219 h.route_path('pullrequest_show',
1227 h.route_path('pullrequest_show',
1220 repo_name=pull_request.target_repo.repo_name,
1228 repo_name=pull_request.target_repo.repo_name,
1221 pull_request_id=pull_request.pull_request_id))
1229 pull_request_id=pull_request.pull_request_id))
1222
1230
1223 self.load_default_context()
1231 self.load_default_context()
1224
1232
1225 with pull_request.set_state(PullRequest.STATE_UPDATING):
1233 with pull_request.set_state(PullRequest.STATE_UPDATING):
1226 check = MergeCheck.validate(
1234 check = MergeCheck.validate(
1227 pull_request, auth_user=self._rhodecode_user,
1235 pull_request, auth_user=self._rhodecode_user,
1228 translator=self.request.translate)
1236 translator=self.request.translate)
1229 merge_possible = not check.failed
1237 merge_possible = not check.failed
1230
1238
1231 for err_type, error_msg in check.errors:
1239 for err_type, error_msg in check.errors:
1232 h.flash(error_msg, category=err_type)
1240 h.flash(error_msg, category=err_type)
1233
1241
1234 if merge_possible:
1242 if merge_possible:
1235 log.debug("Pre-conditions checked, trying to merge.")
1243 log.debug("Pre-conditions checked, trying to merge.")
1236 extras = vcs_operation_context(
1244 extras = vcs_operation_context(
1237 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1245 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1238 username=self._rhodecode_db_user.username, action='push',
1246 username=self._rhodecode_db_user.username, action='push',
1239 scm=pull_request.target_repo.repo_type)
1247 scm=pull_request.target_repo.repo_type)
1240 with pull_request.set_state(PullRequest.STATE_UPDATING):
1248 with pull_request.set_state(PullRequest.STATE_UPDATING):
1241 self._merge_pull_request(
1249 self._merge_pull_request(
1242 pull_request, self._rhodecode_db_user, extras)
1250 pull_request, self._rhodecode_db_user, extras)
1243 else:
1251 else:
1244 log.debug("Pre-conditions failed, NOT merging.")
1252 log.debug("Pre-conditions failed, NOT merging.")
1245
1253
1246 raise HTTPFound(
1254 raise HTTPFound(
1247 h.route_path('pullrequest_show',
1255 h.route_path('pullrequest_show',
1248 repo_name=pull_request.target_repo.repo_name,
1256 repo_name=pull_request.target_repo.repo_name,
1249 pull_request_id=pull_request.pull_request_id))
1257 pull_request_id=pull_request.pull_request_id))
1250
1258
1251 def _merge_pull_request(self, pull_request, user, extras):
1259 def _merge_pull_request(self, pull_request, user, extras):
1252 _ = self.request.translate
1260 _ = self.request.translate
1253 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1261 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1254
1262
1255 if merge_resp.executed:
1263 if merge_resp.executed:
1256 log.debug("The merge was successful, closing the pull request.")
1264 log.debug("The merge was successful, closing the pull request.")
1257 PullRequestModel().close_pull_request(
1265 PullRequestModel().close_pull_request(
1258 pull_request.pull_request_id, user)
1266 pull_request.pull_request_id, user)
1259 Session().commit()
1267 Session().commit()
1260 msg = _('Pull request was successfully merged and closed.')
1268 msg = _('Pull request was successfully merged and closed.')
1261 h.flash(msg, category='success')
1269 h.flash(msg, category='success')
1262 else:
1270 else:
1263 log.debug(
1271 log.debug(
1264 "The merge was not successful. Merge response: %s", merge_resp)
1272 "The merge was not successful. Merge response: %s", merge_resp)
1265 msg = merge_resp.merge_status_message
1273 msg = merge_resp.merge_status_message
1266 h.flash(msg, category='error')
1274 h.flash(msg, category='error')
1267
1275
1268 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1276 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1269 _ = self.request.translate
1277 _ = self.request.translate
1270
1278
1271 get_default_reviewers_data, validate_default_reviewers = \
1279 get_default_reviewers_data, validate_default_reviewers = \
1272 PullRequestModel().get_reviewer_functions()
1280 PullRequestModel().get_reviewer_functions()
1273
1281
1274 try:
1282 try:
1275 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1283 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1276 except ValueError as e:
1284 except ValueError as e:
1277 log.error('Reviewers Validation: {}'.format(e))
1285 log.error('Reviewers Validation: {}'.format(e))
1278 h.flash(e, category='error')
1286 h.flash(e, category='error')
1279 return
1287 return
1280
1288
1281 old_calculated_status = pull_request.calculated_review_status()
1289 old_calculated_status = pull_request.calculated_review_status()
1282 PullRequestModel().update_reviewers(
1290 PullRequestModel().update_reviewers(
1283 pull_request, reviewers, self._rhodecode_user)
1291 pull_request, reviewers, self._rhodecode_user)
1284 h.flash(_('Pull request reviewers updated.'), category='success')
1292 h.flash(_('Pull request reviewers updated.'), category='success')
1285 Session().commit()
1293 Session().commit()
1286
1294
1287 # trigger status changed if change in reviewers changes the status
1295 # trigger status changed if change in reviewers changes the status
1288 calculated_status = pull_request.calculated_review_status()
1296 calculated_status = pull_request.calculated_review_status()
1289 if old_calculated_status != calculated_status:
1297 if old_calculated_status != calculated_status:
1290 PullRequestModel().trigger_pull_request_hook(
1298 PullRequestModel().trigger_pull_request_hook(
1291 pull_request, self._rhodecode_user, 'review_status_change',
1299 pull_request, self._rhodecode_user, 'review_status_change',
1292 data={'status': calculated_status})
1300 data={'status': calculated_status})
1293
1301
1294 @LoginRequired()
1302 @LoginRequired()
1295 @NotAnonymous()
1303 @NotAnonymous()
1296 @HasRepoPermissionAnyDecorator(
1304 @HasRepoPermissionAnyDecorator(
1297 'repository.read', 'repository.write', 'repository.admin')
1305 'repository.read', 'repository.write', 'repository.admin')
1298 @CSRFRequired()
1306 @CSRFRequired()
1299 @view_config(
1307 @view_config(
1300 route_name='pullrequest_delete', request_method='POST',
1308 route_name='pullrequest_delete', request_method='POST',
1301 renderer='json_ext')
1309 renderer='json_ext')
1302 def pull_request_delete(self):
1310 def pull_request_delete(self):
1303 _ = self.request.translate
1311 _ = self.request.translate
1304
1312
1305 pull_request = PullRequest.get_or_404(
1313 pull_request = PullRequest.get_or_404(
1306 self.request.matchdict['pull_request_id'])
1314 self.request.matchdict['pull_request_id'])
1307 self.load_default_context()
1315 self.load_default_context()
1308
1316
1309 pr_closed = pull_request.is_closed()
1317 pr_closed = pull_request.is_closed()
1310 allowed_to_delete = PullRequestModel().check_user_delete(
1318 allowed_to_delete = PullRequestModel().check_user_delete(
1311 pull_request, self._rhodecode_user) and not pr_closed
1319 pull_request, self._rhodecode_user) and not pr_closed
1312
1320
1313 # only owner can delete it !
1321 # only owner can delete it !
1314 if allowed_to_delete:
1322 if allowed_to_delete:
1315 PullRequestModel().delete(pull_request, self._rhodecode_user)
1323 PullRequestModel().delete(pull_request, self._rhodecode_user)
1316 Session().commit()
1324 Session().commit()
1317 h.flash(_('Successfully deleted pull request'),
1325 h.flash(_('Successfully deleted pull request'),
1318 category='success')
1326 category='success')
1319 raise HTTPFound(h.route_path('pullrequest_show_all',
1327 raise HTTPFound(h.route_path('pullrequest_show_all',
1320 repo_name=self.db_repo_name))
1328 repo_name=self.db_repo_name))
1321
1329
1322 log.warning('user %s tried to delete pull request without access',
1330 log.warning('user %s tried to delete pull request without access',
1323 self._rhodecode_user)
1331 self._rhodecode_user)
1324 raise HTTPNotFound()
1332 raise HTTPNotFound()
1325
1333
1326 @LoginRequired()
1334 @LoginRequired()
1327 @NotAnonymous()
1335 @NotAnonymous()
1328 @HasRepoPermissionAnyDecorator(
1336 @HasRepoPermissionAnyDecorator(
1329 'repository.read', 'repository.write', 'repository.admin')
1337 'repository.read', 'repository.write', 'repository.admin')
1330 @CSRFRequired()
1338 @CSRFRequired()
1331 @view_config(
1339 @view_config(
1332 route_name='pullrequest_comment_create', request_method='POST',
1340 route_name='pullrequest_comment_create', request_method='POST',
1333 renderer='json_ext')
1341 renderer='json_ext')
1334 def pull_request_comment_create(self):
1342 def pull_request_comment_create(self):
1335 _ = self.request.translate
1343 _ = self.request.translate
1336
1344
1337 pull_request = PullRequest.get_or_404(
1345 pull_request = PullRequest.get_or_404(
1338 self.request.matchdict['pull_request_id'])
1346 self.request.matchdict['pull_request_id'])
1339 pull_request_id = pull_request.pull_request_id
1347 pull_request_id = pull_request.pull_request_id
1340
1348
1341 if pull_request.is_closed():
1349 if pull_request.is_closed():
1342 log.debug('comment: forbidden because pull request is closed')
1350 log.debug('comment: forbidden because pull request is closed')
1343 raise HTTPForbidden()
1351 raise HTTPForbidden()
1344
1352
1345 allowed_to_comment = PullRequestModel().check_user_comment(
1353 allowed_to_comment = PullRequestModel().check_user_comment(
1346 pull_request, self._rhodecode_user)
1354 pull_request, self._rhodecode_user)
1347 if not allowed_to_comment:
1355 if not allowed_to_comment:
1348 log.debug(
1356 log.debug(
1349 'comment: forbidden because pull request is from forbidden repo')
1357 'comment: forbidden because pull request is from forbidden repo')
1350 raise HTTPForbidden()
1358 raise HTTPForbidden()
1351
1359
1352 c = self.load_default_context()
1360 c = self.load_default_context()
1353
1361
1354 status = self.request.POST.get('changeset_status', None)
1362 status = self.request.POST.get('changeset_status', None)
1355 text = self.request.POST.get('text')
1363 text = self.request.POST.get('text')
1356 comment_type = self.request.POST.get('comment_type')
1364 comment_type = self.request.POST.get('comment_type')
1357 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1365 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1358 close_pull_request = self.request.POST.get('close_pull_request')
1366 close_pull_request = self.request.POST.get('close_pull_request')
1359
1367
1360 # the logic here should work like following, if we submit close
1368 # the logic here should work like following, if we submit close
1361 # pr comment, use `close_pull_request_with_comment` function
1369 # pr comment, use `close_pull_request_with_comment` function
1362 # else handle regular comment logic
1370 # else handle regular comment logic
1363
1371
1364 if close_pull_request:
1372 if close_pull_request:
1365 # only owner or admin or person with write permissions
1373 # only owner or admin or person with write permissions
1366 allowed_to_close = PullRequestModel().check_user_update(
1374 allowed_to_close = PullRequestModel().check_user_update(
1367 pull_request, self._rhodecode_user)
1375 pull_request, self._rhodecode_user)
1368 if not allowed_to_close:
1376 if not allowed_to_close:
1369 log.debug('comment: forbidden because not allowed to close '
1377 log.debug('comment: forbidden because not allowed to close '
1370 'pull request %s', pull_request_id)
1378 'pull request %s', pull_request_id)
1371 raise HTTPForbidden()
1379 raise HTTPForbidden()
1372
1380
1373 # This also triggers `review_status_change`
1381 # This also triggers `review_status_change`
1374 comment, status = PullRequestModel().close_pull_request_with_comment(
1382 comment, status = PullRequestModel().close_pull_request_with_comment(
1375 pull_request, self._rhodecode_user, self.db_repo, message=text,
1383 pull_request, self._rhodecode_user, self.db_repo, message=text,
1376 auth_user=self._rhodecode_user)
1384 auth_user=self._rhodecode_user)
1377 Session().flush()
1385 Session().flush()
1378
1386
1379 PullRequestModel().trigger_pull_request_hook(
1387 PullRequestModel().trigger_pull_request_hook(
1380 pull_request, self._rhodecode_user, 'comment',
1388 pull_request, self._rhodecode_user, 'comment',
1381 data={'comment': comment})
1389 data={'comment': comment})
1382
1390
1383 else:
1391 else:
1384 # regular comment case, could be inline, or one with status.
1392 # regular comment case, could be inline, or one with status.
1385 # for that one we check also permissions
1393 # for that one we check also permissions
1386
1394
1387 allowed_to_change_status = PullRequestModel().check_user_change_status(
1395 allowed_to_change_status = PullRequestModel().check_user_change_status(
1388 pull_request, self._rhodecode_user)
1396 pull_request, self._rhodecode_user)
1389
1397
1390 if status and allowed_to_change_status:
1398 if status and allowed_to_change_status:
1391 message = (_('Status change %(transition_icon)s %(status)s')
1399 message = (_('Status change %(transition_icon)s %(status)s')
1392 % {'transition_icon': '>',
1400 % {'transition_icon': '>',
1393 'status': ChangesetStatus.get_status_lbl(status)})
1401 'status': ChangesetStatus.get_status_lbl(status)})
1394 text = text or message
1402 text = text or message
1395
1403
1396 comment = CommentsModel().create(
1404 comment = CommentsModel().create(
1397 text=text,
1405 text=text,
1398 repo=self.db_repo.repo_id,
1406 repo=self.db_repo.repo_id,
1399 user=self._rhodecode_user.user_id,
1407 user=self._rhodecode_user.user_id,
1400 pull_request=pull_request,
1408 pull_request=pull_request,
1401 f_path=self.request.POST.get('f_path'),
1409 f_path=self.request.POST.get('f_path'),
1402 line_no=self.request.POST.get('line'),
1410 line_no=self.request.POST.get('line'),
1403 status_change=(ChangesetStatus.get_status_lbl(status)
1411 status_change=(ChangesetStatus.get_status_lbl(status)
1404 if status and allowed_to_change_status else None),
1412 if status and allowed_to_change_status else None),
1405 status_change_type=(status
1413 status_change_type=(status
1406 if status and allowed_to_change_status else None),
1414 if status and allowed_to_change_status else None),
1407 comment_type=comment_type,
1415 comment_type=comment_type,
1408 resolves_comment_id=resolves_comment_id,
1416 resolves_comment_id=resolves_comment_id,
1409 auth_user=self._rhodecode_user
1417 auth_user=self._rhodecode_user
1410 )
1418 )
1411
1419
1412 if allowed_to_change_status:
1420 if allowed_to_change_status:
1413 # calculate old status before we change it
1421 # calculate old status before we change it
1414 old_calculated_status = pull_request.calculated_review_status()
1422 old_calculated_status = pull_request.calculated_review_status()
1415
1423
1416 # get status if set !
1424 # get status if set !
1417 if status:
1425 if status:
1418 ChangesetStatusModel().set_status(
1426 ChangesetStatusModel().set_status(
1419 self.db_repo.repo_id,
1427 self.db_repo.repo_id,
1420 status,
1428 status,
1421 self._rhodecode_user.user_id,
1429 self._rhodecode_user.user_id,
1422 comment,
1430 comment,
1423 pull_request=pull_request
1431 pull_request=pull_request
1424 )
1432 )
1425
1433
1426 Session().flush()
1434 Session().flush()
1427 # this is somehow required to get access to some relationship
1435 # this is somehow required to get access to some relationship
1428 # loaded on comment
1436 # loaded on comment
1429 Session().refresh(comment)
1437 Session().refresh(comment)
1430
1438
1431 PullRequestModel().trigger_pull_request_hook(
1439 PullRequestModel().trigger_pull_request_hook(
1432 pull_request, self._rhodecode_user, 'comment',
1440 pull_request, self._rhodecode_user, 'comment',
1433 data={'comment': comment})
1441 data={'comment': comment})
1434
1442
1435 # we now calculate the status of pull request, and based on that
1443 # we now calculate the status of pull request, and based on that
1436 # calculation we set the commits status
1444 # calculation we set the commits status
1437 calculated_status = pull_request.calculated_review_status()
1445 calculated_status = pull_request.calculated_review_status()
1438 if old_calculated_status != calculated_status:
1446 if old_calculated_status != calculated_status:
1439 PullRequestModel().trigger_pull_request_hook(
1447 PullRequestModel().trigger_pull_request_hook(
1440 pull_request, self._rhodecode_user, 'review_status_change',
1448 pull_request, self._rhodecode_user, 'review_status_change',
1441 data={'status': calculated_status})
1449 data={'status': calculated_status})
1442
1450
1443 Session().commit()
1451 Session().commit()
1444
1452
1445 data = {
1453 data = {
1446 'target_id': h.safeid(h.safe_unicode(
1454 'target_id': h.safeid(h.safe_unicode(
1447 self.request.POST.get('f_path'))),
1455 self.request.POST.get('f_path'))),
1448 }
1456 }
1449 if comment:
1457 if comment:
1450 c.co = comment
1458 c.co = comment
1451 rendered_comment = render(
1459 rendered_comment = render(
1452 'rhodecode:templates/changeset/changeset_comment_block.mako',
1460 'rhodecode:templates/changeset/changeset_comment_block.mako',
1453 self._get_template_context(c), self.request)
1461 self._get_template_context(c), self.request)
1454
1462
1455 data.update(comment.get_dict())
1463 data.update(comment.get_dict())
1456 data.update({'rendered_text': rendered_comment})
1464 data.update({'rendered_text': rendered_comment})
1457
1465
1458 return data
1466 return data
1459
1467
1460 @LoginRequired()
1468 @LoginRequired()
1461 @NotAnonymous()
1469 @NotAnonymous()
1462 @HasRepoPermissionAnyDecorator(
1470 @HasRepoPermissionAnyDecorator(
1463 'repository.read', 'repository.write', 'repository.admin')
1471 'repository.read', 'repository.write', 'repository.admin')
1464 @CSRFRequired()
1472 @CSRFRequired()
1465 @view_config(
1473 @view_config(
1466 route_name='pullrequest_comment_delete', request_method='POST',
1474 route_name='pullrequest_comment_delete', request_method='POST',
1467 renderer='json_ext')
1475 renderer='json_ext')
1468 def pull_request_comment_delete(self):
1476 def pull_request_comment_delete(self):
1469 pull_request = PullRequest.get_or_404(
1477 pull_request = PullRequest.get_or_404(
1470 self.request.matchdict['pull_request_id'])
1478 self.request.matchdict['pull_request_id'])
1471
1479
1472 comment = ChangesetComment.get_or_404(
1480 comment = ChangesetComment.get_or_404(
1473 self.request.matchdict['comment_id'])
1481 self.request.matchdict['comment_id'])
1474 comment_id = comment.comment_id
1482 comment_id = comment.comment_id
1475
1483
1476 if comment.immutable:
1484 if comment.immutable:
1477 # don't allow deleting comments that are immutable
1485 # don't allow deleting comments that are immutable
1478 raise HTTPForbidden()
1486 raise HTTPForbidden()
1479
1487
1480 if pull_request.is_closed():
1488 if pull_request.is_closed():
1481 log.debug('comment: forbidden because pull request is closed')
1489 log.debug('comment: forbidden because pull request is closed')
1482 raise HTTPForbidden()
1490 raise HTTPForbidden()
1483
1491
1484 if not comment:
1492 if not comment:
1485 log.debug('Comment with id:%s not found, skipping', comment_id)
1493 log.debug('Comment with id:%s not found, skipping', comment_id)
1486 # comment already deleted in another call probably
1494 # comment already deleted in another call probably
1487 return True
1495 return True
1488
1496
1489 if comment.pull_request.is_closed():
1497 if comment.pull_request.is_closed():
1490 # don't allow deleting comments on closed pull request
1498 # don't allow deleting comments on closed pull request
1491 raise HTTPForbidden()
1499 raise HTTPForbidden()
1492
1500
1493 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1501 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1494 super_admin = h.HasPermissionAny('hg.admin')()
1502 super_admin = h.HasPermissionAny('hg.admin')()
1495 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1503 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1496 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1504 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1497 comment_repo_admin = is_repo_admin and is_repo_comment
1505 comment_repo_admin = is_repo_admin and is_repo_comment
1498
1506
1499 if super_admin or comment_owner or comment_repo_admin:
1507 if super_admin or comment_owner or comment_repo_admin:
1500 old_calculated_status = comment.pull_request.calculated_review_status()
1508 old_calculated_status = comment.pull_request.calculated_review_status()
1501 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1509 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1502 Session().commit()
1510 Session().commit()
1503 calculated_status = comment.pull_request.calculated_review_status()
1511 calculated_status = comment.pull_request.calculated_review_status()
1504 if old_calculated_status != calculated_status:
1512 if old_calculated_status != calculated_status:
1505 PullRequestModel().trigger_pull_request_hook(
1513 PullRequestModel().trigger_pull_request_hook(
1506 comment.pull_request, self._rhodecode_user, 'review_status_change',
1514 comment.pull_request, self._rhodecode_user, 'review_status_change',
1507 data={'status': calculated_status})
1515 data={'status': calculated_status})
1508 return True
1516 return True
1509 else:
1517 else:
1510 log.warning('No permissions for user %s to delete comment_id: %s',
1518 log.warning('No permissions for user %s to delete comment_id: %s',
1511 self._rhodecode_db_user, comment_id)
1519 self._rhodecode_db_user, comment_id)
1512 raise HTTPNotFound()
1520 raise HTTPNotFound()
@@ -1,61 +1,73 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 from pyramid.view import view_config
23 from pyramid.view import view_config
24
24
25 from rhodecode.apps._base import RepoAppView
25 from rhodecode.apps._base import RepoAppView
26 from rhodecode.apps.repository.utils import get_default_reviewers_data
26 from rhodecode.apps.repository.utils import get_default_reviewers_data
27 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
27 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
28 from rhodecode.model.db import Repository
28 from rhodecode.model.db import Repository
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 class RepoReviewRulesView(RepoAppView):
33 class RepoReviewRulesView(RepoAppView):
34 def load_default_context(self):
34 def load_default_context(self):
35 c = self._get_local_tmpl_context()
35 c = self._get_local_tmpl_context()
36 return c
36 return c
37
37
38 @LoginRequired()
38 @LoginRequired()
39 @HasRepoPermissionAnyDecorator('repository.admin')
39 @HasRepoPermissionAnyDecorator('repository.admin')
40 @view_config(
40 @view_config(
41 route_name='repo_reviewers', request_method='GET',
41 route_name='repo_reviewers', request_method='GET',
42 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
42 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
43 def repo_review_rules(self):
43 def repo_review_rules(self):
44 c = self.load_default_context()
44 c = self.load_default_context()
45 c.active = 'reviewers'
45 c.active = 'reviewers'
46
46
47 return self._get_template_context(c)
47 return self._get_template_context(c)
48
48
49 @LoginRequired()
49 @LoginRequired()
50 @HasRepoPermissionAnyDecorator(
50 @HasRepoPermissionAnyDecorator(
51 'repository.read', 'repository.write', 'repository.admin')
51 'repository.read', 'repository.write', 'repository.admin')
52 @view_config(
52 @view_config(
53 route_name='repo_default_reviewers_data', request_method='GET',
53 route_name='repo_default_reviewers_data', request_method='GET',
54 renderer='json_ext')
54 renderer='json_ext')
55 def repo_default_reviewers_data(self):
55 def repo_default_reviewers_data(self):
56 self.load_default_context()
56 self.load_default_context()
57 target_repo_name = self.request.GET.get('target_repo', self.db_repo.repo_name)
57
58 request = self.request
59 source_repo = self.db_repo
60 source_repo_name = source_repo.repo_name
61 target_repo_name = request.GET.get('target_repo', source_repo_name)
58 target_repo = Repository.get_by_repo_name(target_repo_name)
62 target_repo = Repository.get_by_repo_name(target_repo_name)
63
64 source_ref = request.GET['source_ref']
65 target_ref = request.GET['target_ref']
66 source_commit = source_repo.get_commit(source_ref)
67 target_commit = target_repo.get_commit(target_ref)
68
69 current_user = request.user.get_instance()
59 review_data = get_default_reviewers_data(
70 review_data = get_default_reviewers_data(
60 self.db_repo.user, None, None, target_repo, None)
71 current_user, source_repo, source_commit, target_repo, target_commit)
72
61 return review_data
73 return review_data
@@ -1,1029 +1,1034 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'master'
57 DEFAULT_BRANCH_NAME = 'master'
58
58
59 contact = BaseRepository.DEFAULT_CONTACT
59 contact = BaseRepository.DEFAULT_CONTACT
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
63
63
64 self.path = safe_str(os.path.abspath(repo_path))
64 self.path = safe_str(os.path.abspath(repo_path))
65 self.config = config if config else self.get_default_config()
65 self.config = config if config else self.get_default_config()
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67
67
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69
69
70 # caches
70 # caches
71 self._commit_ids = {}
71 self._commit_ids = {}
72
72
73 @LazyProperty
73 @LazyProperty
74 def _remote(self):
74 def _remote(self):
75 repo_id = self.path
75 repo_id = self.path
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77
77
78 @LazyProperty
78 @LazyProperty
79 def bare(self):
79 def bare(self):
80 return self._remote.bare()
80 return self._remote.bare()
81
81
82 @LazyProperty
82 @LazyProperty
83 def head(self):
83 def head(self):
84 return self._remote.head()
84 return self._remote.head()
85
85
86 @CachedProperty
86 @CachedProperty
87 def commit_ids(self):
87 def commit_ids(self):
88 """
88 """
89 Returns list of commit ids, in ascending order. Being lazy
89 Returns list of commit ids, in ascending order. Being lazy
90 attribute allows external tools to inject commit ids from cache.
90 attribute allows external tools to inject commit ids from cache.
91 """
91 """
92 commit_ids = self._get_all_commit_ids()
92 commit_ids = self._get_all_commit_ids()
93 self._rebuild_cache(commit_ids)
93 self._rebuild_cache(commit_ids)
94 return commit_ids
94 return commit_ids
95
95
96 def _rebuild_cache(self, commit_ids):
96 def _rebuild_cache(self, commit_ids):
97 self._commit_ids = dict((commit_id, index)
97 self._commit_ids = dict((commit_id, index)
98 for index, commit_id in enumerate(commit_ids))
98 for index, commit_id in enumerate(commit_ids))
99
99
100 def run_git_command(self, cmd, **opts):
100 def run_git_command(self, cmd, **opts):
101 """
101 """
102 Runs given ``cmd`` as git command and returns tuple
102 Runs given ``cmd`` as git command and returns tuple
103 (stdout, stderr).
103 (stdout, stderr).
104
104
105 :param cmd: git command to be executed
105 :param cmd: git command to be executed
106 :param opts: env options to pass into Subprocess command
106 :param opts: env options to pass into Subprocess command
107 """
107 """
108 if not isinstance(cmd, list):
108 if not isinstance(cmd, list):
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110
110
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 out, err = self._remote.run_git_command(cmd, **opts)
112 out, err = self._remote.run_git_command(cmd, **opts)
113 if err and not skip_stderr_log:
113 if err and not skip_stderr_log:
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 return out, err
115 return out, err
116
116
117 @staticmethod
117 @staticmethod
118 def check_url(url, config):
118 def check_url(url, config):
119 """
119 """
120 Function will check given url and try to verify if it's a valid
120 Function will check given url and try to verify if it's a valid
121 link. Sometimes it may happened that git will issue basic
121 link. Sometimes it may happened that git will issue basic
122 auth request that can cause whole API to hang when used from python
122 auth request that can cause whole API to hang when used from python
123 or other external calls.
123 or other external calls.
124
124
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 when the return code is non 200
126 when the return code is non 200
127 """
127 """
128 # check first if it's not an url
128 # check first if it's not an url
129 if os.path.isdir(url) or url.startswith('file:'):
129 if os.path.isdir(url) or url.startswith('file:'):
130 return True
130 return True
131
131
132 if '+' in url.split('://', 1)[0]:
132 if '+' in url.split('://', 1)[0]:
133 url = url.split('+', 1)[1]
133 url = url.split('+', 1)[1]
134
134
135 # Request the _remote to verify the url
135 # Request the _remote to verify the url
136 return connection.Git.check_url(url, config.serialize())
136 return connection.Git.check_url(url, config.serialize())
137
137
138 @staticmethod
138 @staticmethod
139 def is_valid_repository(path):
139 def is_valid_repository(path):
140 if os.path.isdir(os.path.join(path, '.git')):
140 if os.path.isdir(os.path.join(path, '.git')):
141 return True
141 return True
142 # check case of bare repository
142 # check case of bare repository
143 try:
143 try:
144 GitRepository(path)
144 GitRepository(path)
145 return True
145 return True
146 except VCSError:
146 except VCSError:
147 pass
147 pass
148 return False
148 return False
149
149
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 bare=False):
151 bare=False):
152 if create and os.path.exists(self.path):
152 if create and os.path.exists(self.path):
153 raise RepositoryError(
153 raise RepositoryError(
154 "Cannot create repository at %s, location already exist"
154 "Cannot create repository at %s, location already exist"
155 % self.path)
155 % self.path)
156
156
157 if bare and do_workspace_checkout:
157 if bare and do_workspace_checkout:
158 raise RepositoryError("Cannot update a bare repository")
158 raise RepositoryError("Cannot update a bare repository")
159 try:
159 try:
160
160
161 if src_url:
161 if src_url:
162 # check URL before any actions
162 # check URL before any actions
163 GitRepository.check_url(src_url, self.config)
163 GitRepository.check_url(src_url, self.config)
164
164
165 if create:
165 if create:
166 os.makedirs(self.path, mode=0o755)
166 os.makedirs(self.path, mode=0o755)
167
167
168 if bare:
168 if bare:
169 self._remote.init_bare()
169 self._remote.init_bare()
170 else:
170 else:
171 self._remote.init()
171 self._remote.init()
172
172
173 if src_url and bare:
173 if src_url and bare:
174 # bare repository only allows a fetch and checkout is not allowed
174 # bare repository only allows a fetch and checkout is not allowed
175 self.fetch(src_url, commit_ids=None)
175 self.fetch(src_url, commit_ids=None)
176 elif src_url:
176 elif src_url:
177 self.pull(src_url, commit_ids=None,
177 self.pull(src_url, commit_ids=None,
178 update_after=do_workspace_checkout)
178 update_after=do_workspace_checkout)
179
179
180 else:
180 else:
181 if not self._remote.assert_correct_path():
181 if not self._remote.assert_correct_path():
182 raise RepositoryError(
182 raise RepositoryError(
183 'Path "%s" does not contain a Git repository' %
183 'Path "%s" does not contain a Git repository' %
184 (self.path,))
184 (self.path,))
185
185
186 # TODO: johbo: check if we have to translate the OSError here
186 # TODO: johbo: check if we have to translate the OSError here
187 except OSError as err:
187 except OSError as err:
188 raise RepositoryError(err)
188 raise RepositoryError(err)
189
189
190 def _get_all_commit_ids(self):
190 def _get_all_commit_ids(self):
191 return self._remote.get_all_commit_ids()
191 return self._remote.get_all_commit_ids()
192
192
193 def _get_commit_ids(self, filters=None):
193 def _get_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
194 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
196 # errors
197
197
198 head = self._remote.head(show_exc=False)
198 head = self._remote.head(show_exc=False)
199
199
200 if not head:
200 if not head:
201 return []
201 return []
202
202
203 rev_filter = ['--branches', '--tags']
203 rev_filter = ['--branches', '--tags']
204 extra_filter = []
204 extra_filter = []
205
205
206 if filters:
206 if filters:
207 if filters.get('since'):
207 if filters.get('since'):
208 extra_filter.append('--since=%s' % (filters['since']))
208 extra_filter.append('--since=%s' % (filters['since']))
209 if filters.get('until'):
209 if filters.get('until'):
210 extra_filter.append('--until=%s' % (filters['until']))
210 extra_filter.append('--until=%s' % (filters['until']))
211 if filters.get('branch_name'):
211 if filters.get('branch_name'):
212 rev_filter = []
212 rev_filter = []
213 extra_filter.append(filters['branch_name'])
213 extra_filter.append(filters['branch_name'])
214 rev_filter.extend(extra_filter)
214 rev_filter.extend(extra_filter)
215
215
216 # if filters.get('start') or filters.get('end'):
216 # if filters.get('start') or filters.get('end'):
217 # # skip is offset, max-count is limit
217 # # skip is offset, max-count is limit
218 # if filters.get('start'):
218 # if filters.get('start'):
219 # extra_filter += ' --skip=%s' % filters['start']
219 # extra_filter += ' --skip=%s' % filters['start']
220 # if filters.get('end'):
220 # if filters.get('end'):
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222
222
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 try:
224 try:
225 output, __ = self.run_git_command(cmd)
225 output, __ = self.run_git_command(cmd)
226 except RepositoryError:
226 except RepositoryError:
227 # Can be raised for empty repositories
227 # Can be raised for empty repositories
228 return []
228 return []
229 return output.splitlines()
229 return output.splitlines()
230
230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
232 def is_null(value):
232 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
233 return len(value) == commit_id_or_idx.count('0')
234
234
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
236 return self.commit_ids[-1]
237
237
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 *map(safe_str, [commit_id_or_idx, self.name]))
239 *map(safe_str, [commit_id_or_idx, self.name]))
240
240
241 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
244 try:
244 try:
245 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
246 except Exception:
246 except Exception:
247 raise CommitDoesNotExistError(commit_missing_err)
247 raise CommitDoesNotExistError(commit_missing_err)
248
248
249 elif is_bstr:
249 elif is_bstr:
250 # Need to call remote to translate id for tagging scenario
250 # Need to call remote to translate id for tagging scenario
251 try:
251 try:
252 remote_data = self._remote.get_object(commit_id_or_idx,
252 remote_data = self._remote.get_object(commit_id_or_idx,
253 maybe_unreachable=maybe_unreachable)
253 maybe_unreachable=maybe_unreachable)
254 commit_id_or_idx = remote_data["commit_id"]
254 commit_id_or_idx = remote_data["commit_id"]
255 except (CommitDoesNotExistError,):
255 except (CommitDoesNotExistError,):
256 raise CommitDoesNotExistError(commit_missing_err)
256 raise CommitDoesNotExistError(commit_missing_err)
257
257
258 # Ensure we return full id
258 # Ensure we return full id
259 if not SHA_PATTERN.match(str(commit_id_or_idx)):
259 if not SHA_PATTERN.match(str(commit_id_or_idx)):
260 raise CommitDoesNotExistError(
260 raise CommitDoesNotExistError(
261 "Given commit id %s not recognized" % commit_id_or_idx)
261 "Given commit id %s not recognized" % commit_id_or_idx)
262 return commit_id_or_idx
262 return commit_id_or_idx
263
263
264 def get_hook_location(self):
264 def get_hook_location(self):
265 """
265 """
266 returns absolute path to location where hooks are stored
266 returns absolute path to location where hooks are stored
267 """
267 """
268 loc = os.path.join(self.path, 'hooks')
268 loc = os.path.join(self.path, 'hooks')
269 if not self.bare:
269 if not self.bare:
270 loc = os.path.join(self.path, '.git', 'hooks')
270 loc = os.path.join(self.path, '.git', 'hooks')
271 return loc
271 return loc
272
272
273 @LazyProperty
273 @LazyProperty
274 def last_change(self):
274 def last_change(self):
275 """
275 """
276 Returns last change made on this repository as
276 Returns last change made on this repository as
277 `datetime.datetime` object.
277 `datetime.datetime` object.
278 """
278 """
279 try:
279 try:
280 return self.get_commit().date
280 return self.get_commit().date
281 except RepositoryError:
281 except RepositoryError:
282 tzoffset = makedate()[1]
282 tzoffset = makedate()[1]
283 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
283 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
284
284
285 def _get_fs_mtime(self):
285 def _get_fs_mtime(self):
286 idx_loc = '' if self.bare else '.git'
286 idx_loc = '' if self.bare else '.git'
287 # fallback to filesystem
287 # fallback to filesystem
288 in_path = os.path.join(self.path, idx_loc, "index")
288 in_path = os.path.join(self.path, idx_loc, "index")
289 he_path = os.path.join(self.path, idx_loc, "HEAD")
289 he_path = os.path.join(self.path, idx_loc, "HEAD")
290 if os.path.exists(in_path):
290 if os.path.exists(in_path):
291 return os.stat(in_path).st_mtime
291 return os.stat(in_path).st_mtime
292 else:
292 else:
293 return os.stat(he_path).st_mtime
293 return os.stat(he_path).st_mtime
294
294
295 @LazyProperty
295 @LazyProperty
296 def description(self):
296 def description(self):
297 description = self._remote.get_description()
297 description = self._remote.get_description()
298 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
298 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
299
299
300 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
300 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
301 if self.is_empty():
301 if self.is_empty():
302 return OrderedDict()
302 return OrderedDict()
303
303
304 result = []
304 result = []
305 for ref, sha in self._refs.iteritems():
305 for ref, sha in self._refs.iteritems():
306 if ref.startswith(prefix):
306 if ref.startswith(prefix):
307 ref_name = ref
307 ref_name = ref
308 if strip_prefix:
308 if strip_prefix:
309 ref_name = ref[len(prefix):]
309 ref_name = ref[len(prefix):]
310 result.append((safe_unicode(ref_name), sha))
310 result.append((safe_unicode(ref_name), sha))
311
311
312 def get_name(entry):
312 def get_name(entry):
313 return entry[0]
313 return entry[0]
314
314
315 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
315 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
316
316
317 def _get_branches(self):
317 def _get_branches(self):
318 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
318 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
319
319
320 @CachedProperty
320 @CachedProperty
321 def branches(self):
321 def branches(self):
322 return self._get_branches()
322 return self._get_branches()
323
323
324 @CachedProperty
324 @CachedProperty
325 def branches_closed(self):
325 def branches_closed(self):
326 return {}
326 return {}
327
327
328 @CachedProperty
328 @CachedProperty
329 def bookmarks(self):
329 def bookmarks(self):
330 return {}
330 return {}
331
331
332 @CachedProperty
332 @CachedProperty
333 def branches_all(self):
333 def branches_all(self):
334 all_branches = {}
334 all_branches = {}
335 all_branches.update(self.branches)
335 all_branches.update(self.branches)
336 all_branches.update(self.branches_closed)
336 all_branches.update(self.branches_closed)
337 return all_branches
337 return all_branches
338
338
339 @CachedProperty
339 @CachedProperty
340 def tags(self):
340 def tags(self):
341 return self._get_tags()
341 return self._get_tags()
342
342
343 def _get_tags(self):
343 def _get_tags(self):
344 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
344 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
345
345
346 def tag(self, name, user, commit_id=None, message=None, date=None,
346 def tag(self, name, user, commit_id=None, message=None, date=None,
347 **kwargs):
347 **kwargs):
348 # TODO: fix this method to apply annotated tags correct with message
348 # TODO: fix this method to apply annotated tags correct with message
349 """
349 """
350 Creates and returns a tag for the given ``commit_id``.
350 Creates and returns a tag for the given ``commit_id``.
351
351
352 :param name: name for new tag
352 :param name: name for new tag
353 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
353 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
354 :param commit_id: commit id for which new tag would be created
354 :param commit_id: commit id for which new tag would be created
355 :param message: message of the tag's commit
355 :param message: message of the tag's commit
356 :param date: date of tag's commit
356 :param date: date of tag's commit
357
357
358 :raises TagAlreadyExistError: if tag with same name already exists
358 :raises TagAlreadyExistError: if tag with same name already exists
359 """
359 """
360 if name in self.tags:
360 if name in self.tags:
361 raise TagAlreadyExistError("Tag %s already exists" % name)
361 raise TagAlreadyExistError("Tag %s already exists" % name)
362 commit = self.get_commit(commit_id=commit_id)
362 commit = self.get_commit(commit_id=commit_id)
363 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
363 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
364
364
365 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
365 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
366
366
367 self._invalidate_prop_cache('tags')
367 self._invalidate_prop_cache('tags')
368 self._invalidate_prop_cache('_refs')
368 self._invalidate_prop_cache('_refs')
369
369
370 return commit
370 return commit
371
371
372 def remove_tag(self, name, user, message=None, date=None):
372 def remove_tag(self, name, user, message=None, date=None):
373 """
373 """
374 Removes tag with the given ``name``.
374 Removes tag with the given ``name``.
375
375
376 :param name: name of the tag to be removed
376 :param name: name of the tag to be removed
377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
378 :param message: message of the tag's removal commit
378 :param message: message of the tag's removal commit
379 :param date: date of tag's removal commit
379 :param date: date of tag's removal commit
380
380
381 :raises TagDoesNotExistError: if tag with given name does not exists
381 :raises TagDoesNotExistError: if tag with given name does not exists
382 """
382 """
383 if name not in self.tags:
383 if name not in self.tags:
384 raise TagDoesNotExistError("Tag %s does not exist" % name)
384 raise TagDoesNotExistError("Tag %s does not exist" % name)
385
385
386 self._remote.tag_remove(name)
386 self._remote.tag_remove(name)
387 self._invalidate_prop_cache('tags')
387 self._invalidate_prop_cache('tags')
388 self._invalidate_prop_cache('_refs')
388 self._invalidate_prop_cache('_refs')
389
389
390 def _get_refs(self):
390 def _get_refs(self):
391 return self._remote.get_refs()
391 return self._remote.get_refs()
392
392
393 @CachedProperty
393 @CachedProperty
394 def _refs(self):
394 def _refs(self):
395 return self._get_refs()
395 return self._get_refs()
396
396
397 @property
397 @property
398 def _ref_tree(self):
398 def _ref_tree(self):
399 node = tree = {}
399 node = tree = {}
400 for ref, sha in self._refs.iteritems():
400 for ref, sha in self._refs.iteritems():
401 path = ref.split('/')
401 path = ref.split('/')
402 for bit in path[:-1]:
402 for bit in path[:-1]:
403 node = node.setdefault(bit, {})
403 node = node.setdefault(bit, {})
404 node[path[-1]] = sha
404 node[path[-1]] = sha
405 node = tree
405 node = tree
406 return tree
406 return tree
407
407
408 def get_remote_ref(self, ref_name):
408 def get_remote_ref(self, ref_name):
409 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
409 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
410 try:
410 try:
411 return self._refs[ref_key]
411 return self._refs[ref_key]
412 except Exception:
412 except Exception:
413 return
413 return
414
414
415 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
415 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
416 translate_tag=True, maybe_unreachable=False):
416 translate_tag=True, maybe_unreachable=False):
417 """
417 """
418 Returns `GitCommit` object representing commit from git repository
418 Returns `GitCommit` object representing commit from git repository
419 at the given `commit_id` or head (most recent commit) if None given.
419 at the given `commit_id` or head (most recent commit) if None given.
420 """
420 """
421 if self.is_empty():
421 if self.is_empty():
422 raise EmptyRepositoryError("There are no commits yet")
422 raise EmptyRepositoryError("There are no commits yet")
423
423
424 if commit_id is not None:
424 if commit_id is not None:
425 self._validate_commit_id(commit_id)
425 self._validate_commit_id(commit_id)
426 try:
426 try:
427 # we have cached idx, use it without contacting the remote
427 # we have cached idx, use it without contacting the remote
428 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
429 return GitCommit(self, commit_id, idx, pre_load=pre_load)
429 return GitCommit(self, commit_id, idx, pre_load=pre_load)
430 except KeyError:
430 except KeyError:
431 pass
431 pass
432
432
433 elif commit_idx is not None:
433 elif commit_idx is not None:
434 self._validate_commit_idx(commit_idx)
434 self._validate_commit_idx(commit_idx)
435 try:
435 try:
436 _commit_id = self.commit_ids[commit_idx]
436 _commit_id = self.commit_ids[commit_idx]
437 if commit_idx < 0:
437 if commit_idx < 0:
438 commit_idx = self.commit_ids.index(_commit_id)
438 commit_idx = self.commit_ids.index(_commit_id)
439 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
439 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
440 except IndexError:
440 except IndexError:
441 commit_id = commit_idx
441 commit_id = commit_idx
442 else:
442 else:
443 commit_id = "tip"
443 commit_id = "tip"
444
444
445 if translate_tag:
445 if translate_tag:
446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
447
447
448 try:
448 try:
449 idx = self._commit_ids[commit_id]
449 idx = self._commit_ids[commit_id]
450 except KeyError:
450 except KeyError:
451 idx = -1
451 idx = -1
452
452
453 return GitCommit(self, commit_id, idx, pre_load=pre_load)
453 return GitCommit(self, commit_id, idx, pre_load=pre_load)
454
454
455 def get_commits(
455 def get_commits(
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
458 """
458 """
459 Returns generator of `GitCommit` objects from start to end (both
459 Returns generator of `GitCommit` objects from start to end (both
460 are inclusive), in ascending date order.
460 are inclusive), in ascending date order.
461
461
462 :param start_id: None, str(commit_id)
462 :param start_id: None, str(commit_id)
463 :param end_id: None, str(commit_id)
463 :param end_id: None, str(commit_id)
464 :param start_date: if specified, commits with commit date less than
464 :param start_date: if specified, commits with commit date less than
465 ``start_date`` would be filtered out from returned set
465 ``start_date`` would be filtered out from returned set
466 :param end_date: if specified, commits with commit date greater than
466 :param end_date: if specified, commits with commit date greater than
467 ``end_date`` would be filtered out from returned set
467 ``end_date`` would be filtered out from returned set
468 :param branch_name: if specified, commits not reachable from given
468 :param branch_name: if specified, commits not reachable from given
469 branch would be filtered out from returned set
469 branch would be filtered out from returned set
470 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 :param show_hidden: Show hidden commits such as obsolete or hidden from
471 Mercurial evolve
471 Mercurial evolve
472 :raise BranchDoesNotExistError: If given `branch_name` does not
472 :raise BranchDoesNotExistError: If given `branch_name` does not
473 exist.
473 exist.
474 :raise CommitDoesNotExistError: If commits for given `start` or
474 :raise CommitDoesNotExistError: If commits for given `start` or
475 `end` could not be found.
475 `end` could not be found.
476
476
477 """
477 """
478 if self.is_empty():
478 if self.is_empty():
479 raise EmptyRepositoryError("There are no commits yet")
479 raise EmptyRepositoryError("There are no commits yet")
480
480
481 self._validate_branch_name(branch_name)
481 self._validate_branch_name(branch_name)
482
482
483 if start_id is not None:
483 if start_id is not None:
484 self._validate_commit_id(start_id)
484 self._validate_commit_id(start_id)
485 if end_id is not None:
485 if end_id is not None:
486 self._validate_commit_id(end_id)
486 self._validate_commit_id(end_id)
487
487
488 start_raw_id = self._lookup_commit(start_id)
488 start_raw_id = self._lookup_commit(start_id)
489 start_pos = self._commit_ids[start_raw_id] if start_id else None
489 start_pos = self._commit_ids[start_raw_id] if start_id else None
490 end_raw_id = self._lookup_commit(end_id)
490 end_raw_id = self._lookup_commit(end_id)
491 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
491 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
492
492
493 if None not in [start_id, end_id] and start_pos > end_pos:
493 if None not in [start_id, end_id] and start_pos > end_pos:
494 raise RepositoryError(
494 raise RepositoryError(
495 "Start commit '%s' cannot be after end commit '%s'" %
495 "Start commit '%s' cannot be after end commit '%s'" %
496 (start_id, end_id))
496 (start_id, end_id))
497
497
498 if end_pos is not None:
498 if end_pos is not None:
499 end_pos += 1
499 end_pos += 1
500
500
501 filter_ = []
501 filter_ = []
502 if branch_name:
502 if branch_name:
503 filter_.append({'branch_name': branch_name})
503 filter_.append({'branch_name': branch_name})
504 if start_date and not end_date:
504 if start_date and not end_date:
505 filter_.append({'since': start_date})
505 filter_.append({'since': start_date})
506 if end_date and not start_date:
506 if end_date and not start_date:
507 filter_.append({'until': end_date})
507 filter_.append({'until': end_date})
508 if start_date and end_date:
508 if start_date and end_date:
509 filter_.append({'since': start_date})
509 filter_.append({'since': start_date})
510 filter_.append({'until': end_date})
510 filter_.append({'until': end_date})
511
511
512 # if start_pos or end_pos:
512 # if start_pos or end_pos:
513 # filter_.append({'start': start_pos})
513 # filter_.append({'start': start_pos})
514 # filter_.append({'end': end_pos})
514 # filter_.append({'end': end_pos})
515
515
516 if filter_:
516 if filter_:
517 revfilters = {
517 revfilters = {
518 'branch_name': branch_name,
518 'branch_name': branch_name,
519 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
519 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
520 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
520 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
521 'start': start_pos,
521 'start': start_pos,
522 'end': end_pos,
522 'end': end_pos,
523 }
523 }
524 commit_ids = self._get_commit_ids(filters=revfilters)
524 commit_ids = self._get_commit_ids(filters=revfilters)
525
525
526 else:
526 else:
527 commit_ids = self.commit_ids
527 commit_ids = self.commit_ids
528
528
529 if start_pos or end_pos:
529 if start_pos or end_pos:
530 commit_ids = commit_ids[start_pos: end_pos]
530 commit_ids = commit_ids[start_pos: end_pos]
531
531
532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 translate_tag=translate_tags)
533 translate_tag=translate_tags)
534
534
535 def get_diff(
535 def get_diff(
536 self, commit1, commit2, path='', ignore_whitespace=False,
536 self, commit1, commit2, path='', ignore_whitespace=False,
537 context=3, path1=None):
537 context=3, path1=None):
538 """
538 """
539 Returns (git like) *diff*, as plain text. Shows changes introduced by
539 Returns (git like) *diff*, as plain text. Shows changes introduced by
540 ``commit2`` since ``commit1``.
540 ``commit2`` since ``commit1``.
541
541
542 :param commit1: Entry point from which diff is shown. Can be
542 :param commit1: Entry point from which diff is shown. Can be
543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
544 the changes since empty state of the repository until ``commit2``
544 the changes since empty state of the repository until ``commit2``
545 :param commit2: Until which commits changes should be shown.
545 :param commit2: Until which commits changes should be shown.
546 :param ignore_whitespace: If set to ``True``, would not show whitespace
546 :param ignore_whitespace: If set to ``True``, would not show whitespace
547 changes. Defaults to ``False``.
547 changes. Defaults to ``False``.
548 :param context: How many lines before/after changed lines should be
548 :param context: How many lines before/after changed lines should be
549 shown. Defaults to ``3``.
549 shown. Defaults to ``3``.
550 """
550 """
551 self._validate_diff_commits(commit1, commit2)
551 self._validate_diff_commits(commit1, commit2)
552 if path1 is not None and path1 != path:
552 if path1 is not None and path1 != path:
553 raise ValueError("Diff of two different paths not supported.")
553 raise ValueError("Diff of two different paths not supported.")
554
554
555 if path:
555 if path:
556 file_filter = path
556 file_filter = path
557 else:
557 else:
558 file_filter = None
558 file_filter = None
559
559
560 diff = self._remote.diff(
560 diff = self._remote.diff(
561 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
561 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
562 opt_ignorews=ignore_whitespace,
562 opt_ignorews=ignore_whitespace,
563 context=context)
563 context=context)
564 return GitDiff(diff)
564 return GitDiff(diff)
565
565
566 def strip(self, commit_id, branch_name):
566 def strip(self, commit_id, branch_name):
567 commit = self.get_commit(commit_id=commit_id)
567 commit = self.get_commit(commit_id=commit_id)
568 if commit.merge:
568 if commit.merge:
569 raise Exception('Cannot reset to merge commit')
569 raise Exception('Cannot reset to merge commit')
570
570
571 # parent is going to be the new head now
571 # parent is going to be the new head now
572 commit = commit.parents[0]
572 commit = commit.parents[0]
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574
574
575 # clear cached properties
575 # clear cached properties
576 self._invalidate_prop_cache('commit_ids')
576 self._invalidate_prop_cache('commit_ids')
577 self._invalidate_prop_cache('_refs')
577 self._invalidate_prop_cache('_refs')
578 self._invalidate_prop_cache('branches')
578 self._invalidate_prop_cache('branches')
579
579
580 return len(self.commit_ids)
580 return len(self.commit_ids)
581
581
582 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
582 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
583 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
584 self, commit_id1, repo2, commit_id2)
585
583 if commit_id1 == commit_id2:
586 if commit_id1 == commit_id2:
584 return commit_id1
587 return commit_id1
585
588
586 if self != repo2:
589 if self != repo2:
587 commits = self._remote.get_missing_revs(
590 commits = self._remote.get_missing_revs(
588 commit_id1, commit_id2, repo2.path)
591 commit_id1, commit_id2, repo2.path)
589 if commits:
592 if commits:
590 commit = repo2.get_commit(commits[-1])
593 commit = repo2.get_commit(commits[-1])
591 if commit.parents:
594 if commit.parents:
592 ancestor_id = commit.parents[0].raw_id
595 ancestor_id = commit.parents[0].raw_id
593 else:
596 else:
594 ancestor_id = None
597 ancestor_id = None
595 else:
598 else:
596 # no commits from other repo, ancestor_id is the commit_id2
599 # no commits from other repo, ancestor_id is the commit_id2
597 ancestor_id = commit_id2
600 ancestor_id = commit_id2
598 else:
601 else:
599 output, __ = self.run_git_command(
602 output, __ = self.run_git_command(
600 ['merge-base', commit_id1, commit_id2])
603 ['merge-base', commit_id1, commit_id2])
601 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
604 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
602
605
606 log.debug('Found common ancestor with sha: %s', ancestor_id)
607
603 return ancestor_id
608 return ancestor_id
604
609
605 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
610 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
606 repo1 = self
611 repo1 = self
607 ancestor_id = None
612 ancestor_id = None
608
613
609 if commit_id1 == commit_id2:
614 if commit_id1 == commit_id2:
610 commits = []
615 commits = []
611 elif repo1 != repo2:
616 elif repo1 != repo2:
612 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
617 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
613 repo2.path)
618 repo2.path)
614 commits = [
619 commits = [
615 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
620 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
616 for commit_id in reversed(missing_ids)]
621 for commit_id in reversed(missing_ids)]
617 else:
622 else:
618 output, __ = repo1.run_git_command(
623 output, __ = repo1.run_git_command(
619 ['log', '--reverse', '--pretty=format: %H', '-s',
624 ['log', '--reverse', '--pretty=format: %H', '-s',
620 '%s..%s' % (commit_id1, commit_id2)])
625 '%s..%s' % (commit_id1, commit_id2)])
621 commits = [
626 commits = [
622 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
627 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
623 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
628 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
624
629
625 return commits
630 return commits
626
631
627 @LazyProperty
632 @LazyProperty
628 def in_memory_commit(self):
633 def in_memory_commit(self):
629 """
634 """
630 Returns ``GitInMemoryCommit`` object for this repository.
635 Returns ``GitInMemoryCommit`` object for this repository.
631 """
636 """
632 return GitInMemoryCommit(self)
637 return GitInMemoryCommit(self)
633
638
634 def pull(self, url, commit_ids=None, update_after=False):
639 def pull(self, url, commit_ids=None, update_after=False):
635 """
640 """
636 Pull changes from external location. Pull is different in GIT
641 Pull changes from external location. Pull is different in GIT
637 that fetch since it's doing a checkout
642 that fetch since it's doing a checkout
638
643
639 :param commit_ids: Optional. Can be set to a list of commit ids
644 :param commit_ids: Optional. Can be set to a list of commit ids
640 which shall be pulled from the other repository.
645 which shall be pulled from the other repository.
641 """
646 """
642 refs = None
647 refs = None
643 if commit_ids is not None:
648 if commit_ids is not None:
644 remote_refs = self._remote.get_remote_refs(url)
649 remote_refs = self._remote.get_remote_refs(url)
645 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
650 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
646 self._remote.pull(url, refs=refs, update_after=update_after)
651 self._remote.pull(url, refs=refs, update_after=update_after)
647 self._remote.invalidate_vcs_cache()
652 self._remote.invalidate_vcs_cache()
648
653
649 def fetch(self, url, commit_ids=None):
654 def fetch(self, url, commit_ids=None):
650 """
655 """
651 Fetch all git objects from external location.
656 Fetch all git objects from external location.
652 """
657 """
653 self._remote.sync_fetch(url, refs=commit_ids)
658 self._remote.sync_fetch(url, refs=commit_ids)
654 self._remote.invalidate_vcs_cache()
659 self._remote.invalidate_vcs_cache()
655
660
656 def push(self, url):
661 def push(self, url):
657 refs = None
662 refs = None
658 self._remote.sync_push(url, refs=refs)
663 self._remote.sync_push(url, refs=refs)
659
664
660 def set_refs(self, ref_name, commit_id):
665 def set_refs(self, ref_name, commit_id):
661 self._remote.set_refs(ref_name, commit_id)
666 self._remote.set_refs(ref_name, commit_id)
662 self._invalidate_prop_cache('_refs')
667 self._invalidate_prop_cache('_refs')
663
668
664 def remove_ref(self, ref_name):
669 def remove_ref(self, ref_name):
665 self._remote.remove_ref(ref_name)
670 self._remote.remove_ref(ref_name)
666 self._invalidate_prop_cache('_refs')
671 self._invalidate_prop_cache('_refs')
667
672
668 def run_gc(self, prune=True):
673 def run_gc(self, prune=True):
669 cmd = ['gc', '--aggressive']
674 cmd = ['gc', '--aggressive']
670 if prune:
675 if prune:
671 cmd += ['--prune=now']
676 cmd += ['--prune=now']
672 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
677 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
673 return stderr
678 return stderr
674
679
675 def _update_server_info(self):
680 def _update_server_info(self):
676 """
681 """
677 runs gits update-server-info command in this repo instance
682 runs gits update-server-info command in this repo instance
678 """
683 """
679 self._remote.update_server_info()
684 self._remote.update_server_info()
680
685
681 def _current_branch(self):
686 def _current_branch(self):
682 """
687 """
683 Return the name of the current branch.
688 Return the name of the current branch.
684
689
685 It only works for non bare repositories (i.e. repositories with a
690 It only works for non bare repositories (i.e. repositories with a
686 working copy)
691 working copy)
687 """
692 """
688 if self.bare:
693 if self.bare:
689 raise RepositoryError('Bare git repos do not have active branches')
694 raise RepositoryError('Bare git repos do not have active branches')
690
695
691 if self.is_empty():
696 if self.is_empty():
692 return None
697 return None
693
698
694 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
699 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
695 return stdout.strip()
700 return stdout.strip()
696
701
697 def _checkout(self, branch_name, create=False, force=False):
702 def _checkout(self, branch_name, create=False, force=False):
698 """
703 """
699 Checkout a branch in the working directory.
704 Checkout a branch in the working directory.
700
705
701 It tries to create the branch if create is True, failing if the branch
706 It tries to create the branch if create is True, failing if the branch
702 already exists.
707 already exists.
703
708
704 It only works for non bare repositories (i.e. repositories with a
709 It only works for non bare repositories (i.e. repositories with a
705 working copy)
710 working copy)
706 """
711 """
707 if self.bare:
712 if self.bare:
708 raise RepositoryError('Cannot checkout branches in a bare git repo')
713 raise RepositoryError('Cannot checkout branches in a bare git repo')
709
714
710 cmd = ['checkout']
715 cmd = ['checkout']
711 if force:
716 if force:
712 cmd.append('-f')
717 cmd.append('-f')
713 if create:
718 if create:
714 cmd.append('-b')
719 cmd.append('-b')
715 cmd.append(branch_name)
720 cmd.append(branch_name)
716 self.run_git_command(cmd, fail_on_stderr=False)
721 self.run_git_command(cmd, fail_on_stderr=False)
717
722
718 def _create_branch(self, branch_name, commit_id):
723 def _create_branch(self, branch_name, commit_id):
719 """
724 """
720 creates a branch in a GIT repo
725 creates a branch in a GIT repo
721 """
726 """
722 self._remote.create_branch(branch_name, commit_id)
727 self._remote.create_branch(branch_name, commit_id)
723
728
724 def _identify(self):
729 def _identify(self):
725 """
730 """
726 Return the current state of the working directory.
731 Return the current state of the working directory.
727 """
732 """
728 if self.bare:
733 if self.bare:
729 raise RepositoryError('Bare git repos do not have active branches')
734 raise RepositoryError('Bare git repos do not have active branches')
730
735
731 if self.is_empty():
736 if self.is_empty():
732 return None
737 return None
733
738
734 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
739 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
735 return stdout.strip()
740 return stdout.strip()
736
741
737 def _local_clone(self, clone_path, branch_name, source_branch=None):
742 def _local_clone(self, clone_path, branch_name, source_branch=None):
738 """
743 """
739 Create a local clone of the current repo.
744 Create a local clone of the current repo.
740 """
745 """
741 # N.B.(skreft): the --branch option is required as otherwise the shallow
746 # N.B.(skreft): the --branch option is required as otherwise the shallow
742 # clone will only fetch the active branch.
747 # clone will only fetch the active branch.
743 cmd = ['clone', '--branch', branch_name,
748 cmd = ['clone', '--branch', branch_name,
744 self.path, os.path.abspath(clone_path)]
749 self.path, os.path.abspath(clone_path)]
745
750
746 self.run_git_command(cmd, fail_on_stderr=False)
751 self.run_git_command(cmd, fail_on_stderr=False)
747
752
748 # if we get the different source branch, make sure we also fetch it for
753 # if we get the different source branch, make sure we also fetch it for
749 # merge conditions
754 # merge conditions
750 if source_branch and source_branch != branch_name:
755 if source_branch and source_branch != branch_name:
751 # check if the ref exists.
756 # check if the ref exists.
752 shadow_repo = GitRepository(os.path.abspath(clone_path))
757 shadow_repo = GitRepository(os.path.abspath(clone_path))
753 if shadow_repo.get_remote_ref(source_branch):
758 if shadow_repo.get_remote_ref(source_branch):
754 cmd = ['fetch', self.path, source_branch]
759 cmd = ['fetch', self.path, source_branch]
755 self.run_git_command(cmd, fail_on_stderr=False)
760 self.run_git_command(cmd, fail_on_stderr=False)
756
761
757 def _local_fetch(self, repository_path, branch_name, use_origin=False):
762 def _local_fetch(self, repository_path, branch_name, use_origin=False):
758 """
763 """
759 Fetch a branch from a local repository.
764 Fetch a branch from a local repository.
760 """
765 """
761 repository_path = os.path.abspath(repository_path)
766 repository_path = os.path.abspath(repository_path)
762 if repository_path == self.path:
767 if repository_path == self.path:
763 raise ValueError('Cannot fetch from the same repository')
768 raise ValueError('Cannot fetch from the same repository')
764
769
765 if use_origin:
770 if use_origin:
766 branch_name = '+{branch}:refs/heads/{branch}'.format(
771 branch_name = '+{branch}:refs/heads/{branch}'.format(
767 branch=branch_name)
772 branch=branch_name)
768
773
769 cmd = ['fetch', '--no-tags', '--update-head-ok',
774 cmd = ['fetch', '--no-tags', '--update-head-ok',
770 repository_path, branch_name]
775 repository_path, branch_name]
771 self.run_git_command(cmd, fail_on_stderr=False)
776 self.run_git_command(cmd, fail_on_stderr=False)
772
777
773 def _local_reset(self, branch_name):
778 def _local_reset(self, branch_name):
774 branch_name = '{}'.format(branch_name)
779 branch_name = '{}'.format(branch_name)
775 cmd = ['reset', '--hard', branch_name, '--']
780 cmd = ['reset', '--hard', branch_name, '--']
776 self.run_git_command(cmd, fail_on_stderr=False)
781 self.run_git_command(cmd, fail_on_stderr=False)
777
782
778 def _last_fetch_heads(self):
783 def _last_fetch_heads(self):
779 """
784 """
780 Return the last fetched heads that need merging.
785 Return the last fetched heads that need merging.
781
786
782 The algorithm is defined at
787 The algorithm is defined at
783 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
788 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
784 """
789 """
785 if not self.bare:
790 if not self.bare:
786 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
791 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
787 else:
792 else:
788 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
793 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
789
794
790 heads = []
795 heads = []
791 with open(fetch_heads_path) as f:
796 with open(fetch_heads_path) as f:
792 for line in f:
797 for line in f:
793 if ' not-for-merge ' in line:
798 if ' not-for-merge ' in line:
794 continue
799 continue
795 line = re.sub('\t.*', '', line, flags=re.DOTALL)
800 line = re.sub('\t.*', '', line, flags=re.DOTALL)
796 heads.append(line)
801 heads.append(line)
797
802
798 return heads
803 return heads
799
804
800 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
805 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
801 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
806 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
802
807
803 def _local_pull(self, repository_path, branch_name, ff_only=True):
808 def _local_pull(self, repository_path, branch_name, ff_only=True):
804 """
809 """
805 Pull a branch from a local repository.
810 Pull a branch from a local repository.
806 """
811 """
807 if self.bare:
812 if self.bare:
808 raise RepositoryError('Cannot pull into a bare git repository')
813 raise RepositoryError('Cannot pull into a bare git repository')
809 # N.B.(skreft): The --ff-only option is to make sure this is a
814 # N.B.(skreft): The --ff-only option is to make sure this is a
810 # fast-forward (i.e., we are only pulling new changes and there are no
815 # fast-forward (i.e., we are only pulling new changes and there are no
811 # conflicts with our current branch)
816 # conflicts with our current branch)
812 # Additionally, that option needs to go before --no-tags, otherwise git
817 # Additionally, that option needs to go before --no-tags, otherwise git
813 # pull complains about it being an unknown flag.
818 # pull complains about it being an unknown flag.
814 cmd = ['pull']
819 cmd = ['pull']
815 if ff_only:
820 if ff_only:
816 cmd.append('--ff-only')
821 cmd.append('--ff-only')
817 cmd.extend(['--no-tags', repository_path, branch_name])
822 cmd.extend(['--no-tags', repository_path, branch_name])
818 self.run_git_command(cmd, fail_on_stderr=False)
823 self.run_git_command(cmd, fail_on_stderr=False)
819
824
820 def _local_merge(self, merge_message, user_name, user_email, heads):
825 def _local_merge(self, merge_message, user_name, user_email, heads):
821 """
826 """
822 Merge the given head into the checked out branch.
827 Merge the given head into the checked out branch.
823
828
824 It will force a merge commit.
829 It will force a merge commit.
825
830
826 Currently it raises an error if the repo is empty, as it is not possible
831 Currently it raises an error if the repo is empty, as it is not possible
827 to create a merge commit in an empty repo.
832 to create a merge commit in an empty repo.
828
833
829 :param merge_message: The message to use for the merge commit.
834 :param merge_message: The message to use for the merge commit.
830 :param heads: the heads to merge.
835 :param heads: the heads to merge.
831 """
836 """
832 if self.bare:
837 if self.bare:
833 raise RepositoryError('Cannot merge into a bare git repository')
838 raise RepositoryError('Cannot merge into a bare git repository')
834
839
835 if not heads:
840 if not heads:
836 return
841 return
837
842
838 if self.is_empty():
843 if self.is_empty():
839 # TODO(skreft): do something more robust in this case.
844 # TODO(skreft): do something more robust in this case.
840 raise RepositoryError('Do not know how to merge into empty repositories yet')
845 raise RepositoryError('Do not know how to merge into empty repositories yet')
841 unresolved = None
846 unresolved = None
842
847
843 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
848 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
844 # commit message. We also specify the user who is doing the merge.
849 # commit message. We also specify the user who is doing the merge.
845 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
850 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
846 '-c', 'user.email=%s' % safe_str(user_email),
851 '-c', 'user.email=%s' % safe_str(user_email),
847 'merge', '--no-ff', '-m', safe_str(merge_message)]
852 'merge', '--no-ff', '-m', safe_str(merge_message)]
848
853
849 merge_cmd = cmd + heads
854 merge_cmd = cmd + heads
850
855
851 try:
856 try:
852 self.run_git_command(merge_cmd, fail_on_stderr=False)
857 self.run_git_command(merge_cmd, fail_on_stderr=False)
853 except RepositoryError:
858 except RepositoryError:
854 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
859 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
855 fail_on_stderr=False)[0].splitlines()
860 fail_on_stderr=False)[0].splitlines()
856 # NOTE(marcink): we add U notation for consistent with HG backend output
861 # NOTE(marcink): we add U notation for consistent with HG backend output
857 unresolved = ['U {}'.format(f) for f in files]
862 unresolved = ['U {}'.format(f) for f in files]
858
863
859 # Cleanup any merge leftovers
864 # Cleanup any merge leftovers
860 self._remote.invalidate_vcs_cache()
865 self._remote.invalidate_vcs_cache()
861 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
866 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
862
867
863 if unresolved:
868 if unresolved:
864 raise UnresolvedFilesInRepo(unresolved)
869 raise UnresolvedFilesInRepo(unresolved)
865 else:
870 else:
866 raise
871 raise
867
872
868 def _local_push(
873 def _local_push(
869 self, source_branch, repository_path, target_branch,
874 self, source_branch, repository_path, target_branch,
870 enable_hooks=False, rc_scm_data=None):
875 enable_hooks=False, rc_scm_data=None):
871 """
876 """
872 Push the source_branch to the given repository and target_branch.
877 Push the source_branch to the given repository and target_branch.
873
878
874 Currently it if the target_branch is not master and the target repo is
879 Currently it if the target_branch is not master and the target repo is
875 empty, the push will work, but then GitRepository won't be able to find
880 empty, the push will work, but then GitRepository won't be able to find
876 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
881 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
877 pointing to master, which does not exist).
882 pointing to master, which does not exist).
878
883
879 It does not run the hooks in the target repo.
884 It does not run the hooks in the target repo.
880 """
885 """
881 # TODO(skreft): deal with the case in which the target repo is empty,
886 # TODO(skreft): deal with the case in which the target repo is empty,
882 # and the target_branch is not master.
887 # and the target_branch is not master.
883 target_repo = GitRepository(repository_path)
888 target_repo = GitRepository(repository_path)
884 if (not target_repo.bare and
889 if (not target_repo.bare and
885 target_repo._current_branch() == target_branch):
890 target_repo._current_branch() == target_branch):
886 # Git prevents pushing to the checked out branch, so simulate it by
891 # Git prevents pushing to the checked out branch, so simulate it by
887 # pulling into the target repository.
892 # pulling into the target repository.
888 target_repo._local_pull(self.path, source_branch)
893 target_repo._local_pull(self.path, source_branch)
889 else:
894 else:
890 cmd = ['push', os.path.abspath(repository_path),
895 cmd = ['push', os.path.abspath(repository_path),
891 '%s:%s' % (source_branch, target_branch)]
896 '%s:%s' % (source_branch, target_branch)]
892 gitenv = {}
897 gitenv = {}
893 if rc_scm_data:
898 if rc_scm_data:
894 gitenv.update({'RC_SCM_DATA': rc_scm_data})
899 gitenv.update({'RC_SCM_DATA': rc_scm_data})
895
900
896 if not enable_hooks:
901 if not enable_hooks:
897 gitenv['RC_SKIP_HOOKS'] = '1'
902 gitenv['RC_SKIP_HOOKS'] = '1'
898 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
903 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
899
904
900 def _get_new_pr_branch(self, source_branch, target_branch):
905 def _get_new_pr_branch(self, source_branch, target_branch):
901 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
906 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
902 pr_branches = []
907 pr_branches = []
903 for branch in self.branches:
908 for branch in self.branches:
904 if branch.startswith(prefix):
909 if branch.startswith(prefix):
905 pr_branches.append(int(branch[len(prefix):]))
910 pr_branches.append(int(branch[len(prefix):]))
906
911
907 if not pr_branches:
912 if not pr_branches:
908 branch_id = 0
913 branch_id = 0
909 else:
914 else:
910 branch_id = max(pr_branches) + 1
915 branch_id = max(pr_branches) + 1
911
916
912 return '%s%d' % (prefix, branch_id)
917 return '%s%d' % (prefix, branch_id)
913
918
914 def _maybe_prepare_merge_workspace(
919 def _maybe_prepare_merge_workspace(
915 self, repo_id, workspace_id, target_ref, source_ref):
920 self, repo_id, workspace_id, target_ref, source_ref):
916 shadow_repository_path = self._get_shadow_repository_path(
921 shadow_repository_path = self._get_shadow_repository_path(
917 self.path, repo_id, workspace_id)
922 self.path, repo_id, workspace_id)
918 if not os.path.exists(shadow_repository_path):
923 if not os.path.exists(shadow_repository_path):
919 self._local_clone(
924 self._local_clone(
920 shadow_repository_path, target_ref.name, source_ref.name)
925 shadow_repository_path, target_ref.name, source_ref.name)
921 log.debug('Prepared %s shadow repository in %s',
926 log.debug('Prepared %s shadow repository in %s',
922 self.alias, shadow_repository_path)
927 self.alias, shadow_repository_path)
923
928
924 return shadow_repository_path
929 return shadow_repository_path
925
930
926 def _merge_repo(self, repo_id, workspace_id, target_ref,
931 def _merge_repo(self, repo_id, workspace_id, target_ref,
927 source_repo, source_ref, merge_message,
932 source_repo, source_ref, merge_message,
928 merger_name, merger_email, dry_run=False,
933 merger_name, merger_email, dry_run=False,
929 use_rebase=False, close_branch=False):
934 use_rebase=False, close_branch=False):
930
935
931 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
936 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
932 'rebase' if use_rebase else 'merge', dry_run)
937 'rebase' if use_rebase else 'merge', dry_run)
933 if target_ref.commit_id != self.branches[target_ref.name]:
938 if target_ref.commit_id != self.branches[target_ref.name]:
934 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
939 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
935 target_ref.commit_id, self.branches[target_ref.name])
940 target_ref.commit_id, self.branches[target_ref.name])
936 return MergeResponse(
941 return MergeResponse(
937 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
942 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
938 metadata={'target_ref': target_ref})
943 metadata={'target_ref': target_ref})
939
944
940 shadow_repository_path = self._maybe_prepare_merge_workspace(
945 shadow_repository_path = self._maybe_prepare_merge_workspace(
941 repo_id, workspace_id, target_ref, source_ref)
946 repo_id, workspace_id, target_ref, source_ref)
942 shadow_repo = self.get_shadow_instance(shadow_repository_path)
947 shadow_repo = self.get_shadow_instance(shadow_repository_path)
943
948
944 # checkout source, if it's different. Otherwise we could not
949 # checkout source, if it's different. Otherwise we could not
945 # fetch proper commits for merge testing
950 # fetch proper commits for merge testing
946 if source_ref.name != target_ref.name:
951 if source_ref.name != target_ref.name:
947 if shadow_repo.get_remote_ref(source_ref.name):
952 if shadow_repo.get_remote_ref(source_ref.name):
948 shadow_repo._checkout(source_ref.name, force=True)
953 shadow_repo._checkout(source_ref.name, force=True)
949
954
950 # checkout target, and fetch changes
955 # checkout target, and fetch changes
951 shadow_repo._checkout(target_ref.name, force=True)
956 shadow_repo._checkout(target_ref.name, force=True)
952
957
953 # fetch/reset pull the target, in case it is changed
958 # fetch/reset pull the target, in case it is changed
954 # this handles even force changes
959 # this handles even force changes
955 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
960 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
956 shadow_repo._local_reset(target_ref.name)
961 shadow_repo._local_reset(target_ref.name)
957
962
958 # Need to reload repo to invalidate the cache, or otherwise we cannot
963 # Need to reload repo to invalidate the cache, or otherwise we cannot
959 # retrieve the last target commit.
964 # retrieve the last target commit.
960 shadow_repo = self.get_shadow_instance(shadow_repository_path)
965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
961 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
966 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
962 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
967 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
963 target_ref, target_ref.commit_id,
968 target_ref, target_ref.commit_id,
964 shadow_repo.branches[target_ref.name])
969 shadow_repo.branches[target_ref.name])
965 return MergeResponse(
970 return MergeResponse(
966 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
971 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
967 metadata={'target_ref': target_ref})
972 metadata={'target_ref': target_ref})
968
973
969 # calculate new branch
974 # calculate new branch
970 pr_branch = shadow_repo._get_new_pr_branch(
975 pr_branch = shadow_repo._get_new_pr_branch(
971 source_ref.name, target_ref.name)
976 source_ref.name, target_ref.name)
972 log.debug('using pull-request merge branch: `%s`', pr_branch)
977 log.debug('using pull-request merge branch: `%s`', pr_branch)
973 # checkout to temp branch, and fetch changes
978 # checkout to temp branch, and fetch changes
974 shadow_repo._checkout(pr_branch, create=True)
979 shadow_repo._checkout(pr_branch, create=True)
975 try:
980 try:
976 shadow_repo._local_fetch(source_repo.path, source_ref.name)
981 shadow_repo._local_fetch(source_repo.path, source_ref.name)
977 except RepositoryError:
982 except RepositoryError:
978 log.exception('Failure when doing local fetch on '
983 log.exception('Failure when doing local fetch on '
979 'shadow repo: %s', shadow_repo)
984 'shadow repo: %s', shadow_repo)
980 return MergeResponse(
985 return MergeResponse(
981 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
986 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
982 metadata={'source_ref': source_ref})
987 metadata={'source_ref': source_ref})
983
988
984 merge_ref = None
989 merge_ref = None
985 merge_failure_reason = MergeFailureReason.NONE
990 merge_failure_reason = MergeFailureReason.NONE
986 metadata = {}
991 metadata = {}
987 try:
992 try:
988 shadow_repo._local_merge(merge_message, merger_name, merger_email,
993 shadow_repo._local_merge(merge_message, merger_name, merger_email,
989 [source_ref.commit_id])
994 [source_ref.commit_id])
990 merge_possible = True
995 merge_possible = True
991
996
992 # Need to invalidate the cache, or otherwise we
997 # Need to invalidate the cache, or otherwise we
993 # cannot retrieve the merge commit.
998 # cannot retrieve the merge commit.
994 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
999 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
995 merge_commit_id = shadow_repo.branches[pr_branch]
1000 merge_commit_id = shadow_repo.branches[pr_branch]
996
1001
997 # Set a reference pointing to the merge commit. This reference may
1002 # Set a reference pointing to the merge commit. This reference may
998 # be used to easily identify the last successful merge commit in
1003 # be used to easily identify the last successful merge commit in
999 # the shadow repository.
1004 # the shadow repository.
1000 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1005 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1001 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1006 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1002 except RepositoryError as e:
1007 except RepositoryError as e:
1003 log.exception('Failure when doing local merge on git shadow repo')
1008 log.exception('Failure when doing local merge on git shadow repo')
1004 if isinstance(e, UnresolvedFilesInRepo):
1009 if isinstance(e, UnresolvedFilesInRepo):
1005 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1010 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1006
1011
1007 merge_possible = False
1012 merge_possible = False
1008 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1013 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1009
1014
1010 if merge_possible and not dry_run:
1015 if merge_possible and not dry_run:
1011 try:
1016 try:
1012 shadow_repo._local_push(
1017 shadow_repo._local_push(
1013 pr_branch, self.path, target_ref.name, enable_hooks=True,
1018 pr_branch, self.path, target_ref.name, enable_hooks=True,
1014 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1019 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1015 merge_succeeded = True
1020 merge_succeeded = True
1016 except RepositoryError:
1021 except RepositoryError:
1017 log.exception(
1022 log.exception(
1018 'Failure when doing local push from the shadow '
1023 'Failure when doing local push from the shadow '
1019 'repository to the target repository at %s.', self.path)
1024 'repository to the target repository at %s.', self.path)
1020 merge_succeeded = False
1025 merge_succeeded = False
1021 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1026 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1022 metadata['target'] = 'git shadow repo'
1027 metadata['target'] = 'git shadow repo'
1023 metadata['merge_commit'] = pr_branch
1028 metadata['merge_commit'] = pr_branch
1024 else:
1029 else:
1025 merge_succeeded = False
1030 merge_succeeded = False
1026
1031
1027 return MergeResponse(
1032 return MergeResponse(
1028 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1033 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1029 metadata=metadata)
1034 metadata=metadata)
@@ -1,979 +1,986 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
302
300 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
301 return commit_id1
304 return commit_id1
302
305
303 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
304 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
305 other_path=repo2.path)
308 other_path=repo2.path)
306 return repo2[ancestors[0]].raw_id if ancestors else None
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
307
314
308 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
309 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
310 commits = []
317 commits = []
311 else:
318 else:
312 if merge:
319 if merge:
313 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
314 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
315 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
316 else:
323 else:
317 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
318 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
319 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
320
327
321 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
322 for idx in indexes]
329 for idx in indexes]
323
330
324 return commits
331 return commits
325
332
326 @staticmethod
333 @staticmethod
327 def check_url(url, config):
334 def check_url(url, config):
328 """
335 """
329 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
330 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
331 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
332 or other external calls.
339 or other external calls.
333
340
334 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
335 when the return code is non 200
342 when the return code is non 200
336 """
343 """
337 # check first if it's not an local url
344 # check first if it's not an local url
338 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
339 return True
346 return True
340
347
341 # Request the _remote to verify the url
348 # Request the _remote to verify the url
342 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
343
350
344 @staticmethod
351 @staticmethod
345 def is_valid_repository(path):
352 def is_valid_repository(path):
346 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
347
354
348 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
349 """
356 """
350 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
351 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
352 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
353 be created.
360 be created.
354
361
355 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
356 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
357 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
358 """
365 """
359 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
360 raise RepositoryError(
367 raise RepositoryError(
361 "Cannot create repository at %s, location already exist"
368 "Cannot create repository at %s, location already exist"
362 % self.path)
369 % self.path)
363
370
364 if src_url:
371 if src_url:
365 url = str(self._get_url(src_url))
372 url = str(self._get_url(src_url))
366 MercurialRepository.check_url(url, self.config)
373 MercurialRepository.check_url(url, self.config)
367
374
368 self._remote.clone(url, self.path, do_workspace_checkout)
375 self._remote.clone(url, self.path, do_workspace_checkout)
369
376
370 # Don't try to create if we've already cloned repo
377 # Don't try to create if we've already cloned repo
371 create = False
378 create = False
372
379
373 if create:
380 if create:
374 os.makedirs(self.path, mode=0o755)
381 os.makedirs(self.path, mode=0o755)
375 self._remote.localrepository(create)
382 self._remote.localrepository(create)
376
383
377 @LazyProperty
384 @LazyProperty
378 def in_memory_commit(self):
385 def in_memory_commit(self):
379 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
380
387
381 @LazyProperty
388 @LazyProperty
382 def description(self):
389 def description(self):
383 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
384 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
385 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
386
393
387 @LazyProperty
394 @LazyProperty
388 def contact(self):
395 def contact(self):
389 contact = (
396 contact = (
390 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
391 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
392 return safe_unicode(contact or self.DEFAULT_CONTACT)
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
393
400
394 @LazyProperty
401 @LazyProperty
395 def last_change(self):
402 def last_change(self):
396 """
403 """
397 Returns last change made on this repository as
404 Returns last change made on this repository as
398 `datetime.datetime` object.
405 `datetime.datetime` object.
399 """
406 """
400 try:
407 try:
401 return self.get_commit().date
408 return self.get_commit().date
402 except RepositoryError:
409 except RepositoryError:
403 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
404 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
405
412
406 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
407 # fallback to filesystem
414 # fallback to filesystem
408 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
409 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
410 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
411 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
412 else:
419 else:
413 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
414
421
415 def _get_url(self, url):
422 def _get_url(self, url):
416 """
423 """
417 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
418 to filesystem
425 to filesystem
419 (``file:///``) schema.
426 (``file:///``) schema.
420 """
427 """
421 url = url.encode('utf8')
428 url = url.encode('utf8')
422 if url != 'default' and '://' not in url:
429 if url != 'default' and '://' not in url:
423 url = "file:" + urllib.pathname2url(url)
430 url = "file:" + urllib.pathname2url(url)
424 return url
431 return url
425
432
426 def get_hook_location(self):
433 def get_hook_location(self):
427 """
434 """
428 returns absolute path to location where hooks are stored
435 returns absolute path to location where hooks are stored
429 """
436 """
430 return os.path.join(self.path, '.hg', '.hgrc')
437 return os.path.join(self.path, '.hg', '.hgrc')
431
438
432 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
433 translate_tag=None, maybe_unreachable=False):
440 translate_tag=None, maybe_unreachable=False):
434 """
441 """
435 Returns ``MercurialCommit`` object representing repository's
442 Returns ``MercurialCommit`` object representing repository's
436 commit at the given `commit_id` or `commit_idx`.
443 commit at the given `commit_id` or `commit_idx`.
437 """
444 """
438 if self.is_empty():
445 if self.is_empty():
439 raise EmptyRepositoryError("There are no commits yet")
446 raise EmptyRepositoryError("There are no commits yet")
440
447
441 if commit_id is not None:
448 if commit_id is not None:
442 self._validate_commit_id(commit_id)
449 self._validate_commit_id(commit_id)
443 try:
450 try:
444 # we have cached idx, use it without contacting the remote
451 # we have cached idx, use it without contacting the remote
445 idx = self._commit_ids[commit_id]
452 idx = self._commit_ids[commit_id]
446 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
447 except KeyError:
454 except KeyError:
448 pass
455 pass
449
456
450 elif commit_idx is not None:
457 elif commit_idx is not None:
451 self._validate_commit_idx(commit_idx)
458 self._validate_commit_idx(commit_idx)
452 try:
459 try:
453 _commit_id = self.commit_ids[commit_idx]
460 _commit_id = self.commit_ids[commit_idx]
454 if commit_idx < 0:
461 if commit_idx < 0:
455 commit_idx = self.commit_ids.index(_commit_id)
462 commit_idx = self.commit_ids.index(_commit_id)
456
463
457 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
458 except IndexError:
465 except IndexError:
459 commit_id = commit_idx
466 commit_id = commit_idx
460 else:
467 else:
461 commit_id = "tip"
468 commit_id = "tip"
462
469
463 if isinstance(commit_id, unicode):
470 if isinstance(commit_id, unicode):
464 commit_id = safe_str(commit_id)
471 commit_id = safe_str(commit_id)
465
472
466 try:
473 try:
467 raw_id, idx = self._remote.lookup(commit_id, both=True)
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
468 except CommitDoesNotExistError:
475 except CommitDoesNotExistError:
469 msg = "Commit {} does not exist for `{}`".format(
476 msg = "Commit {} does not exist for `{}`".format(
470 *map(safe_str, [commit_id, self.name]))
477 *map(safe_str, [commit_id, self.name]))
471 raise CommitDoesNotExistError(msg)
478 raise CommitDoesNotExistError(msg)
472
479
473 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
474
481
475 def get_commits(
482 def get_commits(
476 self, start_id=None, end_id=None, start_date=None, end_date=None,
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
477 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
478 """
485 """
479 Returns generator of ``MercurialCommit`` objects from start to end
486 Returns generator of ``MercurialCommit`` objects from start to end
480 (both are inclusive)
487 (both are inclusive)
481
488
482 :param start_id: None, str(commit_id)
489 :param start_id: None, str(commit_id)
483 :param end_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
484 :param start_date: if specified, commits with commit date less than
491 :param start_date: if specified, commits with commit date less than
485 ``start_date`` would be filtered out from returned set
492 ``start_date`` would be filtered out from returned set
486 :param end_date: if specified, commits with commit date greater than
493 :param end_date: if specified, commits with commit date greater than
487 ``end_date`` would be filtered out from returned set
494 ``end_date`` would be filtered out from returned set
488 :param branch_name: if specified, commits not reachable from given
495 :param branch_name: if specified, commits not reachable from given
489 branch would be filtered out from returned set
496 branch would be filtered out from returned set
490 :param show_hidden: Show hidden commits such as obsolete or hidden from
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
491 Mercurial evolve
498 Mercurial evolve
492 :raise BranchDoesNotExistError: If given ``branch_name`` does not
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
493 exist.
500 exist.
494 :raise CommitDoesNotExistError: If commit for given ``start`` or
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
495 ``end`` could not be found.
502 ``end`` could not be found.
496 """
503 """
497 # actually we should check now if it's not an empty repo
504 # actually we should check now if it's not an empty repo
498 if self.is_empty():
505 if self.is_empty():
499 raise EmptyRepositoryError("There are no commits yet")
506 raise EmptyRepositoryError("There are no commits yet")
500 self._validate_branch_name(branch_name)
507 self._validate_branch_name(branch_name)
501
508
502 branch_ancestors = False
509 branch_ancestors = False
503 if start_id is not None:
510 if start_id is not None:
504 self._validate_commit_id(start_id)
511 self._validate_commit_id(start_id)
505 c_start = self.get_commit(commit_id=start_id)
512 c_start = self.get_commit(commit_id=start_id)
506 start_pos = self._commit_ids[c_start.raw_id]
513 start_pos = self._commit_ids[c_start.raw_id]
507 else:
514 else:
508 start_pos = None
515 start_pos = None
509
516
510 if end_id is not None:
517 if end_id is not None:
511 self._validate_commit_id(end_id)
518 self._validate_commit_id(end_id)
512 c_end = self.get_commit(commit_id=end_id)
519 c_end = self.get_commit(commit_id=end_id)
513 end_pos = max(0, self._commit_ids[c_end.raw_id])
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
514 else:
521 else:
515 end_pos = None
522 end_pos = None
516
523
517 if None not in [start_id, end_id] and start_pos > end_pos:
524 if None not in [start_id, end_id] and start_pos > end_pos:
518 raise RepositoryError(
525 raise RepositoryError(
519 "Start commit '%s' cannot be after end commit '%s'" %
526 "Start commit '%s' cannot be after end commit '%s'" %
520 (start_id, end_id))
527 (start_id, end_id))
521
528
522 if end_pos is not None:
529 if end_pos is not None:
523 end_pos += 1
530 end_pos += 1
524
531
525 commit_filter = []
532 commit_filter = []
526
533
527 if branch_name and not branch_ancestors:
534 if branch_name and not branch_ancestors:
528 commit_filter.append('branch("%s")' % (branch_name,))
535 commit_filter.append('branch("%s")' % (branch_name,))
529 elif branch_name and branch_ancestors:
536 elif branch_name and branch_ancestors:
530 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
531
538
532 if start_date and not end_date:
539 if start_date and not end_date:
533 commit_filter.append('date(">%s")' % (start_date,))
540 commit_filter.append('date(">%s")' % (start_date,))
534 if end_date and not start_date:
541 if end_date and not start_date:
535 commit_filter.append('date("<%s")' % (end_date,))
542 commit_filter.append('date("<%s")' % (end_date,))
536 if start_date and end_date:
543 if start_date and end_date:
537 commit_filter.append(
544 commit_filter.append(
538 'date(">%s") and date("<%s")' % (start_date, end_date))
545 'date(">%s") and date("<%s")' % (start_date, end_date))
539
546
540 if not show_hidden:
547 if not show_hidden:
541 commit_filter.append('not obsolete()')
548 commit_filter.append('not obsolete()')
542 commit_filter.append('not hidden()')
549 commit_filter.append('not hidden()')
543
550
544 # TODO: johbo: Figure out a simpler way for this solution
551 # TODO: johbo: Figure out a simpler way for this solution
545 collection_generator = CollectionGenerator
552 collection_generator = CollectionGenerator
546 if commit_filter:
553 if commit_filter:
547 commit_filter = ' and '.join(map(safe_str, commit_filter))
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
548 revisions = self._remote.rev_range([commit_filter])
555 revisions = self._remote.rev_range([commit_filter])
549 collection_generator = MercurialIndexBasedCollectionGenerator
556 collection_generator = MercurialIndexBasedCollectionGenerator
550 else:
557 else:
551 revisions = self.commit_ids
558 revisions = self.commit_ids
552
559
553 if start_pos or end_pos:
560 if start_pos or end_pos:
554 revisions = revisions[start_pos:end_pos]
561 revisions = revisions[start_pos:end_pos]
555
562
556 return collection_generator(self, revisions, pre_load=pre_load)
563 return collection_generator(self, revisions, pre_load=pre_load)
557
564
558 def pull(self, url, commit_ids=None):
565 def pull(self, url, commit_ids=None):
559 """
566 """
560 Pull changes from external location.
567 Pull changes from external location.
561
568
562 :param commit_ids: Optional. Can be set to a list of commit ids
569 :param commit_ids: Optional. Can be set to a list of commit ids
563 which shall be pulled from the other repository.
570 which shall be pulled from the other repository.
564 """
571 """
565 url = self._get_url(url)
572 url = self._get_url(url)
566 self._remote.pull(url, commit_ids=commit_ids)
573 self._remote.pull(url, commit_ids=commit_ids)
567 self._remote.invalidate_vcs_cache()
574 self._remote.invalidate_vcs_cache()
568
575
569 def fetch(self, url, commit_ids=None):
576 def fetch(self, url, commit_ids=None):
570 """
577 """
571 Backward compatibility with GIT fetch==pull
578 Backward compatibility with GIT fetch==pull
572 """
579 """
573 return self.pull(url, commit_ids=commit_ids)
580 return self.pull(url, commit_ids=commit_ids)
574
581
575 def push(self, url):
582 def push(self, url):
576 url = self._get_url(url)
583 url = self._get_url(url)
577 self._remote.sync_push(url)
584 self._remote.sync_push(url)
578
585
579 def _local_clone(self, clone_path):
586 def _local_clone(self, clone_path):
580 """
587 """
581 Create a local clone of the current repo.
588 Create a local clone of the current repo.
582 """
589 """
583 self._remote.clone(self.path, clone_path, update_after_clone=True,
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
584 hooks=False)
591 hooks=False)
585
592
586 def _update(self, revision, clean=False):
593 def _update(self, revision, clean=False):
587 """
594 """
588 Update the working copy to the specified revision.
595 Update the working copy to the specified revision.
589 """
596 """
590 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
591 self._remote.update(revision, clean=clean)
598 self._remote.update(revision, clean=clean)
592
599
593 def _identify(self):
600 def _identify(self):
594 """
601 """
595 Return the current state of the working directory.
602 Return the current state of the working directory.
596 """
603 """
597 return self._remote.identify().strip().rstrip('+')
604 return self._remote.identify().strip().rstrip('+')
598
605
599 def _heads(self, branch=None):
606 def _heads(self, branch=None):
600 """
607 """
601 Return the commit ids of the repository heads.
608 Return the commit ids of the repository heads.
602 """
609 """
603 return self._remote.heads(branch=branch).strip().split(' ')
610 return self._remote.heads(branch=branch).strip().split(' ')
604
611
605 def _ancestor(self, revision1, revision2):
612 def _ancestor(self, revision1, revision2):
606 """
613 """
607 Return the common ancestor of the two revisions.
614 Return the common ancestor of the two revisions.
608 """
615 """
609 return self._remote.ancestor(revision1, revision2)
616 return self._remote.ancestor(revision1, revision2)
610
617
611 def _local_push(
618 def _local_push(
612 self, revision, repository_path, push_branches=False,
619 self, revision, repository_path, push_branches=False,
613 enable_hooks=False):
620 enable_hooks=False):
614 """
621 """
615 Push the given revision to the specified repository.
622 Push the given revision to the specified repository.
616
623
617 :param push_branches: allow to create branches in the target repo.
624 :param push_branches: allow to create branches in the target repo.
618 """
625 """
619 self._remote.push(
626 self._remote.push(
620 [revision], repository_path, hooks=enable_hooks,
627 [revision], repository_path, hooks=enable_hooks,
621 push_branches=push_branches)
628 push_branches=push_branches)
622
629
623 def _local_merge(self, target_ref, merge_message, user_name, user_email,
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
624 source_ref, use_rebase=False, dry_run=False):
631 source_ref, use_rebase=False, dry_run=False):
625 """
632 """
626 Merge the given source_revision into the checked out revision.
633 Merge the given source_revision into the checked out revision.
627
634
628 Returns the commit id of the merge and a boolean indicating if the
635 Returns the commit id of the merge and a boolean indicating if the
629 commit needs to be pushed.
636 commit needs to be pushed.
630 """
637 """
631 self._update(target_ref.commit_id, clean=True)
638 self._update(target_ref.commit_id, clean=True)
632
639
633 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
640 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
634 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
641 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
635
642
636 if ancestor == source_ref.commit_id:
643 if ancestor == source_ref.commit_id:
637 # Nothing to do, the changes were already integrated
644 # Nothing to do, the changes were already integrated
638 return target_ref.commit_id, False
645 return target_ref.commit_id, False
639
646
640 elif ancestor == target_ref.commit_id and is_the_same_branch:
647 elif ancestor == target_ref.commit_id and is_the_same_branch:
641 # In this case we should force a commit message
648 # In this case we should force a commit message
642 return source_ref.commit_id, True
649 return source_ref.commit_id, True
643
650
644 unresolved = None
651 unresolved = None
645 if use_rebase:
652 if use_rebase:
646 try:
653 try:
647 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
654 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
648 target_ref.commit_id)
655 target_ref.commit_id)
649 self.bookmark(bookmark_name, revision=source_ref.commit_id)
656 self.bookmark(bookmark_name, revision=source_ref.commit_id)
650 self._remote.rebase(
657 self._remote.rebase(
651 source=source_ref.commit_id, dest=target_ref.commit_id)
658 source=source_ref.commit_id, dest=target_ref.commit_id)
652 self._remote.invalidate_vcs_cache()
659 self._remote.invalidate_vcs_cache()
653 self._update(bookmark_name, clean=True)
660 self._update(bookmark_name, clean=True)
654 return self._identify(), True
661 return self._identify(), True
655 except RepositoryError as e:
662 except RepositoryError as e:
656 # The rebase-abort may raise another exception which 'hides'
663 # The rebase-abort may raise another exception which 'hides'
657 # the original one, therefore we log it here.
664 # the original one, therefore we log it here.
658 log.exception('Error while rebasing shadow repo during merge.')
665 log.exception('Error while rebasing shadow repo during merge.')
659 if 'unresolved conflicts' in safe_str(e):
666 if 'unresolved conflicts' in safe_str(e):
660 unresolved = self._remote.get_unresolved_files()
667 unresolved = self._remote.get_unresolved_files()
661 log.debug('unresolved files: %s', unresolved)
668 log.debug('unresolved files: %s', unresolved)
662
669
663 # Cleanup any rebase leftovers
670 # Cleanup any rebase leftovers
664 self._remote.invalidate_vcs_cache()
671 self._remote.invalidate_vcs_cache()
665 self._remote.rebase(abort=True)
672 self._remote.rebase(abort=True)
666 self._remote.invalidate_vcs_cache()
673 self._remote.invalidate_vcs_cache()
667 self._remote.update(clean=True)
674 self._remote.update(clean=True)
668 if unresolved:
675 if unresolved:
669 raise UnresolvedFilesInRepo(unresolved)
676 raise UnresolvedFilesInRepo(unresolved)
670 else:
677 else:
671 raise
678 raise
672 else:
679 else:
673 try:
680 try:
674 self._remote.merge(source_ref.commit_id)
681 self._remote.merge(source_ref.commit_id)
675 self._remote.invalidate_vcs_cache()
682 self._remote.invalidate_vcs_cache()
676 self._remote.commit(
683 self._remote.commit(
677 message=safe_str(merge_message),
684 message=safe_str(merge_message),
678 username=safe_str('%s <%s>' % (user_name, user_email)))
685 username=safe_str('%s <%s>' % (user_name, user_email)))
679 self._remote.invalidate_vcs_cache()
686 self._remote.invalidate_vcs_cache()
680 return self._identify(), True
687 return self._identify(), True
681 except RepositoryError as e:
688 except RepositoryError as e:
682 # The merge-abort may raise another exception which 'hides'
689 # The merge-abort may raise another exception which 'hides'
683 # the original one, therefore we log it here.
690 # the original one, therefore we log it here.
684 log.exception('Error while merging shadow repo during merge.')
691 log.exception('Error while merging shadow repo during merge.')
685 if 'unresolved merge conflicts' in safe_str(e):
692 if 'unresolved merge conflicts' in safe_str(e):
686 unresolved = self._remote.get_unresolved_files()
693 unresolved = self._remote.get_unresolved_files()
687 log.debug('unresolved files: %s', unresolved)
694 log.debug('unresolved files: %s', unresolved)
688
695
689 # Cleanup any merge leftovers
696 # Cleanup any merge leftovers
690 self._remote.update(clean=True)
697 self._remote.update(clean=True)
691 if unresolved:
698 if unresolved:
692 raise UnresolvedFilesInRepo(unresolved)
699 raise UnresolvedFilesInRepo(unresolved)
693 else:
700 else:
694 raise
701 raise
695
702
696 def _local_close(self, target_ref, user_name, user_email,
703 def _local_close(self, target_ref, user_name, user_email,
697 source_ref, close_message=''):
704 source_ref, close_message=''):
698 """
705 """
699 Close the branch of the given source_revision
706 Close the branch of the given source_revision
700
707
701 Returns the commit id of the close and a boolean indicating if the
708 Returns the commit id of the close and a boolean indicating if the
702 commit needs to be pushed.
709 commit needs to be pushed.
703 """
710 """
704 self._update(source_ref.commit_id)
711 self._update(source_ref.commit_id)
705 message = close_message or "Closing branch: `{}`".format(source_ref.name)
712 message = close_message or "Closing branch: `{}`".format(source_ref.name)
706 try:
713 try:
707 self._remote.commit(
714 self._remote.commit(
708 message=safe_str(message),
715 message=safe_str(message),
709 username=safe_str('%s <%s>' % (user_name, user_email)),
716 username=safe_str('%s <%s>' % (user_name, user_email)),
710 close_branch=True)
717 close_branch=True)
711 self._remote.invalidate_vcs_cache()
718 self._remote.invalidate_vcs_cache()
712 return self._identify(), True
719 return self._identify(), True
713 except RepositoryError:
720 except RepositoryError:
714 # Cleanup any commit leftovers
721 # Cleanup any commit leftovers
715 self._remote.update(clean=True)
722 self._remote.update(clean=True)
716 raise
723 raise
717
724
718 def _is_the_same_branch(self, target_ref, source_ref):
725 def _is_the_same_branch(self, target_ref, source_ref):
719 return (
726 return (
720 self._get_branch_name(target_ref) ==
727 self._get_branch_name(target_ref) ==
721 self._get_branch_name(source_ref))
728 self._get_branch_name(source_ref))
722
729
723 def _get_branch_name(self, ref):
730 def _get_branch_name(self, ref):
724 if ref.type == 'branch':
731 if ref.type == 'branch':
725 return ref.name
732 return ref.name
726 return self._remote.ctx_branch(ref.commit_id)
733 return self._remote.ctx_branch(ref.commit_id)
727
734
728 def _maybe_prepare_merge_workspace(
735 def _maybe_prepare_merge_workspace(
729 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
736 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
730 shadow_repository_path = self._get_shadow_repository_path(
737 shadow_repository_path = self._get_shadow_repository_path(
731 self.path, repo_id, workspace_id)
738 self.path, repo_id, workspace_id)
732 if not os.path.exists(shadow_repository_path):
739 if not os.path.exists(shadow_repository_path):
733 self._local_clone(shadow_repository_path)
740 self._local_clone(shadow_repository_path)
734 log.debug(
741 log.debug(
735 'Prepared shadow repository in %s', shadow_repository_path)
742 'Prepared shadow repository in %s', shadow_repository_path)
736
743
737 return shadow_repository_path
744 return shadow_repository_path
738
745
739 def _merge_repo(self, repo_id, workspace_id, target_ref,
746 def _merge_repo(self, repo_id, workspace_id, target_ref,
740 source_repo, source_ref, merge_message,
747 source_repo, source_ref, merge_message,
741 merger_name, merger_email, dry_run=False,
748 merger_name, merger_email, dry_run=False,
742 use_rebase=False, close_branch=False):
749 use_rebase=False, close_branch=False):
743
750
744 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
751 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
745 'rebase' if use_rebase else 'merge', dry_run)
752 'rebase' if use_rebase else 'merge', dry_run)
746 if target_ref.commit_id not in self._heads():
753 if target_ref.commit_id not in self._heads():
747 return MergeResponse(
754 return MergeResponse(
748 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
755 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
749 metadata={'target_ref': target_ref})
756 metadata={'target_ref': target_ref})
750
757
751 try:
758 try:
752 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
759 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
753 heads = '\n,'.join(self._heads(target_ref.name))
760 heads = '\n,'.join(self._heads(target_ref.name))
754 metadata = {
761 metadata = {
755 'target_ref': target_ref,
762 'target_ref': target_ref,
756 'source_ref': source_ref,
763 'source_ref': source_ref,
757 'heads': heads
764 'heads': heads
758 }
765 }
759 return MergeResponse(
766 return MergeResponse(
760 False, False, None,
767 False, False, None,
761 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
768 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
762 metadata=metadata)
769 metadata=metadata)
763 except CommitDoesNotExistError:
770 except CommitDoesNotExistError:
764 log.exception('Failure when looking up branch heads on hg target')
771 log.exception('Failure when looking up branch heads on hg target')
765 return MergeResponse(
772 return MergeResponse(
766 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
773 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
767 metadata={'target_ref': target_ref})
774 metadata={'target_ref': target_ref})
768
775
769 shadow_repository_path = self._maybe_prepare_merge_workspace(
776 shadow_repository_path = self._maybe_prepare_merge_workspace(
770 repo_id, workspace_id, target_ref, source_ref)
777 repo_id, workspace_id, target_ref, source_ref)
771 shadow_repo = self.get_shadow_instance(shadow_repository_path)
778 shadow_repo = self.get_shadow_instance(shadow_repository_path)
772
779
773 log.debug('Pulling in target reference %s', target_ref)
780 log.debug('Pulling in target reference %s', target_ref)
774 self._validate_pull_reference(target_ref)
781 self._validate_pull_reference(target_ref)
775 shadow_repo._local_pull(self.path, target_ref)
782 shadow_repo._local_pull(self.path, target_ref)
776
783
777 try:
784 try:
778 log.debug('Pulling in source reference %s', source_ref)
785 log.debug('Pulling in source reference %s', source_ref)
779 source_repo._validate_pull_reference(source_ref)
786 source_repo._validate_pull_reference(source_ref)
780 shadow_repo._local_pull(source_repo.path, source_ref)
787 shadow_repo._local_pull(source_repo.path, source_ref)
781 except CommitDoesNotExistError:
788 except CommitDoesNotExistError:
782 log.exception('Failure when doing local pull on hg shadow repo')
789 log.exception('Failure when doing local pull on hg shadow repo')
783 return MergeResponse(
790 return MergeResponse(
784 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
791 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
785 metadata={'source_ref': source_ref})
792 metadata={'source_ref': source_ref})
786
793
787 merge_ref = None
794 merge_ref = None
788 merge_commit_id = None
795 merge_commit_id = None
789 close_commit_id = None
796 close_commit_id = None
790 merge_failure_reason = MergeFailureReason.NONE
797 merge_failure_reason = MergeFailureReason.NONE
791 metadata = {}
798 metadata = {}
792
799
793 # enforce that close branch should be used only in case we source from
800 # enforce that close branch should be used only in case we source from
794 # an actual Branch
801 # an actual Branch
795 close_branch = close_branch and source_ref.type == 'branch'
802 close_branch = close_branch and source_ref.type == 'branch'
796
803
797 # don't allow to close branch if source and target are the same
804 # don't allow to close branch if source and target are the same
798 close_branch = close_branch and source_ref.name != target_ref.name
805 close_branch = close_branch and source_ref.name != target_ref.name
799
806
800 needs_push_on_close = False
807 needs_push_on_close = False
801 if close_branch and not use_rebase and not dry_run:
808 if close_branch and not use_rebase and not dry_run:
802 try:
809 try:
803 close_commit_id, needs_push_on_close = shadow_repo._local_close(
810 close_commit_id, needs_push_on_close = shadow_repo._local_close(
804 target_ref, merger_name, merger_email, source_ref)
811 target_ref, merger_name, merger_email, source_ref)
805 merge_possible = True
812 merge_possible = True
806 except RepositoryError:
813 except RepositoryError:
807 log.exception('Failure when doing close branch on '
814 log.exception('Failure when doing close branch on '
808 'shadow repo: %s', shadow_repo)
815 'shadow repo: %s', shadow_repo)
809 merge_possible = False
816 merge_possible = False
810 merge_failure_reason = MergeFailureReason.MERGE_FAILED
817 merge_failure_reason = MergeFailureReason.MERGE_FAILED
811 else:
818 else:
812 merge_possible = True
819 merge_possible = True
813
820
814 needs_push = False
821 needs_push = False
815 if merge_possible:
822 if merge_possible:
816 try:
823 try:
817 merge_commit_id, needs_push = shadow_repo._local_merge(
824 merge_commit_id, needs_push = shadow_repo._local_merge(
818 target_ref, merge_message, merger_name, merger_email,
825 target_ref, merge_message, merger_name, merger_email,
819 source_ref, use_rebase=use_rebase, dry_run=dry_run)
826 source_ref, use_rebase=use_rebase, dry_run=dry_run)
820 merge_possible = True
827 merge_possible = True
821
828
822 # read the state of the close action, if it
829 # read the state of the close action, if it
823 # maybe required a push
830 # maybe required a push
824 needs_push = needs_push or needs_push_on_close
831 needs_push = needs_push or needs_push_on_close
825
832
826 # Set a bookmark pointing to the merge commit. This bookmark
833 # Set a bookmark pointing to the merge commit. This bookmark
827 # may be used to easily identify the last successful merge
834 # may be used to easily identify the last successful merge
828 # commit in the shadow repository.
835 # commit in the shadow repository.
829 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
836 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
830 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
837 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
831 except SubrepoMergeError:
838 except SubrepoMergeError:
832 log.exception(
839 log.exception(
833 'Subrepo merge error during local merge on hg shadow repo.')
840 'Subrepo merge error during local merge on hg shadow repo.')
834 merge_possible = False
841 merge_possible = False
835 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
842 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
836 needs_push = False
843 needs_push = False
837 except RepositoryError as e:
844 except RepositoryError as e:
838 log.exception('Failure when doing local merge on hg shadow repo')
845 log.exception('Failure when doing local merge on hg shadow repo')
839 if isinstance(e, UnresolvedFilesInRepo):
846 if isinstance(e, UnresolvedFilesInRepo):
840 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
847 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
841
848
842 merge_possible = False
849 merge_possible = False
843 merge_failure_reason = MergeFailureReason.MERGE_FAILED
850 merge_failure_reason = MergeFailureReason.MERGE_FAILED
844 needs_push = False
851 needs_push = False
845
852
846 if merge_possible and not dry_run:
853 if merge_possible and not dry_run:
847 if needs_push:
854 if needs_push:
848 # In case the target is a bookmark, update it, so after pushing
855 # In case the target is a bookmark, update it, so after pushing
849 # the bookmarks is also updated in the target.
856 # the bookmarks is also updated in the target.
850 if target_ref.type == 'book':
857 if target_ref.type == 'book':
851 shadow_repo.bookmark(
858 shadow_repo.bookmark(
852 target_ref.name, revision=merge_commit_id)
859 target_ref.name, revision=merge_commit_id)
853 try:
860 try:
854 shadow_repo_with_hooks = self.get_shadow_instance(
861 shadow_repo_with_hooks = self.get_shadow_instance(
855 shadow_repository_path,
862 shadow_repository_path,
856 enable_hooks=True)
863 enable_hooks=True)
857 # This is the actual merge action, we push from shadow
864 # This is the actual merge action, we push from shadow
858 # into origin.
865 # into origin.
859 # Note: the push_branches option will push any new branch
866 # Note: the push_branches option will push any new branch
860 # defined in the source repository to the target. This may
867 # defined in the source repository to the target. This may
861 # be dangerous as branches are permanent in Mercurial.
868 # be dangerous as branches are permanent in Mercurial.
862 # This feature was requested in issue #441.
869 # This feature was requested in issue #441.
863 shadow_repo_with_hooks._local_push(
870 shadow_repo_with_hooks._local_push(
864 merge_commit_id, self.path, push_branches=True,
871 merge_commit_id, self.path, push_branches=True,
865 enable_hooks=True)
872 enable_hooks=True)
866
873
867 # maybe we also need to push the close_commit_id
874 # maybe we also need to push the close_commit_id
868 if close_commit_id:
875 if close_commit_id:
869 shadow_repo_with_hooks._local_push(
876 shadow_repo_with_hooks._local_push(
870 close_commit_id, self.path, push_branches=True,
877 close_commit_id, self.path, push_branches=True,
871 enable_hooks=True)
878 enable_hooks=True)
872 merge_succeeded = True
879 merge_succeeded = True
873 except RepositoryError:
880 except RepositoryError:
874 log.exception(
881 log.exception(
875 'Failure when doing local push from the shadow '
882 'Failure when doing local push from the shadow '
876 'repository to the target repository at %s.', self.path)
883 'repository to the target repository at %s.', self.path)
877 merge_succeeded = False
884 merge_succeeded = False
878 merge_failure_reason = MergeFailureReason.PUSH_FAILED
885 merge_failure_reason = MergeFailureReason.PUSH_FAILED
879 metadata['target'] = 'hg shadow repo'
886 metadata['target'] = 'hg shadow repo'
880 metadata['merge_commit'] = merge_commit_id
887 metadata['merge_commit'] = merge_commit_id
881 else:
888 else:
882 merge_succeeded = True
889 merge_succeeded = True
883 else:
890 else:
884 merge_succeeded = False
891 merge_succeeded = False
885
892
886 return MergeResponse(
893 return MergeResponse(
887 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
894 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
888 metadata=metadata)
895 metadata=metadata)
889
896
890 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
897 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
891 config = self.config.copy()
898 config = self.config.copy()
892 if not enable_hooks:
899 if not enable_hooks:
893 config.clear_section('hooks')
900 config.clear_section('hooks')
894 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
901 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
895
902
896 def _validate_pull_reference(self, reference):
903 def _validate_pull_reference(self, reference):
897 if not (reference.name in self.bookmarks or
904 if not (reference.name in self.bookmarks or
898 reference.name in self.branches or
905 reference.name in self.branches or
899 self.get_commit(reference.commit_id)):
906 self.get_commit(reference.commit_id)):
900 raise CommitDoesNotExistError(
907 raise CommitDoesNotExistError(
901 'Unknown branch, bookmark or commit id')
908 'Unknown branch, bookmark or commit id')
902
909
903 def _local_pull(self, repository_path, reference):
910 def _local_pull(self, repository_path, reference):
904 """
911 """
905 Fetch a branch, bookmark or commit from a local repository.
912 Fetch a branch, bookmark or commit from a local repository.
906 """
913 """
907 repository_path = os.path.abspath(repository_path)
914 repository_path = os.path.abspath(repository_path)
908 if repository_path == self.path:
915 if repository_path == self.path:
909 raise ValueError('Cannot pull from the same repository')
916 raise ValueError('Cannot pull from the same repository')
910
917
911 reference_type_to_option_name = {
918 reference_type_to_option_name = {
912 'book': 'bookmark',
919 'book': 'bookmark',
913 'branch': 'branch',
920 'branch': 'branch',
914 }
921 }
915 option_name = reference_type_to_option_name.get(
922 option_name = reference_type_to_option_name.get(
916 reference.type, 'revision')
923 reference.type, 'revision')
917
924
918 if option_name == 'revision':
925 if option_name == 'revision':
919 ref = reference.commit_id
926 ref = reference.commit_id
920 else:
927 else:
921 ref = reference.name
928 ref = reference.name
922
929
923 options = {option_name: [ref]}
930 options = {option_name: [ref]}
924 self._remote.pull_cmd(repository_path, hooks=False, **options)
931 self._remote.pull_cmd(repository_path, hooks=False, **options)
925 self._remote.invalidate_vcs_cache()
932 self._remote.invalidate_vcs_cache()
926
933
927 def bookmark(self, bookmark, revision=None):
934 def bookmark(self, bookmark, revision=None):
928 if isinstance(bookmark, unicode):
935 if isinstance(bookmark, unicode):
929 bookmark = safe_str(bookmark)
936 bookmark = safe_str(bookmark)
930 self._remote.bookmark(bookmark, revision=revision)
937 self._remote.bookmark(bookmark, revision=revision)
931 self._remote.invalidate_vcs_cache()
938 self._remote.invalidate_vcs_cache()
932
939
933 def get_path_permissions(self, username):
940 def get_path_permissions(self, username):
934 hgacl_file = os.path.join(self.path, '.hg/hgacl')
941 hgacl_file = os.path.join(self.path, '.hg/hgacl')
935
942
936 def read_patterns(suffix):
943 def read_patterns(suffix):
937 svalue = None
944 svalue = None
938 for section, option in [
945 for section, option in [
939 ('narrowacl', username + suffix),
946 ('narrowacl', username + suffix),
940 ('narrowacl', 'default' + suffix),
947 ('narrowacl', 'default' + suffix),
941 ('narrowhgacl', username + suffix),
948 ('narrowhgacl', username + suffix),
942 ('narrowhgacl', 'default' + suffix)
949 ('narrowhgacl', 'default' + suffix)
943 ]:
950 ]:
944 try:
951 try:
945 svalue = hgacl.get(section, option)
952 svalue = hgacl.get(section, option)
946 break # stop at the first value we find
953 break # stop at the first value we find
947 except configparser.NoOptionError:
954 except configparser.NoOptionError:
948 pass
955 pass
949 if not svalue:
956 if not svalue:
950 return None
957 return None
951 result = ['/']
958 result = ['/']
952 for pattern in svalue.split():
959 for pattern in svalue.split():
953 result.append(pattern)
960 result.append(pattern)
954 if '*' not in pattern and '?' not in pattern:
961 if '*' not in pattern and '?' not in pattern:
955 result.append(pattern + '/*')
962 result.append(pattern + '/*')
956 return result
963 return result
957
964
958 if os.path.exists(hgacl_file):
965 if os.path.exists(hgacl_file):
959 try:
966 try:
960 hgacl = configparser.RawConfigParser()
967 hgacl = configparser.RawConfigParser()
961 hgacl.read(hgacl_file)
968 hgacl.read(hgacl_file)
962
969
963 includes = read_patterns('.includes')
970 includes = read_patterns('.includes')
964 excludes = read_patterns('.excludes')
971 excludes = read_patterns('.excludes')
965 return BasePathPermissionChecker.create_from_patterns(
972 return BasePathPermissionChecker.create_from_patterns(
966 includes, excludes)
973 includes, excludes)
967 except BaseException as e:
974 except BaseException as e:
968 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
975 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
969 hgacl_file, self.name, e)
976 hgacl_file, self.name, e)
970 raise exceptions.RepositoryRequirementError(msg)
977 raise exceptions.RepositoryRequirementError(msg)
971 else:
978 else:
972 return None
979 return None
973
980
974
981
975 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
982 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
976
983
977 def _commit_factory(self, commit_id):
984 def _commit_factory(self, commit_id):
978 return self.repo.get_commit(
985 return self.repo.get_commit(
979 commit_idx=commit_id, pre_load=self.pre_load)
986 commit_idx=commit_id, pre_load=self.pre_load)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now