##// END OF EJS Templates
shadow-repos: use numeric repo id for creation of shadow repos....
marcink -
r2810:a15bd3a8 default
parent child Browse files
Show More
@@ -1,905 +1,905 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23
23
24 from rhodecode import events
24 from rhodecode import events
25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 from rhodecode.api.utils import (
26 from rhodecode.api.utils import (
27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 validate_repo_permissions, resolve_ref_or_error)
29 validate_repo_permissions, resolve_ref_or_error)
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 from rhodecode.lib.base import vcs_operation_context
31 from rhodecode.lib.base import vcs_operation_context
32 from rhodecode.lib.utils2 import str2bool
32 from rhodecode.lib.utils2 import str2bool
33 from rhodecode.model.changeset_status import ChangesetStatusModel
33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 from rhodecode.model.comment import CommentsModel
34 from rhodecode.model.comment import CommentsModel
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 from rhodecode.model.settings import SettingsModel
37 from rhodecode.model.settings import SettingsModel
38 from rhodecode.model.validation_schema import Invalid
38 from rhodecode.model.validation_schema import Invalid
39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 ReviewerListSchema)
40 ReviewerListSchema)
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 @jsonrpc_method()
45 @jsonrpc_method()
46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)):
46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)):
47 """
47 """
48 Get a pull request based on the given ID.
48 Get a pull request based on the given ID.
49
49
50 :param apiuser: This is filled automatically from the |authtoken|.
50 :param apiuser: This is filled automatically from the |authtoken|.
51 :type apiuser: AuthUser
51 :type apiuser: AuthUser
52 :param repoid: Optional, repository name or repository ID from where
52 :param repoid: Optional, repository name or repository ID from where
53 the pull request was opened.
53 the pull request was opened.
54 :type repoid: str or int
54 :type repoid: str or int
55 :param pullrequestid: ID of the requested pull request.
55 :param pullrequestid: ID of the requested pull request.
56 :type pullrequestid: int
56 :type pullrequestid: int
57
57
58 Example output:
58 Example output:
59
59
60 .. code-block:: bash
60 .. code-block:: bash
61
61
62 "id": <id_given_in_input>,
62 "id": <id_given_in_input>,
63 "result":
63 "result":
64 {
64 {
65 "pull_request_id": "<pull_request_id>",
65 "pull_request_id": "<pull_request_id>",
66 "url": "<url>",
66 "url": "<url>",
67 "title": "<title>",
67 "title": "<title>",
68 "description": "<description>",
68 "description": "<description>",
69 "status" : "<status>",
69 "status" : "<status>",
70 "created_on": "<date_time_created>",
70 "created_on": "<date_time_created>",
71 "updated_on": "<date_time_updated>",
71 "updated_on": "<date_time_updated>",
72 "commit_ids": [
72 "commit_ids": [
73 ...
73 ...
74 "<commit_id>",
74 "<commit_id>",
75 "<commit_id>",
75 "<commit_id>",
76 ...
76 ...
77 ],
77 ],
78 "review_status": "<review_status>",
78 "review_status": "<review_status>",
79 "mergeable": {
79 "mergeable": {
80 "status": "<bool>",
80 "status": "<bool>",
81 "message": "<message>",
81 "message": "<message>",
82 },
82 },
83 "source": {
83 "source": {
84 "clone_url": "<clone_url>",
84 "clone_url": "<clone_url>",
85 "repository": "<repository_name>",
85 "repository": "<repository_name>",
86 "reference":
86 "reference":
87 {
87 {
88 "name": "<name>",
88 "name": "<name>",
89 "type": "<type>",
89 "type": "<type>",
90 "commit_id": "<commit_id>",
90 "commit_id": "<commit_id>",
91 }
91 }
92 },
92 },
93 "target": {
93 "target": {
94 "clone_url": "<clone_url>",
94 "clone_url": "<clone_url>",
95 "repository": "<repository_name>",
95 "repository": "<repository_name>",
96 "reference":
96 "reference":
97 {
97 {
98 "name": "<name>",
98 "name": "<name>",
99 "type": "<type>",
99 "type": "<type>",
100 "commit_id": "<commit_id>",
100 "commit_id": "<commit_id>",
101 }
101 }
102 },
102 },
103 "merge": {
103 "merge": {
104 "clone_url": "<clone_url>",
104 "clone_url": "<clone_url>",
105 "reference":
105 "reference":
106 {
106 {
107 "name": "<name>",
107 "name": "<name>",
108 "type": "<type>",
108 "type": "<type>",
109 "commit_id": "<commit_id>",
109 "commit_id": "<commit_id>",
110 }
110 }
111 },
111 },
112 "author": <user_obj>,
112 "author": <user_obj>,
113 "reviewers": [
113 "reviewers": [
114 ...
114 ...
115 {
115 {
116 "user": "<user_obj>",
116 "user": "<user_obj>",
117 "review_status": "<review_status>",
117 "review_status": "<review_status>",
118 }
118 }
119 ...
119 ...
120 ]
120 ]
121 },
121 },
122 "error": null
122 "error": null
123 """
123 """
124
124
125 pull_request = get_pull_request_or_error(pullrequestid)
125 pull_request = get_pull_request_or_error(pullrequestid)
126 if Optional.extract(repoid):
126 if Optional.extract(repoid):
127 repo = get_repo_or_error(repoid)
127 repo = get_repo_or_error(repoid)
128 else:
128 else:
129 repo = pull_request.target_repo
129 repo = pull_request.target_repo
130
130
131 if not PullRequestModel().check_user_read(
131 if not PullRequestModel().check_user_read(
132 pull_request, apiuser, api=True):
132 pull_request, apiuser, api=True):
133 raise JSONRPCError('repository `%s` or pull request `%s` '
133 raise JSONRPCError('repository `%s` or pull request `%s` '
134 'does not exist' % (repoid, pullrequestid))
134 'does not exist' % (repoid, pullrequestid))
135 data = pull_request.get_api_data()
135 data = pull_request.get_api_data()
136 return data
136 return data
137
137
138
138
139 @jsonrpc_method()
139 @jsonrpc_method()
140 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
140 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
141 """
141 """
142 Get all pull requests from the repository specified in `repoid`.
142 Get all pull requests from the repository specified in `repoid`.
143
143
144 :param apiuser: This is filled automatically from the |authtoken|.
144 :param apiuser: This is filled automatically from the |authtoken|.
145 :type apiuser: AuthUser
145 :type apiuser: AuthUser
146 :param repoid: Optional repository name or repository ID.
146 :param repoid: Optional repository name or repository ID.
147 :type repoid: str or int
147 :type repoid: str or int
148 :param status: Only return pull requests with the specified status.
148 :param status: Only return pull requests with the specified status.
149 Valid options are.
149 Valid options are.
150 * ``new`` (default)
150 * ``new`` (default)
151 * ``open``
151 * ``open``
152 * ``closed``
152 * ``closed``
153 :type status: str
153 :type status: str
154
154
155 Example output:
155 Example output:
156
156
157 .. code-block:: bash
157 .. code-block:: bash
158
158
159 "id": <id_given_in_input>,
159 "id": <id_given_in_input>,
160 "result":
160 "result":
161 [
161 [
162 ...
162 ...
163 {
163 {
164 "pull_request_id": "<pull_request_id>",
164 "pull_request_id": "<pull_request_id>",
165 "url": "<url>",
165 "url": "<url>",
166 "title" : "<title>",
166 "title" : "<title>",
167 "description": "<description>",
167 "description": "<description>",
168 "status": "<status>",
168 "status": "<status>",
169 "created_on": "<date_time_created>",
169 "created_on": "<date_time_created>",
170 "updated_on": "<date_time_updated>",
170 "updated_on": "<date_time_updated>",
171 "commit_ids": [
171 "commit_ids": [
172 ...
172 ...
173 "<commit_id>",
173 "<commit_id>",
174 "<commit_id>",
174 "<commit_id>",
175 ...
175 ...
176 ],
176 ],
177 "review_status": "<review_status>",
177 "review_status": "<review_status>",
178 "mergeable": {
178 "mergeable": {
179 "status": "<bool>",
179 "status": "<bool>",
180 "message: "<message>",
180 "message: "<message>",
181 },
181 },
182 "source": {
182 "source": {
183 "clone_url": "<clone_url>",
183 "clone_url": "<clone_url>",
184 "reference":
184 "reference":
185 {
185 {
186 "name": "<name>",
186 "name": "<name>",
187 "type": "<type>",
187 "type": "<type>",
188 "commit_id": "<commit_id>",
188 "commit_id": "<commit_id>",
189 }
189 }
190 },
190 },
191 "target": {
191 "target": {
192 "clone_url": "<clone_url>",
192 "clone_url": "<clone_url>",
193 "reference":
193 "reference":
194 {
194 {
195 "name": "<name>",
195 "name": "<name>",
196 "type": "<type>",
196 "type": "<type>",
197 "commit_id": "<commit_id>",
197 "commit_id": "<commit_id>",
198 }
198 }
199 },
199 },
200 "merge": {
200 "merge": {
201 "clone_url": "<clone_url>",
201 "clone_url": "<clone_url>",
202 "reference":
202 "reference":
203 {
203 {
204 "name": "<name>",
204 "name": "<name>",
205 "type": "<type>",
205 "type": "<type>",
206 "commit_id": "<commit_id>",
206 "commit_id": "<commit_id>",
207 }
207 }
208 },
208 },
209 "author": <user_obj>,
209 "author": <user_obj>,
210 "reviewers": [
210 "reviewers": [
211 ...
211 ...
212 {
212 {
213 "user": "<user_obj>",
213 "user": "<user_obj>",
214 "review_status": "<review_status>",
214 "review_status": "<review_status>",
215 }
215 }
216 ...
216 ...
217 ]
217 ]
218 }
218 }
219 ...
219 ...
220 ],
220 ],
221 "error": null
221 "error": null
222
222
223 """
223 """
224 repo = get_repo_or_error(repoid)
224 repo = get_repo_or_error(repoid)
225 if not has_superadmin_permission(apiuser):
225 if not has_superadmin_permission(apiuser):
226 _perms = (
226 _perms = (
227 'repository.admin', 'repository.write', 'repository.read',)
227 'repository.admin', 'repository.write', 'repository.read',)
228 validate_repo_permissions(apiuser, repoid, repo, _perms)
228 validate_repo_permissions(apiuser, repoid, repo, _perms)
229
229
230 status = Optional.extract(status)
230 status = Optional.extract(status)
231 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
231 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
232 data = [pr.get_api_data() for pr in pull_requests]
232 data = [pr.get_api_data() for pr in pull_requests]
233 return data
233 return data
234
234
235
235
236 @jsonrpc_method()
236 @jsonrpc_method()
237 def merge_pull_request(
237 def merge_pull_request(
238 request, apiuser, pullrequestid, repoid=Optional(None),
238 request, apiuser, pullrequestid, repoid=Optional(None),
239 userid=Optional(OAttr('apiuser'))):
239 userid=Optional(OAttr('apiuser'))):
240 """
240 """
241 Merge the pull request specified by `pullrequestid` into its target
241 Merge the pull request specified by `pullrequestid` into its target
242 repository.
242 repository.
243
243
244 :param apiuser: This is filled automatically from the |authtoken|.
244 :param apiuser: This is filled automatically from the |authtoken|.
245 :type apiuser: AuthUser
245 :type apiuser: AuthUser
246 :param repoid: Optional, repository name or repository ID of the
246 :param repoid: Optional, repository name or repository ID of the
247 target repository to which the |pr| is to be merged.
247 target repository to which the |pr| is to be merged.
248 :type repoid: str or int
248 :type repoid: str or int
249 :param pullrequestid: ID of the pull request which shall be merged.
249 :param pullrequestid: ID of the pull request which shall be merged.
250 :type pullrequestid: int
250 :type pullrequestid: int
251 :param userid: Merge the pull request as this user.
251 :param userid: Merge the pull request as this user.
252 :type userid: Optional(str or int)
252 :type userid: Optional(str or int)
253
253
254 Example output:
254 Example output:
255
255
256 .. code-block:: bash
256 .. code-block:: bash
257
257
258 "id": <id_given_in_input>,
258 "id": <id_given_in_input>,
259 "result": {
259 "result": {
260 "executed": "<bool>",
260 "executed": "<bool>",
261 "failure_reason": "<int>",
261 "failure_reason": "<int>",
262 "merge_commit_id": "<merge_commit_id>",
262 "merge_commit_id": "<merge_commit_id>",
263 "possible": "<bool>",
263 "possible": "<bool>",
264 "merge_ref": {
264 "merge_ref": {
265 "commit_id": "<commit_id>",
265 "commit_id": "<commit_id>",
266 "type": "<type>",
266 "type": "<type>",
267 "name": "<name>"
267 "name": "<name>"
268 }
268 }
269 },
269 },
270 "error": null
270 "error": null
271 """
271 """
272 pull_request = get_pull_request_or_error(pullrequestid)
272 pull_request = get_pull_request_or_error(pullrequestid)
273 if Optional.extract(repoid):
273 if Optional.extract(repoid):
274 repo = get_repo_or_error(repoid)
274 repo = get_repo_or_error(repoid)
275 else:
275 else:
276 repo = pull_request.target_repo
276 repo = pull_request.target_repo
277
277
278 if not isinstance(userid, Optional):
278 if not isinstance(userid, Optional):
279 if (has_superadmin_permission(apiuser) or
279 if (has_superadmin_permission(apiuser) or
280 HasRepoPermissionAnyApi('repository.admin')(
280 HasRepoPermissionAnyApi('repository.admin')(
281 user=apiuser, repo_name=repo.repo_name)):
281 user=apiuser, repo_name=repo.repo_name)):
282 apiuser = get_user_or_error(userid)
282 apiuser = get_user_or_error(userid)
283 else:
283 else:
284 raise JSONRPCError('userid is not the same as your user')
284 raise JSONRPCError('userid is not the same as your user')
285
285
286 check = MergeCheck.validate(
286 check = MergeCheck.validate(
287 pull_request, user=apiuser, translator=request.translate)
287 pull_request, user=apiuser, translator=request.translate)
288 merge_possible = not check.failed
288 merge_possible = not check.failed
289
289
290 if not merge_possible:
290 if not merge_possible:
291 error_messages = []
291 error_messages = []
292 for err_type, error_msg in check.errors:
292 for err_type, error_msg in check.errors:
293 error_msg = request.translate(error_msg)
293 error_msg = request.translate(error_msg)
294 error_messages.append(error_msg)
294 error_messages.append(error_msg)
295
295
296 reasons = ','.join(error_messages)
296 reasons = ','.join(error_messages)
297 raise JSONRPCError(
297 raise JSONRPCError(
298 'merge not possible for following reasons: {}'.format(reasons))
298 'merge not possible for following reasons: {}'.format(reasons))
299
299
300 target_repo = pull_request.target_repo
300 target_repo = pull_request.target_repo
301 extras = vcs_operation_context(
301 extras = vcs_operation_context(
302 request.environ, repo_name=target_repo.repo_name,
302 request.environ, repo_name=target_repo.repo_name,
303 username=apiuser.username, action='push',
303 username=apiuser.username, action='push',
304 scm=target_repo.repo_type)
304 scm=target_repo.repo_type)
305 merge_response = PullRequestModel().merge(
305 merge_response = PullRequestModel().merge_repo(
306 pull_request, apiuser, extras=extras)
306 pull_request, apiuser, extras=extras)
307 if merge_response.executed:
307 if merge_response.executed:
308 PullRequestModel().close_pull_request(
308 PullRequestModel().close_pull_request(
309 pull_request.pull_request_id, apiuser)
309 pull_request.pull_request_id, apiuser)
310
310
311 Session().commit()
311 Session().commit()
312
312
313 # In previous versions the merge response directly contained the merge
313 # In previous versions the merge response directly contained the merge
314 # commit id. It is now contained in the merge reference object. To be
314 # commit id. It is now contained in the merge reference object. To be
315 # backwards compatible we have to extract it again.
315 # backwards compatible we have to extract it again.
316 merge_response = merge_response._asdict()
316 merge_response = merge_response._asdict()
317 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
317 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
318
318
319 return merge_response
319 return merge_response
320
320
321
321
322 @jsonrpc_method()
322 @jsonrpc_method()
323 def get_pull_request_comments(
323 def get_pull_request_comments(
324 request, apiuser, pullrequestid, repoid=Optional(None)):
324 request, apiuser, pullrequestid, repoid=Optional(None)):
325 """
325 """
326 Get all comments of pull request specified with the `pullrequestid`
326 Get all comments of pull request specified with the `pullrequestid`
327
327
328 :param apiuser: This is filled automatically from the |authtoken|.
328 :param apiuser: This is filled automatically from the |authtoken|.
329 :type apiuser: AuthUser
329 :type apiuser: AuthUser
330 :param repoid: Optional repository name or repository ID.
330 :param repoid: Optional repository name or repository ID.
331 :type repoid: str or int
331 :type repoid: str or int
332 :param pullrequestid: The pull request ID.
332 :param pullrequestid: The pull request ID.
333 :type pullrequestid: int
333 :type pullrequestid: int
334
334
335 Example output:
335 Example output:
336
336
337 .. code-block:: bash
337 .. code-block:: bash
338
338
339 id : <id_given_in_input>
339 id : <id_given_in_input>
340 result : [
340 result : [
341 {
341 {
342 "comment_author": {
342 "comment_author": {
343 "active": true,
343 "active": true,
344 "full_name_or_username": "Tom Gore",
344 "full_name_or_username": "Tom Gore",
345 "username": "admin"
345 "username": "admin"
346 },
346 },
347 "comment_created_on": "2017-01-02T18:43:45.533",
347 "comment_created_on": "2017-01-02T18:43:45.533",
348 "comment_f_path": null,
348 "comment_f_path": null,
349 "comment_id": 25,
349 "comment_id": 25,
350 "comment_lineno": null,
350 "comment_lineno": null,
351 "comment_status": {
351 "comment_status": {
352 "status": "under_review",
352 "status": "under_review",
353 "status_lbl": "Under Review"
353 "status_lbl": "Under Review"
354 },
354 },
355 "comment_text": "Example text",
355 "comment_text": "Example text",
356 "comment_type": null,
356 "comment_type": null,
357 "pull_request_version": null
357 "pull_request_version": null
358 }
358 }
359 ],
359 ],
360 error : null
360 error : null
361 """
361 """
362
362
363 pull_request = get_pull_request_or_error(pullrequestid)
363 pull_request = get_pull_request_or_error(pullrequestid)
364 if Optional.extract(repoid):
364 if Optional.extract(repoid):
365 repo = get_repo_or_error(repoid)
365 repo = get_repo_or_error(repoid)
366 else:
366 else:
367 repo = pull_request.target_repo
367 repo = pull_request.target_repo
368
368
369 if not PullRequestModel().check_user_read(
369 if not PullRequestModel().check_user_read(
370 pull_request, apiuser, api=True):
370 pull_request, apiuser, api=True):
371 raise JSONRPCError('repository `%s` or pull request `%s` '
371 raise JSONRPCError('repository `%s` or pull request `%s` '
372 'does not exist' % (repoid, pullrequestid))
372 'does not exist' % (repoid, pullrequestid))
373
373
374 (pull_request_latest,
374 (pull_request_latest,
375 pull_request_at_ver,
375 pull_request_at_ver,
376 pull_request_display_obj,
376 pull_request_display_obj,
377 at_version) = PullRequestModel().get_pr_version(
377 at_version) = PullRequestModel().get_pr_version(
378 pull_request.pull_request_id, version=None)
378 pull_request.pull_request_id, version=None)
379
379
380 versions = pull_request_display_obj.versions()
380 versions = pull_request_display_obj.versions()
381 ver_map = {
381 ver_map = {
382 ver.pull_request_version_id: cnt
382 ver.pull_request_version_id: cnt
383 for cnt, ver in enumerate(versions, 1)
383 for cnt, ver in enumerate(versions, 1)
384 }
384 }
385
385
386 # GENERAL COMMENTS with versions #
386 # GENERAL COMMENTS with versions #
387 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
387 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
388 q = q.order_by(ChangesetComment.comment_id.asc())
388 q = q.order_by(ChangesetComment.comment_id.asc())
389 general_comments = q.all()
389 general_comments = q.all()
390
390
391 # INLINE COMMENTS with versions #
391 # INLINE COMMENTS with versions #
392 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
392 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
393 q = q.order_by(ChangesetComment.comment_id.asc())
393 q = q.order_by(ChangesetComment.comment_id.asc())
394 inline_comments = q.all()
394 inline_comments = q.all()
395
395
396 data = []
396 data = []
397 for comment in inline_comments + general_comments:
397 for comment in inline_comments + general_comments:
398 full_data = comment.get_api_data()
398 full_data = comment.get_api_data()
399 pr_version_id = None
399 pr_version_id = None
400 if comment.pull_request_version_id:
400 if comment.pull_request_version_id:
401 pr_version_id = 'v{}'.format(
401 pr_version_id = 'v{}'.format(
402 ver_map[comment.pull_request_version_id])
402 ver_map[comment.pull_request_version_id])
403
403
404 # sanitize some entries
404 # sanitize some entries
405
405
406 full_data['pull_request_version'] = pr_version_id
406 full_data['pull_request_version'] = pr_version_id
407 full_data['comment_author'] = {
407 full_data['comment_author'] = {
408 'username': full_data['comment_author'].username,
408 'username': full_data['comment_author'].username,
409 'full_name_or_username': full_data['comment_author'].full_name_or_username,
409 'full_name_or_username': full_data['comment_author'].full_name_or_username,
410 'active': full_data['comment_author'].active,
410 'active': full_data['comment_author'].active,
411 }
411 }
412
412
413 if full_data['comment_status']:
413 if full_data['comment_status']:
414 full_data['comment_status'] = {
414 full_data['comment_status'] = {
415 'status': full_data['comment_status'][0].status,
415 'status': full_data['comment_status'][0].status,
416 'status_lbl': full_data['comment_status'][0].status_lbl,
416 'status_lbl': full_data['comment_status'][0].status_lbl,
417 }
417 }
418 else:
418 else:
419 full_data['comment_status'] = {}
419 full_data['comment_status'] = {}
420
420
421 data.append(full_data)
421 data.append(full_data)
422 return data
422 return data
423
423
424
424
425 @jsonrpc_method()
425 @jsonrpc_method()
426 def comment_pull_request(
426 def comment_pull_request(
427 request, apiuser, pullrequestid, repoid=Optional(None),
427 request, apiuser, pullrequestid, repoid=Optional(None),
428 message=Optional(None), commit_id=Optional(None), status=Optional(None),
428 message=Optional(None), commit_id=Optional(None), status=Optional(None),
429 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
429 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
430 resolves_comment_id=Optional(None),
430 resolves_comment_id=Optional(None),
431 userid=Optional(OAttr('apiuser'))):
431 userid=Optional(OAttr('apiuser'))):
432 """
432 """
433 Comment on the pull request specified with the `pullrequestid`,
433 Comment on the pull request specified with the `pullrequestid`,
434 in the |repo| specified by the `repoid`, and optionally change the
434 in the |repo| specified by the `repoid`, and optionally change the
435 review status.
435 review status.
436
436
437 :param apiuser: This is filled automatically from the |authtoken|.
437 :param apiuser: This is filled automatically from the |authtoken|.
438 :type apiuser: AuthUser
438 :type apiuser: AuthUser
439 :param repoid: Optional repository name or repository ID.
439 :param repoid: Optional repository name or repository ID.
440 :type repoid: str or int
440 :type repoid: str or int
441 :param pullrequestid: The pull request ID.
441 :param pullrequestid: The pull request ID.
442 :type pullrequestid: int
442 :type pullrequestid: int
443 :param commit_id: Specify the commit_id for which to set a comment. If
443 :param commit_id: Specify the commit_id for which to set a comment. If
444 given commit_id is different than latest in the PR status
444 given commit_id is different than latest in the PR status
445 change won't be performed.
445 change won't be performed.
446 :type commit_id: str
446 :type commit_id: str
447 :param message: The text content of the comment.
447 :param message: The text content of the comment.
448 :type message: str
448 :type message: str
449 :param status: (**Optional**) Set the approval status of the pull
449 :param status: (**Optional**) Set the approval status of the pull
450 request. One of: 'not_reviewed', 'approved', 'rejected',
450 request. One of: 'not_reviewed', 'approved', 'rejected',
451 'under_review'
451 'under_review'
452 :type status: str
452 :type status: str
453 :param comment_type: Comment type, one of: 'note', 'todo'
453 :param comment_type: Comment type, one of: 'note', 'todo'
454 :type comment_type: Optional(str), default: 'note'
454 :type comment_type: Optional(str), default: 'note'
455 :param userid: Comment on the pull request as this user
455 :param userid: Comment on the pull request as this user
456 :type userid: Optional(str or int)
456 :type userid: Optional(str or int)
457
457
458 Example output:
458 Example output:
459
459
460 .. code-block:: bash
460 .. code-block:: bash
461
461
462 id : <id_given_in_input>
462 id : <id_given_in_input>
463 result : {
463 result : {
464 "pull_request_id": "<Integer>",
464 "pull_request_id": "<Integer>",
465 "comment_id": "<Integer>",
465 "comment_id": "<Integer>",
466 "status": {"given": <given_status>,
466 "status": {"given": <given_status>,
467 "was_changed": <bool status_was_actually_changed> },
467 "was_changed": <bool status_was_actually_changed> },
468 },
468 },
469 error : null
469 error : null
470 """
470 """
471 pull_request = get_pull_request_or_error(pullrequestid)
471 pull_request = get_pull_request_or_error(pullrequestid)
472 if Optional.extract(repoid):
472 if Optional.extract(repoid):
473 repo = get_repo_or_error(repoid)
473 repo = get_repo_or_error(repoid)
474 else:
474 else:
475 repo = pull_request.target_repo
475 repo = pull_request.target_repo
476
476
477 if not isinstance(userid, Optional):
477 if not isinstance(userid, Optional):
478 if (has_superadmin_permission(apiuser) or
478 if (has_superadmin_permission(apiuser) or
479 HasRepoPermissionAnyApi('repository.admin')(
479 HasRepoPermissionAnyApi('repository.admin')(
480 user=apiuser, repo_name=repo.repo_name)):
480 user=apiuser, repo_name=repo.repo_name)):
481 apiuser = get_user_or_error(userid)
481 apiuser = get_user_or_error(userid)
482 else:
482 else:
483 raise JSONRPCError('userid is not the same as your user')
483 raise JSONRPCError('userid is not the same as your user')
484
484
485 if not PullRequestModel().check_user_read(
485 if not PullRequestModel().check_user_read(
486 pull_request, apiuser, api=True):
486 pull_request, apiuser, api=True):
487 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
487 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
488 message = Optional.extract(message)
488 message = Optional.extract(message)
489 status = Optional.extract(status)
489 status = Optional.extract(status)
490 commit_id = Optional.extract(commit_id)
490 commit_id = Optional.extract(commit_id)
491 comment_type = Optional.extract(comment_type)
491 comment_type = Optional.extract(comment_type)
492 resolves_comment_id = Optional.extract(resolves_comment_id)
492 resolves_comment_id = Optional.extract(resolves_comment_id)
493
493
494 if not message and not status:
494 if not message and not status:
495 raise JSONRPCError(
495 raise JSONRPCError(
496 'Both message and status parameters are missing. '
496 'Both message and status parameters are missing. '
497 'At least one is required.')
497 'At least one is required.')
498
498
499 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
499 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
500 status is not None):
500 status is not None):
501 raise JSONRPCError('Unknown comment status: `%s`' % status)
501 raise JSONRPCError('Unknown comment status: `%s`' % status)
502
502
503 if commit_id and commit_id not in pull_request.revisions:
503 if commit_id and commit_id not in pull_request.revisions:
504 raise JSONRPCError(
504 raise JSONRPCError(
505 'Invalid commit_id `%s` for this pull request.' % commit_id)
505 'Invalid commit_id `%s` for this pull request.' % commit_id)
506
506
507 allowed_to_change_status = PullRequestModel().check_user_change_status(
507 allowed_to_change_status = PullRequestModel().check_user_change_status(
508 pull_request, apiuser)
508 pull_request, apiuser)
509
509
510 # if commit_id is passed re-validated if user is allowed to change status
510 # if commit_id is passed re-validated if user is allowed to change status
511 # based on latest commit_id from the PR
511 # based on latest commit_id from the PR
512 if commit_id:
512 if commit_id:
513 commit_idx = pull_request.revisions.index(commit_id)
513 commit_idx = pull_request.revisions.index(commit_id)
514 if commit_idx != 0:
514 if commit_idx != 0:
515 allowed_to_change_status = False
515 allowed_to_change_status = False
516
516
517 if resolves_comment_id:
517 if resolves_comment_id:
518 comment = ChangesetComment.get(resolves_comment_id)
518 comment = ChangesetComment.get(resolves_comment_id)
519 if not comment:
519 if not comment:
520 raise JSONRPCError(
520 raise JSONRPCError(
521 'Invalid resolves_comment_id `%s` for this pull request.'
521 'Invalid resolves_comment_id `%s` for this pull request.'
522 % resolves_comment_id)
522 % resolves_comment_id)
523 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
523 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
524 raise JSONRPCError(
524 raise JSONRPCError(
525 'Comment `%s` is wrong type for setting status to resolved.'
525 'Comment `%s` is wrong type for setting status to resolved.'
526 % resolves_comment_id)
526 % resolves_comment_id)
527
527
528 text = message
528 text = message
529 status_label = ChangesetStatus.get_status_lbl(status)
529 status_label = ChangesetStatus.get_status_lbl(status)
530 if status and allowed_to_change_status:
530 if status and allowed_to_change_status:
531 st_message = ('Status change %(transition_icon)s %(status)s'
531 st_message = ('Status change %(transition_icon)s %(status)s'
532 % {'transition_icon': '>', 'status': status_label})
532 % {'transition_icon': '>', 'status': status_label})
533 text = message or st_message
533 text = message or st_message
534
534
535 rc_config = SettingsModel().get_all_settings()
535 rc_config = SettingsModel().get_all_settings()
536 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
536 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
537
537
538 status_change = status and allowed_to_change_status
538 status_change = status and allowed_to_change_status
539 comment = CommentsModel().create(
539 comment = CommentsModel().create(
540 text=text,
540 text=text,
541 repo=pull_request.target_repo.repo_id,
541 repo=pull_request.target_repo.repo_id,
542 user=apiuser.user_id,
542 user=apiuser.user_id,
543 pull_request=pull_request.pull_request_id,
543 pull_request=pull_request.pull_request_id,
544 f_path=None,
544 f_path=None,
545 line_no=None,
545 line_no=None,
546 status_change=(status_label if status_change else None),
546 status_change=(status_label if status_change else None),
547 status_change_type=(status if status_change else None),
547 status_change_type=(status if status_change else None),
548 closing_pr=False,
548 closing_pr=False,
549 renderer=renderer,
549 renderer=renderer,
550 comment_type=comment_type,
550 comment_type=comment_type,
551 resolves_comment_id=resolves_comment_id,
551 resolves_comment_id=resolves_comment_id,
552 auth_user=apiuser
552 auth_user=apiuser
553 )
553 )
554
554
555 if allowed_to_change_status and status:
555 if allowed_to_change_status and status:
556 ChangesetStatusModel().set_status(
556 ChangesetStatusModel().set_status(
557 pull_request.target_repo.repo_id,
557 pull_request.target_repo.repo_id,
558 status,
558 status,
559 apiuser.user_id,
559 apiuser.user_id,
560 comment,
560 comment,
561 pull_request=pull_request.pull_request_id
561 pull_request=pull_request.pull_request_id
562 )
562 )
563 Session().flush()
563 Session().flush()
564
564
565 Session().commit()
565 Session().commit()
566 data = {
566 data = {
567 'pull_request_id': pull_request.pull_request_id,
567 'pull_request_id': pull_request.pull_request_id,
568 'comment_id': comment.comment_id if comment else None,
568 'comment_id': comment.comment_id if comment else None,
569 'status': {'given': status, 'was_changed': status_change},
569 'status': {'given': status, 'was_changed': status_change},
570 }
570 }
571 return data
571 return data
572
572
573
573
574 @jsonrpc_method()
574 @jsonrpc_method()
575 def create_pull_request(
575 def create_pull_request(
576 request, apiuser, source_repo, target_repo, source_ref, target_ref,
576 request, apiuser, source_repo, target_repo, source_ref, target_ref,
577 title, description=Optional(''), reviewers=Optional(None)):
577 title, description=Optional(''), reviewers=Optional(None)):
578 """
578 """
579 Creates a new pull request.
579 Creates a new pull request.
580
580
581 Accepts refs in the following formats:
581 Accepts refs in the following formats:
582
582
583 * branch:<branch_name>:<sha>
583 * branch:<branch_name>:<sha>
584 * branch:<branch_name>
584 * branch:<branch_name>
585 * bookmark:<bookmark_name>:<sha> (Mercurial only)
585 * bookmark:<bookmark_name>:<sha> (Mercurial only)
586 * bookmark:<bookmark_name> (Mercurial only)
586 * bookmark:<bookmark_name> (Mercurial only)
587
587
588 :param apiuser: This is filled automatically from the |authtoken|.
588 :param apiuser: This is filled automatically from the |authtoken|.
589 :type apiuser: AuthUser
589 :type apiuser: AuthUser
590 :param source_repo: Set the source repository name.
590 :param source_repo: Set the source repository name.
591 :type source_repo: str
591 :type source_repo: str
592 :param target_repo: Set the target repository name.
592 :param target_repo: Set the target repository name.
593 :type target_repo: str
593 :type target_repo: str
594 :param source_ref: Set the source ref name.
594 :param source_ref: Set the source ref name.
595 :type source_ref: str
595 :type source_ref: str
596 :param target_ref: Set the target ref name.
596 :param target_ref: Set the target ref name.
597 :type target_ref: str
597 :type target_ref: str
598 :param title: Set the pull request title.
598 :param title: Set the pull request title.
599 :type title: str
599 :type title: str
600 :param description: Set the pull request description.
600 :param description: Set the pull request description.
601 :type description: Optional(str)
601 :type description: Optional(str)
602 :param reviewers: Set the new pull request reviewers list.
602 :param reviewers: Set the new pull request reviewers list.
603 Reviewer defined by review rules will be added automatically to the
603 Reviewer defined by review rules will be added automatically to the
604 defined list.
604 defined list.
605 :type reviewers: Optional(list)
605 :type reviewers: Optional(list)
606 Accepts username strings or objects of the format:
606 Accepts username strings or objects of the format:
607
607
608 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
608 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
609 """
609 """
610
610
611 source_db_repo = get_repo_or_error(source_repo)
611 source_db_repo = get_repo_or_error(source_repo)
612 target_db_repo = get_repo_or_error(target_repo)
612 target_db_repo = get_repo_or_error(target_repo)
613 if not has_superadmin_permission(apiuser):
613 if not has_superadmin_permission(apiuser):
614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
615 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
615 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
616
616
617 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
617 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
618 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
618 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
619 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
619 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
620 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
620 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
621 source_scm = source_db_repo.scm_instance()
621 source_scm = source_db_repo.scm_instance()
622 target_scm = target_db_repo.scm_instance()
622 target_scm = target_db_repo.scm_instance()
623
623
624 commit_ranges = target_scm.compare(
624 commit_ranges = target_scm.compare(
625 target_commit.raw_id, source_commit.raw_id, source_scm,
625 target_commit.raw_id, source_commit.raw_id, source_scm,
626 merge=True, pre_load=[])
626 merge=True, pre_load=[])
627
627
628 ancestor = target_scm.get_common_ancestor(
628 ancestor = target_scm.get_common_ancestor(
629 target_commit.raw_id, source_commit.raw_id, source_scm)
629 target_commit.raw_id, source_commit.raw_id, source_scm)
630
630
631 if not commit_ranges:
631 if not commit_ranges:
632 raise JSONRPCError('no commits found')
632 raise JSONRPCError('no commits found')
633
633
634 if not ancestor:
634 if not ancestor:
635 raise JSONRPCError('no common ancestor found')
635 raise JSONRPCError('no common ancestor found')
636
636
637 reviewer_objects = Optional.extract(reviewers) or []
637 reviewer_objects = Optional.extract(reviewers) or []
638
638
639 if reviewer_objects:
639 if reviewer_objects:
640 schema = ReviewerListSchema()
640 schema = ReviewerListSchema()
641 try:
641 try:
642 reviewer_objects = schema.deserialize(reviewer_objects)
642 reviewer_objects = schema.deserialize(reviewer_objects)
643 except Invalid as err:
643 except Invalid as err:
644 raise JSONRPCValidationError(colander_exc=err)
644 raise JSONRPCValidationError(colander_exc=err)
645
645
646 # validate users
646 # validate users
647 for reviewer_object in reviewer_objects:
647 for reviewer_object in reviewer_objects:
648 user = get_user_or_error(reviewer_object['username'])
648 user = get_user_or_error(reviewer_object['username'])
649 reviewer_object['user_id'] = user.user_id
649 reviewer_object['user_id'] = user.user_id
650
650
651 get_default_reviewers_data, get_validated_reviewers = \
651 get_default_reviewers_data, get_validated_reviewers = \
652 PullRequestModel().get_reviewer_functions()
652 PullRequestModel().get_reviewer_functions()
653
653
654 reviewer_rules = get_default_reviewers_data(
654 reviewer_rules = get_default_reviewers_data(
655 apiuser.get_instance(), source_db_repo,
655 apiuser.get_instance(), source_db_repo,
656 source_commit, target_db_repo, target_commit)
656 source_commit, target_db_repo, target_commit)
657
657
658 # specified rules are later re-validated, thus we can assume users will
658 # specified rules are later re-validated, thus we can assume users will
659 # eventually provide those that meet the reviewer criteria.
659 # eventually provide those that meet the reviewer criteria.
660 if not reviewer_objects:
660 if not reviewer_objects:
661 reviewer_objects = reviewer_rules['reviewers']
661 reviewer_objects = reviewer_rules['reviewers']
662
662
663 try:
663 try:
664 reviewers = get_validated_reviewers(
664 reviewers = get_validated_reviewers(
665 reviewer_objects, reviewer_rules)
665 reviewer_objects, reviewer_rules)
666 except ValueError as e:
666 except ValueError as e:
667 raise JSONRPCError('Reviewers Validation: {}'.format(e))
667 raise JSONRPCError('Reviewers Validation: {}'.format(e))
668
668
669 pull_request_model = PullRequestModel()
669 pull_request_model = PullRequestModel()
670 pull_request = pull_request_model.create(
670 pull_request = pull_request_model.create(
671 created_by=apiuser.user_id,
671 created_by=apiuser.user_id,
672 source_repo=source_repo,
672 source_repo=source_repo,
673 source_ref=full_source_ref,
673 source_ref=full_source_ref,
674 target_repo=target_repo,
674 target_repo=target_repo,
675 target_ref=full_target_ref,
675 target_ref=full_target_ref,
676 revisions=reversed(
676 revisions=reversed(
677 [commit.raw_id for commit in reversed(commit_ranges)]),
677 [commit.raw_id for commit in reversed(commit_ranges)]),
678 reviewers=reviewers,
678 reviewers=reviewers,
679 title=title,
679 title=title,
680 description=Optional.extract(description),
680 description=Optional.extract(description),
681 auth_user=apiuser
681 auth_user=apiuser
682 )
682 )
683
683
684 Session().commit()
684 Session().commit()
685 data = {
685 data = {
686 'msg': 'Created new pull request `{}`'.format(title),
686 'msg': 'Created new pull request `{}`'.format(title),
687 'pull_request_id': pull_request.pull_request_id,
687 'pull_request_id': pull_request.pull_request_id,
688 }
688 }
689 return data
689 return data
690
690
691
691
692 @jsonrpc_method()
692 @jsonrpc_method()
693 def update_pull_request(
693 def update_pull_request(
694 request, apiuser, pullrequestid, repoid=Optional(None),
694 request, apiuser, pullrequestid, repoid=Optional(None),
695 title=Optional(''), description=Optional(''), reviewers=Optional(None),
695 title=Optional(''), description=Optional(''), reviewers=Optional(None),
696 update_commits=Optional(None)):
696 update_commits=Optional(None)):
697 """
697 """
698 Updates a pull request.
698 Updates a pull request.
699
699
700 :param apiuser: This is filled automatically from the |authtoken|.
700 :param apiuser: This is filled automatically from the |authtoken|.
701 :type apiuser: AuthUser
701 :type apiuser: AuthUser
702 :param repoid: Optional repository name or repository ID.
702 :param repoid: Optional repository name or repository ID.
703 :type repoid: str or int
703 :type repoid: str or int
704 :param pullrequestid: The pull request ID.
704 :param pullrequestid: The pull request ID.
705 :type pullrequestid: int
705 :type pullrequestid: int
706 :param title: Set the pull request title.
706 :param title: Set the pull request title.
707 :type title: str
707 :type title: str
708 :param description: Update pull request description.
708 :param description: Update pull request description.
709 :type description: Optional(str)
709 :type description: Optional(str)
710 :param reviewers: Update pull request reviewers list with new value.
710 :param reviewers: Update pull request reviewers list with new value.
711 :type reviewers: Optional(list)
711 :type reviewers: Optional(list)
712 Accepts username strings or objects of the format:
712 Accepts username strings or objects of the format:
713
713
714 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
714 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
715
715
716 :param update_commits: Trigger update of commits for this pull request
716 :param update_commits: Trigger update of commits for this pull request
717 :type: update_commits: Optional(bool)
717 :type: update_commits: Optional(bool)
718
718
719 Example output:
719 Example output:
720
720
721 .. code-block:: bash
721 .. code-block:: bash
722
722
723 id : <id_given_in_input>
723 id : <id_given_in_input>
724 result : {
724 result : {
725 "msg": "Updated pull request `63`",
725 "msg": "Updated pull request `63`",
726 "pull_request": <pull_request_object>,
726 "pull_request": <pull_request_object>,
727 "updated_reviewers": {
727 "updated_reviewers": {
728 "added": [
728 "added": [
729 "username"
729 "username"
730 ],
730 ],
731 "removed": []
731 "removed": []
732 },
732 },
733 "updated_commits": {
733 "updated_commits": {
734 "added": [
734 "added": [
735 "<sha1_hash>"
735 "<sha1_hash>"
736 ],
736 ],
737 "common": [
737 "common": [
738 "<sha1_hash>",
738 "<sha1_hash>",
739 "<sha1_hash>",
739 "<sha1_hash>",
740 ],
740 ],
741 "removed": []
741 "removed": []
742 }
742 }
743 }
743 }
744 error : null
744 error : null
745 """
745 """
746
746
747 pull_request = get_pull_request_or_error(pullrequestid)
747 pull_request = get_pull_request_or_error(pullrequestid)
748 if Optional.extract(repoid):
748 if Optional.extract(repoid):
749 repo = get_repo_or_error(repoid)
749 repo = get_repo_or_error(repoid)
750 else:
750 else:
751 repo = pull_request.target_repo
751 repo = pull_request.target_repo
752
752
753 if not PullRequestModel().check_user_update(
753 if not PullRequestModel().check_user_update(
754 pull_request, apiuser, api=True):
754 pull_request, apiuser, api=True):
755 raise JSONRPCError(
755 raise JSONRPCError(
756 'pull request `%s` update failed, no permission to update.' % (
756 'pull request `%s` update failed, no permission to update.' % (
757 pullrequestid,))
757 pullrequestid,))
758 if pull_request.is_closed():
758 if pull_request.is_closed():
759 raise JSONRPCError(
759 raise JSONRPCError(
760 'pull request `%s` update failed, pull request is closed' % (
760 'pull request `%s` update failed, pull request is closed' % (
761 pullrequestid,))
761 pullrequestid,))
762
762
763 reviewer_objects = Optional.extract(reviewers) or []
763 reviewer_objects = Optional.extract(reviewers) or []
764
764
765 if reviewer_objects:
765 if reviewer_objects:
766 schema = ReviewerListSchema()
766 schema = ReviewerListSchema()
767 try:
767 try:
768 reviewer_objects = schema.deserialize(reviewer_objects)
768 reviewer_objects = schema.deserialize(reviewer_objects)
769 except Invalid as err:
769 except Invalid as err:
770 raise JSONRPCValidationError(colander_exc=err)
770 raise JSONRPCValidationError(colander_exc=err)
771
771
772 # validate users
772 # validate users
773 for reviewer_object in reviewer_objects:
773 for reviewer_object in reviewer_objects:
774 user = get_user_or_error(reviewer_object['username'])
774 user = get_user_or_error(reviewer_object['username'])
775 reviewer_object['user_id'] = user.user_id
775 reviewer_object['user_id'] = user.user_id
776
776
777 get_default_reviewers_data, get_validated_reviewers = \
777 get_default_reviewers_data, get_validated_reviewers = \
778 PullRequestModel().get_reviewer_functions()
778 PullRequestModel().get_reviewer_functions()
779
779
780 # re-use stored rules
780 # re-use stored rules
781 reviewer_rules = pull_request.reviewer_data
781 reviewer_rules = pull_request.reviewer_data
782 try:
782 try:
783 reviewers = get_validated_reviewers(
783 reviewers = get_validated_reviewers(
784 reviewer_objects, reviewer_rules)
784 reviewer_objects, reviewer_rules)
785 except ValueError as e:
785 except ValueError as e:
786 raise JSONRPCError('Reviewers Validation: {}'.format(e))
786 raise JSONRPCError('Reviewers Validation: {}'.format(e))
787 else:
787 else:
788 reviewers = []
788 reviewers = []
789
789
790 title = Optional.extract(title)
790 title = Optional.extract(title)
791 description = Optional.extract(description)
791 description = Optional.extract(description)
792 if title or description:
792 if title or description:
793 PullRequestModel().edit(
793 PullRequestModel().edit(
794 pull_request, title or pull_request.title,
794 pull_request, title or pull_request.title,
795 description or pull_request.description, apiuser)
795 description or pull_request.description, apiuser)
796 Session().commit()
796 Session().commit()
797
797
798 commit_changes = {"added": [], "common": [], "removed": []}
798 commit_changes = {"added": [], "common": [], "removed": []}
799 if str2bool(Optional.extract(update_commits)):
799 if str2bool(Optional.extract(update_commits)):
800 if PullRequestModel().has_valid_update_type(pull_request):
800 if PullRequestModel().has_valid_update_type(pull_request):
801 update_response = PullRequestModel().update_commits(
801 update_response = PullRequestModel().update_commits(
802 pull_request)
802 pull_request)
803 commit_changes = update_response.changes or commit_changes
803 commit_changes = update_response.changes or commit_changes
804 Session().commit()
804 Session().commit()
805
805
806 reviewers_changes = {"added": [], "removed": []}
806 reviewers_changes = {"added": [], "removed": []}
807 if reviewers:
807 if reviewers:
808 added_reviewers, removed_reviewers = \
808 added_reviewers, removed_reviewers = \
809 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
809 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
810
810
811 reviewers_changes['added'] = sorted(
811 reviewers_changes['added'] = sorted(
812 [get_user_or_error(n).username for n in added_reviewers])
812 [get_user_or_error(n).username for n in added_reviewers])
813 reviewers_changes['removed'] = sorted(
813 reviewers_changes['removed'] = sorted(
814 [get_user_or_error(n).username for n in removed_reviewers])
814 [get_user_or_error(n).username for n in removed_reviewers])
815 Session().commit()
815 Session().commit()
816
816
817 data = {
817 data = {
818 'msg': 'Updated pull request `{}`'.format(
818 'msg': 'Updated pull request `{}`'.format(
819 pull_request.pull_request_id),
819 pull_request.pull_request_id),
820 'pull_request': pull_request.get_api_data(),
820 'pull_request': pull_request.get_api_data(),
821 'updated_commits': commit_changes,
821 'updated_commits': commit_changes,
822 'updated_reviewers': reviewers_changes
822 'updated_reviewers': reviewers_changes
823 }
823 }
824
824
825 return data
825 return data
826
826
827
827
828 @jsonrpc_method()
828 @jsonrpc_method()
829 def close_pull_request(
829 def close_pull_request(
830 request, apiuser, pullrequestid, repoid=Optional(None),
830 request, apiuser, pullrequestid, repoid=Optional(None),
831 userid=Optional(OAttr('apiuser')), message=Optional('')):
831 userid=Optional(OAttr('apiuser')), message=Optional('')):
832 """
832 """
833 Close the pull request specified by `pullrequestid`.
833 Close the pull request specified by `pullrequestid`.
834
834
835 :param apiuser: This is filled automatically from the |authtoken|.
835 :param apiuser: This is filled automatically from the |authtoken|.
836 :type apiuser: AuthUser
836 :type apiuser: AuthUser
837 :param repoid: Repository name or repository ID to which the pull
837 :param repoid: Repository name or repository ID to which the pull
838 request belongs.
838 request belongs.
839 :type repoid: str or int
839 :type repoid: str or int
840 :param pullrequestid: ID of the pull request to be closed.
840 :param pullrequestid: ID of the pull request to be closed.
841 :type pullrequestid: int
841 :type pullrequestid: int
842 :param userid: Close the pull request as this user.
842 :param userid: Close the pull request as this user.
843 :type userid: Optional(str or int)
843 :type userid: Optional(str or int)
844 :param message: Optional message to close the Pull Request with. If not
844 :param message: Optional message to close the Pull Request with. If not
845 specified it will be generated automatically.
845 specified it will be generated automatically.
846 :type message: Optional(str)
846 :type message: Optional(str)
847
847
848 Example output:
848 Example output:
849
849
850 .. code-block:: bash
850 .. code-block:: bash
851
851
852 "id": <id_given_in_input>,
852 "id": <id_given_in_input>,
853 "result": {
853 "result": {
854 "pull_request_id": "<int>",
854 "pull_request_id": "<int>",
855 "close_status": "<str:status_lbl>,
855 "close_status": "<str:status_lbl>,
856 "closed": "<bool>"
856 "closed": "<bool>"
857 },
857 },
858 "error": null
858 "error": null
859
859
860 """
860 """
861 _ = request.translate
861 _ = request.translate
862
862
863 pull_request = get_pull_request_or_error(pullrequestid)
863 pull_request = get_pull_request_or_error(pullrequestid)
864 if Optional.extract(repoid):
864 if Optional.extract(repoid):
865 repo = get_repo_or_error(repoid)
865 repo = get_repo_or_error(repoid)
866 else:
866 else:
867 repo = pull_request.target_repo
867 repo = pull_request.target_repo
868
868
869 if not isinstance(userid, Optional):
869 if not isinstance(userid, Optional):
870 if (has_superadmin_permission(apiuser) or
870 if (has_superadmin_permission(apiuser) or
871 HasRepoPermissionAnyApi('repository.admin')(
871 HasRepoPermissionAnyApi('repository.admin')(
872 user=apiuser, repo_name=repo.repo_name)):
872 user=apiuser, repo_name=repo.repo_name)):
873 apiuser = get_user_or_error(userid)
873 apiuser = get_user_or_error(userid)
874 else:
874 else:
875 raise JSONRPCError('userid is not the same as your user')
875 raise JSONRPCError('userid is not the same as your user')
876
876
877 if pull_request.is_closed():
877 if pull_request.is_closed():
878 raise JSONRPCError(
878 raise JSONRPCError(
879 'pull request `%s` is already closed' % (pullrequestid,))
879 'pull request `%s` is already closed' % (pullrequestid,))
880
880
881 # only owner or admin or person with write permissions
881 # only owner or admin or person with write permissions
882 allowed_to_close = PullRequestModel().check_user_update(
882 allowed_to_close = PullRequestModel().check_user_update(
883 pull_request, apiuser, api=True)
883 pull_request, apiuser, api=True)
884
884
885 if not allowed_to_close:
885 if not allowed_to_close:
886 raise JSONRPCError(
886 raise JSONRPCError(
887 'pull request `%s` close failed, no permission to close.' % (
887 'pull request `%s` close failed, no permission to close.' % (
888 pullrequestid,))
888 pullrequestid,))
889
889
890 # message we're using to close the PR, else it's automatically generated
890 # message we're using to close the PR, else it's automatically generated
891 message = Optional.extract(message)
891 message = Optional.extract(message)
892
892
893 # finally close the PR, with proper message comment
893 # finally close the PR, with proper message comment
894 comment, status = PullRequestModel().close_pull_request_with_comment(
894 comment, status = PullRequestModel().close_pull_request_with_comment(
895 pull_request, apiuser, repo, message=message)
895 pull_request, apiuser, repo, message=message)
896 status_lbl = ChangesetStatus.get_status_lbl(status)
896 status_lbl = ChangesetStatus.get_status_lbl(status)
897
897
898 Session().commit()
898 Session().commit()
899
899
900 data = {
900 data = {
901 'pull_request_id': pull_request.pull_request_id,
901 'pull_request_id': pull_request.pull_request_id,
902 'close_status': status_lbl,
902 'close_status': status_lbl,
903 'closed': True,
903 'closed': True,
904 }
904 }
905 return data
905 return data
@@ -1,1203 +1,1203 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 from rhodecode.tests.utils import AssertResponse
35 from rhodecode.tests.utils import AssertResponse
36
36
37
37
38 def route_path(name, params=None, **kwargs):
38 def route_path(name, params=None, **kwargs):
39 import urllib
39 import urllib
40
40
41 base_url = {
41 base_url = {
42 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 }[name].format(**kwargs)
56 }[name].format(**kwargs)
57
57
58 if params:
58 if params:
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 return base_url
60 return base_url
61
61
62
62
63 @pytest.mark.usefixtures('app', 'autologin_user')
63 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.backends("git", "hg")
64 @pytest.mark.backends("git", "hg")
65 class TestPullrequestsView(object):
65 class TestPullrequestsView(object):
66
66
67 def test_index(self, backend):
67 def test_index(self, backend):
68 self.app.get(route_path(
68 self.app.get(route_path(
69 'pullrequest_new',
69 'pullrequest_new',
70 repo_name=backend.repo_name))
70 repo_name=backend.repo_name))
71
71
72 def test_option_menu_create_pull_request_exists(self, backend):
72 def test_option_menu_create_pull_request_exists(self, backend):
73 repo_name = backend.repo_name
73 repo_name = backend.repo_name
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75
75
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 'pullrequest_new', repo_name=repo_name)
77 'pullrequest_new', repo_name=repo_name)
78 response.mustcontain(create_pr_link)
78 response.mustcontain(create_pr_link)
79
79
80 def test_create_pr_form_with_raw_commit_id(self, backend):
80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 repo = backend.repo
81 repo = backend.repo
82
82
83 self.app.get(
83 self.app.get(
84 route_path('pullrequest_new',
84 route_path('pullrequest_new',
85 repo_name=repo.repo_name,
85 repo_name=repo.repo_name,
86 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 def test_show(self, pr_util, pr_merge_enabled):
90 def test_show(self, pr_util, pr_merge_enabled):
91 pull_request = pr_util.create_pull_request(
91 pull_request = pr_util.create_pull_request(
92 mergeable=pr_merge_enabled, enable_notifications=False)
92 mergeable=pr_merge_enabled, enable_notifications=False)
93
93
94 response = self.app.get(route_path(
94 response = self.app.get(route_path(
95 'pullrequest_show',
95 'pullrequest_show',
96 repo_name=pull_request.target_repo.scm_instance().name,
96 repo_name=pull_request.target_repo.scm_instance().name,
97 pull_request_id=pull_request.pull_request_id))
97 pull_request_id=pull_request.pull_request_id))
98
98
99 for commit_id in pull_request.revisions:
99 for commit_id in pull_request.revisions:
100 response.mustcontain(commit_id)
100 response.mustcontain(commit_id)
101
101
102 assert pull_request.target_ref_parts.type in response
102 assert pull_request.target_ref_parts.type in response
103 assert pull_request.target_ref_parts.name in response
103 assert pull_request.target_ref_parts.name in response
104 target_clone_url = pull_request.target_repo.clone_url()
104 target_clone_url = pull_request.target_repo.clone_url()
105 assert target_clone_url in response
105 assert target_clone_url in response
106
106
107 assert 'class="pull-request-merge"' in response
107 assert 'class="pull-request-merge"' in response
108 assert (
108 assert (
109 'Server-side pull request merging is disabled.'
109 'Server-side pull request merging is disabled.'
110 in response) != pr_merge_enabled
110 in response) != pr_merge_enabled
111
111
112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 # Logout
113 # Logout
114 response = self.app.post(
114 response = self.app.post(
115 h.route_path('logout'),
115 h.route_path('logout'),
116 params={'csrf_token': csrf_token})
116 params={'csrf_token': csrf_token})
117 # Login as regular user
117 # Login as regular user
118 response = self.app.post(h.route_path('login'),
118 response = self.app.post(h.route_path('login'),
119 {'username': TEST_USER_REGULAR_LOGIN,
119 {'username': TEST_USER_REGULAR_LOGIN,
120 'password': 'test12'})
120 'password': 'test12'})
121
121
122 pull_request = pr_util.create_pull_request(
122 pull_request = pr_util.create_pull_request(
123 author=TEST_USER_REGULAR_LOGIN)
123 author=TEST_USER_REGULAR_LOGIN)
124
124
125 response = self.app.get(route_path(
125 response = self.app.get(route_path(
126 'pullrequest_show',
126 'pullrequest_show',
127 repo_name=pull_request.target_repo.scm_instance().name,
127 repo_name=pull_request.target_repo.scm_instance().name,
128 pull_request_id=pull_request.pull_request_id))
128 pull_request_id=pull_request.pull_request_id))
129
129
130 response.mustcontain('Server-side pull request merging is disabled.')
130 response.mustcontain('Server-side pull request merging is disabled.')
131
131
132 assert_response = response.assert_response()
132 assert_response = response.assert_response()
133 # for regular user without a merge permissions, we don't see it
133 # for regular user without a merge permissions, we don't see it
134 assert_response.no_element_exists('#close-pull-request-action')
134 assert_response.no_element_exists('#close-pull-request-action')
135
135
136 user_util.grant_user_permission_to_repo(
136 user_util.grant_user_permission_to_repo(
137 pull_request.target_repo,
137 pull_request.target_repo,
138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 'repository.write')
139 'repository.write')
140 response = self.app.get(route_path(
140 response = self.app.get(route_path(
141 'pullrequest_show',
141 'pullrequest_show',
142 repo_name=pull_request.target_repo.scm_instance().name,
142 repo_name=pull_request.target_repo.scm_instance().name,
143 pull_request_id=pull_request.pull_request_id))
143 pull_request_id=pull_request.pull_request_id))
144
144
145 response.mustcontain('Server-side pull request merging is disabled.')
145 response.mustcontain('Server-side pull request merging is disabled.')
146
146
147 assert_response = response.assert_response()
147 assert_response = response.assert_response()
148 # now regular user has a merge permissions, we have CLOSE button
148 # now regular user has a merge permissions, we have CLOSE button
149 assert_response.one_element_exists('#close-pull-request-action')
149 assert_response.one_element_exists('#close-pull-request-action')
150
150
151 def test_show_invalid_commit_id(self, pr_util):
151 def test_show_invalid_commit_id(self, pr_util):
152 # Simulating invalid revisions which will cause a lookup error
152 # Simulating invalid revisions which will cause a lookup error
153 pull_request = pr_util.create_pull_request()
153 pull_request = pr_util.create_pull_request()
154 pull_request.revisions = ['invalid']
154 pull_request.revisions = ['invalid']
155 Session().add(pull_request)
155 Session().add(pull_request)
156 Session().commit()
156 Session().commit()
157
157
158 response = self.app.get(route_path(
158 response = self.app.get(route_path(
159 'pullrequest_show',
159 'pullrequest_show',
160 repo_name=pull_request.target_repo.scm_instance().name,
160 repo_name=pull_request.target_repo.scm_instance().name,
161 pull_request_id=pull_request.pull_request_id))
161 pull_request_id=pull_request.pull_request_id))
162
162
163 for commit_id in pull_request.revisions:
163 for commit_id in pull_request.revisions:
164 response.mustcontain(commit_id)
164 response.mustcontain(commit_id)
165
165
166 def test_show_invalid_source_reference(self, pr_util):
166 def test_show_invalid_source_reference(self, pr_util):
167 pull_request = pr_util.create_pull_request()
167 pull_request = pr_util.create_pull_request()
168 pull_request.source_ref = 'branch:b:invalid'
168 pull_request.source_ref = 'branch:b:invalid'
169 Session().add(pull_request)
169 Session().add(pull_request)
170 Session().commit()
170 Session().commit()
171
171
172 self.app.get(route_path(
172 self.app.get(route_path(
173 'pullrequest_show',
173 'pullrequest_show',
174 repo_name=pull_request.target_repo.scm_instance().name,
174 repo_name=pull_request.target_repo.scm_instance().name,
175 pull_request_id=pull_request.pull_request_id))
175 pull_request_id=pull_request.pull_request_id))
176
176
177 def test_edit_title_description(self, pr_util, csrf_token):
177 def test_edit_title_description(self, pr_util, csrf_token):
178 pull_request = pr_util.create_pull_request()
178 pull_request = pr_util.create_pull_request()
179 pull_request_id = pull_request.pull_request_id
179 pull_request_id = pull_request.pull_request_id
180
180
181 response = self.app.post(
181 response = self.app.post(
182 route_path('pullrequest_update',
182 route_path('pullrequest_update',
183 repo_name=pull_request.target_repo.repo_name,
183 repo_name=pull_request.target_repo.repo_name,
184 pull_request_id=pull_request_id),
184 pull_request_id=pull_request_id),
185 params={
185 params={
186 'edit_pull_request': 'true',
186 'edit_pull_request': 'true',
187 'title': 'New title',
187 'title': 'New title',
188 'description': 'New description',
188 'description': 'New description',
189 'csrf_token': csrf_token})
189 'csrf_token': csrf_token})
190
190
191 assert_session_flash(
191 assert_session_flash(
192 response, u'Pull request title & description updated.',
192 response, u'Pull request title & description updated.',
193 category='success')
193 category='success')
194
194
195 pull_request = PullRequest.get(pull_request_id)
195 pull_request = PullRequest.get(pull_request_id)
196 assert pull_request.title == 'New title'
196 assert pull_request.title == 'New title'
197 assert pull_request.description == 'New description'
197 assert pull_request.description == 'New description'
198
198
199 def test_edit_title_description_closed(self, pr_util, csrf_token):
199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 pull_request = pr_util.create_pull_request()
200 pull_request = pr_util.create_pull_request()
201 pull_request_id = pull_request.pull_request_id
201 pull_request_id = pull_request.pull_request_id
202 repo_name = pull_request.target_repo.repo_name
202 repo_name = pull_request.target_repo.repo_name
203 pr_util.close()
203 pr_util.close()
204
204
205 response = self.app.post(
205 response = self.app.post(
206 route_path('pullrequest_update',
206 route_path('pullrequest_update',
207 repo_name=repo_name, pull_request_id=pull_request_id),
207 repo_name=repo_name, pull_request_id=pull_request_id),
208 params={
208 params={
209 'edit_pull_request': 'true',
209 'edit_pull_request': 'true',
210 'title': 'New title',
210 'title': 'New title',
211 'description': 'New description',
211 'description': 'New description',
212 'csrf_token': csrf_token}, status=200)
212 'csrf_token': csrf_token}, status=200)
213 assert_session_flash(
213 assert_session_flash(
214 response, u'Cannot update closed pull requests.',
214 response, u'Cannot update closed pull requests.',
215 category='error')
215 category='error')
216
216
217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219
219
220 pull_request = pr_util.create_pull_request()
220 pull_request = pr_util.create_pull_request()
221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 Session().add(pull_request)
222 Session().add(pull_request)
223 Session().commit()
223 Session().commit()
224
224
225 pull_request_id = pull_request.pull_request_id
225 pull_request_id = pull_request.pull_request_id
226
226
227 response = self.app.post(
227 response = self.app.post(
228 route_path('pullrequest_update',
228 route_path('pullrequest_update',
229 repo_name=pull_request.target_repo.repo_name,
229 repo_name=pull_request.target_repo.repo_name,
230 pull_request_id=pull_request_id),
230 pull_request_id=pull_request_id),
231 params={'update_commits': 'true',
231 params={'update_commits': 'true',
232 'csrf_token': csrf_token})
232 'csrf_token': csrf_token})
233
233
234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 UpdateFailureReason.MISSING_SOURCE_REF])
235 UpdateFailureReason.MISSING_SOURCE_REF])
236 assert_session_flash(response, expected_msg, category='error')
236 assert_session_flash(response, expected_msg, category='error')
237
237
238 def test_missing_target_reference(self, pr_util, csrf_token):
238 def test_missing_target_reference(self, pr_util, csrf_token):
239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 pull_request = pr_util.create_pull_request(
240 pull_request = pr_util.create_pull_request(
241 approved=True, mergeable=True)
241 approved=True, mergeable=True)
242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 Session().add(pull_request)
243 Session().add(pull_request)
244 Session().commit()
244 Session().commit()
245
245
246 pull_request_id = pull_request.pull_request_id
246 pull_request_id = pull_request.pull_request_id
247 pull_request_url = route_path(
247 pull_request_url = route_path(
248 'pullrequest_show',
248 'pullrequest_show',
249 repo_name=pull_request.target_repo.repo_name,
249 repo_name=pull_request.target_repo.repo_name,
250 pull_request_id=pull_request_id)
250 pull_request_id=pull_request_id)
251
251
252 response = self.app.get(pull_request_url)
252 response = self.app.get(pull_request_url)
253
253
254 assertr = AssertResponse(response)
254 assertr = AssertResponse(response)
255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 MergeFailureReason.MISSING_TARGET_REF]
256 MergeFailureReason.MISSING_TARGET_REF]
257 assertr.element_contains(
257 assertr.element_contains(
258 'span[data-role="merge-message"]', str(expected_msg))
258 'span[data-role="merge-message"]', str(expected_msg))
259
259
260 def test_comment_and_close_pull_request_custom_message_approved(
260 def test_comment_and_close_pull_request_custom_message_approved(
261 self, pr_util, csrf_token, xhr_header):
261 self, pr_util, csrf_token, xhr_header):
262
262
263 pull_request = pr_util.create_pull_request(approved=True)
263 pull_request = pr_util.create_pull_request(approved=True)
264 pull_request_id = pull_request.pull_request_id
264 pull_request_id = pull_request.pull_request_id
265 author = pull_request.user_id
265 author = pull_request.user_id
266 repo = pull_request.target_repo.repo_id
266 repo = pull_request.target_repo.repo_id
267
267
268 self.app.post(
268 self.app.post(
269 route_path('pullrequest_comment_create',
269 route_path('pullrequest_comment_create',
270 repo_name=pull_request.target_repo.scm_instance().name,
270 repo_name=pull_request.target_repo.scm_instance().name,
271 pull_request_id=pull_request_id),
271 pull_request_id=pull_request_id),
272 params={
272 params={
273 'close_pull_request': '1',
273 'close_pull_request': '1',
274 'text': 'Closing a PR',
274 'text': 'Closing a PR',
275 'csrf_token': csrf_token},
275 'csrf_token': csrf_token},
276 extra_environ=xhr_header,)
276 extra_environ=xhr_header,)
277
277
278 journal = UserLog.query()\
278 journal = UserLog.query()\
279 .filter(UserLog.user_id == author)\
279 .filter(UserLog.user_id == author)\
280 .filter(UserLog.repository_id == repo) \
280 .filter(UserLog.repository_id == repo) \
281 .order_by('user_log_id') \
281 .order_by('user_log_id') \
282 .all()
282 .all()
283 assert journal[-1].action == 'repo.pull_request.close'
283 assert journal[-1].action == 'repo.pull_request.close'
284
284
285 pull_request = PullRequest.get(pull_request_id)
285 pull_request = PullRequest.get(pull_request_id)
286 assert pull_request.is_closed()
286 assert pull_request.is_closed()
287
287
288 status = ChangesetStatusModel().get_status(
288 status = ChangesetStatusModel().get_status(
289 pull_request.source_repo, pull_request=pull_request)
289 pull_request.source_repo, pull_request=pull_request)
290 assert status == ChangesetStatus.STATUS_APPROVED
290 assert status == ChangesetStatus.STATUS_APPROVED
291 comments = ChangesetComment().query() \
291 comments = ChangesetComment().query() \
292 .filter(ChangesetComment.pull_request == pull_request) \
292 .filter(ChangesetComment.pull_request == pull_request) \
293 .order_by(ChangesetComment.comment_id.asc())\
293 .order_by(ChangesetComment.comment_id.asc())\
294 .all()
294 .all()
295 assert comments[-1].text == 'Closing a PR'
295 assert comments[-1].text == 'Closing a PR'
296
296
297 def test_comment_force_close_pull_request_rejected(
297 def test_comment_force_close_pull_request_rejected(
298 self, pr_util, csrf_token, xhr_header):
298 self, pr_util, csrf_token, xhr_header):
299 pull_request = pr_util.create_pull_request()
299 pull_request = pr_util.create_pull_request()
300 pull_request_id = pull_request.pull_request_id
300 pull_request_id = pull_request.pull_request_id
301 PullRequestModel().update_reviewers(
301 PullRequestModel().update_reviewers(
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 pull_request.author)
303 pull_request.author)
304 author = pull_request.user_id
304 author = pull_request.user_id
305 repo = pull_request.target_repo.repo_id
305 repo = pull_request.target_repo.repo_id
306
306
307 self.app.post(
307 self.app.post(
308 route_path('pullrequest_comment_create',
308 route_path('pullrequest_comment_create',
309 repo_name=pull_request.target_repo.scm_instance().name,
309 repo_name=pull_request.target_repo.scm_instance().name,
310 pull_request_id=pull_request_id),
310 pull_request_id=pull_request_id),
311 params={
311 params={
312 'close_pull_request': '1',
312 'close_pull_request': '1',
313 'csrf_token': csrf_token},
313 'csrf_token': csrf_token},
314 extra_environ=xhr_header)
314 extra_environ=xhr_header)
315
315
316 pull_request = PullRequest.get(pull_request_id)
316 pull_request = PullRequest.get(pull_request_id)
317
317
318 journal = UserLog.query()\
318 journal = UserLog.query()\
319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 .order_by('user_log_id') \
320 .order_by('user_log_id') \
321 .all()
321 .all()
322 assert journal[-1].action == 'repo.pull_request.close'
322 assert journal[-1].action == 'repo.pull_request.close'
323
323
324 # check only the latest status, not the review status
324 # check only the latest status, not the review status
325 status = ChangesetStatusModel().get_status(
325 status = ChangesetStatusModel().get_status(
326 pull_request.source_repo, pull_request=pull_request)
326 pull_request.source_repo, pull_request=pull_request)
327 assert status == ChangesetStatus.STATUS_REJECTED
327 assert status == ChangesetStatus.STATUS_REJECTED
328
328
329 def test_comment_and_close_pull_request(
329 def test_comment_and_close_pull_request(
330 self, pr_util, csrf_token, xhr_header):
330 self, pr_util, csrf_token, xhr_header):
331 pull_request = pr_util.create_pull_request()
331 pull_request = pr_util.create_pull_request()
332 pull_request_id = pull_request.pull_request_id
332 pull_request_id = pull_request.pull_request_id
333
333
334 response = self.app.post(
334 response = self.app.post(
335 route_path('pullrequest_comment_create',
335 route_path('pullrequest_comment_create',
336 repo_name=pull_request.target_repo.scm_instance().name,
336 repo_name=pull_request.target_repo.scm_instance().name,
337 pull_request_id=pull_request.pull_request_id),
337 pull_request_id=pull_request.pull_request_id),
338 params={
338 params={
339 'close_pull_request': 'true',
339 'close_pull_request': 'true',
340 'csrf_token': csrf_token},
340 'csrf_token': csrf_token},
341 extra_environ=xhr_header)
341 extra_environ=xhr_header)
342
342
343 assert response.json
343 assert response.json
344
344
345 pull_request = PullRequest.get(pull_request_id)
345 pull_request = PullRequest.get(pull_request_id)
346 assert pull_request.is_closed()
346 assert pull_request.is_closed()
347
347
348 # check only the latest status, not the review status
348 # check only the latest status, not the review status
349 status = ChangesetStatusModel().get_status(
349 status = ChangesetStatusModel().get_status(
350 pull_request.source_repo, pull_request=pull_request)
350 pull_request.source_repo, pull_request=pull_request)
351 assert status == ChangesetStatus.STATUS_REJECTED
351 assert status == ChangesetStatus.STATUS_REJECTED
352
352
353 def test_create_pull_request(self, backend, csrf_token):
353 def test_create_pull_request(self, backend, csrf_token):
354 commits = [
354 commits = [
355 {'message': 'ancestor'},
355 {'message': 'ancestor'},
356 {'message': 'change'},
356 {'message': 'change'},
357 {'message': 'change2'},
357 {'message': 'change2'},
358 ]
358 ]
359 commit_ids = backend.create_master_repo(commits)
359 commit_ids = backend.create_master_repo(commits)
360 target = backend.create_repo(heads=['ancestor'])
360 target = backend.create_repo(heads=['ancestor'])
361 source = backend.create_repo(heads=['change2'])
361 source = backend.create_repo(heads=['change2'])
362
362
363 response = self.app.post(
363 response = self.app.post(
364 route_path('pullrequest_create', repo_name=source.repo_name),
364 route_path('pullrequest_create', repo_name=source.repo_name),
365 [
365 [
366 ('source_repo', source.repo_name),
366 ('source_repo', source.repo_name),
367 ('source_ref', 'branch:default:' + commit_ids['change2']),
367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 ('target_repo', target.repo_name),
368 ('target_repo', target.repo_name),
369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 ('common_ancestor', commit_ids['ancestor']),
370 ('common_ancestor', commit_ids['ancestor']),
371 ('pullrequest_desc', 'Description'),
371 ('pullrequest_desc', 'Description'),
372 ('pullrequest_title', 'Title'),
372 ('pullrequest_title', 'Title'),
373 ('__start__', 'review_members:sequence'),
373 ('__start__', 'review_members:sequence'),
374 ('__start__', 'reviewer:mapping'),
374 ('__start__', 'reviewer:mapping'),
375 ('user_id', '1'),
375 ('user_id', '1'),
376 ('__start__', 'reasons:sequence'),
376 ('__start__', 'reasons:sequence'),
377 ('reason', 'Some reason'),
377 ('reason', 'Some reason'),
378 ('__end__', 'reasons:sequence'),
378 ('__end__', 'reasons:sequence'),
379 ('__start__', 'rules:sequence'),
379 ('__start__', 'rules:sequence'),
380 ('__end__', 'rules:sequence'),
380 ('__end__', 'rules:sequence'),
381 ('mandatory', 'False'),
381 ('mandatory', 'False'),
382 ('__end__', 'reviewer:mapping'),
382 ('__end__', 'reviewer:mapping'),
383 ('__end__', 'review_members:sequence'),
383 ('__end__', 'review_members:sequence'),
384 ('__start__', 'revisions:sequence'),
384 ('__start__', 'revisions:sequence'),
385 ('revisions', commit_ids['change']),
385 ('revisions', commit_ids['change']),
386 ('revisions', commit_ids['change2']),
386 ('revisions', commit_ids['change2']),
387 ('__end__', 'revisions:sequence'),
387 ('__end__', 'revisions:sequence'),
388 ('user', ''),
388 ('user', ''),
389 ('csrf_token', csrf_token),
389 ('csrf_token', csrf_token),
390 ],
390 ],
391 status=302)
391 status=302)
392
392
393 location = response.headers['Location']
393 location = response.headers['Location']
394 pull_request_id = location.rsplit('/', 1)[1]
394 pull_request_id = location.rsplit('/', 1)[1]
395 assert pull_request_id != 'new'
395 assert pull_request_id != 'new'
396 pull_request = PullRequest.get(int(pull_request_id))
396 pull_request = PullRequest.get(int(pull_request_id))
397
397
398 # check that we have now both revisions
398 # check that we have now both revisions
399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
402 assert pull_request.target_ref == expected_target_ref
402 assert pull_request.target_ref == expected_target_ref
403
403
404 def test_reviewer_notifications(self, backend, csrf_token):
404 def test_reviewer_notifications(self, backend, csrf_token):
405 # We have to use the app.post for this test so it will create the
405 # We have to use the app.post for this test so it will create the
406 # notifications properly with the new PR
406 # notifications properly with the new PR
407 commits = [
407 commits = [
408 {'message': 'ancestor',
408 {'message': 'ancestor',
409 'added': [FileNode('file_A', content='content_of_ancestor')]},
409 'added': [FileNode('file_A', content='content_of_ancestor')]},
410 {'message': 'change',
410 {'message': 'change',
411 'added': [FileNode('file_a', content='content_of_change')]},
411 'added': [FileNode('file_a', content='content_of_change')]},
412 {'message': 'change-child'},
412 {'message': 'change-child'},
413 {'message': 'ancestor-child', 'parents': ['ancestor'],
413 {'message': 'ancestor-child', 'parents': ['ancestor'],
414 'added': [
414 'added': [
415 FileNode('file_B', content='content_of_ancestor_child')]},
415 FileNode('file_B', content='content_of_ancestor_child')]},
416 {'message': 'ancestor-child-2'},
416 {'message': 'ancestor-child-2'},
417 ]
417 ]
418 commit_ids = backend.create_master_repo(commits)
418 commit_ids = backend.create_master_repo(commits)
419 target = backend.create_repo(heads=['ancestor-child'])
419 target = backend.create_repo(heads=['ancestor-child'])
420 source = backend.create_repo(heads=['change'])
420 source = backend.create_repo(heads=['change'])
421
421
422 response = self.app.post(
422 response = self.app.post(
423 route_path('pullrequest_create', repo_name=source.repo_name),
423 route_path('pullrequest_create', repo_name=source.repo_name),
424 [
424 [
425 ('source_repo', source.repo_name),
425 ('source_repo', source.repo_name),
426 ('source_ref', 'branch:default:' + commit_ids['change']),
426 ('source_ref', 'branch:default:' + commit_ids['change']),
427 ('target_repo', target.repo_name),
427 ('target_repo', target.repo_name),
428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
429 ('common_ancestor', commit_ids['ancestor']),
429 ('common_ancestor', commit_ids['ancestor']),
430 ('pullrequest_desc', 'Description'),
430 ('pullrequest_desc', 'Description'),
431 ('pullrequest_title', 'Title'),
431 ('pullrequest_title', 'Title'),
432 ('__start__', 'review_members:sequence'),
432 ('__start__', 'review_members:sequence'),
433 ('__start__', 'reviewer:mapping'),
433 ('__start__', 'reviewer:mapping'),
434 ('user_id', '2'),
434 ('user_id', '2'),
435 ('__start__', 'reasons:sequence'),
435 ('__start__', 'reasons:sequence'),
436 ('reason', 'Some reason'),
436 ('reason', 'Some reason'),
437 ('__end__', 'reasons:sequence'),
437 ('__end__', 'reasons:sequence'),
438 ('__start__', 'rules:sequence'),
438 ('__start__', 'rules:sequence'),
439 ('__end__', 'rules:sequence'),
439 ('__end__', 'rules:sequence'),
440 ('mandatory', 'False'),
440 ('mandatory', 'False'),
441 ('__end__', 'reviewer:mapping'),
441 ('__end__', 'reviewer:mapping'),
442 ('__end__', 'review_members:sequence'),
442 ('__end__', 'review_members:sequence'),
443 ('__start__', 'revisions:sequence'),
443 ('__start__', 'revisions:sequence'),
444 ('revisions', commit_ids['change']),
444 ('revisions', commit_ids['change']),
445 ('__end__', 'revisions:sequence'),
445 ('__end__', 'revisions:sequence'),
446 ('user', ''),
446 ('user', ''),
447 ('csrf_token', csrf_token),
447 ('csrf_token', csrf_token),
448 ],
448 ],
449 status=302)
449 status=302)
450
450
451 location = response.headers['Location']
451 location = response.headers['Location']
452
452
453 pull_request_id = location.rsplit('/', 1)[1]
453 pull_request_id = location.rsplit('/', 1)[1]
454 assert pull_request_id != 'new'
454 assert pull_request_id != 'new'
455 pull_request = PullRequest.get(int(pull_request_id))
455 pull_request = PullRequest.get(int(pull_request_id))
456
456
457 # Check that a notification was made
457 # Check that a notification was made
458 notifications = Notification.query()\
458 notifications = Notification.query()\
459 .filter(Notification.created_by == pull_request.author.user_id,
459 .filter(Notification.created_by == pull_request.author.user_id,
460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
461 Notification.subject.contains(
461 Notification.subject.contains(
462 "wants you to review pull request #%s" % pull_request_id))
462 "wants you to review pull request #%s" % pull_request_id))
463 assert len(notifications.all()) == 1
463 assert len(notifications.all()) == 1
464
464
465 # Change reviewers and check that a notification was made
465 # Change reviewers and check that a notification was made
466 PullRequestModel().update_reviewers(
466 PullRequestModel().update_reviewers(
467 pull_request.pull_request_id, [(1, [], False, [])],
467 pull_request.pull_request_id, [(1, [], False, [])],
468 pull_request.author)
468 pull_request.author)
469 assert len(notifications.all()) == 2
469 assert len(notifications.all()) == 2
470
470
471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
472 csrf_token):
472 csrf_token):
473 commits = [
473 commits = [
474 {'message': 'ancestor',
474 {'message': 'ancestor',
475 'added': [FileNode('file_A', content='content_of_ancestor')]},
475 'added': [FileNode('file_A', content='content_of_ancestor')]},
476 {'message': 'change',
476 {'message': 'change',
477 'added': [FileNode('file_a', content='content_of_change')]},
477 'added': [FileNode('file_a', content='content_of_change')]},
478 {'message': 'change-child'},
478 {'message': 'change-child'},
479 {'message': 'ancestor-child', 'parents': ['ancestor'],
479 {'message': 'ancestor-child', 'parents': ['ancestor'],
480 'added': [
480 'added': [
481 FileNode('file_B', content='content_of_ancestor_child')]},
481 FileNode('file_B', content='content_of_ancestor_child')]},
482 {'message': 'ancestor-child-2'},
482 {'message': 'ancestor-child-2'},
483 ]
483 ]
484 commit_ids = backend.create_master_repo(commits)
484 commit_ids = backend.create_master_repo(commits)
485 target = backend.create_repo(heads=['ancestor-child'])
485 target = backend.create_repo(heads=['ancestor-child'])
486 source = backend.create_repo(heads=['change'])
486 source = backend.create_repo(heads=['change'])
487
487
488 response = self.app.post(
488 response = self.app.post(
489 route_path('pullrequest_create', repo_name=source.repo_name),
489 route_path('pullrequest_create', repo_name=source.repo_name),
490 [
490 [
491 ('source_repo', source.repo_name),
491 ('source_repo', source.repo_name),
492 ('source_ref', 'branch:default:' + commit_ids['change']),
492 ('source_ref', 'branch:default:' + commit_ids['change']),
493 ('target_repo', target.repo_name),
493 ('target_repo', target.repo_name),
494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
495 ('common_ancestor', commit_ids['ancestor']),
495 ('common_ancestor', commit_ids['ancestor']),
496 ('pullrequest_desc', 'Description'),
496 ('pullrequest_desc', 'Description'),
497 ('pullrequest_title', 'Title'),
497 ('pullrequest_title', 'Title'),
498 ('__start__', 'review_members:sequence'),
498 ('__start__', 'review_members:sequence'),
499 ('__start__', 'reviewer:mapping'),
499 ('__start__', 'reviewer:mapping'),
500 ('user_id', '1'),
500 ('user_id', '1'),
501 ('__start__', 'reasons:sequence'),
501 ('__start__', 'reasons:sequence'),
502 ('reason', 'Some reason'),
502 ('reason', 'Some reason'),
503 ('__end__', 'reasons:sequence'),
503 ('__end__', 'reasons:sequence'),
504 ('__start__', 'rules:sequence'),
504 ('__start__', 'rules:sequence'),
505 ('__end__', 'rules:sequence'),
505 ('__end__', 'rules:sequence'),
506 ('mandatory', 'False'),
506 ('mandatory', 'False'),
507 ('__end__', 'reviewer:mapping'),
507 ('__end__', 'reviewer:mapping'),
508 ('__end__', 'review_members:sequence'),
508 ('__end__', 'review_members:sequence'),
509 ('__start__', 'revisions:sequence'),
509 ('__start__', 'revisions:sequence'),
510 ('revisions', commit_ids['change']),
510 ('revisions', commit_ids['change']),
511 ('__end__', 'revisions:sequence'),
511 ('__end__', 'revisions:sequence'),
512 ('user', ''),
512 ('user', ''),
513 ('csrf_token', csrf_token),
513 ('csrf_token', csrf_token),
514 ],
514 ],
515 status=302)
515 status=302)
516
516
517 location = response.headers['Location']
517 location = response.headers['Location']
518
518
519 pull_request_id = location.rsplit('/', 1)[1]
519 pull_request_id = location.rsplit('/', 1)[1]
520 assert pull_request_id != 'new'
520 assert pull_request_id != 'new'
521 pull_request = PullRequest.get(int(pull_request_id))
521 pull_request = PullRequest.get(int(pull_request_id))
522
522
523 # target_ref has to point to the ancestor's commit_id in order to
523 # target_ref has to point to the ancestor's commit_id in order to
524 # show the correct diff
524 # show the correct diff
525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
526 assert pull_request.target_ref == expected_target_ref
526 assert pull_request.target_ref == expected_target_ref
527
527
528 # Check generated diff contents
528 # Check generated diff contents
529 response = response.follow()
529 response = response.follow()
530 assert 'content_of_ancestor' not in response.body
530 assert 'content_of_ancestor' not in response.body
531 assert 'content_of_ancestor-child' not in response.body
531 assert 'content_of_ancestor-child' not in response.body
532 assert 'content_of_change' in response.body
532 assert 'content_of_change' in response.body
533
533
534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
535 # Clear any previous calls to rcextensions
535 # Clear any previous calls to rcextensions
536 rhodecode.EXTENSIONS.calls.clear()
536 rhodecode.EXTENSIONS.calls.clear()
537
537
538 pull_request = pr_util.create_pull_request(
538 pull_request = pr_util.create_pull_request(
539 approved=True, mergeable=True)
539 approved=True, mergeable=True)
540 pull_request_id = pull_request.pull_request_id
540 pull_request_id = pull_request.pull_request_id
541 repo_name = pull_request.target_repo.scm_instance().name,
541 repo_name = pull_request.target_repo.scm_instance().name,
542
542
543 response = self.app.post(
543 response = self.app.post(
544 route_path('pullrequest_merge',
544 route_path('pullrequest_merge',
545 repo_name=str(repo_name[0]),
545 repo_name=str(repo_name[0]),
546 pull_request_id=pull_request_id),
546 pull_request_id=pull_request_id),
547 params={'csrf_token': csrf_token}).follow()
547 params={'csrf_token': csrf_token}).follow()
548
548
549 pull_request = PullRequest.get(pull_request_id)
549 pull_request = PullRequest.get(pull_request_id)
550
550
551 assert response.status_int == 200
551 assert response.status_int == 200
552 assert pull_request.is_closed()
552 assert pull_request.is_closed()
553 assert_pull_request_status(
553 assert_pull_request_status(
554 pull_request, ChangesetStatus.STATUS_APPROVED)
554 pull_request, ChangesetStatus.STATUS_APPROVED)
555
555
556 # Check the relevant log entries were added
556 # Check the relevant log entries were added
557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
558 actions = [log.action for log in user_logs]
558 actions = [log.action for log in user_logs]
559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
560 expected_actions = [
560 expected_actions = [
561 u'repo.pull_request.close',
561 u'repo.pull_request.close',
562 u'repo.pull_request.merge',
562 u'repo.pull_request.merge',
563 u'repo.pull_request.comment.create'
563 u'repo.pull_request.comment.create'
564 ]
564 ]
565 assert actions == expected_actions
565 assert actions == expected_actions
566
566
567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
568 actions = [log for log in user_logs]
568 actions = [log for log in user_logs]
569 assert actions[-1].action == 'user.push'
569 assert actions[-1].action == 'user.push'
570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
571
571
572 # Check post_push rcextension was really executed
572 # Check post_push rcextension was really executed
573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
574 assert len(push_calls) == 1
574 assert len(push_calls) == 1
575 unused_last_call_args, last_call_kwargs = push_calls[0]
575 unused_last_call_args, last_call_kwargs = push_calls[0]
576 assert last_call_kwargs['action'] == 'push'
576 assert last_call_kwargs['action'] == 'push'
577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
578
578
579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
580 pull_request = pr_util.create_pull_request(mergeable=False)
580 pull_request = pr_util.create_pull_request(mergeable=False)
581 pull_request_id = pull_request.pull_request_id
581 pull_request_id = pull_request.pull_request_id
582 pull_request = PullRequest.get(pull_request_id)
582 pull_request = PullRequest.get(pull_request_id)
583
583
584 response = self.app.post(
584 response = self.app.post(
585 route_path('pullrequest_merge',
585 route_path('pullrequest_merge',
586 repo_name=pull_request.target_repo.scm_instance().name,
586 repo_name=pull_request.target_repo.scm_instance().name,
587 pull_request_id=pull_request.pull_request_id),
587 pull_request_id=pull_request.pull_request_id),
588 params={'csrf_token': csrf_token}).follow()
588 params={'csrf_token': csrf_token}).follow()
589
589
590 assert response.status_int == 200
590 assert response.status_int == 200
591 response.mustcontain(
591 response.mustcontain(
592 'Merge is not currently possible because of below failed checks.')
592 'Merge is not currently possible because of below failed checks.')
593 response.mustcontain('Server-side pull request merging is disabled.')
593 response.mustcontain('Server-side pull request merging is disabled.')
594
594
595 @pytest.mark.skip_backends('svn')
595 @pytest.mark.skip_backends('svn')
596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
597 pull_request = pr_util.create_pull_request(mergeable=True)
597 pull_request = pr_util.create_pull_request(mergeable=True)
598 pull_request_id = pull_request.pull_request_id
598 pull_request_id = pull_request.pull_request_id
599 repo_name = pull_request.target_repo.scm_instance().name
599 repo_name = pull_request.target_repo.scm_instance().name
600
600
601 response = self.app.post(
601 response = self.app.post(
602 route_path('pullrequest_merge',
602 route_path('pullrequest_merge',
603 repo_name=repo_name,
603 repo_name=repo_name,
604 pull_request_id=pull_request_id),
604 pull_request_id=pull_request_id),
605 params={'csrf_token': csrf_token}).follow()
605 params={'csrf_token': csrf_token}).follow()
606
606
607 assert response.status_int == 200
607 assert response.status_int == 200
608
608
609 response.mustcontain(
609 response.mustcontain(
610 'Merge is not currently possible because of below failed checks.')
610 'Merge is not currently possible because of below failed checks.')
611 response.mustcontain('Pull request reviewer approval is pending.')
611 response.mustcontain('Pull request reviewer approval is pending.')
612
612
613 def test_merge_pull_request_renders_failure_reason(
613 def test_merge_pull_request_renders_failure_reason(
614 self, user_regular, csrf_token, pr_util):
614 self, user_regular, csrf_token, pr_util):
615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
616 pull_request_id = pull_request.pull_request_id
616 pull_request_id = pull_request.pull_request_id
617 repo_name = pull_request.target_repo.scm_instance().name
617 repo_name = pull_request.target_repo.scm_instance().name
618
618
619 model_patcher = mock.patch.multiple(
619 model_patcher = mock.patch.multiple(
620 PullRequestModel,
620 PullRequestModel,
621 merge=mock.Mock(return_value=MergeResponse(
621 merge_repo=mock.Mock(return_value=MergeResponse(
622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
624
624
625 with model_patcher:
625 with model_patcher:
626 response = self.app.post(
626 response = self.app.post(
627 route_path('pullrequest_merge',
627 route_path('pullrequest_merge',
628 repo_name=repo_name,
628 repo_name=repo_name,
629 pull_request_id=pull_request_id),
629 pull_request_id=pull_request_id),
630 params={'csrf_token': csrf_token}, status=302)
630 params={'csrf_token': csrf_token}, status=302)
631
631
632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
633 MergeFailureReason.PUSH_FAILED])
633 MergeFailureReason.PUSH_FAILED])
634
634
635 def test_update_source_revision(self, backend, csrf_token):
635 def test_update_source_revision(self, backend, csrf_token):
636 commits = [
636 commits = [
637 {'message': 'ancestor'},
637 {'message': 'ancestor'},
638 {'message': 'change'},
638 {'message': 'change'},
639 {'message': 'change-2'},
639 {'message': 'change-2'},
640 ]
640 ]
641 commit_ids = backend.create_master_repo(commits)
641 commit_ids = backend.create_master_repo(commits)
642 target = backend.create_repo(heads=['ancestor'])
642 target = backend.create_repo(heads=['ancestor'])
643 source = backend.create_repo(heads=['change'])
643 source = backend.create_repo(heads=['change'])
644
644
645 # create pr from a in source to A in target
645 # create pr from a in source to A in target
646 pull_request = PullRequest()
646 pull_request = PullRequest()
647 pull_request.source_repo = source
647 pull_request.source_repo = source
648 # TODO: johbo: Make sure that we write the source ref this way!
648 # TODO: johbo: Make sure that we write the source ref this way!
649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
651 pull_request.target_repo = target
651 pull_request.target_repo = target
652
652
653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
654 branch=backend.default_branch_name,
654 branch=backend.default_branch_name,
655 commit_id=commit_ids['ancestor'])
655 commit_id=commit_ids['ancestor'])
656 pull_request.revisions = [commit_ids['change']]
656 pull_request.revisions = [commit_ids['change']]
657 pull_request.title = u"Test"
657 pull_request.title = u"Test"
658 pull_request.description = u"Description"
658 pull_request.description = u"Description"
659 pull_request.author = UserModel().get_by_username(
659 pull_request.author = UserModel().get_by_username(
660 TEST_USER_ADMIN_LOGIN)
660 TEST_USER_ADMIN_LOGIN)
661 Session().add(pull_request)
661 Session().add(pull_request)
662 Session().commit()
662 Session().commit()
663 pull_request_id = pull_request.pull_request_id
663 pull_request_id = pull_request.pull_request_id
664
664
665 # source has ancestor - change - change-2
665 # source has ancestor - change - change-2
666 backend.pull_heads(source, heads=['change-2'])
666 backend.pull_heads(source, heads=['change-2'])
667
667
668 # update PR
668 # update PR
669 self.app.post(
669 self.app.post(
670 route_path('pullrequest_update',
670 route_path('pullrequest_update',
671 repo_name=target.repo_name,
671 repo_name=target.repo_name,
672 pull_request_id=pull_request_id),
672 pull_request_id=pull_request_id),
673 params={'update_commits': 'true',
673 params={'update_commits': 'true',
674 'csrf_token': csrf_token})
674 'csrf_token': csrf_token})
675
675
676 # check that we have now both revisions
676 # check that we have now both revisions
677 pull_request = PullRequest.get(pull_request_id)
677 pull_request = PullRequest.get(pull_request_id)
678 assert pull_request.revisions == [
678 assert pull_request.revisions == [
679 commit_ids['change-2'], commit_ids['change']]
679 commit_ids['change-2'], commit_ids['change']]
680
680
681 # TODO: johbo: this should be a test on its own
681 # TODO: johbo: this should be a test on its own
682 response = self.app.get(route_path(
682 response = self.app.get(route_path(
683 'pullrequest_new',
683 'pullrequest_new',
684 repo_name=target.repo_name))
684 repo_name=target.repo_name))
685 assert response.status_int == 200
685 assert response.status_int == 200
686 assert 'Pull request updated to' in response.body
686 assert 'Pull request updated to' in response.body
687 assert 'with 1 added, 0 removed commits.' in response.body
687 assert 'with 1 added, 0 removed commits.' in response.body
688
688
689 def test_update_target_revision(self, backend, csrf_token):
689 def test_update_target_revision(self, backend, csrf_token):
690 commits = [
690 commits = [
691 {'message': 'ancestor'},
691 {'message': 'ancestor'},
692 {'message': 'change'},
692 {'message': 'change'},
693 {'message': 'ancestor-new', 'parents': ['ancestor']},
693 {'message': 'ancestor-new', 'parents': ['ancestor']},
694 {'message': 'change-rebased'},
694 {'message': 'change-rebased'},
695 ]
695 ]
696 commit_ids = backend.create_master_repo(commits)
696 commit_ids = backend.create_master_repo(commits)
697 target = backend.create_repo(heads=['ancestor'])
697 target = backend.create_repo(heads=['ancestor'])
698 source = backend.create_repo(heads=['change'])
698 source = backend.create_repo(heads=['change'])
699
699
700 # create pr from a in source to A in target
700 # create pr from a in source to A in target
701 pull_request = PullRequest()
701 pull_request = PullRequest()
702 pull_request.source_repo = source
702 pull_request.source_repo = source
703 # TODO: johbo: Make sure that we write the source ref this way!
703 # TODO: johbo: Make sure that we write the source ref this way!
704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
706 pull_request.target_repo = target
706 pull_request.target_repo = target
707 # TODO: johbo: Target ref should be branch based, since tip can jump
707 # TODO: johbo: Target ref should be branch based, since tip can jump
708 # from branch to branch
708 # from branch to branch
709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
710 branch=backend.default_branch_name,
710 branch=backend.default_branch_name,
711 commit_id=commit_ids['ancestor'])
711 commit_id=commit_ids['ancestor'])
712 pull_request.revisions = [commit_ids['change']]
712 pull_request.revisions = [commit_ids['change']]
713 pull_request.title = u"Test"
713 pull_request.title = u"Test"
714 pull_request.description = u"Description"
714 pull_request.description = u"Description"
715 pull_request.author = UserModel().get_by_username(
715 pull_request.author = UserModel().get_by_username(
716 TEST_USER_ADMIN_LOGIN)
716 TEST_USER_ADMIN_LOGIN)
717 Session().add(pull_request)
717 Session().add(pull_request)
718 Session().commit()
718 Session().commit()
719 pull_request_id = pull_request.pull_request_id
719 pull_request_id = pull_request.pull_request_id
720
720
721 # target has ancestor - ancestor-new
721 # target has ancestor - ancestor-new
722 # source has ancestor - ancestor-new - change-rebased
722 # source has ancestor - ancestor-new - change-rebased
723 backend.pull_heads(target, heads=['ancestor-new'])
723 backend.pull_heads(target, heads=['ancestor-new'])
724 backend.pull_heads(source, heads=['change-rebased'])
724 backend.pull_heads(source, heads=['change-rebased'])
725
725
726 # update PR
726 # update PR
727 self.app.post(
727 self.app.post(
728 route_path('pullrequest_update',
728 route_path('pullrequest_update',
729 repo_name=target.repo_name,
729 repo_name=target.repo_name,
730 pull_request_id=pull_request_id),
730 pull_request_id=pull_request_id),
731 params={'update_commits': 'true',
731 params={'update_commits': 'true',
732 'csrf_token': csrf_token},
732 'csrf_token': csrf_token},
733 status=200)
733 status=200)
734
734
735 # check that we have now both revisions
735 # check that we have now both revisions
736 pull_request = PullRequest.get(pull_request_id)
736 pull_request = PullRequest.get(pull_request_id)
737 assert pull_request.revisions == [commit_ids['change-rebased']]
737 assert pull_request.revisions == [commit_ids['change-rebased']]
738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
739 branch=backend.default_branch_name,
739 branch=backend.default_branch_name,
740 commit_id=commit_ids['ancestor-new'])
740 commit_id=commit_ids['ancestor-new'])
741
741
742 # TODO: johbo: This should be a test on its own
742 # TODO: johbo: This should be a test on its own
743 response = self.app.get(route_path(
743 response = self.app.get(route_path(
744 'pullrequest_new',
744 'pullrequest_new',
745 repo_name=target.repo_name))
745 repo_name=target.repo_name))
746 assert response.status_int == 200
746 assert response.status_int == 200
747 assert 'Pull request updated to' in response.body
747 assert 'Pull request updated to' in response.body
748 assert 'with 1 added, 1 removed commits.' in response.body
748 assert 'with 1 added, 1 removed commits.' in response.body
749
749
750 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
750 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
751 backend = backend_git
751 backend = backend_git
752 commits = [
752 commits = [
753 {'message': 'master-commit-1'},
753 {'message': 'master-commit-1'},
754 {'message': 'master-commit-2-change-1'},
754 {'message': 'master-commit-2-change-1'},
755 {'message': 'master-commit-3-change-2'},
755 {'message': 'master-commit-3-change-2'},
756
756
757 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
757 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
758 {'message': 'feat-commit-2'},
758 {'message': 'feat-commit-2'},
759 ]
759 ]
760 commit_ids = backend.create_master_repo(commits)
760 commit_ids = backend.create_master_repo(commits)
761 target = backend.create_repo(heads=['master-commit-3-change-2'])
761 target = backend.create_repo(heads=['master-commit-3-change-2'])
762 source = backend.create_repo(heads=['feat-commit-2'])
762 source = backend.create_repo(heads=['feat-commit-2'])
763
763
764 # create pr from a in source to A in target
764 # create pr from a in source to A in target
765 pull_request = PullRequest()
765 pull_request = PullRequest()
766 pull_request.source_repo = source
766 pull_request.source_repo = source
767 # TODO: johbo: Make sure that we write the source ref this way!
767 # TODO: johbo: Make sure that we write the source ref this way!
768 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
768 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
769 branch=backend.default_branch_name,
769 branch=backend.default_branch_name,
770 commit_id=commit_ids['master-commit-3-change-2'])
770 commit_id=commit_ids['master-commit-3-change-2'])
771
771
772 pull_request.target_repo = target
772 pull_request.target_repo = target
773 # TODO: johbo: Target ref should be branch based, since tip can jump
773 # TODO: johbo: Target ref should be branch based, since tip can jump
774 # from branch to branch
774 # from branch to branch
775 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
775 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
776 branch=backend.default_branch_name,
776 branch=backend.default_branch_name,
777 commit_id=commit_ids['feat-commit-2'])
777 commit_id=commit_ids['feat-commit-2'])
778
778
779 pull_request.revisions = [
779 pull_request.revisions = [
780 commit_ids['feat-commit-1'],
780 commit_ids['feat-commit-1'],
781 commit_ids['feat-commit-2']
781 commit_ids['feat-commit-2']
782 ]
782 ]
783 pull_request.title = u"Test"
783 pull_request.title = u"Test"
784 pull_request.description = u"Description"
784 pull_request.description = u"Description"
785 pull_request.author = UserModel().get_by_username(
785 pull_request.author = UserModel().get_by_username(
786 TEST_USER_ADMIN_LOGIN)
786 TEST_USER_ADMIN_LOGIN)
787 Session().add(pull_request)
787 Session().add(pull_request)
788 Session().commit()
788 Session().commit()
789 pull_request_id = pull_request.pull_request_id
789 pull_request_id = pull_request.pull_request_id
790
790
791 # PR is created, now we simulate a force-push into target,
791 # PR is created, now we simulate a force-push into target,
792 # that drops a 2 last commits
792 # that drops a 2 last commits
793 vcsrepo = target.scm_instance()
793 vcsrepo = target.scm_instance()
794 vcsrepo.config.clear_section('hooks')
794 vcsrepo.config.clear_section('hooks')
795 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
795 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
796
796
797 # update PR
797 # update PR
798 self.app.post(
798 self.app.post(
799 route_path('pullrequest_update',
799 route_path('pullrequest_update',
800 repo_name=target.repo_name,
800 repo_name=target.repo_name,
801 pull_request_id=pull_request_id),
801 pull_request_id=pull_request_id),
802 params={'update_commits': 'true',
802 params={'update_commits': 'true',
803 'csrf_token': csrf_token},
803 'csrf_token': csrf_token},
804 status=200)
804 status=200)
805
805
806 response = self.app.get(route_path(
806 response = self.app.get(route_path(
807 'pullrequest_new',
807 'pullrequest_new',
808 repo_name=target.repo_name))
808 repo_name=target.repo_name))
809 assert response.status_int == 200
809 assert response.status_int == 200
810 response.mustcontain('Pull request updated to')
810 response.mustcontain('Pull request updated to')
811 response.mustcontain('with 0 added, 0 removed commits.')
811 response.mustcontain('with 0 added, 0 removed commits.')
812
812
813 def test_update_of_ancestor_reference(self, backend, csrf_token):
813 def test_update_of_ancestor_reference(self, backend, csrf_token):
814 commits = [
814 commits = [
815 {'message': 'ancestor'},
815 {'message': 'ancestor'},
816 {'message': 'change'},
816 {'message': 'change'},
817 {'message': 'change-2'},
817 {'message': 'change-2'},
818 {'message': 'ancestor-new', 'parents': ['ancestor']},
818 {'message': 'ancestor-new', 'parents': ['ancestor']},
819 {'message': 'change-rebased'},
819 {'message': 'change-rebased'},
820 ]
820 ]
821 commit_ids = backend.create_master_repo(commits)
821 commit_ids = backend.create_master_repo(commits)
822 target = backend.create_repo(heads=['ancestor'])
822 target = backend.create_repo(heads=['ancestor'])
823 source = backend.create_repo(heads=['change'])
823 source = backend.create_repo(heads=['change'])
824
824
825 # create pr from a in source to A in target
825 # create pr from a in source to A in target
826 pull_request = PullRequest()
826 pull_request = PullRequest()
827 pull_request.source_repo = source
827 pull_request.source_repo = source
828 # TODO: johbo: Make sure that we write the source ref this way!
828 # TODO: johbo: Make sure that we write the source ref this way!
829 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
829 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
830 branch=backend.default_branch_name,
830 branch=backend.default_branch_name,
831 commit_id=commit_ids['change'])
831 commit_id=commit_ids['change'])
832 pull_request.target_repo = target
832 pull_request.target_repo = target
833 # TODO: johbo: Target ref should be branch based, since tip can jump
833 # TODO: johbo: Target ref should be branch based, since tip can jump
834 # from branch to branch
834 # from branch to branch
835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
836 branch=backend.default_branch_name,
836 branch=backend.default_branch_name,
837 commit_id=commit_ids['ancestor'])
837 commit_id=commit_ids['ancestor'])
838 pull_request.revisions = [commit_ids['change']]
838 pull_request.revisions = [commit_ids['change']]
839 pull_request.title = u"Test"
839 pull_request.title = u"Test"
840 pull_request.description = u"Description"
840 pull_request.description = u"Description"
841 pull_request.author = UserModel().get_by_username(
841 pull_request.author = UserModel().get_by_username(
842 TEST_USER_ADMIN_LOGIN)
842 TEST_USER_ADMIN_LOGIN)
843 Session().add(pull_request)
843 Session().add(pull_request)
844 Session().commit()
844 Session().commit()
845 pull_request_id = pull_request.pull_request_id
845 pull_request_id = pull_request.pull_request_id
846
846
847 # target has ancestor - ancestor-new
847 # target has ancestor - ancestor-new
848 # source has ancestor - ancestor-new - change-rebased
848 # source has ancestor - ancestor-new - change-rebased
849 backend.pull_heads(target, heads=['ancestor-new'])
849 backend.pull_heads(target, heads=['ancestor-new'])
850 backend.pull_heads(source, heads=['change-rebased'])
850 backend.pull_heads(source, heads=['change-rebased'])
851
851
852 # update PR
852 # update PR
853 self.app.post(
853 self.app.post(
854 route_path('pullrequest_update',
854 route_path('pullrequest_update',
855 repo_name=target.repo_name,
855 repo_name=target.repo_name,
856 pull_request_id=pull_request_id),
856 pull_request_id=pull_request_id),
857 params={'update_commits': 'true',
857 params={'update_commits': 'true',
858 'csrf_token': csrf_token},
858 'csrf_token': csrf_token},
859 status=200)
859 status=200)
860
860
861 # Expect the target reference to be updated correctly
861 # Expect the target reference to be updated correctly
862 pull_request = PullRequest.get(pull_request_id)
862 pull_request = PullRequest.get(pull_request_id)
863 assert pull_request.revisions == [commit_ids['change-rebased']]
863 assert pull_request.revisions == [commit_ids['change-rebased']]
864 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
864 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
865 branch=backend.default_branch_name,
865 branch=backend.default_branch_name,
866 commit_id=commit_ids['ancestor-new'])
866 commit_id=commit_ids['ancestor-new'])
867 assert pull_request.target_ref == expected_target_ref
867 assert pull_request.target_ref == expected_target_ref
868
868
869 def test_remove_pull_request_branch(self, backend_git, csrf_token):
869 def test_remove_pull_request_branch(self, backend_git, csrf_token):
870 branch_name = 'development'
870 branch_name = 'development'
871 commits = [
871 commits = [
872 {'message': 'initial-commit'},
872 {'message': 'initial-commit'},
873 {'message': 'old-feature'},
873 {'message': 'old-feature'},
874 {'message': 'new-feature', 'branch': branch_name},
874 {'message': 'new-feature', 'branch': branch_name},
875 ]
875 ]
876 repo = backend_git.create_repo(commits)
876 repo = backend_git.create_repo(commits)
877 commit_ids = backend_git.commit_ids
877 commit_ids = backend_git.commit_ids
878
878
879 pull_request = PullRequest()
879 pull_request = PullRequest()
880 pull_request.source_repo = repo
880 pull_request.source_repo = repo
881 pull_request.target_repo = repo
881 pull_request.target_repo = repo
882 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
882 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
883 branch=branch_name, commit_id=commit_ids['new-feature'])
883 branch=branch_name, commit_id=commit_ids['new-feature'])
884 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
884 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
885 branch=backend_git.default_branch_name,
885 branch=backend_git.default_branch_name,
886 commit_id=commit_ids['old-feature'])
886 commit_id=commit_ids['old-feature'])
887 pull_request.revisions = [commit_ids['new-feature']]
887 pull_request.revisions = [commit_ids['new-feature']]
888 pull_request.title = u"Test"
888 pull_request.title = u"Test"
889 pull_request.description = u"Description"
889 pull_request.description = u"Description"
890 pull_request.author = UserModel().get_by_username(
890 pull_request.author = UserModel().get_by_username(
891 TEST_USER_ADMIN_LOGIN)
891 TEST_USER_ADMIN_LOGIN)
892 Session().add(pull_request)
892 Session().add(pull_request)
893 Session().commit()
893 Session().commit()
894
894
895 vcs = repo.scm_instance()
895 vcs = repo.scm_instance()
896 vcs.remove_ref('refs/heads/{}'.format(branch_name))
896 vcs.remove_ref('refs/heads/{}'.format(branch_name))
897
897
898 response = self.app.get(route_path(
898 response = self.app.get(route_path(
899 'pullrequest_show',
899 'pullrequest_show',
900 repo_name=repo.repo_name,
900 repo_name=repo.repo_name,
901 pull_request_id=pull_request.pull_request_id))
901 pull_request_id=pull_request.pull_request_id))
902
902
903 assert response.status_int == 200
903 assert response.status_int == 200
904 assert_response = AssertResponse(response)
904 assert_response = AssertResponse(response)
905 assert_response.element_contains(
905 assert_response.element_contains(
906 '#changeset_compare_view_content .alert strong',
906 '#changeset_compare_view_content .alert strong',
907 'Missing commits')
907 'Missing commits')
908 assert_response.element_contains(
908 assert_response.element_contains(
909 '#changeset_compare_view_content .alert',
909 '#changeset_compare_view_content .alert',
910 'This pull request cannot be displayed, because one or more'
910 'This pull request cannot be displayed, because one or more'
911 ' commits no longer exist in the source repository.')
911 ' commits no longer exist in the source repository.')
912
912
913 def test_strip_commits_from_pull_request(
913 def test_strip_commits_from_pull_request(
914 self, backend, pr_util, csrf_token):
914 self, backend, pr_util, csrf_token):
915 commits = [
915 commits = [
916 {'message': 'initial-commit'},
916 {'message': 'initial-commit'},
917 {'message': 'old-feature'},
917 {'message': 'old-feature'},
918 {'message': 'new-feature', 'parents': ['initial-commit']},
918 {'message': 'new-feature', 'parents': ['initial-commit']},
919 ]
919 ]
920 pull_request = pr_util.create_pull_request(
920 pull_request = pr_util.create_pull_request(
921 commits, target_head='initial-commit', source_head='new-feature',
921 commits, target_head='initial-commit', source_head='new-feature',
922 revisions=['new-feature'])
922 revisions=['new-feature'])
923
923
924 vcs = pr_util.source_repository.scm_instance()
924 vcs = pr_util.source_repository.scm_instance()
925 if backend.alias == 'git':
925 if backend.alias == 'git':
926 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
926 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
927 else:
927 else:
928 vcs.strip(pr_util.commit_ids['new-feature'])
928 vcs.strip(pr_util.commit_ids['new-feature'])
929
929
930 response = self.app.get(route_path(
930 response = self.app.get(route_path(
931 'pullrequest_show',
931 'pullrequest_show',
932 repo_name=pr_util.target_repository.repo_name,
932 repo_name=pr_util.target_repository.repo_name,
933 pull_request_id=pull_request.pull_request_id))
933 pull_request_id=pull_request.pull_request_id))
934
934
935 assert response.status_int == 200
935 assert response.status_int == 200
936 assert_response = AssertResponse(response)
936 assert_response = AssertResponse(response)
937 assert_response.element_contains(
937 assert_response.element_contains(
938 '#changeset_compare_view_content .alert strong',
938 '#changeset_compare_view_content .alert strong',
939 'Missing commits')
939 'Missing commits')
940 assert_response.element_contains(
940 assert_response.element_contains(
941 '#changeset_compare_view_content .alert',
941 '#changeset_compare_view_content .alert',
942 'This pull request cannot be displayed, because one or more'
942 'This pull request cannot be displayed, because one or more'
943 ' commits no longer exist in the source repository.')
943 ' commits no longer exist in the source repository.')
944 assert_response.element_contains(
944 assert_response.element_contains(
945 '#update_commits',
945 '#update_commits',
946 'Update commits')
946 'Update commits')
947
947
948 def test_strip_commits_and_update(
948 def test_strip_commits_and_update(
949 self, backend, pr_util, csrf_token):
949 self, backend, pr_util, csrf_token):
950 commits = [
950 commits = [
951 {'message': 'initial-commit'},
951 {'message': 'initial-commit'},
952 {'message': 'old-feature'},
952 {'message': 'old-feature'},
953 {'message': 'new-feature', 'parents': ['old-feature']},
953 {'message': 'new-feature', 'parents': ['old-feature']},
954 ]
954 ]
955 pull_request = pr_util.create_pull_request(
955 pull_request = pr_util.create_pull_request(
956 commits, target_head='old-feature', source_head='new-feature',
956 commits, target_head='old-feature', source_head='new-feature',
957 revisions=['new-feature'], mergeable=True)
957 revisions=['new-feature'], mergeable=True)
958
958
959 vcs = pr_util.source_repository.scm_instance()
959 vcs = pr_util.source_repository.scm_instance()
960 if backend.alias == 'git':
960 if backend.alias == 'git':
961 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
961 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
962 else:
962 else:
963 vcs.strip(pr_util.commit_ids['new-feature'])
963 vcs.strip(pr_util.commit_ids['new-feature'])
964
964
965 response = self.app.post(
965 response = self.app.post(
966 route_path('pullrequest_update',
966 route_path('pullrequest_update',
967 repo_name=pull_request.target_repo.repo_name,
967 repo_name=pull_request.target_repo.repo_name,
968 pull_request_id=pull_request.pull_request_id),
968 pull_request_id=pull_request.pull_request_id),
969 params={'update_commits': 'true',
969 params={'update_commits': 'true',
970 'csrf_token': csrf_token})
970 'csrf_token': csrf_token})
971
971
972 assert response.status_int == 200
972 assert response.status_int == 200
973 assert response.body == 'true'
973 assert response.body == 'true'
974
974
975 # Make sure that after update, it won't raise 500 errors
975 # Make sure that after update, it won't raise 500 errors
976 response = self.app.get(route_path(
976 response = self.app.get(route_path(
977 'pullrequest_show',
977 'pullrequest_show',
978 repo_name=pr_util.target_repository.repo_name,
978 repo_name=pr_util.target_repository.repo_name,
979 pull_request_id=pull_request.pull_request_id))
979 pull_request_id=pull_request.pull_request_id))
980
980
981 assert response.status_int == 200
981 assert response.status_int == 200
982 assert_response = AssertResponse(response)
982 assert_response = AssertResponse(response)
983 assert_response.element_contains(
983 assert_response.element_contains(
984 '#changeset_compare_view_content .alert strong',
984 '#changeset_compare_view_content .alert strong',
985 'Missing commits')
985 'Missing commits')
986
986
987 def test_branch_is_a_link(self, pr_util):
987 def test_branch_is_a_link(self, pr_util):
988 pull_request = pr_util.create_pull_request()
988 pull_request = pr_util.create_pull_request()
989 pull_request.source_ref = 'branch:origin:1234567890abcdef'
989 pull_request.source_ref = 'branch:origin:1234567890abcdef'
990 pull_request.target_ref = 'branch:target:abcdef1234567890'
990 pull_request.target_ref = 'branch:target:abcdef1234567890'
991 Session().add(pull_request)
991 Session().add(pull_request)
992 Session().commit()
992 Session().commit()
993
993
994 response = self.app.get(route_path(
994 response = self.app.get(route_path(
995 'pullrequest_show',
995 'pullrequest_show',
996 repo_name=pull_request.target_repo.scm_instance().name,
996 repo_name=pull_request.target_repo.scm_instance().name,
997 pull_request_id=pull_request.pull_request_id))
997 pull_request_id=pull_request.pull_request_id))
998 assert response.status_int == 200
998 assert response.status_int == 200
999 assert_response = AssertResponse(response)
999 assert_response = AssertResponse(response)
1000
1000
1001 origin = assert_response.get_element('.pr-origininfo .tag')
1001 origin = assert_response.get_element('.pr-origininfo .tag')
1002 origin_children = origin.getchildren()
1002 origin_children = origin.getchildren()
1003 assert len(origin_children) == 1
1003 assert len(origin_children) == 1
1004 target = assert_response.get_element('.pr-targetinfo .tag')
1004 target = assert_response.get_element('.pr-targetinfo .tag')
1005 target_children = target.getchildren()
1005 target_children = target.getchildren()
1006 assert len(target_children) == 1
1006 assert len(target_children) == 1
1007
1007
1008 expected_origin_link = route_path(
1008 expected_origin_link = route_path(
1009 'repo_changelog',
1009 'repo_changelog',
1010 repo_name=pull_request.source_repo.scm_instance().name,
1010 repo_name=pull_request.source_repo.scm_instance().name,
1011 params=dict(branch='origin'))
1011 params=dict(branch='origin'))
1012 expected_target_link = route_path(
1012 expected_target_link = route_path(
1013 'repo_changelog',
1013 'repo_changelog',
1014 repo_name=pull_request.target_repo.scm_instance().name,
1014 repo_name=pull_request.target_repo.scm_instance().name,
1015 params=dict(branch='target'))
1015 params=dict(branch='target'))
1016 assert origin_children[0].attrib['href'] == expected_origin_link
1016 assert origin_children[0].attrib['href'] == expected_origin_link
1017 assert origin_children[0].text == 'branch: origin'
1017 assert origin_children[0].text == 'branch: origin'
1018 assert target_children[0].attrib['href'] == expected_target_link
1018 assert target_children[0].attrib['href'] == expected_target_link
1019 assert target_children[0].text == 'branch: target'
1019 assert target_children[0].text == 'branch: target'
1020
1020
1021 def test_bookmark_is_not_a_link(self, pr_util):
1021 def test_bookmark_is_not_a_link(self, pr_util):
1022 pull_request = pr_util.create_pull_request()
1022 pull_request = pr_util.create_pull_request()
1023 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1023 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1024 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1024 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1025 Session().add(pull_request)
1025 Session().add(pull_request)
1026 Session().commit()
1026 Session().commit()
1027
1027
1028 response = self.app.get(route_path(
1028 response = self.app.get(route_path(
1029 'pullrequest_show',
1029 'pullrequest_show',
1030 repo_name=pull_request.target_repo.scm_instance().name,
1030 repo_name=pull_request.target_repo.scm_instance().name,
1031 pull_request_id=pull_request.pull_request_id))
1031 pull_request_id=pull_request.pull_request_id))
1032 assert response.status_int == 200
1032 assert response.status_int == 200
1033 assert_response = AssertResponse(response)
1033 assert_response = AssertResponse(response)
1034
1034
1035 origin = assert_response.get_element('.pr-origininfo .tag')
1035 origin = assert_response.get_element('.pr-origininfo .tag')
1036 assert origin.text.strip() == 'bookmark: origin'
1036 assert origin.text.strip() == 'bookmark: origin'
1037 assert origin.getchildren() == []
1037 assert origin.getchildren() == []
1038
1038
1039 target = assert_response.get_element('.pr-targetinfo .tag')
1039 target = assert_response.get_element('.pr-targetinfo .tag')
1040 assert target.text.strip() == 'bookmark: target'
1040 assert target.text.strip() == 'bookmark: target'
1041 assert target.getchildren() == []
1041 assert target.getchildren() == []
1042
1042
1043 def test_tag_is_not_a_link(self, pr_util):
1043 def test_tag_is_not_a_link(self, pr_util):
1044 pull_request = pr_util.create_pull_request()
1044 pull_request = pr_util.create_pull_request()
1045 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1045 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1046 pull_request.target_ref = 'tag:target:abcdef1234567890'
1046 pull_request.target_ref = 'tag:target:abcdef1234567890'
1047 Session().add(pull_request)
1047 Session().add(pull_request)
1048 Session().commit()
1048 Session().commit()
1049
1049
1050 response = self.app.get(route_path(
1050 response = self.app.get(route_path(
1051 'pullrequest_show',
1051 'pullrequest_show',
1052 repo_name=pull_request.target_repo.scm_instance().name,
1052 repo_name=pull_request.target_repo.scm_instance().name,
1053 pull_request_id=pull_request.pull_request_id))
1053 pull_request_id=pull_request.pull_request_id))
1054 assert response.status_int == 200
1054 assert response.status_int == 200
1055 assert_response = AssertResponse(response)
1055 assert_response = AssertResponse(response)
1056
1056
1057 origin = assert_response.get_element('.pr-origininfo .tag')
1057 origin = assert_response.get_element('.pr-origininfo .tag')
1058 assert origin.text.strip() == 'tag: origin'
1058 assert origin.text.strip() == 'tag: origin'
1059 assert origin.getchildren() == []
1059 assert origin.getchildren() == []
1060
1060
1061 target = assert_response.get_element('.pr-targetinfo .tag')
1061 target = assert_response.get_element('.pr-targetinfo .tag')
1062 assert target.text.strip() == 'tag: target'
1062 assert target.text.strip() == 'tag: target'
1063 assert target.getchildren() == []
1063 assert target.getchildren() == []
1064
1064
1065 @pytest.mark.parametrize('mergeable', [True, False])
1065 @pytest.mark.parametrize('mergeable', [True, False])
1066 def test_shadow_repository_link(
1066 def test_shadow_repository_link(
1067 self, mergeable, pr_util, http_host_only_stub):
1067 self, mergeable, pr_util, http_host_only_stub):
1068 """
1068 """
1069 Check that the pull request summary page displays a link to the shadow
1069 Check that the pull request summary page displays a link to the shadow
1070 repository if the pull request is mergeable. If it is not mergeable
1070 repository if the pull request is mergeable. If it is not mergeable
1071 the link should not be displayed.
1071 the link should not be displayed.
1072 """
1072 """
1073 pull_request = pr_util.create_pull_request(
1073 pull_request = pr_util.create_pull_request(
1074 mergeable=mergeable, enable_notifications=False)
1074 mergeable=mergeable, enable_notifications=False)
1075 target_repo = pull_request.target_repo.scm_instance()
1075 target_repo = pull_request.target_repo.scm_instance()
1076 pr_id = pull_request.pull_request_id
1076 pr_id = pull_request.pull_request_id
1077 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1077 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1078 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1078 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1079
1079
1080 response = self.app.get(route_path(
1080 response = self.app.get(route_path(
1081 'pullrequest_show',
1081 'pullrequest_show',
1082 repo_name=target_repo.name,
1082 repo_name=target_repo.name,
1083 pull_request_id=pr_id))
1083 pull_request_id=pr_id))
1084
1084
1085 assertr = AssertResponse(response)
1085 assertr = AssertResponse(response)
1086 if mergeable:
1086 if mergeable:
1087 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1087 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1088 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1088 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1089 else:
1089 else:
1090 assertr.no_element_exists('.pr-mergeinfo')
1090 assertr.no_element_exists('.pr-mergeinfo')
1091
1091
1092
1092
1093 @pytest.mark.usefixtures('app')
1093 @pytest.mark.usefixtures('app')
1094 @pytest.mark.backends("git", "hg")
1094 @pytest.mark.backends("git", "hg")
1095 class TestPullrequestsControllerDelete(object):
1095 class TestPullrequestsControllerDelete(object):
1096 def test_pull_request_delete_button_permissions_admin(
1096 def test_pull_request_delete_button_permissions_admin(
1097 self, autologin_user, user_admin, pr_util):
1097 self, autologin_user, user_admin, pr_util):
1098 pull_request = pr_util.create_pull_request(
1098 pull_request = pr_util.create_pull_request(
1099 author=user_admin.username, enable_notifications=False)
1099 author=user_admin.username, enable_notifications=False)
1100
1100
1101 response = self.app.get(route_path(
1101 response = self.app.get(route_path(
1102 'pullrequest_show',
1102 'pullrequest_show',
1103 repo_name=pull_request.target_repo.scm_instance().name,
1103 repo_name=pull_request.target_repo.scm_instance().name,
1104 pull_request_id=pull_request.pull_request_id))
1104 pull_request_id=pull_request.pull_request_id))
1105
1105
1106 response.mustcontain('id="delete_pullrequest"')
1106 response.mustcontain('id="delete_pullrequest"')
1107 response.mustcontain('Confirm to delete this pull request')
1107 response.mustcontain('Confirm to delete this pull request')
1108
1108
1109 def test_pull_request_delete_button_permissions_owner(
1109 def test_pull_request_delete_button_permissions_owner(
1110 self, autologin_regular_user, user_regular, pr_util):
1110 self, autologin_regular_user, user_regular, pr_util):
1111 pull_request = pr_util.create_pull_request(
1111 pull_request = pr_util.create_pull_request(
1112 author=user_regular.username, enable_notifications=False)
1112 author=user_regular.username, enable_notifications=False)
1113
1113
1114 response = self.app.get(route_path(
1114 response = self.app.get(route_path(
1115 'pullrequest_show',
1115 'pullrequest_show',
1116 repo_name=pull_request.target_repo.scm_instance().name,
1116 repo_name=pull_request.target_repo.scm_instance().name,
1117 pull_request_id=pull_request.pull_request_id))
1117 pull_request_id=pull_request.pull_request_id))
1118
1118
1119 response.mustcontain('id="delete_pullrequest"')
1119 response.mustcontain('id="delete_pullrequest"')
1120 response.mustcontain('Confirm to delete this pull request')
1120 response.mustcontain('Confirm to delete this pull request')
1121
1121
1122 def test_pull_request_delete_button_permissions_forbidden(
1122 def test_pull_request_delete_button_permissions_forbidden(
1123 self, autologin_regular_user, user_regular, user_admin, pr_util):
1123 self, autologin_regular_user, user_regular, user_admin, pr_util):
1124 pull_request = pr_util.create_pull_request(
1124 pull_request = pr_util.create_pull_request(
1125 author=user_admin.username, enable_notifications=False)
1125 author=user_admin.username, enable_notifications=False)
1126
1126
1127 response = self.app.get(route_path(
1127 response = self.app.get(route_path(
1128 'pullrequest_show',
1128 'pullrequest_show',
1129 repo_name=pull_request.target_repo.scm_instance().name,
1129 repo_name=pull_request.target_repo.scm_instance().name,
1130 pull_request_id=pull_request.pull_request_id))
1130 pull_request_id=pull_request.pull_request_id))
1131 response.mustcontain(no=['id="delete_pullrequest"'])
1131 response.mustcontain(no=['id="delete_pullrequest"'])
1132 response.mustcontain(no=['Confirm to delete this pull request'])
1132 response.mustcontain(no=['Confirm to delete this pull request'])
1133
1133
1134 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1134 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1135 self, autologin_regular_user, user_regular, user_admin, pr_util,
1135 self, autologin_regular_user, user_regular, user_admin, pr_util,
1136 user_util):
1136 user_util):
1137
1137
1138 pull_request = pr_util.create_pull_request(
1138 pull_request = pr_util.create_pull_request(
1139 author=user_admin.username, enable_notifications=False)
1139 author=user_admin.username, enable_notifications=False)
1140
1140
1141 user_util.grant_user_permission_to_repo(
1141 user_util.grant_user_permission_to_repo(
1142 pull_request.target_repo, user_regular,
1142 pull_request.target_repo, user_regular,
1143 'repository.write')
1143 'repository.write')
1144
1144
1145 response = self.app.get(route_path(
1145 response = self.app.get(route_path(
1146 'pullrequest_show',
1146 'pullrequest_show',
1147 repo_name=pull_request.target_repo.scm_instance().name,
1147 repo_name=pull_request.target_repo.scm_instance().name,
1148 pull_request_id=pull_request.pull_request_id))
1148 pull_request_id=pull_request.pull_request_id))
1149
1149
1150 response.mustcontain('id="open_edit_pullrequest"')
1150 response.mustcontain('id="open_edit_pullrequest"')
1151 response.mustcontain('id="delete_pullrequest"')
1151 response.mustcontain('id="delete_pullrequest"')
1152 response.mustcontain(no=['Confirm to delete this pull request'])
1152 response.mustcontain(no=['Confirm to delete this pull request'])
1153
1153
1154 def test_delete_comment_returns_404_if_comment_does_not_exist(
1154 def test_delete_comment_returns_404_if_comment_does_not_exist(
1155 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1155 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1156
1156
1157 pull_request = pr_util.create_pull_request(
1157 pull_request = pr_util.create_pull_request(
1158 author=user_admin.username, enable_notifications=False)
1158 author=user_admin.username, enable_notifications=False)
1159
1159
1160 self.app.post(
1160 self.app.post(
1161 route_path(
1161 route_path(
1162 'pullrequest_comment_delete',
1162 'pullrequest_comment_delete',
1163 repo_name=pull_request.target_repo.scm_instance().name,
1163 repo_name=pull_request.target_repo.scm_instance().name,
1164 pull_request_id=pull_request.pull_request_id,
1164 pull_request_id=pull_request.pull_request_id,
1165 comment_id=1024404),
1165 comment_id=1024404),
1166 extra_environ=xhr_header,
1166 extra_environ=xhr_header,
1167 params={'csrf_token': csrf_token},
1167 params={'csrf_token': csrf_token},
1168 status=404
1168 status=404
1169 )
1169 )
1170
1170
1171 def test_delete_comment(
1171 def test_delete_comment(
1172 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1172 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1173
1173
1174 pull_request = pr_util.create_pull_request(
1174 pull_request = pr_util.create_pull_request(
1175 author=user_admin.username, enable_notifications=False)
1175 author=user_admin.username, enable_notifications=False)
1176 comment = pr_util.create_comment()
1176 comment = pr_util.create_comment()
1177 comment_id = comment.comment_id
1177 comment_id = comment.comment_id
1178
1178
1179 response = self.app.post(
1179 response = self.app.post(
1180 route_path(
1180 route_path(
1181 'pullrequest_comment_delete',
1181 'pullrequest_comment_delete',
1182 repo_name=pull_request.target_repo.scm_instance().name,
1182 repo_name=pull_request.target_repo.scm_instance().name,
1183 pull_request_id=pull_request.pull_request_id,
1183 pull_request_id=pull_request.pull_request_id,
1184 comment_id=comment_id),
1184 comment_id=comment_id),
1185 extra_environ=xhr_header,
1185 extra_environ=xhr_header,
1186 params={'csrf_token': csrf_token},
1186 params={'csrf_token': csrf_token},
1187 status=200
1187 status=200
1188 )
1188 )
1189 assert response.body == 'true'
1189 assert response.body == 'true'
1190
1190
1191
1191
1192 def assert_pull_request_status(pull_request, expected_status):
1192 def assert_pull_request_status(pull_request, expected_status):
1193 status = ChangesetStatusModel().calculated_review_status(
1193 status = ChangesetStatusModel().calculated_review_status(
1194 pull_request=pull_request)
1194 pull_request=pull_request)
1195 assert status == expected_status
1195 assert status == expected_status
1196
1196
1197
1197
1198 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1198 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1199 @pytest.mark.usefixtures("autologin_user")
1199 @pytest.mark.usefixtures("autologin_user")
1200 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1200 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1201 response = app.get(
1201 response = app.get(
1202 route_path(route, repo_name=backend_svn.repo_name), status=404)
1202 route_path(route, repo_name=backend_svn.repo_name), status=404)
1203
1203
@@ -1,1306 +1,1307 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34
34
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.base import vcs_operation_context
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 NotAnonymous, CSRFRequired)
41 NotAnonymous, CSRFRequired)
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
45 RepositoryRequirementError, EmptyRepositoryError)
45 RepositoryRequirementError, EmptyRepositoryError)
46 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
49 ChangesetComment, ChangesetStatus, Repository)
49 ChangesetComment, ChangesetStatus, Repository)
50 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.forms import PullRequestForm
51 from rhodecode.model.meta import Session
51 from rhodecode.model.meta import Session
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
53 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.scm import ScmModel
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
59
59
60 def load_default_context(self):
60 def load_default_context(self):
61 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c = self._get_local_tmpl_context(include_app_defaults=True)
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
64
64
65 return c
65 return c
66
66
67 def _get_pull_requests_list(
67 def _get_pull_requests_list(
68 self, repo_name, source, filter_type, opened_by, statuses):
68 self, repo_name, source, filter_type, opened_by, statuses):
69
69
70 draw, start, limit = self._extract_chunk(self.request)
70 draw, start, limit = self._extract_chunk(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 _render = self.request.get_partial_renderer(
72 _render = self.request.get_partial_renderer(
73 'rhodecode:templates/data_table/_dt_elements.mako')
73 'rhodecode:templates/data_table/_dt_elements.mako')
74
74
75 # pagination
75 # pagination
76
76
77 if filter_type == 'awaiting_review':
77 if filter_type == 'awaiting_review':
78 pull_requests = PullRequestModel().get_awaiting_review(
78 pull_requests = PullRequestModel().get_awaiting_review(
79 repo_name, source=source, opened_by=opened_by,
79 repo_name, source=source, opened_by=opened_by,
80 statuses=statuses, offset=start, length=limit,
80 statuses=statuses, offset=start, length=limit,
81 order_by=order_by, order_dir=order_dir)
81 order_by=order_by, order_dir=order_dir)
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 repo_name, source=source, statuses=statuses,
83 repo_name, source=source, statuses=statuses,
84 opened_by=opened_by)
84 opened_by=opened_by)
85 elif filter_type == 'awaiting_my_review':
85 elif filter_type == 'awaiting_my_review':
86 pull_requests = PullRequestModel().get_awaiting_my_review(
86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 repo_name, source=source, opened_by=opened_by,
87 repo_name, source=source, opened_by=opened_by,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 offset=start, length=limit, order_by=order_by,
89 offset=start, length=limit, order_by=order_by,
90 order_dir=order_dir)
90 order_dir=order_dir)
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 statuses=statuses, opened_by=opened_by)
93 statuses=statuses, opened_by=opened_by)
94 else:
94 else:
95 pull_requests = PullRequestModel().get_all(
95 pull_requests = PullRequestModel().get_all(
96 repo_name, source=source, opened_by=opened_by,
96 repo_name, source=source, opened_by=opened_by,
97 statuses=statuses, offset=start, length=limit,
97 statuses=statuses, offset=start, length=limit,
98 order_by=order_by, order_dir=order_dir)
98 order_by=order_by, order_dir=order_dir)
99 pull_requests_total_count = PullRequestModel().count_all(
99 pull_requests_total_count = PullRequestModel().count_all(
100 repo_name, source=source, statuses=statuses,
100 repo_name, source=source, statuses=statuses,
101 opened_by=opened_by)
101 opened_by=opened_by)
102
102
103 data = []
103 data = []
104 comments_model = CommentsModel()
104 comments_model = CommentsModel()
105 for pr in pull_requests:
105 for pr in pull_requests:
106 comments = comments_model.get_all_comments(
106 comments = comments_model.get_all_comments(
107 self.db_repo.repo_id, pull_request=pr)
107 self.db_repo.repo_id, pull_request=pr)
108
108
109 data.append({
109 data.append({
110 'name': _render('pullrequest_name',
110 'name': _render('pullrequest_name',
111 pr.pull_request_id, pr.target_repo.repo_name),
111 pr.pull_request_id, pr.target_repo.repo_name),
112 'name_raw': pr.pull_request_id,
112 'name_raw': pr.pull_request_id,
113 'status': _render('pullrequest_status',
113 'status': _render('pullrequest_status',
114 pr.calculated_review_status()),
114 pr.calculated_review_status()),
115 'title': _render(
115 'title': _render(
116 'pullrequest_title', pr.title, pr.description),
116 'pullrequest_title', pr.title, pr.description),
117 'description': h.escape(pr.description),
117 'description': h.escape(pr.description),
118 'updated_on': _render('pullrequest_updated_on',
118 'updated_on': _render('pullrequest_updated_on',
119 h.datetime_to_time(pr.updated_on)),
119 h.datetime_to_time(pr.updated_on)),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'created_on': _render('pullrequest_updated_on',
121 'created_on': _render('pullrequest_updated_on',
122 h.datetime_to_time(pr.created_on)),
122 h.datetime_to_time(pr.created_on)),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'author': _render('pullrequest_author',
124 'author': _render('pullrequest_author',
125 pr.author.full_contact, ),
125 pr.author.full_contact, ),
126 'author_raw': pr.author.full_name,
126 'author_raw': pr.author.full_name,
127 'comments': _render('pullrequest_comments', len(comments)),
127 'comments': _render('pullrequest_comments', len(comments)),
128 'comments_raw': len(comments),
128 'comments_raw': len(comments),
129 'closed': pr.is_closed(),
129 'closed': pr.is_closed(),
130 })
130 })
131
131
132 data = ({
132 data = ({
133 'draw': draw,
133 'draw': draw,
134 'data': data,
134 'data': data,
135 'recordsTotal': pull_requests_total_count,
135 'recordsTotal': pull_requests_total_count,
136 'recordsFiltered': pull_requests_total_count,
136 'recordsFiltered': pull_requests_total_count,
137 })
137 })
138 return data
138 return data
139
139
140 @LoginRequired()
140 @LoginRequired()
141 @HasRepoPermissionAnyDecorator(
141 @HasRepoPermissionAnyDecorator(
142 'repository.read', 'repository.write', 'repository.admin')
142 'repository.read', 'repository.write', 'repository.admin')
143 @view_config(
143 @view_config(
144 route_name='pullrequest_show_all', request_method='GET',
144 route_name='pullrequest_show_all', request_method='GET',
145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 def pull_request_list(self):
146 def pull_request_list(self):
147 c = self.load_default_context()
147 c = self.load_default_context()
148
148
149 req_get = self.request.GET
149 req_get = self.request.GET
150 c.source = str2bool(req_get.get('source'))
150 c.source = str2bool(req_get.get('source'))
151 c.closed = str2bool(req_get.get('closed'))
151 c.closed = str2bool(req_get.get('closed'))
152 c.my = str2bool(req_get.get('my'))
152 c.my = str2bool(req_get.get('my'))
153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155
155
156 c.active = 'open'
156 c.active = 'open'
157 if c.my:
157 if c.my:
158 c.active = 'my'
158 c.active = 'my'
159 if c.closed:
159 if c.closed:
160 c.active = 'closed'
160 c.active = 'closed'
161 if c.awaiting_review and not c.source:
161 if c.awaiting_review and not c.source:
162 c.active = 'awaiting'
162 c.active = 'awaiting'
163 if c.source and not c.awaiting_review:
163 if c.source and not c.awaiting_review:
164 c.active = 'source'
164 c.active = 'source'
165 if c.awaiting_my_review:
165 if c.awaiting_my_review:
166 c.active = 'awaiting_my'
166 c.active = 'awaiting_my'
167
167
168 return self._get_template_context(c)
168 return self._get_template_context(c)
169
169
170 @LoginRequired()
170 @LoginRequired()
171 @HasRepoPermissionAnyDecorator(
171 @HasRepoPermissionAnyDecorator(
172 'repository.read', 'repository.write', 'repository.admin')
172 'repository.read', 'repository.write', 'repository.admin')
173 @view_config(
173 @view_config(
174 route_name='pullrequest_show_all_data', request_method='GET',
174 route_name='pullrequest_show_all_data', request_method='GET',
175 renderer='json_ext', xhr=True)
175 renderer='json_ext', xhr=True)
176 def pull_request_list_data(self):
176 def pull_request_list_data(self):
177 self.load_default_context()
177 self.load_default_context()
178
178
179 # additional filters
179 # additional filters
180 req_get = self.request.GET
180 req_get = self.request.GET
181 source = str2bool(req_get.get('source'))
181 source = str2bool(req_get.get('source'))
182 closed = str2bool(req_get.get('closed'))
182 closed = str2bool(req_get.get('closed'))
183 my = str2bool(req_get.get('my'))
183 my = str2bool(req_get.get('my'))
184 awaiting_review = str2bool(req_get.get('awaiting_review'))
184 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186
186
187 filter_type = 'awaiting_review' if awaiting_review \
187 filter_type = 'awaiting_review' if awaiting_review \
188 else 'awaiting_my_review' if awaiting_my_review \
188 else 'awaiting_my_review' if awaiting_my_review \
189 else None
189 else None
190
190
191 opened_by = None
191 opened_by = None
192 if my:
192 if my:
193 opened_by = [self._rhodecode_user.user_id]
193 opened_by = [self._rhodecode_user.user_id]
194
194
195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 if closed:
196 if closed:
197 statuses = [PullRequest.STATUS_CLOSED]
197 statuses = [PullRequest.STATUS_CLOSED]
198
198
199 data = self._get_pull_requests_list(
199 data = self._get_pull_requests_list(
200 repo_name=self.db_repo_name, source=source,
200 repo_name=self.db_repo_name, source=source,
201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202
202
203 return data
203 return data
204
204
205 def _is_diff_cache_enabled(self, target_repo):
205 def _is_diff_cache_enabled(self, target_repo):
206 caching_enabled = self._get_general_setting(
206 caching_enabled = self._get_general_setting(
207 target_repo, 'rhodecode_diff_cache')
207 target_repo, 'rhodecode_diff_cache')
208 log.debug('Diff caching enabled: %s', caching_enabled)
208 log.debug('Diff caching enabled: %s', caching_enabled)
209 return caching_enabled
209 return caching_enabled
210
210
211 def _get_diffset(self, source_repo_name, source_repo,
211 def _get_diffset(self, source_repo_name, source_repo,
212 source_ref_id, target_ref_id,
212 source_ref_id, target_ref_id,
213 target_commit, source_commit, diff_limit, file_limit,
213 target_commit, source_commit, diff_limit, file_limit,
214 fulldiff):
214 fulldiff):
215
215
216 vcs_diff = PullRequestModel().get_diff(
216 vcs_diff = PullRequestModel().get_diff(
217 source_repo, source_ref_id, target_ref_id)
217 source_repo, source_ref_id, target_ref_id)
218
218
219 diff_processor = diffs.DiffProcessor(
219 diff_processor = diffs.DiffProcessor(
220 vcs_diff, format='newdiff', diff_limit=diff_limit,
220 vcs_diff, format='newdiff', diff_limit=diff_limit,
221 file_limit=file_limit, show_full_diff=fulldiff)
221 file_limit=file_limit, show_full_diff=fulldiff)
222
222
223 _parsed = diff_processor.prepare()
223 _parsed = diff_processor.prepare()
224
224
225 diffset = codeblocks.DiffSet(
225 diffset = codeblocks.DiffSet(
226 repo_name=self.db_repo_name,
226 repo_name=self.db_repo_name,
227 source_repo_name=source_repo_name,
227 source_repo_name=source_repo_name,
228 source_node_getter=codeblocks.diffset_node_getter(target_commit),
228 source_node_getter=codeblocks.diffset_node_getter(target_commit),
229 target_node_getter=codeblocks.diffset_node_getter(source_commit),
229 target_node_getter=codeblocks.diffset_node_getter(source_commit),
230 )
230 )
231 diffset = self.path_filter.render_patchset_filtered(
231 diffset = self.path_filter.render_patchset_filtered(
232 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
232 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
233
233
234 return diffset
234 return diffset
235
235
236 @LoginRequired()
236 @LoginRequired()
237 @HasRepoPermissionAnyDecorator(
237 @HasRepoPermissionAnyDecorator(
238 'repository.read', 'repository.write', 'repository.admin')
238 'repository.read', 'repository.write', 'repository.admin')
239 @view_config(
239 @view_config(
240 route_name='pullrequest_show', request_method='GET',
240 route_name='pullrequest_show', request_method='GET',
241 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
241 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
242 def pull_request_show(self):
242 def pull_request_show(self):
243 pull_request_id = self.request.matchdict['pull_request_id']
243 pull_request_id = self.request.matchdict['pull_request_id']
244
244
245 c = self.load_default_context()
245 c = self.load_default_context()
246
246
247 version = self.request.GET.get('version')
247 version = self.request.GET.get('version')
248 from_version = self.request.GET.get('from_version') or version
248 from_version = self.request.GET.get('from_version') or version
249 merge_checks = self.request.GET.get('merge_checks')
249 merge_checks = self.request.GET.get('merge_checks')
250 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
250 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
251 force_refresh = str2bool(self.request.GET.get('force_refresh'))
251 force_refresh = str2bool(self.request.GET.get('force_refresh'))
252
252
253 (pull_request_latest,
253 (pull_request_latest,
254 pull_request_at_ver,
254 pull_request_at_ver,
255 pull_request_display_obj,
255 pull_request_display_obj,
256 at_version) = PullRequestModel().get_pr_version(
256 at_version) = PullRequestModel().get_pr_version(
257 pull_request_id, version=version)
257 pull_request_id, version=version)
258 pr_closed = pull_request_latest.is_closed()
258 pr_closed = pull_request_latest.is_closed()
259
259
260 if pr_closed and (version or from_version):
260 if pr_closed and (version or from_version):
261 # not allow to browse versions
261 # not allow to browse versions
262 raise HTTPFound(h.route_path(
262 raise HTTPFound(h.route_path(
263 'pullrequest_show', repo_name=self.db_repo_name,
263 'pullrequest_show', repo_name=self.db_repo_name,
264 pull_request_id=pull_request_id))
264 pull_request_id=pull_request_id))
265
265
266 versions = pull_request_display_obj.versions()
266 versions = pull_request_display_obj.versions()
267
267
268 c.at_version = at_version
268 c.at_version = at_version
269 c.at_version_num = (at_version
269 c.at_version_num = (at_version
270 if at_version and at_version != 'latest'
270 if at_version and at_version != 'latest'
271 else None)
271 else None)
272 c.at_version_pos = ChangesetComment.get_index_from_version(
272 c.at_version_pos = ChangesetComment.get_index_from_version(
273 c.at_version_num, versions)
273 c.at_version_num, versions)
274
274
275 (prev_pull_request_latest,
275 (prev_pull_request_latest,
276 prev_pull_request_at_ver,
276 prev_pull_request_at_ver,
277 prev_pull_request_display_obj,
277 prev_pull_request_display_obj,
278 prev_at_version) = PullRequestModel().get_pr_version(
278 prev_at_version) = PullRequestModel().get_pr_version(
279 pull_request_id, version=from_version)
279 pull_request_id, version=from_version)
280
280
281 c.from_version = prev_at_version
281 c.from_version = prev_at_version
282 c.from_version_num = (prev_at_version
282 c.from_version_num = (prev_at_version
283 if prev_at_version and prev_at_version != 'latest'
283 if prev_at_version and prev_at_version != 'latest'
284 else None)
284 else None)
285 c.from_version_pos = ChangesetComment.get_index_from_version(
285 c.from_version_pos = ChangesetComment.get_index_from_version(
286 c.from_version_num, versions)
286 c.from_version_num, versions)
287
287
288 # define if we're in COMPARE mode or VIEW at version mode
288 # define if we're in COMPARE mode or VIEW at version mode
289 compare = at_version != prev_at_version
289 compare = at_version != prev_at_version
290
290
291 # pull_requests repo_name we opened it against
291 # pull_requests repo_name we opened it against
292 # ie. target_repo must match
292 # ie. target_repo must match
293 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
293 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
294 raise HTTPNotFound()
294 raise HTTPNotFound()
295
295
296 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
296 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
297 pull_request_at_ver)
297 pull_request_at_ver)
298
298
299 c.pull_request = pull_request_display_obj
299 c.pull_request = pull_request_display_obj
300 c.pull_request_latest = pull_request_latest
300 c.pull_request_latest = pull_request_latest
301
301
302 if compare or (at_version and not at_version == 'latest'):
302 if compare or (at_version and not at_version == 'latest'):
303 c.allowed_to_change_status = False
303 c.allowed_to_change_status = False
304 c.allowed_to_update = False
304 c.allowed_to_update = False
305 c.allowed_to_merge = False
305 c.allowed_to_merge = False
306 c.allowed_to_delete = False
306 c.allowed_to_delete = False
307 c.allowed_to_comment = False
307 c.allowed_to_comment = False
308 c.allowed_to_close = False
308 c.allowed_to_close = False
309 else:
309 else:
310 can_change_status = PullRequestModel().check_user_change_status(
310 can_change_status = PullRequestModel().check_user_change_status(
311 pull_request_at_ver, self._rhodecode_user)
311 pull_request_at_ver, self._rhodecode_user)
312 c.allowed_to_change_status = can_change_status and not pr_closed
312 c.allowed_to_change_status = can_change_status and not pr_closed
313
313
314 c.allowed_to_update = PullRequestModel().check_user_update(
314 c.allowed_to_update = PullRequestModel().check_user_update(
315 pull_request_latest, self._rhodecode_user) and not pr_closed
315 pull_request_latest, self._rhodecode_user) and not pr_closed
316 c.allowed_to_merge = PullRequestModel().check_user_merge(
316 c.allowed_to_merge = PullRequestModel().check_user_merge(
317 pull_request_latest, self._rhodecode_user) and not pr_closed
317 pull_request_latest, self._rhodecode_user) and not pr_closed
318 c.allowed_to_delete = PullRequestModel().check_user_delete(
318 c.allowed_to_delete = PullRequestModel().check_user_delete(
319 pull_request_latest, self._rhodecode_user) and not pr_closed
319 pull_request_latest, self._rhodecode_user) and not pr_closed
320 c.allowed_to_comment = not pr_closed
320 c.allowed_to_comment = not pr_closed
321 c.allowed_to_close = c.allowed_to_merge and not pr_closed
321 c.allowed_to_close = c.allowed_to_merge and not pr_closed
322
322
323 c.forbid_adding_reviewers = False
323 c.forbid_adding_reviewers = False
324 c.forbid_author_to_review = False
324 c.forbid_author_to_review = False
325 c.forbid_commit_author_to_review = False
325 c.forbid_commit_author_to_review = False
326
326
327 if pull_request_latest.reviewer_data and \
327 if pull_request_latest.reviewer_data and \
328 'rules' in pull_request_latest.reviewer_data:
328 'rules' in pull_request_latest.reviewer_data:
329 rules = pull_request_latest.reviewer_data['rules'] or {}
329 rules = pull_request_latest.reviewer_data['rules'] or {}
330 try:
330 try:
331 c.forbid_adding_reviewers = rules.get(
331 c.forbid_adding_reviewers = rules.get(
332 'forbid_adding_reviewers')
332 'forbid_adding_reviewers')
333 c.forbid_author_to_review = rules.get(
333 c.forbid_author_to_review = rules.get(
334 'forbid_author_to_review')
334 'forbid_author_to_review')
335 c.forbid_commit_author_to_review = rules.get(
335 c.forbid_commit_author_to_review = rules.get(
336 'forbid_commit_author_to_review')
336 'forbid_commit_author_to_review')
337 except Exception:
337 except Exception:
338 pass
338 pass
339
339
340 # check merge capabilities
340 # check merge capabilities
341 _merge_check = MergeCheck.validate(
341 _merge_check = MergeCheck.validate(
342 pull_request_latest, user=self._rhodecode_user,
342 pull_request_latest, user=self._rhodecode_user,
343 translator=self.request.translate, force_shadow_repo_refresh=force_refresh)
343 translator=self.request.translate,
344 force_shadow_repo_refresh=force_refresh)
344 c.pr_merge_errors = _merge_check.error_details
345 c.pr_merge_errors = _merge_check.error_details
345 c.pr_merge_possible = not _merge_check.failed
346 c.pr_merge_possible = not _merge_check.failed
346 c.pr_merge_message = _merge_check.merge_msg
347 c.pr_merge_message = _merge_check.merge_msg
347
348
348 c.pr_merge_info = MergeCheck.get_merge_conditions(
349 c.pr_merge_info = MergeCheck.get_merge_conditions(
349 pull_request_latest, translator=self.request.translate)
350 pull_request_latest, translator=self.request.translate)
350
351
351 c.pull_request_review_status = _merge_check.review_status
352 c.pull_request_review_status = _merge_check.review_status
352 if merge_checks:
353 if merge_checks:
353 self.request.override_renderer = \
354 self.request.override_renderer = \
354 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
355 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
355 return self._get_template_context(c)
356 return self._get_template_context(c)
356
357
357 comments_model = CommentsModel()
358 comments_model = CommentsModel()
358
359
359 # reviewers and statuses
360 # reviewers and statuses
360 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
361 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
361 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
362 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
362
363
363 # GENERAL COMMENTS with versions #
364 # GENERAL COMMENTS with versions #
364 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
365 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
365 q = q.order_by(ChangesetComment.comment_id.asc())
366 q = q.order_by(ChangesetComment.comment_id.asc())
366 general_comments = q
367 general_comments = q
367
368
368 # pick comments we want to render at current version
369 # pick comments we want to render at current version
369 c.comment_versions = comments_model.aggregate_comments(
370 c.comment_versions = comments_model.aggregate_comments(
370 general_comments, versions, c.at_version_num)
371 general_comments, versions, c.at_version_num)
371 c.comments = c.comment_versions[c.at_version_num]['until']
372 c.comments = c.comment_versions[c.at_version_num]['until']
372
373
373 # INLINE COMMENTS with versions #
374 # INLINE COMMENTS with versions #
374 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
375 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
375 q = q.order_by(ChangesetComment.comment_id.asc())
376 q = q.order_by(ChangesetComment.comment_id.asc())
376 inline_comments = q
377 inline_comments = q
377
378
378 c.inline_versions = comments_model.aggregate_comments(
379 c.inline_versions = comments_model.aggregate_comments(
379 inline_comments, versions, c.at_version_num, inline=True)
380 inline_comments, versions, c.at_version_num, inline=True)
380
381
381 # inject latest version
382 # inject latest version
382 latest_ver = PullRequest.get_pr_display_object(
383 latest_ver = PullRequest.get_pr_display_object(
383 pull_request_latest, pull_request_latest)
384 pull_request_latest, pull_request_latest)
384
385
385 c.versions = versions + [latest_ver]
386 c.versions = versions + [latest_ver]
386
387
387 # if we use version, then do not show later comments
388 # if we use version, then do not show later comments
388 # than current version
389 # than current version
389 display_inline_comments = collections.defaultdict(
390 display_inline_comments = collections.defaultdict(
390 lambda: collections.defaultdict(list))
391 lambda: collections.defaultdict(list))
391 for co in inline_comments:
392 for co in inline_comments:
392 if c.at_version_num:
393 if c.at_version_num:
393 # pick comments that are at least UPTO given version, so we
394 # pick comments that are at least UPTO given version, so we
394 # don't render comments for higher version
395 # don't render comments for higher version
395 should_render = co.pull_request_version_id and \
396 should_render = co.pull_request_version_id and \
396 co.pull_request_version_id <= c.at_version_num
397 co.pull_request_version_id <= c.at_version_num
397 else:
398 else:
398 # showing all, for 'latest'
399 # showing all, for 'latest'
399 should_render = True
400 should_render = True
400
401
401 if should_render:
402 if should_render:
402 display_inline_comments[co.f_path][co.line_no].append(co)
403 display_inline_comments[co.f_path][co.line_no].append(co)
403
404
404 # load diff data into template context, if we use compare mode then
405 # load diff data into template context, if we use compare mode then
405 # diff is calculated based on changes between versions of PR
406 # diff is calculated based on changes between versions of PR
406
407
407 source_repo = pull_request_at_ver.source_repo
408 source_repo = pull_request_at_ver.source_repo
408 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
409 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
409
410
410 target_repo = pull_request_at_ver.target_repo
411 target_repo = pull_request_at_ver.target_repo
411 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
412 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
412
413
413 if compare:
414 if compare:
414 # in compare switch the diff base to latest commit from prev version
415 # in compare switch the diff base to latest commit from prev version
415 target_ref_id = prev_pull_request_display_obj.revisions[0]
416 target_ref_id = prev_pull_request_display_obj.revisions[0]
416
417
417 # despite opening commits for bookmarks/branches/tags, we always
418 # despite opening commits for bookmarks/branches/tags, we always
418 # convert this to rev to prevent changes after bookmark or branch change
419 # convert this to rev to prevent changes after bookmark or branch change
419 c.source_ref_type = 'rev'
420 c.source_ref_type = 'rev'
420 c.source_ref = source_ref_id
421 c.source_ref = source_ref_id
421
422
422 c.target_ref_type = 'rev'
423 c.target_ref_type = 'rev'
423 c.target_ref = target_ref_id
424 c.target_ref = target_ref_id
424
425
425 c.source_repo = source_repo
426 c.source_repo = source_repo
426 c.target_repo = target_repo
427 c.target_repo = target_repo
427
428
428 c.commit_ranges = []
429 c.commit_ranges = []
429 source_commit = EmptyCommit()
430 source_commit = EmptyCommit()
430 target_commit = EmptyCommit()
431 target_commit = EmptyCommit()
431 c.missing_requirements = False
432 c.missing_requirements = False
432
433
433 source_scm = source_repo.scm_instance()
434 source_scm = source_repo.scm_instance()
434 target_scm = target_repo.scm_instance()
435 target_scm = target_repo.scm_instance()
435
436
436 shadow_scm = None
437 shadow_scm = None
437 try:
438 try:
438 shadow_scm = pull_request_latest.get_shadow_repo()
439 shadow_scm = pull_request_latest.get_shadow_repo()
439 except Exception:
440 except Exception:
440 log.debug('Failed to get shadow repo', exc_info=True)
441 log.debug('Failed to get shadow repo', exc_info=True)
441 # try first the existing source_repo, and then shadow
442 # try first the existing source_repo, and then shadow
442 # repo if we can obtain one
443 # repo if we can obtain one
443 commits_source_repo = source_scm or shadow_scm
444 commits_source_repo = source_scm or shadow_scm
444
445
445 c.commits_source_repo = commits_source_repo
446 c.commits_source_repo = commits_source_repo
446 c.ancestor = None # set it to None, to hide it from PR view
447 c.ancestor = None # set it to None, to hide it from PR view
447
448
448 # empty version means latest, so we keep this to prevent
449 # empty version means latest, so we keep this to prevent
449 # double caching
450 # double caching
450 version_normalized = version or 'latest'
451 version_normalized = version or 'latest'
451 from_version_normalized = from_version or 'latest'
452 from_version_normalized = from_version or 'latest'
452
453
453 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
454 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
454 target_repo)
455 target_repo)
455 cache_file_path = diff_cache_exist(
456 cache_file_path = diff_cache_exist(
456 cache_path, 'pull_request', pull_request_id, version_normalized,
457 cache_path, 'pull_request', pull_request_id, version_normalized,
457 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
458 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
458
459
459 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
460 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
460 force_recache = str2bool(self.request.GET.get('force_recache'))
461 force_recache = str2bool(self.request.GET.get('force_recache'))
461
462
462 cached_diff = None
463 cached_diff = None
463 if caching_enabled:
464 if caching_enabled:
464 cached_diff = load_cached_diff(cache_file_path)
465 cached_diff = load_cached_diff(cache_file_path)
465
466
466 has_proper_commit_cache = (
467 has_proper_commit_cache = (
467 cached_diff and cached_diff.get('commits')
468 cached_diff and cached_diff.get('commits')
468 and len(cached_diff.get('commits', [])) == 5
469 and len(cached_diff.get('commits', [])) == 5
469 and cached_diff.get('commits')[0]
470 and cached_diff.get('commits')[0]
470 and cached_diff.get('commits')[3])
471 and cached_diff.get('commits')[3])
471 if not force_recache and has_proper_commit_cache:
472 if not force_recache and has_proper_commit_cache:
472 diff_commit_cache = \
473 diff_commit_cache = \
473 (ancestor_commit, commit_cache, missing_requirements,
474 (ancestor_commit, commit_cache, missing_requirements,
474 source_commit, target_commit) = cached_diff['commits']
475 source_commit, target_commit) = cached_diff['commits']
475 else:
476 else:
476 diff_commit_cache = \
477 diff_commit_cache = \
477 (ancestor_commit, commit_cache, missing_requirements,
478 (ancestor_commit, commit_cache, missing_requirements,
478 source_commit, target_commit) = self.get_commits(
479 source_commit, target_commit) = self.get_commits(
479 commits_source_repo,
480 commits_source_repo,
480 pull_request_at_ver,
481 pull_request_at_ver,
481 source_commit,
482 source_commit,
482 source_ref_id,
483 source_ref_id,
483 source_scm,
484 source_scm,
484 target_commit,
485 target_commit,
485 target_ref_id,
486 target_ref_id,
486 target_scm)
487 target_scm)
487
488
488 # register our commit range
489 # register our commit range
489 for comm in commit_cache.values():
490 for comm in commit_cache.values():
490 c.commit_ranges.append(comm)
491 c.commit_ranges.append(comm)
491
492
492 c.missing_requirements = missing_requirements
493 c.missing_requirements = missing_requirements
493 c.ancestor_commit = ancestor_commit
494 c.ancestor_commit = ancestor_commit
494 c.statuses = source_repo.statuses(
495 c.statuses = source_repo.statuses(
495 [x.raw_id for x in c.commit_ranges])
496 [x.raw_id for x in c.commit_ranges])
496
497
497 # auto collapse if we have more than limit
498 # auto collapse if we have more than limit
498 collapse_limit = diffs.DiffProcessor._collapse_commits_over
499 collapse_limit = diffs.DiffProcessor._collapse_commits_over
499 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
500 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
500 c.compare_mode = compare
501 c.compare_mode = compare
501
502
502 # diff_limit is the old behavior, will cut off the whole diff
503 # diff_limit is the old behavior, will cut off the whole diff
503 # if the limit is applied otherwise will just hide the
504 # if the limit is applied otherwise will just hide the
504 # big files from the front-end
505 # big files from the front-end
505 diff_limit = c.visual.cut_off_limit_diff
506 diff_limit = c.visual.cut_off_limit_diff
506 file_limit = c.visual.cut_off_limit_file
507 file_limit = c.visual.cut_off_limit_file
507
508
508 c.missing_commits = False
509 c.missing_commits = False
509 if (c.missing_requirements
510 if (c.missing_requirements
510 or isinstance(source_commit, EmptyCommit)
511 or isinstance(source_commit, EmptyCommit)
511 or source_commit == target_commit):
512 or source_commit == target_commit):
512
513
513 c.missing_commits = True
514 c.missing_commits = True
514 else:
515 else:
515 c.inline_comments = display_inline_comments
516 c.inline_comments = display_inline_comments
516
517
517 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
518 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
518 if not force_recache and has_proper_diff_cache:
519 if not force_recache and has_proper_diff_cache:
519 c.diffset = cached_diff['diff']
520 c.diffset = cached_diff['diff']
520 (ancestor_commit, commit_cache, missing_requirements,
521 (ancestor_commit, commit_cache, missing_requirements,
521 source_commit, target_commit) = cached_diff['commits']
522 source_commit, target_commit) = cached_diff['commits']
522 else:
523 else:
523 c.diffset = self._get_diffset(
524 c.diffset = self._get_diffset(
524 c.source_repo.repo_name, commits_source_repo,
525 c.source_repo.repo_name, commits_source_repo,
525 source_ref_id, target_ref_id,
526 source_ref_id, target_ref_id,
526 target_commit, source_commit,
527 target_commit, source_commit,
527 diff_limit, file_limit, c.fulldiff)
528 diff_limit, file_limit, c.fulldiff)
528
529
529 # save cached diff
530 # save cached diff
530 if caching_enabled:
531 if caching_enabled:
531 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
532 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
532
533
533 c.limited_diff = c.diffset.limited_diff
534 c.limited_diff = c.diffset.limited_diff
534
535
535 # calculate removed files that are bound to comments
536 # calculate removed files that are bound to comments
536 comment_deleted_files = [
537 comment_deleted_files = [
537 fname for fname in display_inline_comments
538 fname for fname in display_inline_comments
538 if fname not in c.diffset.file_stats]
539 if fname not in c.diffset.file_stats]
539
540
540 c.deleted_files_comments = collections.defaultdict(dict)
541 c.deleted_files_comments = collections.defaultdict(dict)
541 for fname, per_line_comments in display_inline_comments.items():
542 for fname, per_line_comments in display_inline_comments.items():
542 if fname in comment_deleted_files:
543 if fname in comment_deleted_files:
543 c.deleted_files_comments[fname]['stats'] = 0
544 c.deleted_files_comments[fname]['stats'] = 0
544 c.deleted_files_comments[fname]['comments'] = list()
545 c.deleted_files_comments[fname]['comments'] = list()
545 for lno, comments in per_line_comments.items():
546 for lno, comments in per_line_comments.items():
546 c.deleted_files_comments[fname]['comments'].extend(
547 c.deleted_files_comments[fname]['comments'].extend(
547 comments)
548 comments)
548
549
549 # this is a hack to properly display links, when creating PR, the
550 # this is a hack to properly display links, when creating PR, the
550 # compare view and others uses different notation, and
551 # compare view and others uses different notation, and
551 # compare_commits.mako renders links based on the target_repo.
552 # compare_commits.mako renders links based on the target_repo.
552 # We need to swap that here to generate it properly on the html side
553 # We need to swap that here to generate it properly on the html side
553 c.target_repo = c.source_repo
554 c.target_repo = c.source_repo
554
555
555 c.commit_statuses = ChangesetStatus.STATUSES
556 c.commit_statuses = ChangesetStatus.STATUSES
556
557
557 c.show_version_changes = not pr_closed
558 c.show_version_changes = not pr_closed
558 if c.show_version_changes:
559 if c.show_version_changes:
559 cur_obj = pull_request_at_ver
560 cur_obj = pull_request_at_ver
560 prev_obj = prev_pull_request_at_ver
561 prev_obj = prev_pull_request_at_ver
561
562
562 old_commit_ids = prev_obj.revisions
563 old_commit_ids = prev_obj.revisions
563 new_commit_ids = cur_obj.revisions
564 new_commit_ids = cur_obj.revisions
564 commit_changes = PullRequestModel()._calculate_commit_id_changes(
565 commit_changes = PullRequestModel()._calculate_commit_id_changes(
565 old_commit_ids, new_commit_ids)
566 old_commit_ids, new_commit_ids)
566 c.commit_changes_summary = commit_changes
567 c.commit_changes_summary = commit_changes
567
568
568 # calculate the diff for commits between versions
569 # calculate the diff for commits between versions
569 c.commit_changes = []
570 c.commit_changes = []
570 mark = lambda cs, fw: list(
571 mark = lambda cs, fw: list(
571 h.itertools.izip_longest([], cs, fillvalue=fw))
572 h.itertools.izip_longest([], cs, fillvalue=fw))
572 for c_type, raw_id in mark(commit_changes.added, 'a') \
573 for c_type, raw_id in mark(commit_changes.added, 'a') \
573 + mark(commit_changes.removed, 'r') \
574 + mark(commit_changes.removed, 'r') \
574 + mark(commit_changes.common, 'c'):
575 + mark(commit_changes.common, 'c'):
575
576
576 if raw_id in commit_cache:
577 if raw_id in commit_cache:
577 commit = commit_cache[raw_id]
578 commit = commit_cache[raw_id]
578 else:
579 else:
579 try:
580 try:
580 commit = commits_source_repo.get_commit(raw_id)
581 commit = commits_source_repo.get_commit(raw_id)
581 except CommitDoesNotExistError:
582 except CommitDoesNotExistError:
582 # in case we fail extracting still use "dummy" commit
583 # in case we fail extracting still use "dummy" commit
583 # for display in commit diff
584 # for display in commit diff
584 commit = h.AttributeDict(
585 commit = h.AttributeDict(
585 {'raw_id': raw_id,
586 {'raw_id': raw_id,
586 'message': 'EMPTY or MISSING COMMIT'})
587 'message': 'EMPTY or MISSING COMMIT'})
587 c.commit_changes.append([c_type, commit])
588 c.commit_changes.append([c_type, commit])
588
589
589 # current user review statuses for each version
590 # current user review statuses for each version
590 c.review_versions = {}
591 c.review_versions = {}
591 if self._rhodecode_user.user_id in allowed_reviewers:
592 if self._rhodecode_user.user_id in allowed_reviewers:
592 for co in general_comments:
593 for co in general_comments:
593 if co.author.user_id == self._rhodecode_user.user_id:
594 if co.author.user_id == self._rhodecode_user.user_id:
594 status = co.status_change
595 status = co.status_change
595 if status:
596 if status:
596 _ver_pr = status[0].comment.pull_request_version_id
597 _ver_pr = status[0].comment.pull_request_version_id
597 c.review_versions[_ver_pr] = status[0]
598 c.review_versions[_ver_pr] = status[0]
598
599
599 return self._get_template_context(c)
600 return self._get_template_context(c)
600
601
601 def get_commits(
602 def get_commits(
602 self, commits_source_repo, pull_request_at_ver, source_commit,
603 self, commits_source_repo, pull_request_at_ver, source_commit,
603 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
604 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
604 commit_cache = collections.OrderedDict()
605 commit_cache = collections.OrderedDict()
605 missing_requirements = False
606 missing_requirements = False
606 try:
607 try:
607 pre_load = ["author", "branch", "date", "message"]
608 pre_load = ["author", "branch", "date", "message"]
608 show_revs = pull_request_at_ver.revisions
609 show_revs = pull_request_at_ver.revisions
609 for rev in show_revs:
610 for rev in show_revs:
610 comm = commits_source_repo.get_commit(
611 comm = commits_source_repo.get_commit(
611 commit_id=rev, pre_load=pre_load)
612 commit_id=rev, pre_load=pre_load)
612 commit_cache[comm.raw_id] = comm
613 commit_cache[comm.raw_id] = comm
613
614
614 # Order here matters, we first need to get target, and then
615 # Order here matters, we first need to get target, and then
615 # the source
616 # the source
616 target_commit = commits_source_repo.get_commit(
617 target_commit = commits_source_repo.get_commit(
617 commit_id=safe_str(target_ref_id))
618 commit_id=safe_str(target_ref_id))
618
619
619 source_commit = commits_source_repo.get_commit(
620 source_commit = commits_source_repo.get_commit(
620 commit_id=safe_str(source_ref_id))
621 commit_id=safe_str(source_ref_id))
621 except CommitDoesNotExistError:
622 except CommitDoesNotExistError:
622 log.warning(
623 log.warning(
623 'Failed to get commit from `{}` repo'.format(
624 'Failed to get commit from `{}` repo'.format(
624 commits_source_repo), exc_info=True)
625 commits_source_repo), exc_info=True)
625 except RepositoryRequirementError:
626 except RepositoryRequirementError:
626 log.warning(
627 log.warning(
627 'Failed to get all required data from repo', exc_info=True)
628 'Failed to get all required data from repo', exc_info=True)
628 missing_requirements = True
629 missing_requirements = True
629 ancestor_commit = None
630 ancestor_commit = None
630 try:
631 try:
631 ancestor_id = source_scm.get_common_ancestor(
632 ancestor_id = source_scm.get_common_ancestor(
632 source_commit.raw_id, target_commit.raw_id, target_scm)
633 source_commit.raw_id, target_commit.raw_id, target_scm)
633 ancestor_commit = source_scm.get_commit(ancestor_id)
634 ancestor_commit = source_scm.get_commit(ancestor_id)
634 except Exception:
635 except Exception:
635 ancestor_commit = None
636 ancestor_commit = None
636 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
637 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
637
638
638 def assure_not_empty_repo(self):
639 def assure_not_empty_repo(self):
639 _ = self.request.translate
640 _ = self.request.translate
640
641
641 try:
642 try:
642 self.db_repo.scm_instance().get_commit()
643 self.db_repo.scm_instance().get_commit()
643 except EmptyRepositoryError:
644 except EmptyRepositoryError:
644 h.flash(h.literal(_('There are no commits yet')),
645 h.flash(h.literal(_('There are no commits yet')),
645 category='warning')
646 category='warning')
646 raise HTTPFound(
647 raise HTTPFound(
647 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
648 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
648
649
649 @LoginRequired()
650 @LoginRequired()
650 @NotAnonymous()
651 @NotAnonymous()
651 @HasRepoPermissionAnyDecorator(
652 @HasRepoPermissionAnyDecorator(
652 'repository.read', 'repository.write', 'repository.admin')
653 'repository.read', 'repository.write', 'repository.admin')
653 @view_config(
654 @view_config(
654 route_name='pullrequest_new', request_method='GET',
655 route_name='pullrequest_new', request_method='GET',
655 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
656 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
656 def pull_request_new(self):
657 def pull_request_new(self):
657 _ = self.request.translate
658 _ = self.request.translate
658 c = self.load_default_context()
659 c = self.load_default_context()
659
660
660 self.assure_not_empty_repo()
661 self.assure_not_empty_repo()
661 source_repo = self.db_repo
662 source_repo = self.db_repo
662
663
663 commit_id = self.request.GET.get('commit')
664 commit_id = self.request.GET.get('commit')
664 branch_ref = self.request.GET.get('branch')
665 branch_ref = self.request.GET.get('branch')
665 bookmark_ref = self.request.GET.get('bookmark')
666 bookmark_ref = self.request.GET.get('bookmark')
666
667
667 try:
668 try:
668 source_repo_data = PullRequestModel().generate_repo_data(
669 source_repo_data = PullRequestModel().generate_repo_data(
669 source_repo, commit_id=commit_id,
670 source_repo, commit_id=commit_id,
670 branch=branch_ref, bookmark=bookmark_ref,
671 branch=branch_ref, bookmark=bookmark_ref,
671 translator=self.request.translate)
672 translator=self.request.translate)
672 except CommitDoesNotExistError as e:
673 except CommitDoesNotExistError as e:
673 log.exception(e)
674 log.exception(e)
674 h.flash(_('Commit does not exist'), 'error')
675 h.flash(_('Commit does not exist'), 'error')
675 raise HTTPFound(
676 raise HTTPFound(
676 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
677 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
677
678
678 default_target_repo = source_repo
679 default_target_repo = source_repo
679
680
680 if source_repo.parent:
681 if source_repo.parent:
681 parent_vcs_obj = source_repo.parent.scm_instance()
682 parent_vcs_obj = source_repo.parent.scm_instance()
682 if parent_vcs_obj and not parent_vcs_obj.is_empty():
683 if parent_vcs_obj and not parent_vcs_obj.is_empty():
683 # change default if we have a parent repo
684 # change default if we have a parent repo
684 default_target_repo = source_repo.parent
685 default_target_repo = source_repo.parent
685
686
686 target_repo_data = PullRequestModel().generate_repo_data(
687 target_repo_data = PullRequestModel().generate_repo_data(
687 default_target_repo, translator=self.request.translate)
688 default_target_repo, translator=self.request.translate)
688
689
689 selected_source_ref = source_repo_data['refs']['selected_ref']
690 selected_source_ref = source_repo_data['refs']['selected_ref']
690 title_source_ref = ''
691 title_source_ref = ''
691 if selected_source_ref:
692 if selected_source_ref:
692 title_source_ref = selected_source_ref.split(':', 2)[1]
693 title_source_ref = selected_source_ref.split(':', 2)[1]
693 c.default_title = PullRequestModel().generate_pullrequest_title(
694 c.default_title = PullRequestModel().generate_pullrequest_title(
694 source=source_repo.repo_name,
695 source=source_repo.repo_name,
695 source_ref=title_source_ref,
696 source_ref=title_source_ref,
696 target=default_target_repo.repo_name
697 target=default_target_repo.repo_name
697 )
698 )
698
699
699 c.default_repo_data = {
700 c.default_repo_data = {
700 'source_repo_name': source_repo.repo_name,
701 'source_repo_name': source_repo.repo_name,
701 'source_refs_json': json.dumps(source_repo_data),
702 'source_refs_json': json.dumps(source_repo_data),
702 'target_repo_name': default_target_repo.repo_name,
703 'target_repo_name': default_target_repo.repo_name,
703 'target_refs_json': json.dumps(target_repo_data),
704 'target_refs_json': json.dumps(target_repo_data),
704 }
705 }
705 c.default_source_ref = selected_source_ref
706 c.default_source_ref = selected_source_ref
706
707
707 return self._get_template_context(c)
708 return self._get_template_context(c)
708
709
709 @LoginRequired()
710 @LoginRequired()
710 @NotAnonymous()
711 @NotAnonymous()
711 @HasRepoPermissionAnyDecorator(
712 @HasRepoPermissionAnyDecorator(
712 'repository.read', 'repository.write', 'repository.admin')
713 'repository.read', 'repository.write', 'repository.admin')
713 @view_config(
714 @view_config(
714 route_name='pullrequest_repo_refs', request_method='GET',
715 route_name='pullrequest_repo_refs', request_method='GET',
715 renderer='json_ext', xhr=True)
716 renderer='json_ext', xhr=True)
716 def pull_request_repo_refs(self):
717 def pull_request_repo_refs(self):
717 self.load_default_context()
718 self.load_default_context()
718 target_repo_name = self.request.matchdict['target_repo_name']
719 target_repo_name = self.request.matchdict['target_repo_name']
719 repo = Repository.get_by_repo_name(target_repo_name)
720 repo = Repository.get_by_repo_name(target_repo_name)
720 if not repo:
721 if not repo:
721 raise HTTPNotFound()
722 raise HTTPNotFound()
722
723
723 target_perm = HasRepoPermissionAny(
724 target_perm = HasRepoPermissionAny(
724 'repository.read', 'repository.write', 'repository.admin')(
725 'repository.read', 'repository.write', 'repository.admin')(
725 target_repo_name)
726 target_repo_name)
726 if not target_perm:
727 if not target_perm:
727 raise HTTPNotFound()
728 raise HTTPNotFound()
728
729
729 return PullRequestModel().generate_repo_data(
730 return PullRequestModel().generate_repo_data(
730 repo, translator=self.request.translate)
731 repo, translator=self.request.translate)
731
732
732 @LoginRequired()
733 @LoginRequired()
733 @NotAnonymous()
734 @NotAnonymous()
734 @HasRepoPermissionAnyDecorator(
735 @HasRepoPermissionAnyDecorator(
735 'repository.read', 'repository.write', 'repository.admin')
736 'repository.read', 'repository.write', 'repository.admin')
736 @view_config(
737 @view_config(
737 route_name='pullrequest_repo_destinations', request_method='GET',
738 route_name='pullrequest_repo_destinations', request_method='GET',
738 renderer='json_ext', xhr=True)
739 renderer='json_ext', xhr=True)
739 def pull_request_repo_destinations(self):
740 def pull_request_repo_destinations(self):
740 _ = self.request.translate
741 _ = self.request.translate
741 filter_query = self.request.GET.get('query')
742 filter_query = self.request.GET.get('query')
742
743
743 query = Repository.query() \
744 query = Repository.query() \
744 .order_by(func.length(Repository.repo_name)) \
745 .order_by(func.length(Repository.repo_name)) \
745 .filter(
746 .filter(
746 or_(Repository.repo_name == self.db_repo.repo_name,
747 or_(Repository.repo_name == self.db_repo.repo_name,
747 Repository.fork_id == self.db_repo.repo_id))
748 Repository.fork_id == self.db_repo.repo_id))
748
749
749 if filter_query:
750 if filter_query:
750 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
751 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
751 query = query.filter(
752 query = query.filter(
752 Repository.repo_name.ilike(ilike_expression))
753 Repository.repo_name.ilike(ilike_expression))
753
754
754 add_parent = False
755 add_parent = False
755 if self.db_repo.parent:
756 if self.db_repo.parent:
756 if filter_query in self.db_repo.parent.repo_name:
757 if filter_query in self.db_repo.parent.repo_name:
757 parent_vcs_obj = self.db_repo.parent.scm_instance()
758 parent_vcs_obj = self.db_repo.parent.scm_instance()
758 if parent_vcs_obj and not parent_vcs_obj.is_empty():
759 if parent_vcs_obj and not parent_vcs_obj.is_empty():
759 add_parent = True
760 add_parent = True
760
761
761 limit = 20 - 1 if add_parent else 20
762 limit = 20 - 1 if add_parent else 20
762 all_repos = query.limit(limit).all()
763 all_repos = query.limit(limit).all()
763 if add_parent:
764 if add_parent:
764 all_repos += [self.db_repo.parent]
765 all_repos += [self.db_repo.parent]
765
766
766 repos = []
767 repos = []
767 for obj in ScmModel().get_repos(all_repos):
768 for obj in ScmModel().get_repos(all_repos):
768 repos.append({
769 repos.append({
769 'id': obj['name'],
770 'id': obj['name'],
770 'text': obj['name'],
771 'text': obj['name'],
771 'type': 'repo',
772 'type': 'repo',
772 'repo_id': obj['dbrepo']['repo_id'],
773 'repo_id': obj['dbrepo']['repo_id'],
773 'repo_type': obj['dbrepo']['repo_type'],
774 'repo_type': obj['dbrepo']['repo_type'],
774 'private': obj['dbrepo']['private'],
775 'private': obj['dbrepo']['private'],
775
776
776 })
777 })
777
778
778 data = {
779 data = {
779 'more': False,
780 'more': False,
780 'results': [{
781 'results': [{
781 'text': _('Repositories'),
782 'text': _('Repositories'),
782 'children': repos
783 'children': repos
783 }] if repos else []
784 }] if repos else []
784 }
785 }
785 return data
786 return data
786
787
787 @LoginRequired()
788 @LoginRequired()
788 @NotAnonymous()
789 @NotAnonymous()
789 @HasRepoPermissionAnyDecorator(
790 @HasRepoPermissionAnyDecorator(
790 'repository.read', 'repository.write', 'repository.admin')
791 'repository.read', 'repository.write', 'repository.admin')
791 @CSRFRequired()
792 @CSRFRequired()
792 @view_config(
793 @view_config(
793 route_name='pullrequest_create', request_method='POST',
794 route_name='pullrequest_create', request_method='POST',
794 renderer=None)
795 renderer=None)
795 def pull_request_create(self):
796 def pull_request_create(self):
796 _ = self.request.translate
797 _ = self.request.translate
797 self.assure_not_empty_repo()
798 self.assure_not_empty_repo()
798 self.load_default_context()
799 self.load_default_context()
799
800
800 controls = peppercorn.parse(self.request.POST.items())
801 controls = peppercorn.parse(self.request.POST.items())
801
802
802 try:
803 try:
803 form = PullRequestForm(
804 form = PullRequestForm(
804 self.request.translate, self.db_repo.repo_id)()
805 self.request.translate, self.db_repo.repo_id)()
805 _form = form.to_python(controls)
806 _form = form.to_python(controls)
806 except formencode.Invalid as errors:
807 except formencode.Invalid as errors:
807 if errors.error_dict.get('revisions'):
808 if errors.error_dict.get('revisions'):
808 msg = 'Revisions: %s' % errors.error_dict['revisions']
809 msg = 'Revisions: %s' % errors.error_dict['revisions']
809 elif errors.error_dict.get('pullrequest_title'):
810 elif errors.error_dict.get('pullrequest_title'):
810 msg = errors.error_dict.get('pullrequest_title')
811 msg = errors.error_dict.get('pullrequest_title')
811 else:
812 else:
812 msg = _('Error creating pull request: {}').format(errors)
813 msg = _('Error creating pull request: {}').format(errors)
813 log.exception(msg)
814 log.exception(msg)
814 h.flash(msg, 'error')
815 h.flash(msg, 'error')
815
816
816 # would rather just go back to form ...
817 # would rather just go back to form ...
817 raise HTTPFound(
818 raise HTTPFound(
818 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
819 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
819
820
820 source_repo = _form['source_repo']
821 source_repo = _form['source_repo']
821 source_ref = _form['source_ref']
822 source_ref = _form['source_ref']
822 target_repo = _form['target_repo']
823 target_repo = _form['target_repo']
823 target_ref = _form['target_ref']
824 target_ref = _form['target_ref']
824 commit_ids = _form['revisions'][::-1]
825 commit_ids = _form['revisions'][::-1]
825
826
826 # find the ancestor for this pr
827 # find the ancestor for this pr
827 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
828 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
828 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
829 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
829
830
830 # re-check permissions again here
831 # re-check permissions again here
831 # source_repo we must have read permissions
832 # source_repo we must have read permissions
832
833
833 source_perm = HasRepoPermissionAny(
834 source_perm = HasRepoPermissionAny(
834 'repository.read',
835 'repository.read',
835 'repository.write', 'repository.admin')(source_db_repo.repo_name)
836 'repository.write', 'repository.admin')(source_db_repo.repo_name)
836 if not source_perm:
837 if not source_perm:
837 msg = _('Not Enough permissions to source repo `{}`.'.format(
838 msg = _('Not Enough permissions to source repo `{}`.'.format(
838 source_db_repo.repo_name))
839 source_db_repo.repo_name))
839 h.flash(msg, category='error')
840 h.flash(msg, category='error')
840 # copy the args back to redirect
841 # copy the args back to redirect
841 org_query = self.request.GET.mixed()
842 org_query = self.request.GET.mixed()
842 raise HTTPFound(
843 raise HTTPFound(
843 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
844 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
844 _query=org_query))
845 _query=org_query))
845
846
846 # target repo we must have read permissions, and also later on
847 # target repo we must have read permissions, and also later on
847 # we want to check branch permissions here
848 # we want to check branch permissions here
848 target_perm = HasRepoPermissionAny(
849 target_perm = HasRepoPermissionAny(
849 'repository.read',
850 'repository.read',
850 'repository.write', 'repository.admin')(target_db_repo.repo_name)
851 'repository.write', 'repository.admin')(target_db_repo.repo_name)
851 if not target_perm:
852 if not target_perm:
852 msg = _('Not Enough permissions to target repo `{}`.'.format(
853 msg = _('Not Enough permissions to target repo `{}`.'.format(
853 target_db_repo.repo_name))
854 target_db_repo.repo_name))
854 h.flash(msg, category='error')
855 h.flash(msg, category='error')
855 # copy the args back to redirect
856 # copy the args back to redirect
856 org_query = self.request.GET.mixed()
857 org_query = self.request.GET.mixed()
857 raise HTTPFound(
858 raise HTTPFound(
858 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
859 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
859 _query=org_query))
860 _query=org_query))
860
861
861 source_scm = source_db_repo.scm_instance()
862 source_scm = source_db_repo.scm_instance()
862 target_scm = target_db_repo.scm_instance()
863 target_scm = target_db_repo.scm_instance()
863
864
864 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
865 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
865 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
866 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
866
867
867 ancestor = source_scm.get_common_ancestor(
868 ancestor = source_scm.get_common_ancestor(
868 source_commit.raw_id, target_commit.raw_id, target_scm)
869 source_commit.raw_id, target_commit.raw_id, target_scm)
869
870
870 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
871 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
871 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
872 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
872
873
873 pullrequest_title = _form['pullrequest_title']
874 pullrequest_title = _form['pullrequest_title']
874 title_source_ref = source_ref.split(':', 2)[1]
875 title_source_ref = source_ref.split(':', 2)[1]
875 if not pullrequest_title:
876 if not pullrequest_title:
876 pullrequest_title = PullRequestModel().generate_pullrequest_title(
877 pullrequest_title = PullRequestModel().generate_pullrequest_title(
877 source=source_repo,
878 source=source_repo,
878 source_ref=title_source_ref,
879 source_ref=title_source_ref,
879 target=target_repo
880 target=target_repo
880 )
881 )
881
882
882 description = _form['pullrequest_desc']
883 description = _form['pullrequest_desc']
883
884
884 get_default_reviewers_data, validate_default_reviewers = \
885 get_default_reviewers_data, validate_default_reviewers = \
885 PullRequestModel().get_reviewer_functions()
886 PullRequestModel().get_reviewer_functions()
886
887
887 # recalculate reviewers logic, to make sure we can validate this
888 # recalculate reviewers logic, to make sure we can validate this
888 reviewer_rules = get_default_reviewers_data(
889 reviewer_rules = get_default_reviewers_data(
889 self._rhodecode_db_user, source_db_repo,
890 self._rhodecode_db_user, source_db_repo,
890 source_commit, target_db_repo, target_commit)
891 source_commit, target_db_repo, target_commit)
891
892
892 given_reviewers = _form['review_members']
893 given_reviewers = _form['review_members']
893 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
894 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
894
895
895 try:
896 try:
896 pull_request = PullRequestModel().create(
897 pull_request = PullRequestModel().create(
897 self._rhodecode_user.user_id, source_repo, source_ref,
898 self._rhodecode_user.user_id, source_repo, source_ref,
898 target_repo, target_ref, commit_ids, reviewers,
899 target_repo, target_ref, commit_ids, reviewers,
899 pullrequest_title, description, reviewer_rules,
900 pullrequest_title, description, reviewer_rules,
900 auth_user=self._rhodecode_user
901 auth_user=self._rhodecode_user
901 )
902 )
902 Session().commit()
903 Session().commit()
903
904
904 h.flash(_('Successfully opened new pull request'),
905 h.flash(_('Successfully opened new pull request'),
905 category='success')
906 category='success')
906 except Exception:
907 except Exception:
907 msg = _('Error occurred during creation of this pull request.')
908 msg = _('Error occurred during creation of this pull request.')
908 log.exception(msg)
909 log.exception(msg)
909 h.flash(msg, category='error')
910 h.flash(msg, category='error')
910
911
911 # copy the args back to redirect
912 # copy the args back to redirect
912 org_query = self.request.GET.mixed()
913 org_query = self.request.GET.mixed()
913 raise HTTPFound(
914 raise HTTPFound(
914 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
915 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
915 _query=org_query))
916 _query=org_query))
916
917
917 raise HTTPFound(
918 raise HTTPFound(
918 h.route_path('pullrequest_show', repo_name=target_repo,
919 h.route_path('pullrequest_show', repo_name=target_repo,
919 pull_request_id=pull_request.pull_request_id))
920 pull_request_id=pull_request.pull_request_id))
920
921
921 @LoginRequired()
922 @LoginRequired()
922 @NotAnonymous()
923 @NotAnonymous()
923 @HasRepoPermissionAnyDecorator(
924 @HasRepoPermissionAnyDecorator(
924 'repository.read', 'repository.write', 'repository.admin')
925 'repository.read', 'repository.write', 'repository.admin')
925 @CSRFRequired()
926 @CSRFRequired()
926 @view_config(
927 @view_config(
927 route_name='pullrequest_update', request_method='POST',
928 route_name='pullrequest_update', request_method='POST',
928 renderer='json_ext')
929 renderer='json_ext')
929 def pull_request_update(self):
930 def pull_request_update(self):
930 pull_request = PullRequest.get_or_404(
931 pull_request = PullRequest.get_or_404(
931 self.request.matchdict['pull_request_id'])
932 self.request.matchdict['pull_request_id'])
932 _ = self.request.translate
933 _ = self.request.translate
933
934
934 self.load_default_context()
935 self.load_default_context()
935
936
936 if pull_request.is_closed():
937 if pull_request.is_closed():
937 log.debug('update: forbidden because pull request is closed')
938 log.debug('update: forbidden because pull request is closed')
938 msg = _(u'Cannot update closed pull requests.')
939 msg = _(u'Cannot update closed pull requests.')
939 h.flash(msg, category='error')
940 h.flash(msg, category='error')
940 return True
941 return True
941
942
942 # only owner or admin can update it
943 # only owner or admin can update it
943 allowed_to_update = PullRequestModel().check_user_update(
944 allowed_to_update = PullRequestModel().check_user_update(
944 pull_request, self._rhodecode_user)
945 pull_request, self._rhodecode_user)
945 if allowed_to_update:
946 if allowed_to_update:
946 controls = peppercorn.parse(self.request.POST.items())
947 controls = peppercorn.parse(self.request.POST.items())
947
948
948 if 'review_members' in controls:
949 if 'review_members' in controls:
949 self._update_reviewers(
950 self._update_reviewers(
950 pull_request, controls['review_members'],
951 pull_request, controls['review_members'],
951 pull_request.reviewer_data)
952 pull_request.reviewer_data)
952 elif str2bool(self.request.POST.get('update_commits', 'false')):
953 elif str2bool(self.request.POST.get('update_commits', 'false')):
953 self._update_commits(pull_request)
954 self._update_commits(pull_request)
954 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
955 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
955 self._edit_pull_request(pull_request)
956 self._edit_pull_request(pull_request)
956 else:
957 else:
957 raise HTTPBadRequest()
958 raise HTTPBadRequest()
958 return True
959 return True
959 raise HTTPForbidden()
960 raise HTTPForbidden()
960
961
961 def _edit_pull_request(self, pull_request):
962 def _edit_pull_request(self, pull_request):
962 _ = self.request.translate
963 _ = self.request.translate
963 try:
964 try:
964 PullRequestModel().edit(
965 PullRequestModel().edit(
965 pull_request, self.request.POST.get('title'),
966 pull_request, self.request.POST.get('title'),
966 self.request.POST.get('description'), self._rhodecode_user)
967 self.request.POST.get('description'), self._rhodecode_user)
967 except ValueError:
968 except ValueError:
968 msg = _(u'Cannot update closed pull requests.')
969 msg = _(u'Cannot update closed pull requests.')
969 h.flash(msg, category='error')
970 h.flash(msg, category='error')
970 return
971 return
971 else:
972 else:
972 Session().commit()
973 Session().commit()
973
974
974 msg = _(u'Pull request title & description updated.')
975 msg = _(u'Pull request title & description updated.')
975 h.flash(msg, category='success')
976 h.flash(msg, category='success')
976 return
977 return
977
978
978 def _update_commits(self, pull_request):
979 def _update_commits(self, pull_request):
979 _ = self.request.translate
980 _ = self.request.translate
980 resp = PullRequestModel().update_commits(pull_request)
981 resp = PullRequestModel().update_commits(pull_request)
981
982
982 if resp.executed:
983 if resp.executed:
983
984
984 if resp.target_changed and resp.source_changed:
985 if resp.target_changed and resp.source_changed:
985 changed = 'target and source repositories'
986 changed = 'target and source repositories'
986 elif resp.target_changed and not resp.source_changed:
987 elif resp.target_changed and not resp.source_changed:
987 changed = 'target repository'
988 changed = 'target repository'
988 elif not resp.target_changed and resp.source_changed:
989 elif not resp.target_changed and resp.source_changed:
989 changed = 'source repository'
990 changed = 'source repository'
990 else:
991 else:
991 changed = 'nothing'
992 changed = 'nothing'
992
993
993 msg = _(
994 msg = _(
994 u'Pull request updated to "{source_commit_id}" with '
995 u'Pull request updated to "{source_commit_id}" with '
995 u'{count_added} added, {count_removed} removed commits. '
996 u'{count_added} added, {count_removed} removed commits. '
996 u'Source of changes: {change_source}')
997 u'Source of changes: {change_source}')
997 msg = msg.format(
998 msg = msg.format(
998 source_commit_id=pull_request.source_ref_parts.commit_id,
999 source_commit_id=pull_request.source_ref_parts.commit_id,
999 count_added=len(resp.changes.added),
1000 count_added=len(resp.changes.added),
1000 count_removed=len(resp.changes.removed),
1001 count_removed=len(resp.changes.removed),
1001 change_source=changed)
1002 change_source=changed)
1002 h.flash(msg, category='success')
1003 h.flash(msg, category='success')
1003
1004
1004 channel = '/repo${}$/pr/{}'.format(
1005 channel = '/repo${}$/pr/{}'.format(
1005 pull_request.target_repo.repo_name,
1006 pull_request.target_repo.repo_name,
1006 pull_request.pull_request_id)
1007 pull_request.pull_request_id)
1007 message = msg + (
1008 message = msg + (
1008 ' - <a onclick="window.location.reload()">'
1009 ' - <a onclick="window.location.reload()">'
1009 '<strong>{}</strong></a>'.format(_('Reload page')))
1010 '<strong>{}</strong></a>'.format(_('Reload page')))
1010 channelstream.post_message(
1011 channelstream.post_message(
1011 channel, message, self._rhodecode_user.username,
1012 channel, message, self._rhodecode_user.username,
1012 registry=self.request.registry)
1013 registry=self.request.registry)
1013 else:
1014 else:
1014 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1015 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1015 warning_reasons = [
1016 warning_reasons = [
1016 UpdateFailureReason.NO_CHANGE,
1017 UpdateFailureReason.NO_CHANGE,
1017 UpdateFailureReason.WRONG_REF_TYPE,
1018 UpdateFailureReason.WRONG_REF_TYPE,
1018 ]
1019 ]
1019 category = 'warning' if resp.reason in warning_reasons else 'error'
1020 category = 'warning' if resp.reason in warning_reasons else 'error'
1020 h.flash(msg, category=category)
1021 h.flash(msg, category=category)
1021
1022
1022 @LoginRequired()
1023 @LoginRequired()
1023 @NotAnonymous()
1024 @NotAnonymous()
1024 @HasRepoPermissionAnyDecorator(
1025 @HasRepoPermissionAnyDecorator(
1025 'repository.read', 'repository.write', 'repository.admin')
1026 'repository.read', 'repository.write', 'repository.admin')
1026 @CSRFRequired()
1027 @CSRFRequired()
1027 @view_config(
1028 @view_config(
1028 route_name='pullrequest_merge', request_method='POST',
1029 route_name='pullrequest_merge', request_method='POST',
1029 renderer='json_ext')
1030 renderer='json_ext')
1030 def pull_request_merge(self):
1031 def pull_request_merge(self):
1031 """
1032 """
1032 Merge will perform a server-side merge of the specified
1033 Merge will perform a server-side merge of the specified
1033 pull request, if the pull request is approved and mergeable.
1034 pull request, if the pull request is approved and mergeable.
1034 After successful merging, the pull request is automatically
1035 After successful merging, the pull request is automatically
1035 closed, with a relevant comment.
1036 closed, with a relevant comment.
1036 """
1037 """
1037 pull_request = PullRequest.get_or_404(
1038 pull_request = PullRequest.get_or_404(
1038 self.request.matchdict['pull_request_id'])
1039 self.request.matchdict['pull_request_id'])
1039
1040
1040 self.load_default_context()
1041 self.load_default_context()
1041 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
1042 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
1042 translator=self.request.translate)
1043 translator=self.request.translate)
1043 merge_possible = not check.failed
1044 merge_possible = not check.failed
1044
1045
1045 for err_type, error_msg in check.errors:
1046 for err_type, error_msg in check.errors:
1046 h.flash(error_msg, category=err_type)
1047 h.flash(error_msg, category=err_type)
1047
1048
1048 if merge_possible:
1049 if merge_possible:
1049 log.debug("Pre-conditions checked, trying to merge.")
1050 log.debug("Pre-conditions checked, trying to merge.")
1050 extras = vcs_operation_context(
1051 extras = vcs_operation_context(
1051 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1052 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1052 username=self._rhodecode_db_user.username, action='push',
1053 username=self._rhodecode_db_user.username, action='push',
1053 scm=pull_request.target_repo.repo_type)
1054 scm=pull_request.target_repo.repo_type)
1054 self._merge_pull_request(
1055 self._merge_pull_request(
1055 pull_request, self._rhodecode_db_user, extras)
1056 pull_request, self._rhodecode_db_user, extras)
1056 else:
1057 else:
1057 log.debug("Pre-conditions failed, NOT merging.")
1058 log.debug("Pre-conditions failed, NOT merging.")
1058
1059
1059 raise HTTPFound(
1060 raise HTTPFound(
1060 h.route_path('pullrequest_show',
1061 h.route_path('pullrequest_show',
1061 repo_name=pull_request.target_repo.repo_name,
1062 repo_name=pull_request.target_repo.repo_name,
1062 pull_request_id=pull_request.pull_request_id))
1063 pull_request_id=pull_request.pull_request_id))
1063
1064
1064 def _merge_pull_request(self, pull_request, user, extras):
1065 def _merge_pull_request(self, pull_request, user, extras):
1065 _ = self.request.translate
1066 _ = self.request.translate
1066 merge_resp = PullRequestModel().merge(pull_request, user, extras=extras)
1067 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1067
1068
1068 if merge_resp.executed:
1069 if merge_resp.executed:
1069 log.debug("The merge was successful, closing the pull request.")
1070 log.debug("The merge was successful, closing the pull request.")
1070 PullRequestModel().close_pull_request(
1071 PullRequestModel().close_pull_request(
1071 pull_request.pull_request_id, user)
1072 pull_request.pull_request_id, user)
1072 Session().commit()
1073 Session().commit()
1073 msg = _('Pull request was successfully merged and closed.')
1074 msg = _('Pull request was successfully merged and closed.')
1074 h.flash(msg, category='success')
1075 h.flash(msg, category='success')
1075 else:
1076 else:
1076 log.debug(
1077 log.debug(
1077 "The merge was not successful. Merge response: %s",
1078 "The merge was not successful. Merge response: %s",
1078 merge_resp)
1079 merge_resp)
1079 msg = PullRequestModel().merge_status_message(
1080 msg = PullRequestModel().merge_status_message(
1080 merge_resp.failure_reason)
1081 merge_resp.failure_reason)
1081 h.flash(msg, category='error')
1082 h.flash(msg, category='error')
1082
1083
1083 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1084 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1084 _ = self.request.translate
1085 _ = self.request.translate
1085 get_default_reviewers_data, validate_default_reviewers = \
1086 get_default_reviewers_data, validate_default_reviewers = \
1086 PullRequestModel().get_reviewer_functions()
1087 PullRequestModel().get_reviewer_functions()
1087
1088
1088 try:
1089 try:
1089 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1090 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1090 except ValueError as e:
1091 except ValueError as e:
1091 log.error('Reviewers Validation: {}'.format(e))
1092 log.error('Reviewers Validation: {}'.format(e))
1092 h.flash(e, category='error')
1093 h.flash(e, category='error')
1093 return
1094 return
1094
1095
1095 PullRequestModel().update_reviewers(
1096 PullRequestModel().update_reviewers(
1096 pull_request, reviewers, self._rhodecode_user)
1097 pull_request, reviewers, self._rhodecode_user)
1097 h.flash(_('Pull request reviewers updated.'), category='success')
1098 h.flash(_('Pull request reviewers updated.'), category='success')
1098 Session().commit()
1099 Session().commit()
1099
1100
1100 @LoginRequired()
1101 @LoginRequired()
1101 @NotAnonymous()
1102 @NotAnonymous()
1102 @HasRepoPermissionAnyDecorator(
1103 @HasRepoPermissionAnyDecorator(
1103 'repository.read', 'repository.write', 'repository.admin')
1104 'repository.read', 'repository.write', 'repository.admin')
1104 @CSRFRequired()
1105 @CSRFRequired()
1105 @view_config(
1106 @view_config(
1106 route_name='pullrequest_delete', request_method='POST',
1107 route_name='pullrequest_delete', request_method='POST',
1107 renderer='json_ext')
1108 renderer='json_ext')
1108 def pull_request_delete(self):
1109 def pull_request_delete(self):
1109 _ = self.request.translate
1110 _ = self.request.translate
1110
1111
1111 pull_request = PullRequest.get_or_404(
1112 pull_request = PullRequest.get_or_404(
1112 self.request.matchdict['pull_request_id'])
1113 self.request.matchdict['pull_request_id'])
1113 self.load_default_context()
1114 self.load_default_context()
1114
1115
1115 pr_closed = pull_request.is_closed()
1116 pr_closed = pull_request.is_closed()
1116 allowed_to_delete = PullRequestModel().check_user_delete(
1117 allowed_to_delete = PullRequestModel().check_user_delete(
1117 pull_request, self._rhodecode_user) and not pr_closed
1118 pull_request, self._rhodecode_user) and not pr_closed
1118
1119
1119 # only owner can delete it !
1120 # only owner can delete it !
1120 if allowed_to_delete:
1121 if allowed_to_delete:
1121 PullRequestModel().delete(pull_request, self._rhodecode_user)
1122 PullRequestModel().delete(pull_request, self._rhodecode_user)
1122 Session().commit()
1123 Session().commit()
1123 h.flash(_('Successfully deleted pull request'),
1124 h.flash(_('Successfully deleted pull request'),
1124 category='success')
1125 category='success')
1125 raise HTTPFound(h.route_path('pullrequest_show_all',
1126 raise HTTPFound(h.route_path('pullrequest_show_all',
1126 repo_name=self.db_repo_name))
1127 repo_name=self.db_repo_name))
1127
1128
1128 log.warning('user %s tried to delete pull request without access',
1129 log.warning('user %s tried to delete pull request without access',
1129 self._rhodecode_user)
1130 self._rhodecode_user)
1130 raise HTTPNotFound()
1131 raise HTTPNotFound()
1131
1132
1132 @LoginRequired()
1133 @LoginRequired()
1133 @NotAnonymous()
1134 @NotAnonymous()
1134 @HasRepoPermissionAnyDecorator(
1135 @HasRepoPermissionAnyDecorator(
1135 'repository.read', 'repository.write', 'repository.admin')
1136 'repository.read', 'repository.write', 'repository.admin')
1136 @CSRFRequired()
1137 @CSRFRequired()
1137 @view_config(
1138 @view_config(
1138 route_name='pullrequest_comment_create', request_method='POST',
1139 route_name='pullrequest_comment_create', request_method='POST',
1139 renderer='json_ext')
1140 renderer='json_ext')
1140 def pull_request_comment_create(self):
1141 def pull_request_comment_create(self):
1141 _ = self.request.translate
1142 _ = self.request.translate
1142
1143
1143 pull_request = PullRequest.get_or_404(
1144 pull_request = PullRequest.get_or_404(
1144 self.request.matchdict['pull_request_id'])
1145 self.request.matchdict['pull_request_id'])
1145 pull_request_id = pull_request.pull_request_id
1146 pull_request_id = pull_request.pull_request_id
1146
1147
1147 if pull_request.is_closed():
1148 if pull_request.is_closed():
1148 log.debug('comment: forbidden because pull request is closed')
1149 log.debug('comment: forbidden because pull request is closed')
1149 raise HTTPForbidden()
1150 raise HTTPForbidden()
1150
1151
1151 allowed_to_comment = PullRequestModel().check_user_comment(
1152 allowed_to_comment = PullRequestModel().check_user_comment(
1152 pull_request, self._rhodecode_user)
1153 pull_request, self._rhodecode_user)
1153 if not allowed_to_comment:
1154 if not allowed_to_comment:
1154 log.debug(
1155 log.debug(
1155 'comment: forbidden because pull request is from forbidden repo')
1156 'comment: forbidden because pull request is from forbidden repo')
1156 raise HTTPForbidden()
1157 raise HTTPForbidden()
1157
1158
1158 c = self.load_default_context()
1159 c = self.load_default_context()
1159
1160
1160 status = self.request.POST.get('changeset_status', None)
1161 status = self.request.POST.get('changeset_status', None)
1161 text = self.request.POST.get('text')
1162 text = self.request.POST.get('text')
1162 comment_type = self.request.POST.get('comment_type')
1163 comment_type = self.request.POST.get('comment_type')
1163 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1164 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1164 close_pull_request = self.request.POST.get('close_pull_request')
1165 close_pull_request = self.request.POST.get('close_pull_request')
1165
1166
1166 # the logic here should work like following, if we submit close
1167 # the logic here should work like following, if we submit close
1167 # pr comment, use `close_pull_request_with_comment` function
1168 # pr comment, use `close_pull_request_with_comment` function
1168 # else handle regular comment logic
1169 # else handle regular comment logic
1169
1170
1170 if close_pull_request:
1171 if close_pull_request:
1171 # only owner or admin or person with write permissions
1172 # only owner or admin or person with write permissions
1172 allowed_to_close = PullRequestModel().check_user_update(
1173 allowed_to_close = PullRequestModel().check_user_update(
1173 pull_request, self._rhodecode_user)
1174 pull_request, self._rhodecode_user)
1174 if not allowed_to_close:
1175 if not allowed_to_close:
1175 log.debug('comment: forbidden because not allowed to close '
1176 log.debug('comment: forbidden because not allowed to close '
1176 'pull request %s', pull_request_id)
1177 'pull request %s', pull_request_id)
1177 raise HTTPForbidden()
1178 raise HTTPForbidden()
1178 comment, status = PullRequestModel().close_pull_request_with_comment(
1179 comment, status = PullRequestModel().close_pull_request_with_comment(
1179 pull_request, self._rhodecode_user, self.db_repo, message=text)
1180 pull_request, self._rhodecode_user, self.db_repo, message=text)
1180 Session().flush()
1181 Session().flush()
1181 events.trigger(
1182 events.trigger(
1182 events.PullRequestCommentEvent(pull_request, comment))
1183 events.PullRequestCommentEvent(pull_request, comment))
1183
1184
1184 else:
1185 else:
1185 # regular comment case, could be inline, or one with status.
1186 # regular comment case, could be inline, or one with status.
1186 # for that one we check also permissions
1187 # for that one we check also permissions
1187
1188
1188 allowed_to_change_status = PullRequestModel().check_user_change_status(
1189 allowed_to_change_status = PullRequestModel().check_user_change_status(
1189 pull_request, self._rhodecode_user)
1190 pull_request, self._rhodecode_user)
1190
1191
1191 if status and allowed_to_change_status:
1192 if status and allowed_to_change_status:
1192 message = (_('Status change %(transition_icon)s %(status)s')
1193 message = (_('Status change %(transition_icon)s %(status)s')
1193 % {'transition_icon': '>',
1194 % {'transition_icon': '>',
1194 'status': ChangesetStatus.get_status_lbl(status)})
1195 'status': ChangesetStatus.get_status_lbl(status)})
1195 text = text or message
1196 text = text or message
1196
1197
1197 comment = CommentsModel().create(
1198 comment = CommentsModel().create(
1198 text=text,
1199 text=text,
1199 repo=self.db_repo.repo_id,
1200 repo=self.db_repo.repo_id,
1200 user=self._rhodecode_user.user_id,
1201 user=self._rhodecode_user.user_id,
1201 pull_request=pull_request,
1202 pull_request=pull_request,
1202 f_path=self.request.POST.get('f_path'),
1203 f_path=self.request.POST.get('f_path'),
1203 line_no=self.request.POST.get('line'),
1204 line_no=self.request.POST.get('line'),
1204 status_change=(ChangesetStatus.get_status_lbl(status)
1205 status_change=(ChangesetStatus.get_status_lbl(status)
1205 if status and allowed_to_change_status else None),
1206 if status and allowed_to_change_status else None),
1206 status_change_type=(status
1207 status_change_type=(status
1207 if status and allowed_to_change_status else None),
1208 if status and allowed_to_change_status else None),
1208 comment_type=comment_type,
1209 comment_type=comment_type,
1209 resolves_comment_id=resolves_comment_id,
1210 resolves_comment_id=resolves_comment_id,
1210 auth_user=self._rhodecode_user
1211 auth_user=self._rhodecode_user
1211 )
1212 )
1212
1213
1213 if allowed_to_change_status:
1214 if allowed_to_change_status:
1214 # calculate old status before we change it
1215 # calculate old status before we change it
1215 old_calculated_status = pull_request.calculated_review_status()
1216 old_calculated_status = pull_request.calculated_review_status()
1216
1217
1217 # get status if set !
1218 # get status if set !
1218 if status:
1219 if status:
1219 ChangesetStatusModel().set_status(
1220 ChangesetStatusModel().set_status(
1220 self.db_repo.repo_id,
1221 self.db_repo.repo_id,
1221 status,
1222 status,
1222 self._rhodecode_user.user_id,
1223 self._rhodecode_user.user_id,
1223 comment,
1224 comment,
1224 pull_request=pull_request
1225 pull_request=pull_request
1225 )
1226 )
1226
1227
1227 Session().flush()
1228 Session().flush()
1228 # this is somehow required to get access to some relationship
1229 # this is somehow required to get access to some relationship
1229 # loaded on comment
1230 # loaded on comment
1230 Session().refresh(comment)
1231 Session().refresh(comment)
1231
1232
1232 events.trigger(
1233 events.trigger(
1233 events.PullRequestCommentEvent(pull_request, comment))
1234 events.PullRequestCommentEvent(pull_request, comment))
1234
1235
1235 # we now calculate the status of pull request, and based on that
1236 # we now calculate the status of pull request, and based on that
1236 # calculation we set the commits status
1237 # calculation we set the commits status
1237 calculated_status = pull_request.calculated_review_status()
1238 calculated_status = pull_request.calculated_review_status()
1238 if old_calculated_status != calculated_status:
1239 if old_calculated_status != calculated_status:
1239 PullRequestModel()._trigger_pull_request_hook(
1240 PullRequestModel()._trigger_pull_request_hook(
1240 pull_request, self._rhodecode_user, 'review_status_change')
1241 pull_request, self._rhodecode_user, 'review_status_change')
1241
1242
1242 Session().commit()
1243 Session().commit()
1243
1244
1244 data = {
1245 data = {
1245 'target_id': h.safeid(h.safe_unicode(
1246 'target_id': h.safeid(h.safe_unicode(
1246 self.request.POST.get('f_path'))),
1247 self.request.POST.get('f_path'))),
1247 }
1248 }
1248 if comment:
1249 if comment:
1249 c.co = comment
1250 c.co = comment
1250 rendered_comment = render(
1251 rendered_comment = render(
1251 'rhodecode:templates/changeset/changeset_comment_block.mako',
1252 'rhodecode:templates/changeset/changeset_comment_block.mako',
1252 self._get_template_context(c), self.request)
1253 self._get_template_context(c), self.request)
1253
1254
1254 data.update(comment.get_dict())
1255 data.update(comment.get_dict())
1255 data.update({'rendered_text': rendered_comment})
1256 data.update({'rendered_text': rendered_comment})
1256
1257
1257 return data
1258 return data
1258
1259
1259 @LoginRequired()
1260 @LoginRequired()
1260 @NotAnonymous()
1261 @NotAnonymous()
1261 @HasRepoPermissionAnyDecorator(
1262 @HasRepoPermissionAnyDecorator(
1262 'repository.read', 'repository.write', 'repository.admin')
1263 'repository.read', 'repository.write', 'repository.admin')
1263 @CSRFRequired()
1264 @CSRFRequired()
1264 @view_config(
1265 @view_config(
1265 route_name='pullrequest_comment_delete', request_method='POST',
1266 route_name='pullrequest_comment_delete', request_method='POST',
1266 renderer='json_ext')
1267 renderer='json_ext')
1267 def pull_request_comment_delete(self):
1268 def pull_request_comment_delete(self):
1268 pull_request = PullRequest.get_or_404(
1269 pull_request = PullRequest.get_or_404(
1269 self.request.matchdict['pull_request_id'])
1270 self.request.matchdict['pull_request_id'])
1270
1271
1271 comment = ChangesetComment.get_or_404(
1272 comment = ChangesetComment.get_or_404(
1272 self.request.matchdict['comment_id'])
1273 self.request.matchdict['comment_id'])
1273 comment_id = comment.comment_id
1274 comment_id = comment.comment_id
1274
1275
1275 if pull_request.is_closed():
1276 if pull_request.is_closed():
1276 log.debug('comment: forbidden because pull request is closed')
1277 log.debug('comment: forbidden because pull request is closed')
1277 raise HTTPForbidden()
1278 raise HTTPForbidden()
1278
1279
1279 if not comment:
1280 if not comment:
1280 log.debug('Comment with id:%s not found, skipping', comment_id)
1281 log.debug('Comment with id:%s not found, skipping', comment_id)
1281 # comment already deleted in another call probably
1282 # comment already deleted in another call probably
1282 return True
1283 return True
1283
1284
1284 if comment.pull_request.is_closed():
1285 if comment.pull_request.is_closed():
1285 # don't allow deleting comments on closed pull request
1286 # don't allow deleting comments on closed pull request
1286 raise HTTPForbidden()
1287 raise HTTPForbidden()
1287
1288
1288 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1289 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1289 super_admin = h.HasPermissionAny('hg.admin')()
1290 super_admin = h.HasPermissionAny('hg.admin')()
1290 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1291 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1291 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1292 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1292 comment_repo_admin = is_repo_admin and is_repo_comment
1293 comment_repo_admin = is_repo_admin and is_repo_comment
1293
1294
1294 if super_admin or comment_owner or comment_repo_admin:
1295 if super_admin or comment_owner or comment_repo_admin:
1295 old_calculated_status = comment.pull_request.calculated_review_status()
1296 old_calculated_status = comment.pull_request.calculated_review_status()
1296 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1297 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1297 Session().commit()
1298 Session().commit()
1298 calculated_status = comment.pull_request.calculated_review_status()
1299 calculated_status = comment.pull_request.calculated_review_status()
1299 if old_calculated_status != calculated_status:
1300 if old_calculated_status != calculated_status:
1300 PullRequestModel()._trigger_pull_request_hook(
1301 PullRequestModel()._trigger_pull_request_hook(
1301 comment.pull_request, self._rhodecode_user, 'review_status_change')
1302 comment.pull_request, self._rhodecode_user, 'review_status_change')
1302 return True
1303 return True
1303 else:
1304 else:
1304 log.warning('No permissions for user %s to delete comment_id: %s',
1305 log.warning('No permissions for user %s to delete comment_id: %s',
1305 self._rhodecode_db_user, comment_id)
1306 self._rhodecode_db_user, comment_id)
1306 raise HTTPNotFound()
1307 raise HTTPNotFound()
@@ -1,673 +1,674 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 It's implemented with basic auth function
23 It's implemented with basic auth function
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import importlib
29 import importlib
30 from functools import wraps
30 from functools import wraps
31 from StringIO import StringIO
31 from StringIO import StringIO
32 from lxml import etree
32 from lxml import etree
33
33
34 import time
34 import time
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36
36
37 from pyramid.httpexceptions import (
37 from pyramid.httpexceptions import (
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 from zope.cachedescriptors.property import Lazy as LazyProperty
39 from zope.cachedescriptors.property import Lazy as LazyProperty
40
40
41 import rhodecode
41 import rhodecode
42 from rhodecode.authentication.base import (
42 from rhodecode.authentication.base import (
43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
44 from rhodecode.lib import caches
44 from rhodecode.lib import caches
45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
46 from rhodecode.lib.base import (
46 from rhodecode.lib.base import (
47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
50 from rhodecode.lib.middleware import appenlight
50 from rhodecode.lib.middleware import appenlight
51 from rhodecode.lib.middleware.utils import scm_app_http
51 from rhodecode.lib.middleware.utils import scm_app_http
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 from rhodecode.lib.vcs.backends import base
55 from rhodecode.lib.vcs.backends import base
56
56
57 from rhodecode.model import meta
57 from rhodecode.model import meta
58 from rhodecode.model.db import User, Repository, PullRequest
58 from rhodecode.model.db import User, Repository, PullRequest
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.pull_request import PullRequestModel
60 from rhodecode.model.pull_request import PullRequestModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65
65
66 def extract_svn_txn_id(acl_repo_name, data):
66 def extract_svn_txn_id(acl_repo_name, data):
67 """
67 """
68 Helper method for extraction of svn txn_id from submited XML data during
68 Helper method for extraction of svn txn_id from submited XML data during
69 POST operations
69 POST operations
70 """
70 """
71 try:
71 try:
72 root = etree.fromstring(data)
72 root = etree.fromstring(data)
73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
74 for el in root:
74 for el in root:
75 if el.tag == '{DAV:}source':
75 if el.tag == '{DAV:}source':
76 for sub_el in el:
76 for sub_el in el:
77 if sub_el.tag == '{DAV:}href':
77 if sub_el.tag == '{DAV:}href':
78 match = pat.search(sub_el.text)
78 match = pat.search(sub_el.text)
79 if match:
79 if match:
80 svn_tx_id = match.groupdict()['txn_id']
80 svn_tx_id = match.groupdict()['txn_id']
81 txn_id = caches.compute_key_from_params(
81 txn_id = caches.compute_key_from_params(
82 acl_repo_name, svn_tx_id)
82 acl_repo_name, svn_tx_id)
83 return txn_id
83 return txn_id
84 except Exception:
84 except Exception:
85 log.exception('Failed to extract txn_id')
85 log.exception('Failed to extract txn_id')
86
86
87
87
88 def initialize_generator(factory):
88 def initialize_generator(factory):
89 """
89 """
90 Initializes the returned generator by draining its first element.
90 Initializes the returned generator by draining its first element.
91
91
92 This can be used to give a generator an initializer, which is the code
92 This can be used to give a generator an initializer, which is the code
93 up to the first yield statement. This decorator enforces that the first
93 up to the first yield statement. This decorator enforces that the first
94 produced element has the value ``"__init__"`` to make its special
94 produced element has the value ``"__init__"`` to make its special
95 purpose very explicit in the using code.
95 purpose very explicit in the using code.
96 """
96 """
97
97
98 @wraps(factory)
98 @wraps(factory)
99 def wrapper(*args, **kwargs):
99 def wrapper(*args, **kwargs):
100 gen = factory(*args, **kwargs)
100 gen = factory(*args, **kwargs)
101 try:
101 try:
102 init = gen.next()
102 init = gen.next()
103 except StopIteration:
103 except StopIteration:
104 raise ValueError('Generator must yield at least one element.')
104 raise ValueError('Generator must yield at least one element.')
105 if init != "__init__":
105 if init != "__init__":
106 raise ValueError('First yielded element must be "__init__".')
106 raise ValueError('First yielded element must be "__init__".')
107 return gen
107 return gen
108 return wrapper
108 return wrapper
109
109
110
110
111 class SimpleVCS(object):
111 class SimpleVCS(object):
112 """Common functionality for SCM HTTP handlers."""
112 """Common functionality for SCM HTTP handlers."""
113
113
114 SCM = 'unknown'
114 SCM = 'unknown'
115
115
116 acl_repo_name = None
116 acl_repo_name = None
117 url_repo_name = None
117 url_repo_name = None
118 vcs_repo_name = None
118 vcs_repo_name = None
119 rc_extras = {}
119 rc_extras = {}
120
120
121 # We have to handle requests to shadow repositories different than requests
121 # We have to handle requests to shadow repositories different than requests
122 # to normal repositories. Therefore we have to distinguish them. To do this
122 # to normal repositories. Therefore we have to distinguish them. To do this
123 # we use this regex which will match only on URLs pointing to shadow
123 # we use this regex which will match only on URLs pointing to shadow
124 # repositories.
124 # repositories.
125 shadow_repo_re = re.compile(
125 shadow_repo_re = re.compile(
126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
127 '(?P<target>{slug_pat})/' # target repo
127 '(?P<target>{slug_pat})/' # target repo
128 'pull-request/(?P<pr_id>\d+)/' # pull request
128 'pull-request/(?P<pr_id>\d+)/' # pull request
129 'repository$' # shadow repo
129 'repository$' # shadow repo
130 .format(slug_pat=SLUG_RE.pattern))
130 .format(slug_pat=SLUG_RE.pattern))
131
131
132 def __init__(self, config, registry):
132 def __init__(self, config, registry):
133 self.registry = registry
133 self.registry = registry
134 self.config = config
134 self.config = config
135 # re-populated by specialized middleware
135 # re-populated by specialized middleware
136 self.repo_vcs_config = base.Config()
136 self.repo_vcs_config = base.Config()
137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
138
138
139 registry.rhodecode_settings = self.rhodecode_settings
139 registry.rhodecode_settings = self.rhodecode_settings
140 # authenticate this VCS request using authfunc
140 # authenticate this VCS request using authfunc
141 auth_ret_code_detection = \
141 auth_ret_code_detection = \
142 str2bool(self.config.get('auth_ret_code_detection', False))
142 str2bool(self.config.get('auth_ret_code_detection', False))
143 self.authenticate = BasicAuth(
143 self.authenticate = BasicAuth(
144 '', authenticate, registry, config.get('auth_ret_code'),
144 '', authenticate, registry, config.get('auth_ret_code'),
145 auth_ret_code_detection)
145 auth_ret_code_detection)
146 self.ip_addr = '0.0.0.0'
146 self.ip_addr = '0.0.0.0'
147
147
148 @LazyProperty
148 @LazyProperty
149 def global_vcs_config(self):
149 def global_vcs_config(self):
150 try:
150 try:
151 return VcsSettingsModel().get_ui_settings_as_config_obj()
151 return VcsSettingsModel().get_ui_settings_as_config_obj()
152 except Exception:
152 except Exception:
153 return base.Config()
153 return base.Config()
154
154
155 @property
155 @property
156 def base_path(self):
156 def base_path(self):
157 settings_path = self.repo_vcs_config.get(
157 settings_path = self.repo_vcs_config.get(
158 *VcsSettingsModel.PATH_SETTING)
158 *VcsSettingsModel.PATH_SETTING)
159
159
160 if not settings_path:
160 if not settings_path:
161 settings_path = self.global_vcs_config.get(
161 settings_path = self.global_vcs_config.get(
162 *VcsSettingsModel.PATH_SETTING)
162 *VcsSettingsModel.PATH_SETTING)
163
163
164 if not settings_path:
164 if not settings_path:
165 # try, maybe we passed in explicitly as config option
165 # try, maybe we passed in explicitly as config option
166 settings_path = self.config.get('base_path')
166 settings_path = self.config.get('base_path')
167
167
168 if not settings_path:
168 if not settings_path:
169 raise ValueError('FATAL: base_path is empty')
169 raise ValueError('FATAL: base_path is empty')
170 return settings_path
170 return settings_path
171
171
172 def set_repo_names(self, environ):
172 def set_repo_names(self, environ):
173 """
173 """
174 This will populate the attributes acl_repo_name, url_repo_name,
174 This will populate the attributes acl_repo_name, url_repo_name,
175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
176 shadow) repositories all names are equal. In case of requests to a
176 shadow) repositories all names are equal. In case of requests to a
177 shadow repository the acl-name points to the target repo of the pull
177 shadow repository the acl-name points to the target repo of the pull
178 request and the vcs-name points to the shadow repo file system path.
178 request and the vcs-name points to the shadow repo file system path.
179 The url-name is always the URL used by the vcs client program.
179 The url-name is always the URL used by the vcs client program.
180
180
181 Example in case of a shadow repo:
181 Example in case of a shadow repo:
182 acl_repo_name = RepoGroup/MyRepo
182 acl_repo_name = RepoGroup/MyRepo
183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
185 """
185 """
186 # First we set the repo name from URL for all attributes. This is the
186 # First we set the repo name from URL for all attributes. This is the
187 # default if handling normal (non shadow) repo requests.
187 # default if handling normal (non shadow) repo requests.
188 self.url_repo_name = self._get_repository_name(environ)
188 self.url_repo_name = self._get_repository_name(environ)
189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
190 self.is_shadow_repo = False
190 self.is_shadow_repo = False
191
191
192 # Check if this is a request to a shadow repository.
192 # Check if this is a request to a shadow repository.
193 match = self.shadow_repo_re.match(self.url_repo_name)
193 match = self.shadow_repo_re.match(self.url_repo_name)
194 if match:
194 if match:
195 match_dict = match.groupdict()
195 match_dict = match.groupdict()
196
196
197 # Build acl repo name from regex match.
197 # Build acl repo name from regex match.
198 acl_repo_name = safe_unicode('{groups}{target}'.format(
198 acl_repo_name = safe_unicode('{groups}{target}'.format(
199 groups=match_dict['groups'] or '',
199 groups=match_dict['groups'] or '',
200 target=match_dict['target']))
200 target=match_dict['target']))
201
201
202 # Retrieve pull request instance by ID from regex match.
202 # Retrieve pull request instance by ID from regex match.
203 pull_request = PullRequest.get(match_dict['pr_id'])
203 pull_request = PullRequest.get(match_dict['pr_id'])
204
204
205 # Only proceed if we got a pull request and if acl repo name from
205 # Only proceed if we got a pull request and if acl repo name from
206 # URL equals the target repo name of the pull request.
206 # URL equals the target repo name of the pull request.
207 if pull_request and (acl_repo_name ==
207 if pull_request and \
208 pull_request.target_repo.repo_name):
208 (acl_repo_name == pull_request.target_repo.repo_name):
209 repo_id = pull_request.target_repo.repo_id
209 # Get file system path to shadow repository.
210 # Get file system path to shadow repository.
210 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 target_vcs = pull_request.target_repo.scm_instance()
212 target_vcs = pull_request.target_repo.scm_instance()
212 vcs_repo_name = target_vcs._get_shadow_repository_path(
213 vcs_repo_name = target_vcs._get_shadow_repository_path(
213 workspace_id)
214 repo_id, workspace_id)
214
215
215 # Store names for later usage.
216 # Store names for later usage.
216 self.vcs_repo_name = vcs_repo_name
217 self.vcs_repo_name = vcs_repo_name
217 self.acl_repo_name = acl_repo_name
218 self.acl_repo_name = acl_repo_name
218 self.is_shadow_repo = True
219 self.is_shadow_repo = True
219
220
220 log.debug('Setting all VCS repository names: %s', {
221 log.debug('Setting all VCS repository names: %s', {
221 'acl_repo_name': self.acl_repo_name,
222 'acl_repo_name': self.acl_repo_name,
222 'url_repo_name': self.url_repo_name,
223 'url_repo_name': self.url_repo_name,
223 'vcs_repo_name': self.vcs_repo_name,
224 'vcs_repo_name': self.vcs_repo_name,
224 })
225 })
225
226
226 @property
227 @property
227 def scm_app(self):
228 def scm_app(self):
228 custom_implementation = self.config['vcs.scm_app_implementation']
229 custom_implementation = self.config['vcs.scm_app_implementation']
229 if custom_implementation == 'http':
230 if custom_implementation == 'http':
230 log.info('Using HTTP implementation of scm app.')
231 log.info('Using HTTP implementation of scm app.')
231 scm_app_impl = scm_app_http
232 scm_app_impl = scm_app_http
232 else:
233 else:
233 log.info('Using custom implementation of scm_app: "{}"'.format(
234 log.info('Using custom implementation of scm_app: "{}"'.format(
234 custom_implementation))
235 custom_implementation))
235 scm_app_impl = importlib.import_module(custom_implementation)
236 scm_app_impl = importlib.import_module(custom_implementation)
236 return scm_app_impl
237 return scm_app_impl
237
238
238 def _get_by_id(self, repo_name):
239 def _get_by_id(self, repo_name):
239 """
240 """
240 Gets a special pattern _<ID> from clone url and tries to replace it
241 Gets a special pattern _<ID> from clone url and tries to replace it
241 with a repository_name for support of _<ID> non changeable urls
242 with a repository_name for support of _<ID> non changeable urls
242 """
243 """
243
244
244 data = repo_name.split('/')
245 data = repo_name.split('/')
245 if len(data) >= 2:
246 if len(data) >= 2:
246 from rhodecode.model.repo import RepoModel
247 from rhodecode.model.repo import RepoModel
247 by_id_match = RepoModel().get_repo_by_id(repo_name)
248 by_id_match = RepoModel().get_repo_by_id(repo_name)
248 if by_id_match:
249 if by_id_match:
249 data[1] = by_id_match.repo_name
250 data[1] = by_id_match.repo_name
250
251
251 return safe_str('/'.join(data))
252 return safe_str('/'.join(data))
252
253
253 def _invalidate_cache(self, repo_name):
254 def _invalidate_cache(self, repo_name):
254 """
255 """
255 Set's cache for this repository for invalidation on next access
256 Set's cache for this repository for invalidation on next access
256
257
257 :param repo_name: full repo name, also a cache key
258 :param repo_name: full repo name, also a cache key
258 """
259 """
259 ScmModel().mark_for_invalidation(repo_name)
260 ScmModel().mark_for_invalidation(repo_name)
260
261
261 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
262 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
262 db_repo = Repository.get_by_repo_name(repo_name)
263 db_repo = Repository.get_by_repo_name(repo_name)
263 if not db_repo:
264 if not db_repo:
264 log.debug('Repository `%s` not found inside the database.',
265 log.debug('Repository `%s` not found inside the database.',
265 repo_name)
266 repo_name)
266 return False
267 return False
267
268
268 if db_repo.repo_type != scm_type:
269 if db_repo.repo_type != scm_type:
269 log.warning(
270 log.warning(
270 'Repository `%s` have incorrect scm_type, expected %s got %s',
271 'Repository `%s` have incorrect scm_type, expected %s got %s',
271 repo_name, db_repo.repo_type, scm_type)
272 repo_name, db_repo.repo_type, scm_type)
272 return False
273 return False
273
274
274 config = db_repo._config
275 config = db_repo._config
275 config.set('extensions', 'largefiles', '')
276 config.set('extensions', 'largefiles', '')
276 return is_valid_repo(
277 return is_valid_repo(
277 repo_name, base_path,
278 repo_name, base_path,
278 explicit_scm=scm_type, expect_scm=scm_type, config=config)
279 explicit_scm=scm_type, expect_scm=scm_type, config=config)
279
280
280 def valid_and_active_user(self, user):
281 def valid_and_active_user(self, user):
281 """
282 """
282 Checks if that user is not empty, and if it's actually object it checks
283 Checks if that user is not empty, and if it's actually object it checks
283 if he's active.
284 if he's active.
284
285
285 :param user: user object or None
286 :param user: user object or None
286 :return: boolean
287 :return: boolean
287 """
288 """
288 if user is None:
289 if user is None:
289 return False
290 return False
290
291
291 elif user.active:
292 elif user.active:
292 return True
293 return True
293
294
294 return False
295 return False
295
296
296 @property
297 @property
297 def is_shadow_repo_dir(self):
298 def is_shadow_repo_dir(self):
298 return os.path.isdir(self.vcs_repo_name)
299 return os.path.isdir(self.vcs_repo_name)
299
300
300 def _check_permission(self, action, user, repo_name, ip_addr=None,
301 def _check_permission(self, action, user, repo_name, ip_addr=None,
301 plugin_id='', plugin_cache_active=False, cache_ttl=0):
302 plugin_id='', plugin_cache_active=False, cache_ttl=0):
302 """
303 """
303 Checks permissions using action (push/pull) user and repository
304 Checks permissions using action (push/pull) user and repository
304 name. If plugin_cache and ttl is set it will use the plugin which
305 name. If plugin_cache and ttl is set it will use the plugin which
305 authenticated the user to store the cached permissions result for N
306 authenticated the user to store the cached permissions result for N
306 amount of seconds as in cache_ttl
307 amount of seconds as in cache_ttl
307
308
308 :param action: push or pull action
309 :param action: push or pull action
309 :param user: user instance
310 :param user: user instance
310 :param repo_name: repository name
311 :param repo_name: repository name
311 """
312 """
312
313
313 # get instance of cache manager configured for a namespace
314 # get instance of cache manager configured for a namespace
314 cache_manager = get_perms_cache_manager(
315 cache_manager = get_perms_cache_manager(
315 custom_ttl=cache_ttl, suffix=user.user_id)
316 custom_ttl=cache_ttl, suffix=user.user_id)
316 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
317 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
317 plugin_id, plugin_cache_active, cache_ttl)
318 plugin_id, plugin_cache_active, cache_ttl)
318
319
319 # for environ based password can be empty, but then the validation is
320 # for environ based password can be empty, but then the validation is
320 # on the server that fills in the env data needed for authentication
321 # on the server that fills in the env data needed for authentication
321 _perm_calc_hash = caches.compute_key_from_params(
322 _perm_calc_hash = caches.compute_key_from_params(
322 plugin_id, action, user.user_id, repo_name, ip_addr)
323 plugin_id, action, user.user_id, repo_name, ip_addr)
323
324
324 # _authenticate is a wrapper for .auth() method of plugin.
325 # _authenticate is a wrapper for .auth() method of plugin.
325 # it checks if .auth() sends proper data.
326 # it checks if .auth() sends proper data.
326 # For RhodeCodeExternalAuthPlugin it also maps users to
327 # For RhodeCodeExternalAuthPlugin it also maps users to
327 # Database and maps the attributes returned from .auth()
328 # Database and maps the attributes returned from .auth()
328 # to RhodeCode database. If this function returns data
329 # to RhodeCode database. If this function returns data
329 # then auth is correct.
330 # then auth is correct.
330 start = time.time()
331 start = time.time()
331 log.debug('Running plugin `%s` permissions check', plugin_id)
332 log.debug('Running plugin `%s` permissions check', plugin_id)
332
333
333 def perm_func():
334 def perm_func():
334 """
335 """
335 This function is used internally in Cache of Beaker to calculate
336 This function is used internally in Cache of Beaker to calculate
336 Results
337 Results
337 """
338 """
338 log.debug('auth: calculating permission access now...')
339 log.debug('auth: calculating permission access now...')
339 # check IP
340 # check IP
340 inherit = user.inherit_default_permissions
341 inherit = user.inherit_default_permissions
341 ip_allowed = AuthUser.check_ip_allowed(
342 ip_allowed = AuthUser.check_ip_allowed(
342 user.user_id, ip_addr, inherit_from_default=inherit)
343 user.user_id, ip_addr, inherit_from_default=inherit)
343 if ip_allowed:
344 if ip_allowed:
344 log.info('Access for IP:%s allowed', ip_addr)
345 log.info('Access for IP:%s allowed', ip_addr)
345 else:
346 else:
346 return False
347 return False
347
348
348 if action == 'push':
349 if action == 'push':
349 perms = ('repository.write', 'repository.admin')
350 perms = ('repository.write', 'repository.admin')
350 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
351 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
351 return False
352 return False
352
353
353 else:
354 else:
354 # any other action need at least read permission
355 # any other action need at least read permission
355 perms = (
356 perms = (
356 'repository.read', 'repository.write', 'repository.admin')
357 'repository.read', 'repository.write', 'repository.admin')
357 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
358 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
358 return False
359 return False
359
360
360 return True
361 return True
361
362
362 if plugin_cache_active:
363 if plugin_cache_active:
363 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
364 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
364 perm_result = cache_manager.get(
365 perm_result = cache_manager.get(
365 _perm_calc_hash, createfunc=perm_func)
366 _perm_calc_hash, createfunc=perm_func)
366 else:
367 else:
367 perm_result = perm_func()
368 perm_result = perm_func()
368
369
369 auth_time = time.time() - start
370 auth_time = time.time() - start
370 log.debug('Permissions for plugin `%s` completed in %.3fs, '
371 log.debug('Permissions for plugin `%s` completed in %.3fs, '
371 'expiration time of fetched cache %.1fs.',
372 'expiration time of fetched cache %.1fs.',
372 plugin_id, auth_time, cache_ttl)
373 plugin_id, auth_time, cache_ttl)
373
374
374 return perm_result
375 return perm_result
375
376
376 def _check_ssl(self, environ, start_response):
377 def _check_ssl(self, environ, start_response):
377 """
378 """
378 Checks the SSL check flag and returns False if SSL is not present
379 Checks the SSL check flag and returns False if SSL is not present
379 and required True otherwise
380 and required True otherwise
380 """
381 """
381 org_proto = environ['wsgi._org_proto']
382 org_proto = environ['wsgi._org_proto']
382 # check if we have SSL required ! if not it's a bad request !
383 # check if we have SSL required ! if not it's a bad request !
383 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
384 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
384 if require_ssl and org_proto == 'http':
385 if require_ssl and org_proto == 'http':
385 log.debug(
386 log.debug(
386 'Bad request: detected protocol is `%s` and '
387 'Bad request: detected protocol is `%s` and '
387 'SSL/HTTPS is required.', org_proto)
388 'SSL/HTTPS is required.', org_proto)
388 return False
389 return False
389 return True
390 return True
390
391
391 def _get_default_cache_ttl(self):
392 def _get_default_cache_ttl(self):
392 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
393 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
393 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
394 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
394 plugin_settings = plugin.get_settings()
395 plugin_settings = plugin.get_settings()
395 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
396 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
396 plugin_settings) or (False, 0)
397 plugin_settings) or (False, 0)
397 return plugin_cache_active, cache_ttl
398 return plugin_cache_active, cache_ttl
398
399
399 def __call__(self, environ, start_response):
400 def __call__(self, environ, start_response):
400 try:
401 try:
401 return self._handle_request(environ, start_response)
402 return self._handle_request(environ, start_response)
402 except Exception:
403 except Exception:
403 log.exception("Exception while handling request")
404 log.exception("Exception while handling request")
404 appenlight.track_exception(environ)
405 appenlight.track_exception(environ)
405 return HTTPInternalServerError()(environ, start_response)
406 return HTTPInternalServerError()(environ, start_response)
406 finally:
407 finally:
407 meta.Session.remove()
408 meta.Session.remove()
408
409
409 def _handle_request(self, environ, start_response):
410 def _handle_request(self, environ, start_response):
410
411
411 if not self._check_ssl(environ, start_response):
412 if not self._check_ssl(environ, start_response):
412 reason = ('SSL required, while RhodeCode was unable '
413 reason = ('SSL required, while RhodeCode was unable '
413 'to detect this as SSL request')
414 'to detect this as SSL request')
414 log.debug('User not allowed to proceed, %s', reason)
415 log.debug('User not allowed to proceed, %s', reason)
415 return HTTPNotAcceptable(reason)(environ, start_response)
416 return HTTPNotAcceptable(reason)(environ, start_response)
416
417
417 if not self.url_repo_name:
418 if not self.url_repo_name:
418 log.warning('Repository name is empty: %s', self.url_repo_name)
419 log.warning('Repository name is empty: %s', self.url_repo_name)
419 # failed to get repo name, we fail now
420 # failed to get repo name, we fail now
420 return HTTPNotFound()(environ, start_response)
421 return HTTPNotFound()(environ, start_response)
421 log.debug('Extracted repo name is %s', self.url_repo_name)
422 log.debug('Extracted repo name is %s', self.url_repo_name)
422
423
423 ip_addr = get_ip_addr(environ)
424 ip_addr = get_ip_addr(environ)
424 user_agent = get_user_agent(environ)
425 user_agent = get_user_agent(environ)
425 username = None
426 username = None
426
427
427 # skip passing error to error controller
428 # skip passing error to error controller
428 environ['pylons.status_code_redirect'] = True
429 environ['pylons.status_code_redirect'] = True
429
430
430 # ======================================================================
431 # ======================================================================
431 # GET ACTION PULL or PUSH
432 # GET ACTION PULL or PUSH
432 # ======================================================================
433 # ======================================================================
433 action = self._get_action(environ)
434 action = self._get_action(environ)
434
435
435 # ======================================================================
436 # ======================================================================
436 # Check if this is a request to a shadow repository of a pull request.
437 # Check if this is a request to a shadow repository of a pull request.
437 # In this case only pull action is allowed.
438 # In this case only pull action is allowed.
438 # ======================================================================
439 # ======================================================================
439 if self.is_shadow_repo and action != 'pull':
440 if self.is_shadow_repo and action != 'pull':
440 reason = 'Only pull action is allowed for shadow repositories.'
441 reason = 'Only pull action is allowed for shadow repositories.'
441 log.debug('User not allowed to proceed, %s', reason)
442 log.debug('User not allowed to proceed, %s', reason)
442 return HTTPNotAcceptable(reason)(environ, start_response)
443 return HTTPNotAcceptable(reason)(environ, start_response)
443
444
444 # Check if the shadow repo actually exists, in case someone refers
445 # Check if the shadow repo actually exists, in case someone refers
445 # to it, and it has been deleted because of successful merge.
446 # to it, and it has been deleted because of successful merge.
446 if self.is_shadow_repo and not self.is_shadow_repo_dir:
447 if self.is_shadow_repo and not self.is_shadow_repo_dir:
447 log.debug(
448 log.debug(
448 'Shadow repo detected, and shadow repo dir `%s` is missing',
449 'Shadow repo detected, and shadow repo dir `%s` is missing',
449 self.is_shadow_repo_dir)
450 self.is_shadow_repo_dir)
450 return HTTPNotFound()(environ, start_response)
451 return HTTPNotFound()(environ, start_response)
451
452
452 # ======================================================================
453 # ======================================================================
453 # CHECK ANONYMOUS PERMISSION
454 # CHECK ANONYMOUS PERMISSION
454 # ======================================================================
455 # ======================================================================
455 if action in ['pull', 'push']:
456 if action in ['pull', 'push']:
456 anonymous_user = User.get_default_user()
457 anonymous_user = User.get_default_user()
457 username = anonymous_user.username
458 username = anonymous_user.username
458 if anonymous_user.active:
459 if anonymous_user.active:
459 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
460 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
460 # ONLY check permissions if the user is activated
461 # ONLY check permissions if the user is activated
461 anonymous_perm = self._check_permission(
462 anonymous_perm = self._check_permission(
462 action, anonymous_user, self.acl_repo_name, ip_addr,
463 action, anonymous_user, self.acl_repo_name, ip_addr,
463 plugin_id='anonymous_access',
464 plugin_id='anonymous_access',
464 plugin_cache_active=plugin_cache_active,
465 plugin_cache_active=plugin_cache_active,
465 cache_ttl=cache_ttl,
466 cache_ttl=cache_ttl,
466 )
467 )
467 else:
468 else:
468 anonymous_perm = False
469 anonymous_perm = False
469
470
470 if not anonymous_user.active or not anonymous_perm:
471 if not anonymous_user.active or not anonymous_perm:
471 if not anonymous_user.active:
472 if not anonymous_user.active:
472 log.debug('Anonymous access is disabled, running '
473 log.debug('Anonymous access is disabled, running '
473 'authentication')
474 'authentication')
474
475
475 if not anonymous_perm:
476 if not anonymous_perm:
476 log.debug('Not enough credentials to access this '
477 log.debug('Not enough credentials to access this '
477 'repository as anonymous user')
478 'repository as anonymous user')
478
479
479 username = None
480 username = None
480 # ==============================================================
481 # ==============================================================
481 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
482 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
482 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
483 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
483 # ==============================================================
484 # ==============================================================
484
485
485 # try to auth based on environ, container auth methods
486 # try to auth based on environ, container auth methods
486 log.debug('Running PRE-AUTH for container based authentication')
487 log.debug('Running PRE-AUTH for container based authentication')
487 pre_auth = authenticate(
488 pre_auth = authenticate(
488 '', '', environ, VCS_TYPE, registry=self.registry,
489 '', '', environ, VCS_TYPE, registry=self.registry,
489 acl_repo_name=self.acl_repo_name)
490 acl_repo_name=self.acl_repo_name)
490 if pre_auth and pre_auth.get('username'):
491 if pre_auth and pre_auth.get('username'):
491 username = pre_auth['username']
492 username = pre_auth['username']
492 log.debug('PRE-AUTH got %s as username', username)
493 log.debug('PRE-AUTH got %s as username', username)
493 if pre_auth:
494 if pre_auth:
494 log.debug('PRE-AUTH successful from %s',
495 log.debug('PRE-AUTH successful from %s',
495 pre_auth.get('auth_data', {}).get('_plugin'))
496 pre_auth.get('auth_data', {}).get('_plugin'))
496
497
497 # If not authenticated by the container, running basic auth
498 # If not authenticated by the container, running basic auth
498 # before inject the calling repo_name for special scope checks
499 # before inject the calling repo_name for special scope checks
499 self.authenticate.acl_repo_name = self.acl_repo_name
500 self.authenticate.acl_repo_name = self.acl_repo_name
500
501
501 plugin_cache_active, cache_ttl = False, 0
502 plugin_cache_active, cache_ttl = False, 0
502 plugin = None
503 plugin = None
503 if not username:
504 if not username:
504 self.authenticate.realm = self.authenticate.get_rc_realm()
505 self.authenticate.realm = self.authenticate.get_rc_realm()
505
506
506 try:
507 try:
507 auth_result = self.authenticate(environ)
508 auth_result = self.authenticate(environ)
508 except (UserCreationError, NotAllowedToCreateUserError) as e:
509 except (UserCreationError, NotAllowedToCreateUserError) as e:
509 log.error(e)
510 log.error(e)
510 reason = safe_str(e)
511 reason = safe_str(e)
511 return HTTPNotAcceptable(reason)(environ, start_response)
512 return HTTPNotAcceptable(reason)(environ, start_response)
512
513
513 if isinstance(auth_result, dict):
514 if isinstance(auth_result, dict):
514 AUTH_TYPE.update(environ, 'basic')
515 AUTH_TYPE.update(environ, 'basic')
515 REMOTE_USER.update(environ, auth_result['username'])
516 REMOTE_USER.update(environ, auth_result['username'])
516 username = auth_result['username']
517 username = auth_result['username']
517 plugin = auth_result.get('auth_data', {}).get('_plugin')
518 plugin = auth_result.get('auth_data', {}).get('_plugin')
518 log.info(
519 log.info(
519 'MAIN-AUTH successful for user `%s` from %s plugin',
520 'MAIN-AUTH successful for user `%s` from %s plugin',
520 username, plugin)
521 username, plugin)
521
522
522 plugin_cache_active, cache_ttl = auth_result.get(
523 plugin_cache_active, cache_ttl = auth_result.get(
523 'auth_data', {}).get('_ttl_cache') or (False, 0)
524 'auth_data', {}).get('_ttl_cache') or (False, 0)
524 else:
525 else:
525 return auth_result.wsgi_application(
526 return auth_result.wsgi_application(
526 environ, start_response)
527 environ, start_response)
527
528
528
529
529 # ==============================================================
530 # ==============================================================
530 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
531 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
531 # ==============================================================
532 # ==============================================================
532 user = User.get_by_username(username)
533 user = User.get_by_username(username)
533 if not self.valid_and_active_user(user):
534 if not self.valid_and_active_user(user):
534 return HTTPForbidden()(environ, start_response)
535 return HTTPForbidden()(environ, start_response)
535 username = user.username
536 username = user.username
536 user.update_lastactivity()
537 user.update_lastactivity()
537 meta.Session().commit()
538 meta.Session().commit()
538
539
539 # check user attributes for password change flag
540 # check user attributes for password change flag
540 user_obj = user
541 user_obj = user
541 if user_obj and user_obj.username != User.DEFAULT_USER and \
542 if user_obj and user_obj.username != User.DEFAULT_USER and \
542 user_obj.user_data.get('force_password_change'):
543 user_obj.user_data.get('force_password_change'):
543 reason = 'password change required'
544 reason = 'password change required'
544 log.debug('User not allowed to authenticate, %s', reason)
545 log.debug('User not allowed to authenticate, %s', reason)
545 return HTTPNotAcceptable(reason)(environ, start_response)
546 return HTTPNotAcceptable(reason)(environ, start_response)
546
547
547 # check permissions for this repository
548 # check permissions for this repository
548 perm = self._check_permission(
549 perm = self._check_permission(
549 action, user, self.acl_repo_name, ip_addr,
550 action, user, self.acl_repo_name, ip_addr,
550 plugin, plugin_cache_active, cache_ttl)
551 plugin, plugin_cache_active, cache_ttl)
551 if not perm:
552 if not perm:
552 return HTTPForbidden()(environ, start_response)
553 return HTTPForbidden()(environ, start_response)
553
554
554 # extras are injected into UI object and later available
555 # extras are injected into UI object and later available
555 # in hooks executed by RhodeCode
556 # in hooks executed by RhodeCode
556 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
557 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
557 extras = vcs_operation_context(
558 extras = vcs_operation_context(
558 environ, repo_name=self.acl_repo_name, username=username,
559 environ, repo_name=self.acl_repo_name, username=username,
559 action=action, scm=self.SCM, check_locking=check_locking,
560 action=action, scm=self.SCM, check_locking=check_locking,
560 is_shadow_repo=self.is_shadow_repo
561 is_shadow_repo=self.is_shadow_repo
561 )
562 )
562
563
563 # ======================================================================
564 # ======================================================================
564 # REQUEST HANDLING
565 # REQUEST HANDLING
565 # ======================================================================
566 # ======================================================================
566 repo_path = os.path.join(
567 repo_path = os.path.join(
567 safe_str(self.base_path), safe_str(self.vcs_repo_name))
568 safe_str(self.base_path), safe_str(self.vcs_repo_name))
568 log.debug('Repository path is %s', repo_path)
569 log.debug('Repository path is %s', repo_path)
569
570
570 fix_PATH()
571 fix_PATH()
571
572
572 log.info(
573 log.info(
573 '%s action on %s repo "%s" by "%s" from %s %s',
574 '%s action on %s repo "%s" by "%s" from %s %s',
574 action, self.SCM, safe_str(self.url_repo_name),
575 action, self.SCM, safe_str(self.url_repo_name),
575 safe_str(username), ip_addr, user_agent)
576 safe_str(username), ip_addr, user_agent)
576
577
577 return self._generate_vcs_response(
578 return self._generate_vcs_response(
578 environ, start_response, repo_path, extras, action)
579 environ, start_response, repo_path, extras, action)
579
580
580 @initialize_generator
581 @initialize_generator
581 def _generate_vcs_response(
582 def _generate_vcs_response(
582 self, environ, start_response, repo_path, extras, action):
583 self, environ, start_response, repo_path, extras, action):
583 """
584 """
584 Returns a generator for the response content.
585 Returns a generator for the response content.
585
586
586 This method is implemented as a generator, so that it can trigger
587 This method is implemented as a generator, so that it can trigger
587 the cache validation after all content sent back to the client. It
588 the cache validation after all content sent back to the client. It
588 also handles the locking exceptions which will be triggered when
589 also handles the locking exceptions which will be triggered when
589 the first chunk is produced by the underlying WSGI application.
590 the first chunk is produced by the underlying WSGI application.
590 """
591 """
591 txn_id = ''
592 txn_id = ''
592 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
593 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
593 # case for SVN, we want to re-use the callback daemon port
594 # case for SVN, we want to re-use the callback daemon port
594 # so we use the txn_id, for this we peek the body, and still save
595 # so we use the txn_id, for this we peek the body, and still save
595 # it as wsgi.input
596 # it as wsgi.input
596 data = environ['wsgi.input'].read()
597 data = environ['wsgi.input'].read()
597 environ['wsgi.input'] = StringIO(data)
598 environ['wsgi.input'] = StringIO(data)
598 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
599 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
599
600
600 callback_daemon, extras = self._prepare_callback_daemon(
601 callback_daemon, extras = self._prepare_callback_daemon(
601 extras, environ, action, txn_id=txn_id)
602 extras, environ, action, txn_id=txn_id)
602 log.debug('HOOKS extras is %s', extras)
603 log.debug('HOOKS extras is %s', extras)
603
604
604 config = self._create_config(extras, self.acl_repo_name)
605 config = self._create_config(extras, self.acl_repo_name)
605 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
606 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
606 with callback_daemon:
607 with callback_daemon:
607 app.rc_extras = extras
608 app.rc_extras = extras
608
609
609 try:
610 try:
610 response = app(environ, start_response)
611 response = app(environ, start_response)
611 finally:
612 finally:
612 # This statement works together with the decorator
613 # This statement works together with the decorator
613 # "initialize_generator" above. The decorator ensures that
614 # "initialize_generator" above. The decorator ensures that
614 # we hit the first yield statement before the generator is
615 # we hit the first yield statement before the generator is
615 # returned back to the WSGI server. This is needed to
616 # returned back to the WSGI server. This is needed to
616 # ensure that the call to "app" above triggers the
617 # ensure that the call to "app" above triggers the
617 # needed callback to "start_response" before the
618 # needed callback to "start_response" before the
618 # generator is actually used.
619 # generator is actually used.
619 yield "__init__"
620 yield "__init__"
620
621
621 # iter content
622 # iter content
622 for chunk in response:
623 for chunk in response:
623 yield chunk
624 yield chunk
624
625
625 try:
626 try:
626 # invalidate cache on push
627 # invalidate cache on push
627 if action == 'push':
628 if action == 'push':
628 self._invalidate_cache(self.url_repo_name)
629 self._invalidate_cache(self.url_repo_name)
629 finally:
630 finally:
630 meta.Session.remove()
631 meta.Session.remove()
631
632
632 def _get_repository_name(self, environ):
633 def _get_repository_name(self, environ):
633 """Get repository name out of the environmnent
634 """Get repository name out of the environmnent
634
635
635 :param environ: WSGI environment
636 :param environ: WSGI environment
636 """
637 """
637 raise NotImplementedError()
638 raise NotImplementedError()
638
639
639 def _get_action(self, environ):
640 def _get_action(self, environ):
640 """Map request commands into a pull or push command.
641 """Map request commands into a pull or push command.
641
642
642 :param environ: WSGI environment
643 :param environ: WSGI environment
643 """
644 """
644 raise NotImplementedError()
645 raise NotImplementedError()
645
646
646 def _create_wsgi_app(self, repo_path, repo_name, config):
647 def _create_wsgi_app(self, repo_path, repo_name, config):
647 """Return the WSGI app that will finally handle the request."""
648 """Return the WSGI app that will finally handle the request."""
648 raise NotImplementedError()
649 raise NotImplementedError()
649
650
650 def _create_config(self, extras, repo_name):
651 def _create_config(self, extras, repo_name):
651 """Create a safe config representation."""
652 """Create a safe config representation."""
652 raise NotImplementedError()
653 raise NotImplementedError()
653
654
654 def _should_use_callback_daemon(self, extras, environ, action):
655 def _should_use_callback_daemon(self, extras, environ, action):
655 return True
656 return True
656
657
657 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
658 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
658 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
659 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
659 if not self._should_use_callback_daemon(extras, environ, action):
660 if not self._should_use_callback_daemon(extras, environ, action):
660 # disable callback daemon for actions that don't require it
661 # disable callback daemon for actions that don't require it
661 direct_calls = True
662 direct_calls = True
662
663
663 return prepare_callback_daemon(
664 return prepare_callback_daemon(
664 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
665 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
665 use_direct_calls=direct_calls, txn_id=txn_id)
666 use_direct_calls=direct_calls, txn_id=txn_id)
666
667
667
668
668 def _should_check_locking(query_string):
669 def _should_check_locking(query_string):
669 # this is kind of hacky, but due to how mercurial handles client-server
670 # this is kind of hacky, but due to how mercurial handles client-server
670 # server see all operation on commit; bookmarks, phases and
671 # server see all operation on commit; bookmarks, phases and
671 # obsolescence marker in different transaction, we don't want to check
672 # obsolescence marker in different transaction, we don't want to check
672 # locking on those
673 # locking on those
673 return query_string not in ['cmd=listkeys']
674 return query_string not in ['cmd=listkeys']
@@ -1,1730 +1,1746 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import os
30 import os
31 import re
31 import re
32 import time
32 import time
33 import warnings
33 import warnings
34 import shutil
34 import shutil
35
35
36 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from zope.cachedescriptors.property import Lazy as LazyProperty
37
37
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs import connection
40 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.utils import author_name, author_email
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 RepositoryError)
47 RepositoryError)
48
48
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 FILEMODE_DEFAULT = 0100644
53 FILEMODE_DEFAULT = 0100644
54 FILEMODE_EXECUTABLE = 0100755
54 FILEMODE_EXECUTABLE = 0100755
55
55
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 MergeResponse = collections.namedtuple(
57 MergeResponse = collections.namedtuple(
58 'MergeResponse',
58 'MergeResponse',
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60
60
61
61
62 class MergeFailureReason(object):
62 class MergeFailureReason(object):
63 """
63 """
64 Enumeration with all the reasons why the server side merge could fail.
64 Enumeration with all the reasons why the server side merge could fail.
65
65
66 DO NOT change the number of the reasons, as they may be stored in the
66 DO NOT change the number of the reasons, as they may be stored in the
67 database.
67 database.
68
68
69 Changing the name of a reason is acceptable and encouraged to deprecate old
69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 reasons.
70 reasons.
71 """
71 """
72
72
73 # Everything went well.
73 # Everything went well.
74 NONE = 0
74 NONE = 0
75
75
76 # An unexpected exception was raised. Check the logs for more details.
76 # An unexpected exception was raised. Check the logs for more details.
77 UNKNOWN = 1
77 UNKNOWN = 1
78
78
79 # The merge was not successful, there are conflicts.
79 # The merge was not successful, there are conflicts.
80 MERGE_FAILED = 2
80 MERGE_FAILED = 2
81
81
82 # The merge succeeded but we could not push it to the target repository.
82 # The merge succeeded but we could not push it to the target repository.
83 PUSH_FAILED = 3
83 PUSH_FAILED = 3
84
84
85 # The specified target is not a head in the target repository.
85 # The specified target is not a head in the target repository.
86 TARGET_IS_NOT_HEAD = 4
86 TARGET_IS_NOT_HEAD = 4
87
87
88 # The source repository contains more branches than the target. Pushing
88 # The source repository contains more branches than the target. Pushing
89 # the merge will create additional branches in the target.
89 # the merge will create additional branches in the target.
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91
91
92 # The target reference has multiple heads. That does not allow to correctly
92 # The target reference has multiple heads. That does not allow to correctly
93 # identify the target location. This could only happen for mercurial
93 # identify the target location. This could only happen for mercurial
94 # branches.
94 # branches.
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96
96
97 # The target repository is locked
97 # The target repository is locked
98 TARGET_IS_LOCKED = 7
98 TARGET_IS_LOCKED = 7
99
99
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # A involved commit could not be found.
101 # A involved commit could not be found.
102 _DEPRECATED_MISSING_COMMIT = 8
102 _DEPRECATED_MISSING_COMMIT = 8
103
103
104 # The target repo reference is missing.
104 # The target repo reference is missing.
105 MISSING_TARGET_REF = 9
105 MISSING_TARGET_REF = 9
106
106
107 # The source repo reference is missing.
107 # The source repo reference is missing.
108 MISSING_SOURCE_REF = 10
108 MISSING_SOURCE_REF = 10
109
109
110 # The merge was not successful, there are conflicts related to sub
110 # The merge was not successful, there are conflicts related to sub
111 # repositories.
111 # repositories.
112 SUBREPO_MERGE_FAILED = 11
112 SUBREPO_MERGE_FAILED = 11
113
113
114
114
115 class UpdateFailureReason(object):
115 class UpdateFailureReason(object):
116 """
116 """
117 Enumeration with all the reasons why the pull request update could fail.
117 Enumeration with all the reasons why the pull request update could fail.
118
118
119 DO NOT change the number of the reasons, as they may be stored in the
119 DO NOT change the number of the reasons, as they may be stored in the
120 database.
120 database.
121
121
122 Changing the name of a reason is acceptable and encouraged to deprecate old
122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 reasons.
123 reasons.
124 """
124 """
125
125
126 # Everything went well.
126 # Everything went well.
127 NONE = 0
127 NONE = 0
128
128
129 # An unexpected exception was raised. Check the logs for more details.
129 # An unexpected exception was raised. Check the logs for more details.
130 UNKNOWN = 1
130 UNKNOWN = 1
131
131
132 # The pull request is up to date.
132 # The pull request is up to date.
133 NO_CHANGE = 2
133 NO_CHANGE = 2
134
134
135 # The pull request has a reference type that is not supported for update.
135 # The pull request has a reference type that is not supported for update.
136 WRONG_REF_TYPE = 3
136 WRONG_REF_TYPE = 3
137
137
138 # Update failed because the target reference is missing.
138 # Update failed because the target reference is missing.
139 MISSING_TARGET_REF = 4
139 MISSING_TARGET_REF = 4
140
140
141 # Update failed because the source reference is missing.
141 # Update failed because the source reference is missing.
142 MISSING_SOURCE_REF = 5
142 MISSING_SOURCE_REF = 5
143
143
144
144
145 class BaseRepository(object):
145 class BaseRepository(object):
146 """
146 """
147 Base Repository for final backends
147 Base Repository for final backends
148
148
149 .. attribute:: DEFAULT_BRANCH_NAME
149 .. attribute:: DEFAULT_BRANCH_NAME
150
150
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152
152
153 .. attribute:: commit_ids
153 .. attribute:: commit_ids
154
154
155 list of all available commit ids, in ascending order
155 list of all available commit ids, in ascending order
156
156
157 .. attribute:: path
157 .. attribute:: path
158
158
159 absolute path to the repository
159 absolute path to the repository
160
160
161 .. attribute:: bookmarks
161 .. attribute:: bookmarks
162
162
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 there are no bookmarks or the backend implementation does not support
164 there are no bookmarks or the backend implementation does not support
165 bookmarks.
165 bookmarks.
166
166
167 .. attribute:: tags
167 .. attribute:: tags
168
168
169 Mapping from name to :term:`Commit ID` of the tag.
169 Mapping from name to :term:`Commit ID` of the tag.
170
170
171 """
171 """
172
172
173 DEFAULT_BRANCH_NAME = None
173 DEFAULT_BRANCH_NAME = None
174 DEFAULT_CONTACT = u"Unknown"
174 DEFAULT_CONTACT = u"Unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
176 EMPTY_COMMIT_ID = '0' * 40
176 EMPTY_COMMIT_ID = '0' * 40
177
177
178 path = None
178 path = None
179 _remote = None
179 _remote = None
180
180
181 def __init__(self, repo_path, config=None, create=False, **kwargs):
181 def __init__(self, repo_path, config=None, create=False, **kwargs):
182 """
182 """
183 Initializes repository. Raises RepositoryError if repository could
183 Initializes repository. Raises RepositoryError if repository could
184 not be find at the given ``repo_path`` or directory at ``repo_path``
184 not be find at the given ``repo_path`` or directory at ``repo_path``
185 exists and ``create`` is set to True.
185 exists and ``create`` is set to True.
186
186
187 :param repo_path: local path of the repository
187 :param repo_path: local path of the repository
188 :param config: repository configuration
188 :param config: repository configuration
189 :param create=False: if set to True, would try to create repository.
189 :param create=False: if set to True, would try to create repository.
190 :param src_url=None: if set, should be proper url from which repository
190 :param src_url=None: if set, should be proper url from which repository
191 would be cloned; requires ``create`` parameter to be set to True -
191 would be cloned; requires ``create`` parameter to be set to True -
192 raises RepositoryError if src_url is set and create evaluates to
192 raises RepositoryError if src_url is set and create evaluates to
193 False
193 False
194 """
194 """
195 raise NotImplementedError
195 raise NotImplementedError
196
196
197 def __repr__(self):
197 def __repr__(self):
198 return '<%s at %s>' % (self.__class__.__name__, self.path)
198 return '<%s at %s>' % (self.__class__.__name__, self.path)
199
199
200 def __len__(self):
200 def __len__(self):
201 return self.count()
201 return self.count()
202
202
203 def __eq__(self, other):
203 def __eq__(self, other):
204 same_instance = isinstance(other, self.__class__)
204 same_instance = isinstance(other, self.__class__)
205 return same_instance and other.path == self.path
205 return same_instance and other.path == self.path
206
206
207 def __ne__(self, other):
207 def __ne__(self, other):
208 return not self.__eq__(other)
208 return not self.__eq__(other)
209
209
210 def get_create_shadow_cache_pr_path(self, db_repo):
210 def get_create_shadow_cache_pr_path(self, db_repo):
211 path = db_repo.cached_diffs_dir
211 path = db_repo.cached_diffs_dir
212 if not os.path.exists(path):
212 if not os.path.exists(path):
213 os.makedirs(path, 0755)
213 os.makedirs(path, 0755)
214 return path
214 return path
215
215
216 @classmethod
216 @classmethod
217 def get_default_config(cls, default=None):
217 def get_default_config(cls, default=None):
218 config = Config()
218 config = Config()
219 if default and isinstance(default, list):
219 if default and isinstance(default, list):
220 for section, key, val in default:
220 for section, key, val in default:
221 config.set(section, key, val)
221 config.set(section, key, val)
222 return config
222 return config
223
223
224 @LazyProperty
224 @LazyProperty
225 def EMPTY_COMMIT(self):
225 def EMPTY_COMMIT(self):
226 return EmptyCommit(self.EMPTY_COMMIT_ID)
226 return EmptyCommit(self.EMPTY_COMMIT_ID)
227
227
228 @LazyProperty
228 @LazyProperty
229 def alias(self):
229 def alias(self):
230 for k, v in settings.BACKENDS.items():
230 for k, v in settings.BACKENDS.items():
231 if v.split('.')[-1] == str(self.__class__.__name__):
231 if v.split('.')[-1] == str(self.__class__.__name__):
232 return k
232 return k
233
233
234 @LazyProperty
234 @LazyProperty
235 def name(self):
235 def name(self):
236 return safe_unicode(os.path.basename(self.path))
236 return safe_unicode(os.path.basename(self.path))
237
237
238 @LazyProperty
238 @LazyProperty
239 def description(self):
239 def description(self):
240 raise NotImplementedError
240 raise NotImplementedError
241
241
242 def refs(self):
242 def refs(self):
243 """
243 """
244 returns a `dict` with branches, bookmarks, tags, and closed_branches
244 returns a `dict` with branches, bookmarks, tags, and closed_branches
245 for this repository
245 for this repository
246 """
246 """
247 return dict(
247 return dict(
248 branches=self.branches,
248 branches=self.branches,
249 branches_closed=self.branches_closed,
249 branches_closed=self.branches_closed,
250 tags=self.tags,
250 tags=self.tags,
251 bookmarks=self.bookmarks
251 bookmarks=self.bookmarks
252 )
252 )
253
253
254 @LazyProperty
254 @LazyProperty
255 def branches(self):
255 def branches(self):
256 """
256 """
257 A `dict` which maps branch names to commit ids.
257 A `dict` which maps branch names to commit ids.
258 """
258 """
259 raise NotImplementedError
259 raise NotImplementedError
260
260
261 @LazyProperty
261 @LazyProperty
262 def branches_closed(self):
262 def branches_closed(self):
263 """
263 """
264 A `dict` which maps tags names to commit ids.
264 A `dict` which maps tags names to commit ids.
265 """
265 """
266 raise NotImplementedError
266 raise NotImplementedError
267
267
268 @LazyProperty
268 @LazyProperty
269 def bookmarks(self):
269 def bookmarks(self):
270 """
270 """
271 A `dict` which maps tags names to commit ids.
271 A `dict` which maps tags names to commit ids.
272 """
272 """
273 raise NotImplementedError
273 raise NotImplementedError
274
274
275 @LazyProperty
275 @LazyProperty
276 def tags(self):
276 def tags(self):
277 """
277 """
278 A `dict` which maps tags names to commit ids.
278 A `dict` which maps tags names to commit ids.
279 """
279 """
280 raise NotImplementedError
280 raise NotImplementedError
281
281
282 @LazyProperty
282 @LazyProperty
283 def size(self):
283 def size(self):
284 """
284 """
285 Returns combined size in bytes for all repository files
285 Returns combined size in bytes for all repository files
286 """
286 """
287 tip = self.get_commit()
287 tip = self.get_commit()
288 return tip.size
288 return tip.size
289
289
290 def size_at_commit(self, commit_id):
290 def size_at_commit(self, commit_id):
291 commit = self.get_commit(commit_id)
291 commit = self.get_commit(commit_id)
292 return commit.size
292 return commit.size
293
293
294 def is_empty(self):
294 def is_empty(self):
295 return not bool(self.commit_ids)
295 return not bool(self.commit_ids)
296
296
297 @staticmethod
297 @staticmethod
298 def check_url(url, config):
298 def check_url(url, config):
299 """
299 """
300 Function will check given url and try to verify if it's a valid
300 Function will check given url and try to verify if it's a valid
301 link.
301 link.
302 """
302 """
303 raise NotImplementedError
303 raise NotImplementedError
304
304
305 @staticmethod
305 @staticmethod
306 def is_valid_repository(path):
306 def is_valid_repository(path):
307 """
307 """
308 Check if given `path` contains a valid repository of this backend
308 Check if given `path` contains a valid repository of this backend
309 """
309 """
310 raise NotImplementedError
310 raise NotImplementedError
311
311
312 # ==========================================================================
312 # ==========================================================================
313 # COMMITS
313 # COMMITS
314 # ==========================================================================
314 # ==========================================================================
315
315
316 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
316 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
317 """
317 """
318 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
318 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
319 are both None, most recent commit is returned.
319 are both None, most recent commit is returned.
320
320
321 :param pre_load: Optional. List of commit attributes to load.
321 :param pre_load: Optional. List of commit attributes to load.
322
322
323 :raises ``EmptyRepositoryError``: if there are no commits
323 :raises ``EmptyRepositoryError``: if there are no commits
324 """
324 """
325 raise NotImplementedError
325 raise NotImplementedError
326
326
327 def __iter__(self):
327 def __iter__(self):
328 for commit_id in self.commit_ids:
328 for commit_id in self.commit_ids:
329 yield self.get_commit(commit_id=commit_id)
329 yield self.get_commit(commit_id=commit_id)
330
330
331 def get_commits(
331 def get_commits(
332 self, start_id=None, end_id=None, start_date=None, end_date=None,
332 self, start_id=None, end_id=None, start_date=None, end_date=None,
333 branch_name=None, show_hidden=False, pre_load=None):
333 branch_name=None, show_hidden=False, pre_load=None):
334 """
334 """
335 Returns iterator of `BaseCommit` objects from start to end
335 Returns iterator of `BaseCommit` objects from start to end
336 not inclusive. This should behave just like a list, ie. end is not
336 not inclusive. This should behave just like a list, ie. end is not
337 inclusive.
337 inclusive.
338
338
339 :param start_id: None or str, must be a valid commit id
339 :param start_id: None or str, must be a valid commit id
340 :param end_id: None or str, must be a valid commit id
340 :param end_id: None or str, must be a valid commit id
341 :param start_date:
341 :param start_date:
342 :param end_date:
342 :param end_date:
343 :param branch_name:
343 :param branch_name:
344 :param show_hidden:
344 :param show_hidden:
345 :param pre_load:
345 :param pre_load:
346 """
346 """
347 raise NotImplementedError
347 raise NotImplementedError
348
348
349 def __getitem__(self, key):
349 def __getitem__(self, key):
350 """
350 """
351 Allows index based access to the commit objects of this repository.
351 Allows index based access to the commit objects of this repository.
352 """
352 """
353 pre_load = ["author", "branch", "date", "message", "parents"]
353 pre_load = ["author", "branch", "date", "message", "parents"]
354 if isinstance(key, slice):
354 if isinstance(key, slice):
355 return self._get_range(key, pre_load)
355 return self._get_range(key, pre_load)
356 return self.get_commit(commit_idx=key, pre_load=pre_load)
356 return self.get_commit(commit_idx=key, pre_load=pre_load)
357
357
358 def _get_range(self, slice_obj, pre_load):
358 def _get_range(self, slice_obj, pre_load):
359 for commit_id in self.commit_ids.__getitem__(slice_obj):
359 for commit_id in self.commit_ids.__getitem__(slice_obj):
360 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
360 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
361
361
362 def count(self):
362 def count(self):
363 return len(self.commit_ids)
363 return len(self.commit_ids)
364
364
365 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
365 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
366 """
366 """
367 Creates and returns a tag for the given ``commit_id``.
367 Creates and returns a tag for the given ``commit_id``.
368
368
369 :param name: name for new tag
369 :param name: name for new tag
370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
371 :param commit_id: commit id for which new tag would be created
371 :param commit_id: commit id for which new tag would be created
372 :param message: message of the tag's commit
372 :param message: message of the tag's commit
373 :param date: date of tag's commit
373 :param date: date of tag's commit
374
374
375 :raises TagAlreadyExistError: if tag with same name already exists
375 :raises TagAlreadyExistError: if tag with same name already exists
376 """
376 """
377 raise NotImplementedError
377 raise NotImplementedError
378
378
379 def remove_tag(self, name, user, message=None, date=None):
379 def remove_tag(self, name, user, message=None, date=None):
380 """
380 """
381 Removes tag with the given ``name``.
381 Removes tag with the given ``name``.
382
382
383 :param name: name of the tag to be removed
383 :param name: name of the tag to be removed
384 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
384 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
385 :param message: message of the tag's removal commit
385 :param message: message of the tag's removal commit
386 :param date: date of tag's removal commit
386 :param date: date of tag's removal commit
387
387
388 :raises TagDoesNotExistError: if tag with given name does not exists
388 :raises TagDoesNotExistError: if tag with given name does not exists
389 """
389 """
390 raise NotImplementedError
390 raise NotImplementedError
391
391
392 def get_diff(
392 def get_diff(
393 self, commit1, commit2, path=None, ignore_whitespace=False,
393 self, commit1, commit2, path=None, ignore_whitespace=False,
394 context=3, path1=None):
394 context=3, path1=None):
395 """
395 """
396 Returns (git like) *diff*, as plain text. Shows changes introduced by
396 Returns (git like) *diff*, as plain text. Shows changes introduced by
397 `commit2` since `commit1`.
397 `commit2` since `commit1`.
398
398
399 :param commit1: Entry point from which diff is shown. Can be
399 :param commit1: Entry point from which diff is shown. Can be
400 ``self.EMPTY_COMMIT`` - in this case, patch showing all
400 ``self.EMPTY_COMMIT`` - in this case, patch showing all
401 the changes since empty state of the repository until `commit2`
401 the changes since empty state of the repository until `commit2`
402 :param commit2: Until which commit changes should be shown.
402 :param commit2: Until which commit changes should be shown.
403 :param path: Can be set to a path of a file to create a diff of that
403 :param path: Can be set to a path of a file to create a diff of that
404 file. If `path1` is also set, this value is only associated to
404 file. If `path1` is also set, this value is only associated to
405 `commit2`.
405 `commit2`.
406 :param ignore_whitespace: If set to ``True``, would not show whitespace
406 :param ignore_whitespace: If set to ``True``, would not show whitespace
407 changes. Defaults to ``False``.
407 changes. Defaults to ``False``.
408 :param context: How many lines before/after changed lines should be
408 :param context: How many lines before/after changed lines should be
409 shown. Defaults to ``3``.
409 shown. Defaults to ``3``.
410 :param path1: Can be set to a path to associate with `commit1`. This
410 :param path1: Can be set to a path to associate with `commit1`. This
411 parameter works only for backends which support diff generation for
411 parameter works only for backends which support diff generation for
412 different paths. Other backends will raise a `ValueError` if `path1`
412 different paths. Other backends will raise a `ValueError` if `path1`
413 is set and has a different value than `path`.
413 is set and has a different value than `path`.
414 :param file_path: filter this diff by given path pattern
414 :param file_path: filter this diff by given path pattern
415 """
415 """
416 raise NotImplementedError
416 raise NotImplementedError
417
417
418 def strip(self, commit_id, branch=None):
418 def strip(self, commit_id, branch=None):
419 """
419 """
420 Strip given commit_id from the repository
420 Strip given commit_id from the repository
421 """
421 """
422 raise NotImplementedError
422 raise NotImplementedError
423
423
424 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
424 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
425 """
425 """
426 Return a latest common ancestor commit if one exists for this repo
426 Return a latest common ancestor commit if one exists for this repo
427 `commit_id1` vs `commit_id2` from `repo2`.
427 `commit_id1` vs `commit_id2` from `repo2`.
428
428
429 :param commit_id1: Commit it from this repository to use as a
429 :param commit_id1: Commit it from this repository to use as a
430 target for the comparison.
430 target for the comparison.
431 :param commit_id2: Source commit id to use for comparison.
431 :param commit_id2: Source commit id to use for comparison.
432 :param repo2: Source repository to use for comparison.
432 :param repo2: Source repository to use for comparison.
433 """
433 """
434 raise NotImplementedError
434 raise NotImplementedError
435
435
436 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
436 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
437 """
437 """
438 Compare this repository's revision `commit_id1` with `commit_id2`.
438 Compare this repository's revision `commit_id1` with `commit_id2`.
439
439
440 Returns a tuple(commits, ancestor) that would be merged from
440 Returns a tuple(commits, ancestor) that would be merged from
441 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
441 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
442 will be returned as ancestor.
442 will be returned as ancestor.
443
443
444 :param commit_id1: Commit it from this repository to use as a
444 :param commit_id1: Commit it from this repository to use as a
445 target for the comparison.
445 target for the comparison.
446 :param commit_id2: Source commit id to use for comparison.
446 :param commit_id2: Source commit id to use for comparison.
447 :param repo2: Source repository to use for comparison.
447 :param repo2: Source repository to use for comparison.
448 :param merge: If set to ``True`` will do a merge compare which also
448 :param merge: If set to ``True`` will do a merge compare which also
449 returns the common ancestor.
449 returns the common ancestor.
450 :param pre_load: Optional. List of commit attributes to load.
450 :param pre_load: Optional. List of commit attributes to load.
451 """
451 """
452 raise NotImplementedError
452 raise NotImplementedError
453
453
454 def merge(self, target_ref, source_repo, source_ref, workspace_id,
454 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
455 user_name='', user_email='', message='', dry_run=False,
455 user_name='', user_email='', message='', dry_run=False,
456 use_rebase=False, close_branch=False):
456 use_rebase=False, close_branch=False):
457 """
457 """
458 Merge the revisions specified in `source_ref` from `source_repo`
458 Merge the revisions specified in `source_ref` from `source_repo`
459 onto the `target_ref` of this repository.
459 onto the `target_ref` of this repository.
460
460
461 `source_ref` and `target_ref` are named tupls with the following
461 `source_ref` and `target_ref` are named tupls with the following
462 fields `type`, `name` and `commit_id`.
462 fields `type`, `name` and `commit_id`.
463
463
464 Returns a MergeResponse named tuple with the following fields
464 Returns a MergeResponse named tuple with the following fields
465 'possible', 'executed', 'source_commit', 'target_commit',
465 'possible', 'executed', 'source_commit', 'target_commit',
466 'merge_commit'.
466 'merge_commit'.
467
467
468 :param repo_id: `repo_id` target repo id.
469 :param workspace_id: `workspace_id` unique identifier.
468 :param target_ref: `target_ref` points to the commit on top of which
470 :param target_ref: `target_ref` points to the commit on top of which
469 the `source_ref` should be merged.
471 the `source_ref` should be merged.
470 :param source_repo: The repository that contains the commits to be
472 :param source_repo: The repository that contains the commits to be
471 merged.
473 merged.
472 :param source_ref: `source_ref` points to the topmost commit from
474 :param source_ref: `source_ref` points to the topmost commit from
473 the `source_repo` which should be merged.
475 the `source_repo` which should be merged.
474 :param workspace_id: `workspace_id` unique identifier.
475 :param user_name: Merge commit `user_name`.
476 :param user_name: Merge commit `user_name`.
476 :param user_email: Merge commit `user_email`.
477 :param user_email: Merge commit `user_email`.
477 :param message: Merge commit `message`.
478 :param message: Merge commit `message`.
478 :param dry_run: If `True` the merge will not take place.
479 :param dry_run: If `True` the merge will not take place.
479 :param use_rebase: If `True` commits from the source will be rebased
480 :param use_rebase: If `True` commits from the source will be rebased
480 on top of the target instead of being merged.
481 on top of the target instead of being merged.
481 :param close_branch: If `True` branch will be close before merging it
482 :param close_branch: If `True` branch will be close before merging it
482 """
483 """
483 if dry_run:
484 if dry_run:
484 message = message or 'dry_run_merge_message'
485 message = message or 'dry_run_merge_message'
485 user_email = user_email or 'dry-run-merge@rhodecode.com'
486 user_email = user_email or 'dry-run-merge@rhodecode.com'
486 user_name = user_name or 'Dry-Run User'
487 user_name = user_name or 'Dry-Run User'
487 else:
488 else:
488 if not user_name:
489 if not user_name:
489 raise ValueError('user_name cannot be empty')
490 raise ValueError('user_name cannot be empty')
490 if not user_email:
491 if not user_email:
491 raise ValueError('user_email cannot be empty')
492 raise ValueError('user_email cannot be empty')
492 if not message:
493 if not message:
493 raise ValueError('message cannot be empty')
494 raise ValueError('message cannot be empty')
494
495
495 shadow_repository_path = self._maybe_prepare_merge_workspace(
496 workspace_id, target_ref, source_ref)
497
498 try:
496 try:
499 return self._merge_repo(
497 return self._merge_repo(
500 shadow_repository_path, target_ref, source_repo,
498 repo_id, workspace_id, target_ref, source_repo,
501 source_ref, message, user_name, user_email, dry_run=dry_run,
499 source_ref, message, user_name, user_email, dry_run=dry_run,
502 use_rebase=use_rebase, close_branch=close_branch)
500 use_rebase=use_rebase, close_branch=close_branch)
503 except RepositoryError:
501 except RepositoryError:
504 log.exception(
502 log.exception(
505 'Unexpected failure when running merge, dry-run=%s',
503 'Unexpected failure when running merge, dry-run=%s',
506 dry_run)
504 dry_run)
507 return MergeResponse(
505 return MergeResponse(
508 False, False, None, MergeFailureReason.UNKNOWN)
506 False, False, None, MergeFailureReason.UNKNOWN)
509
507
510 def _merge_repo(self, shadow_repository_path, target_ref,
508 def _merge_repo(self, repo_id, workspace_id, target_ref,
511 source_repo, source_ref, merge_message,
509 source_repo, source_ref, merge_message,
512 merger_name, merger_email, dry_run=False,
510 merger_name, merger_email, dry_run=False,
513 use_rebase=False, close_branch=False):
511 use_rebase=False, close_branch=False):
514 """Internal implementation of merge."""
512 """Internal implementation of merge."""
515 raise NotImplementedError
513 raise NotImplementedError
516
514
517 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
515 def _maybe_prepare_merge_workspace(
516 self, repo_id, workspace_id, target_ref, source_ref):
518 """
517 """
519 Create the merge workspace.
518 Create the merge workspace.
520
519
521 :param workspace_id: `workspace_id` unique identifier.
520 :param workspace_id: `workspace_id` unique identifier.
522 """
521 """
523 raise NotImplementedError
522 raise NotImplementedError
524
523
525 def _get_shadow_repository_path(self, workspace_id):
524 def _get_legacy_shadow_repository_path(self, workspace_id):
526 raise NotImplementedError
525 """
526 Legacy version that was used before. We still need it for
527 backward compat
528 """
529 return os.path.join(
530 os.path.dirname(self.path),
531 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
527
532
528 def cleanup_merge_workspace(self, workspace_id):
533 def _get_shadow_repository_path(self, repo_id, workspace_id):
534 # The name of the shadow repository must start with '.', so it is
535 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
536 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
537 if os.path.exists(legacy_repository_path):
538 return legacy_repository_path
539 else:
540 return os.path.join(
541 os.path.dirname(self.path),
542 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
543
544 def cleanup_merge_workspace(self, repo_id, workspace_id):
529 """
545 """
530 Remove merge workspace.
546 Remove merge workspace.
531
547
532 This function MUST not fail in case there is no workspace associated to
548 This function MUST not fail in case there is no workspace associated to
533 the given `workspace_id`.
549 the given `workspace_id`.
534
550
535 :param workspace_id: `workspace_id` unique identifier.
551 :param workspace_id: `workspace_id` unique identifier.
536 """
552 """
537 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
553 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
538 shadow_repository_path_del = '{}.{}.delete'.format(
554 shadow_repository_path_del = '{}.{}.delete'.format(
539 shadow_repository_path, time.time())
555 shadow_repository_path, time.time())
540
556
541 # move the shadow repo, so it never conflicts with the one used.
557 # move the shadow repo, so it never conflicts with the one used.
542 # we use this method because shutil.rmtree had some edge case problems
558 # we use this method because shutil.rmtree had some edge case problems
543 # removing symlinked repositories
559 # removing symlinked repositories
544 if not os.path.isdir(shadow_repository_path):
560 if not os.path.isdir(shadow_repository_path):
545 return
561 return
546
562
547 shutil.move(shadow_repository_path, shadow_repository_path_del)
563 shutil.move(shadow_repository_path, shadow_repository_path_del)
548 try:
564 try:
549 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
565 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
550 except Exception:
566 except Exception:
551 log.exception('Failed to gracefully remove shadow repo under %s',
567 log.exception('Failed to gracefully remove shadow repo under %s',
552 shadow_repository_path_del)
568 shadow_repository_path_del)
553 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
569 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
554
570
555 # ========== #
571 # ========== #
556 # COMMIT API #
572 # COMMIT API #
557 # ========== #
573 # ========== #
558
574
559 @LazyProperty
575 @LazyProperty
560 def in_memory_commit(self):
576 def in_memory_commit(self):
561 """
577 """
562 Returns :class:`InMemoryCommit` object for this repository.
578 Returns :class:`InMemoryCommit` object for this repository.
563 """
579 """
564 raise NotImplementedError
580 raise NotImplementedError
565
581
566 # ======================== #
582 # ======================== #
567 # UTILITIES FOR SUBCLASSES #
583 # UTILITIES FOR SUBCLASSES #
568 # ======================== #
584 # ======================== #
569
585
570 def _validate_diff_commits(self, commit1, commit2):
586 def _validate_diff_commits(self, commit1, commit2):
571 """
587 """
572 Validates that the given commits are related to this repository.
588 Validates that the given commits are related to this repository.
573
589
574 Intended as a utility for sub classes to have a consistent validation
590 Intended as a utility for sub classes to have a consistent validation
575 of input parameters in methods like :meth:`get_diff`.
591 of input parameters in methods like :meth:`get_diff`.
576 """
592 """
577 self._validate_commit(commit1)
593 self._validate_commit(commit1)
578 self._validate_commit(commit2)
594 self._validate_commit(commit2)
579 if (isinstance(commit1, EmptyCommit) and
595 if (isinstance(commit1, EmptyCommit) and
580 isinstance(commit2, EmptyCommit)):
596 isinstance(commit2, EmptyCommit)):
581 raise ValueError("Cannot compare two empty commits")
597 raise ValueError("Cannot compare two empty commits")
582
598
583 def _validate_commit(self, commit):
599 def _validate_commit(self, commit):
584 if not isinstance(commit, BaseCommit):
600 if not isinstance(commit, BaseCommit):
585 raise TypeError(
601 raise TypeError(
586 "%s is not of type BaseCommit" % repr(commit))
602 "%s is not of type BaseCommit" % repr(commit))
587 if commit.repository != self and not isinstance(commit, EmptyCommit):
603 if commit.repository != self and not isinstance(commit, EmptyCommit):
588 raise ValueError(
604 raise ValueError(
589 "Commit %s must be a valid commit from this repository %s, "
605 "Commit %s must be a valid commit from this repository %s, "
590 "related to this repository instead %s." %
606 "related to this repository instead %s." %
591 (commit, self, commit.repository))
607 (commit, self, commit.repository))
592
608
593 def _validate_commit_id(self, commit_id):
609 def _validate_commit_id(self, commit_id):
594 if not isinstance(commit_id, basestring):
610 if not isinstance(commit_id, basestring):
595 raise TypeError("commit_id must be a string value")
611 raise TypeError("commit_id must be a string value")
596
612
597 def _validate_commit_idx(self, commit_idx):
613 def _validate_commit_idx(self, commit_idx):
598 if not isinstance(commit_idx, (int, long)):
614 if not isinstance(commit_idx, (int, long)):
599 raise TypeError("commit_idx must be a numeric value")
615 raise TypeError("commit_idx must be a numeric value")
600
616
601 def _validate_branch_name(self, branch_name):
617 def _validate_branch_name(self, branch_name):
602 if branch_name and branch_name not in self.branches_all:
618 if branch_name and branch_name not in self.branches_all:
603 msg = ("Branch %s not found in %s" % (branch_name, self))
619 msg = ("Branch %s not found in %s" % (branch_name, self))
604 raise BranchDoesNotExistError(msg)
620 raise BranchDoesNotExistError(msg)
605
621
606 #
622 #
607 # Supporting deprecated API parts
623 # Supporting deprecated API parts
608 # TODO: johbo: consider to move this into a mixin
624 # TODO: johbo: consider to move this into a mixin
609 #
625 #
610
626
611 @property
627 @property
612 def EMPTY_CHANGESET(self):
628 def EMPTY_CHANGESET(self):
613 warnings.warn(
629 warnings.warn(
614 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
630 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
615 return self.EMPTY_COMMIT_ID
631 return self.EMPTY_COMMIT_ID
616
632
617 @property
633 @property
618 def revisions(self):
634 def revisions(self):
619 warnings.warn("Use commits attribute instead", DeprecationWarning)
635 warnings.warn("Use commits attribute instead", DeprecationWarning)
620 return self.commit_ids
636 return self.commit_ids
621
637
622 @revisions.setter
638 @revisions.setter
623 def revisions(self, value):
639 def revisions(self, value):
624 warnings.warn("Use commits attribute instead", DeprecationWarning)
640 warnings.warn("Use commits attribute instead", DeprecationWarning)
625 self.commit_ids = value
641 self.commit_ids = value
626
642
627 def get_changeset(self, revision=None, pre_load=None):
643 def get_changeset(self, revision=None, pre_load=None):
628 warnings.warn("Use get_commit instead", DeprecationWarning)
644 warnings.warn("Use get_commit instead", DeprecationWarning)
629 commit_id = None
645 commit_id = None
630 commit_idx = None
646 commit_idx = None
631 if isinstance(revision, basestring):
647 if isinstance(revision, basestring):
632 commit_id = revision
648 commit_id = revision
633 else:
649 else:
634 commit_idx = revision
650 commit_idx = revision
635 return self.get_commit(
651 return self.get_commit(
636 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
652 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
637
653
638 def get_changesets(
654 def get_changesets(
639 self, start=None, end=None, start_date=None, end_date=None,
655 self, start=None, end=None, start_date=None, end_date=None,
640 branch_name=None, pre_load=None):
656 branch_name=None, pre_load=None):
641 warnings.warn("Use get_commits instead", DeprecationWarning)
657 warnings.warn("Use get_commits instead", DeprecationWarning)
642 start_id = self._revision_to_commit(start)
658 start_id = self._revision_to_commit(start)
643 end_id = self._revision_to_commit(end)
659 end_id = self._revision_to_commit(end)
644 return self.get_commits(
660 return self.get_commits(
645 start_id=start_id, end_id=end_id, start_date=start_date,
661 start_id=start_id, end_id=end_id, start_date=start_date,
646 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
662 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
647
663
648 def _revision_to_commit(self, revision):
664 def _revision_to_commit(self, revision):
649 """
665 """
650 Translates a revision to a commit_id
666 Translates a revision to a commit_id
651
667
652 Helps to support the old changeset based API which allows to use
668 Helps to support the old changeset based API which allows to use
653 commit ids and commit indices interchangeable.
669 commit ids and commit indices interchangeable.
654 """
670 """
655 if revision is None:
671 if revision is None:
656 return revision
672 return revision
657
673
658 if isinstance(revision, basestring):
674 if isinstance(revision, basestring):
659 commit_id = revision
675 commit_id = revision
660 else:
676 else:
661 commit_id = self.commit_ids[revision]
677 commit_id = self.commit_ids[revision]
662 return commit_id
678 return commit_id
663
679
664 @property
680 @property
665 def in_memory_changeset(self):
681 def in_memory_changeset(self):
666 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
682 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
667 return self.in_memory_commit
683 return self.in_memory_commit
668
684
669 def get_path_permissions(self, username):
685 def get_path_permissions(self, username):
670 """
686 """
671 Returns a path permission checker or None if not supported
687 Returns a path permission checker or None if not supported
672
688
673 :param username: session user name
689 :param username: session user name
674 :return: an instance of BasePathPermissionChecker or None
690 :return: an instance of BasePathPermissionChecker or None
675 """
691 """
676 return None
692 return None
677
693
678 def install_hooks(self, force=False):
694 def install_hooks(self, force=False):
679 return self._remote.install_hooks(force)
695 return self._remote.install_hooks(force)
680
696
681
697
682 class BaseCommit(object):
698 class BaseCommit(object):
683 """
699 """
684 Each backend should implement it's commit representation.
700 Each backend should implement it's commit representation.
685
701
686 **Attributes**
702 **Attributes**
687
703
688 ``repository``
704 ``repository``
689 repository object within which commit exists
705 repository object within which commit exists
690
706
691 ``id``
707 ``id``
692 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
708 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
693 just ``tip``.
709 just ``tip``.
694
710
695 ``raw_id``
711 ``raw_id``
696 raw commit representation (i.e. full 40 length sha for git
712 raw commit representation (i.e. full 40 length sha for git
697 backend)
713 backend)
698
714
699 ``short_id``
715 ``short_id``
700 shortened (if apply) version of ``raw_id``; it would be simple
716 shortened (if apply) version of ``raw_id``; it would be simple
701 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
717 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
702 as ``raw_id`` for subversion
718 as ``raw_id`` for subversion
703
719
704 ``idx``
720 ``idx``
705 commit index
721 commit index
706
722
707 ``files``
723 ``files``
708 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
724 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
709
725
710 ``dirs``
726 ``dirs``
711 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
727 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
712
728
713 ``nodes``
729 ``nodes``
714 combined list of ``Node`` objects
730 combined list of ``Node`` objects
715
731
716 ``author``
732 ``author``
717 author of the commit, as unicode
733 author of the commit, as unicode
718
734
719 ``message``
735 ``message``
720 message of the commit, as unicode
736 message of the commit, as unicode
721
737
722 ``parents``
738 ``parents``
723 list of parent commits
739 list of parent commits
724
740
725 """
741 """
726
742
727 branch = None
743 branch = None
728 """
744 """
729 Depending on the backend this should be set to the branch name of the
745 Depending on the backend this should be set to the branch name of the
730 commit. Backends not supporting branches on commits should leave this
746 commit. Backends not supporting branches on commits should leave this
731 value as ``None``.
747 value as ``None``.
732 """
748 """
733
749
734 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
750 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
735 """
751 """
736 This template is used to generate a default prefix for repository archives
752 This template is used to generate a default prefix for repository archives
737 if no prefix has been specified.
753 if no prefix has been specified.
738 """
754 """
739
755
740 def __str__(self):
756 def __str__(self):
741 return '<%s at %s:%s>' % (
757 return '<%s at %s:%s>' % (
742 self.__class__.__name__, self.idx, self.short_id)
758 self.__class__.__name__, self.idx, self.short_id)
743
759
744 def __repr__(self):
760 def __repr__(self):
745 return self.__str__()
761 return self.__str__()
746
762
747 def __unicode__(self):
763 def __unicode__(self):
748 return u'%s:%s' % (self.idx, self.short_id)
764 return u'%s:%s' % (self.idx, self.short_id)
749
765
750 def __eq__(self, other):
766 def __eq__(self, other):
751 same_instance = isinstance(other, self.__class__)
767 same_instance = isinstance(other, self.__class__)
752 return same_instance and self.raw_id == other.raw_id
768 return same_instance and self.raw_id == other.raw_id
753
769
754 def __json__(self):
770 def __json__(self):
755 parents = []
771 parents = []
756 try:
772 try:
757 for parent in self.parents:
773 for parent in self.parents:
758 parents.append({'raw_id': parent.raw_id})
774 parents.append({'raw_id': parent.raw_id})
759 except NotImplementedError:
775 except NotImplementedError:
760 # empty commit doesn't have parents implemented
776 # empty commit doesn't have parents implemented
761 pass
777 pass
762
778
763 return {
779 return {
764 'short_id': self.short_id,
780 'short_id': self.short_id,
765 'raw_id': self.raw_id,
781 'raw_id': self.raw_id,
766 'revision': self.idx,
782 'revision': self.idx,
767 'message': self.message,
783 'message': self.message,
768 'date': self.date,
784 'date': self.date,
769 'author': self.author,
785 'author': self.author,
770 'parents': parents,
786 'parents': parents,
771 'branch': self.branch
787 'branch': self.branch
772 }
788 }
773
789
774 def __getstate__(self):
790 def __getstate__(self):
775 d = self.__dict__.copy()
791 d = self.__dict__.copy()
776 d.pop('_remote', None)
792 d.pop('_remote', None)
777 d.pop('repository', None)
793 d.pop('repository', None)
778 return d
794 return d
779
795
780 def _get_refs(self):
796 def _get_refs(self):
781 return {
797 return {
782 'branches': [self.branch] if self.branch else [],
798 'branches': [self.branch] if self.branch else [],
783 'bookmarks': getattr(self, 'bookmarks', []),
799 'bookmarks': getattr(self, 'bookmarks', []),
784 'tags': self.tags
800 'tags': self.tags
785 }
801 }
786
802
787 @LazyProperty
803 @LazyProperty
788 def last(self):
804 def last(self):
789 """
805 """
790 ``True`` if this is last commit in repository, ``False``
806 ``True`` if this is last commit in repository, ``False``
791 otherwise; trying to access this attribute while there is no
807 otherwise; trying to access this attribute while there is no
792 commits would raise `EmptyRepositoryError`
808 commits would raise `EmptyRepositoryError`
793 """
809 """
794 if self.repository is None:
810 if self.repository is None:
795 raise CommitError("Cannot check if it's most recent commit")
811 raise CommitError("Cannot check if it's most recent commit")
796 return self.raw_id == self.repository.commit_ids[-1]
812 return self.raw_id == self.repository.commit_ids[-1]
797
813
798 @LazyProperty
814 @LazyProperty
799 def parents(self):
815 def parents(self):
800 """
816 """
801 Returns list of parent commits.
817 Returns list of parent commits.
802 """
818 """
803 raise NotImplementedError
819 raise NotImplementedError
804
820
805 @property
821 @property
806 def merge(self):
822 def merge(self):
807 """
823 """
808 Returns boolean if commit is a merge.
824 Returns boolean if commit is a merge.
809 """
825 """
810 return len(self.parents) > 1
826 return len(self.parents) > 1
811
827
812 @LazyProperty
828 @LazyProperty
813 def children(self):
829 def children(self):
814 """
830 """
815 Returns list of child commits.
831 Returns list of child commits.
816 """
832 """
817 raise NotImplementedError
833 raise NotImplementedError
818
834
819 @LazyProperty
835 @LazyProperty
820 def id(self):
836 def id(self):
821 """
837 """
822 Returns string identifying this commit.
838 Returns string identifying this commit.
823 """
839 """
824 raise NotImplementedError
840 raise NotImplementedError
825
841
826 @LazyProperty
842 @LazyProperty
827 def raw_id(self):
843 def raw_id(self):
828 """
844 """
829 Returns raw string identifying this commit.
845 Returns raw string identifying this commit.
830 """
846 """
831 raise NotImplementedError
847 raise NotImplementedError
832
848
833 @LazyProperty
849 @LazyProperty
834 def short_id(self):
850 def short_id(self):
835 """
851 """
836 Returns shortened version of ``raw_id`` attribute, as string,
852 Returns shortened version of ``raw_id`` attribute, as string,
837 identifying this commit, useful for presentation to users.
853 identifying this commit, useful for presentation to users.
838 """
854 """
839 raise NotImplementedError
855 raise NotImplementedError
840
856
841 @LazyProperty
857 @LazyProperty
842 def idx(self):
858 def idx(self):
843 """
859 """
844 Returns integer identifying this commit.
860 Returns integer identifying this commit.
845 """
861 """
846 raise NotImplementedError
862 raise NotImplementedError
847
863
848 @LazyProperty
864 @LazyProperty
849 def committer(self):
865 def committer(self):
850 """
866 """
851 Returns committer for this commit
867 Returns committer for this commit
852 """
868 """
853 raise NotImplementedError
869 raise NotImplementedError
854
870
855 @LazyProperty
871 @LazyProperty
856 def committer_name(self):
872 def committer_name(self):
857 """
873 """
858 Returns committer name for this commit
874 Returns committer name for this commit
859 """
875 """
860
876
861 return author_name(self.committer)
877 return author_name(self.committer)
862
878
863 @LazyProperty
879 @LazyProperty
864 def committer_email(self):
880 def committer_email(self):
865 """
881 """
866 Returns committer email address for this commit
882 Returns committer email address for this commit
867 """
883 """
868
884
869 return author_email(self.committer)
885 return author_email(self.committer)
870
886
871 @LazyProperty
887 @LazyProperty
872 def author(self):
888 def author(self):
873 """
889 """
874 Returns author for this commit
890 Returns author for this commit
875 """
891 """
876
892
877 raise NotImplementedError
893 raise NotImplementedError
878
894
879 @LazyProperty
895 @LazyProperty
880 def author_name(self):
896 def author_name(self):
881 """
897 """
882 Returns author name for this commit
898 Returns author name for this commit
883 """
899 """
884
900
885 return author_name(self.author)
901 return author_name(self.author)
886
902
887 @LazyProperty
903 @LazyProperty
888 def author_email(self):
904 def author_email(self):
889 """
905 """
890 Returns author email address for this commit
906 Returns author email address for this commit
891 """
907 """
892
908
893 return author_email(self.author)
909 return author_email(self.author)
894
910
895 def get_file_mode(self, path):
911 def get_file_mode(self, path):
896 """
912 """
897 Returns stat mode of the file at `path`.
913 Returns stat mode of the file at `path`.
898 """
914 """
899 raise NotImplementedError
915 raise NotImplementedError
900
916
901 def is_link(self, path):
917 def is_link(self, path):
902 """
918 """
903 Returns ``True`` if given `path` is a symlink
919 Returns ``True`` if given `path` is a symlink
904 """
920 """
905 raise NotImplementedError
921 raise NotImplementedError
906
922
907 def get_file_content(self, path):
923 def get_file_content(self, path):
908 """
924 """
909 Returns content of the file at the given `path`.
925 Returns content of the file at the given `path`.
910 """
926 """
911 raise NotImplementedError
927 raise NotImplementedError
912
928
913 def get_file_size(self, path):
929 def get_file_size(self, path):
914 """
930 """
915 Returns size of the file at the given `path`.
931 Returns size of the file at the given `path`.
916 """
932 """
917 raise NotImplementedError
933 raise NotImplementedError
918
934
919 def get_file_commit(self, path, pre_load=None):
935 def get_file_commit(self, path, pre_load=None):
920 """
936 """
921 Returns last commit of the file at the given `path`.
937 Returns last commit of the file at the given `path`.
922
938
923 :param pre_load: Optional. List of commit attributes to load.
939 :param pre_load: Optional. List of commit attributes to load.
924 """
940 """
925 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
941 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
926 if not commits:
942 if not commits:
927 raise RepositoryError(
943 raise RepositoryError(
928 'Failed to fetch history for path {}. '
944 'Failed to fetch history for path {}. '
929 'Please check if such path exists in your repository'.format(
945 'Please check if such path exists in your repository'.format(
930 path))
946 path))
931 return commits[0]
947 return commits[0]
932
948
933 def get_file_history(self, path, limit=None, pre_load=None):
949 def get_file_history(self, path, limit=None, pre_load=None):
934 """
950 """
935 Returns history of file as reversed list of :class:`BaseCommit`
951 Returns history of file as reversed list of :class:`BaseCommit`
936 objects for which file at given `path` has been modified.
952 objects for which file at given `path` has been modified.
937
953
938 :param limit: Optional. Allows to limit the size of the returned
954 :param limit: Optional. Allows to limit the size of the returned
939 history. This is intended as a hint to the underlying backend, so
955 history. This is intended as a hint to the underlying backend, so
940 that it can apply optimizations depending on the limit.
956 that it can apply optimizations depending on the limit.
941 :param pre_load: Optional. List of commit attributes to load.
957 :param pre_load: Optional. List of commit attributes to load.
942 """
958 """
943 raise NotImplementedError
959 raise NotImplementedError
944
960
945 def get_file_annotate(self, path, pre_load=None):
961 def get_file_annotate(self, path, pre_load=None):
946 """
962 """
947 Returns a generator of four element tuples with
963 Returns a generator of four element tuples with
948 lineno, sha, commit lazy loader and line
964 lineno, sha, commit lazy loader and line
949
965
950 :param pre_load: Optional. List of commit attributes to load.
966 :param pre_load: Optional. List of commit attributes to load.
951 """
967 """
952 raise NotImplementedError
968 raise NotImplementedError
953
969
954 def get_nodes(self, path):
970 def get_nodes(self, path):
955 """
971 """
956 Returns combined ``DirNode`` and ``FileNode`` objects list representing
972 Returns combined ``DirNode`` and ``FileNode`` objects list representing
957 state of commit at the given ``path``.
973 state of commit at the given ``path``.
958
974
959 :raises ``CommitError``: if node at the given ``path`` is not
975 :raises ``CommitError``: if node at the given ``path`` is not
960 instance of ``DirNode``
976 instance of ``DirNode``
961 """
977 """
962 raise NotImplementedError
978 raise NotImplementedError
963
979
964 def get_node(self, path):
980 def get_node(self, path):
965 """
981 """
966 Returns ``Node`` object from the given ``path``.
982 Returns ``Node`` object from the given ``path``.
967
983
968 :raises ``NodeDoesNotExistError``: if there is no node at the given
984 :raises ``NodeDoesNotExistError``: if there is no node at the given
969 ``path``
985 ``path``
970 """
986 """
971 raise NotImplementedError
987 raise NotImplementedError
972
988
973 def get_largefile_node(self, path):
989 def get_largefile_node(self, path):
974 """
990 """
975 Returns the path to largefile from Mercurial/Git-lfs storage.
991 Returns the path to largefile from Mercurial/Git-lfs storage.
976 or None if it's not a largefile node
992 or None if it's not a largefile node
977 """
993 """
978 return None
994 return None
979
995
980 def archive_repo(self, file_path, kind='tgz', subrepos=None,
996 def archive_repo(self, file_path, kind='tgz', subrepos=None,
981 prefix=None, write_metadata=False, mtime=None):
997 prefix=None, write_metadata=False, mtime=None):
982 """
998 """
983 Creates an archive containing the contents of the repository.
999 Creates an archive containing the contents of the repository.
984
1000
985 :param file_path: path to the file which to create the archive.
1001 :param file_path: path to the file which to create the archive.
986 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1002 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
987 :param prefix: name of root directory in archive.
1003 :param prefix: name of root directory in archive.
988 Default is repository name and commit's short_id joined with dash:
1004 Default is repository name and commit's short_id joined with dash:
989 ``"{repo_name}-{short_id}"``.
1005 ``"{repo_name}-{short_id}"``.
990 :param write_metadata: write a metadata file into archive.
1006 :param write_metadata: write a metadata file into archive.
991 :param mtime: custom modification time for archive creation, defaults
1007 :param mtime: custom modification time for archive creation, defaults
992 to time.time() if not given.
1008 to time.time() if not given.
993
1009
994 :raise VCSError: If prefix has a problem.
1010 :raise VCSError: If prefix has a problem.
995 """
1011 """
996 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1012 allowed_kinds = settings.ARCHIVE_SPECS.keys()
997 if kind not in allowed_kinds:
1013 if kind not in allowed_kinds:
998 raise ImproperArchiveTypeError(
1014 raise ImproperArchiveTypeError(
999 'Archive kind (%s) not supported use one of %s' %
1015 'Archive kind (%s) not supported use one of %s' %
1000 (kind, allowed_kinds))
1016 (kind, allowed_kinds))
1001
1017
1002 prefix = self._validate_archive_prefix(prefix)
1018 prefix = self._validate_archive_prefix(prefix)
1003
1019
1004 mtime = mtime or time.mktime(self.date.timetuple())
1020 mtime = mtime or time.mktime(self.date.timetuple())
1005
1021
1006 file_info = []
1022 file_info = []
1007 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1023 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1008 for _r, _d, files in cur_rev.walk('/'):
1024 for _r, _d, files in cur_rev.walk('/'):
1009 for f in files:
1025 for f in files:
1010 f_path = os.path.join(prefix, f.path)
1026 f_path = os.path.join(prefix, f.path)
1011 file_info.append(
1027 file_info.append(
1012 (f_path, f.mode, f.is_link(), f.raw_bytes))
1028 (f_path, f.mode, f.is_link(), f.raw_bytes))
1013
1029
1014 if write_metadata:
1030 if write_metadata:
1015 metadata = [
1031 metadata = [
1016 ('repo_name', self.repository.name),
1032 ('repo_name', self.repository.name),
1017 ('rev', self.raw_id),
1033 ('rev', self.raw_id),
1018 ('create_time', mtime),
1034 ('create_time', mtime),
1019 ('branch', self.branch),
1035 ('branch', self.branch),
1020 ('tags', ','.join(self.tags)),
1036 ('tags', ','.join(self.tags)),
1021 ]
1037 ]
1022 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1038 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1023 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1039 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1024
1040
1025 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1041 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1026
1042
1027 def _validate_archive_prefix(self, prefix):
1043 def _validate_archive_prefix(self, prefix):
1028 if prefix is None:
1044 if prefix is None:
1029 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1045 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1030 repo_name=safe_str(self.repository.name),
1046 repo_name=safe_str(self.repository.name),
1031 short_id=self.short_id)
1047 short_id=self.short_id)
1032 elif not isinstance(prefix, str):
1048 elif not isinstance(prefix, str):
1033 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1049 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1034 elif prefix.startswith('/'):
1050 elif prefix.startswith('/'):
1035 raise VCSError("Prefix cannot start with leading slash")
1051 raise VCSError("Prefix cannot start with leading slash")
1036 elif prefix.strip() == '':
1052 elif prefix.strip() == '':
1037 raise VCSError("Prefix cannot be empty")
1053 raise VCSError("Prefix cannot be empty")
1038 return prefix
1054 return prefix
1039
1055
1040 @LazyProperty
1056 @LazyProperty
1041 def root(self):
1057 def root(self):
1042 """
1058 """
1043 Returns ``RootNode`` object for this commit.
1059 Returns ``RootNode`` object for this commit.
1044 """
1060 """
1045 return self.get_node('')
1061 return self.get_node('')
1046
1062
1047 def next(self, branch=None):
1063 def next(self, branch=None):
1048 """
1064 """
1049 Returns next commit from current, if branch is gives it will return
1065 Returns next commit from current, if branch is gives it will return
1050 next commit belonging to this branch
1066 next commit belonging to this branch
1051
1067
1052 :param branch: show commits within the given named branch
1068 :param branch: show commits within the given named branch
1053 """
1069 """
1054 indexes = xrange(self.idx + 1, self.repository.count())
1070 indexes = xrange(self.idx + 1, self.repository.count())
1055 return self._find_next(indexes, branch)
1071 return self._find_next(indexes, branch)
1056
1072
1057 def prev(self, branch=None):
1073 def prev(self, branch=None):
1058 """
1074 """
1059 Returns previous commit from current, if branch is gives it will
1075 Returns previous commit from current, if branch is gives it will
1060 return previous commit belonging to this branch
1076 return previous commit belonging to this branch
1061
1077
1062 :param branch: show commit within the given named branch
1078 :param branch: show commit within the given named branch
1063 """
1079 """
1064 indexes = xrange(self.idx - 1, -1, -1)
1080 indexes = xrange(self.idx - 1, -1, -1)
1065 return self._find_next(indexes, branch)
1081 return self._find_next(indexes, branch)
1066
1082
1067 def _find_next(self, indexes, branch=None):
1083 def _find_next(self, indexes, branch=None):
1068 if branch and self.branch != branch:
1084 if branch and self.branch != branch:
1069 raise VCSError('Branch option used on commit not belonging '
1085 raise VCSError('Branch option used on commit not belonging '
1070 'to that branch')
1086 'to that branch')
1071
1087
1072 for next_idx in indexes:
1088 for next_idx in indexes:
1073 commit = self.repository.get_commit(commit_idx=next_idx)
1089 commit = self.repository.get_commit(commit_idx=next_idx)
1074 if branch and branch != commit.branch:
1090 if branch and branch != commit.branch:
1075 continue
1091 continue
1076 return commit
1092 return commit
1077 raise CommitDoesNotExistError
1093 raise CommitDoesNotExistError
1078
1094
1079 def diff(self, ignore_whitespace=True, context=3):
1095 def diff(self, ignore_whitespace=True, context=3):
1080 """
1096 """
1081 Returns a `Diff` object representing the change made by this commit.
1097 Returns a `Diff` object representing the change made by this commit.
1082 """
1098 """
1083 parent = (
1099 parent = (
1084 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1100 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1085 diff = self.repository.get_diff(
1101 diff = self.repository.get_diff(
1086 parent, self,
1102 parent, self,
1087 ignore_whitespace=ignore_whitespace,
1103 ignore_whitespace=ignore_whitespace,
1088 context=context)
1104 context=context)
1089 return diff
1105 return diff
1090
1106
1091 @LazyProperty
1107 @LazyProperty
1092 def added(self):
1108 def added(self):
1093 """
1109 """
1094 Returns list of added ``FileNode`` objects.
1110 Returns list of added ``FileNode`` objects.
1095 """
1111 """
1096 raise NotImplementedError
1112 raise NotImplementedError
1097
1113
1098 @LazyProperty
1114 @LazyProperty
1099 def changed(self):
1115 def changed(self):
1100 """
1116 """
1101 Returns list of modified ``FileNode`` objects.
1117 Returns list of modified ``FileNode`` objects.
1102 """
1118 """
1103 raise NotImplementedError
1119 raise NotImplementedError
1104
1120
1105 @LazyProperty
1121 @LazyProperty
1106 def removed(self):
1122 def removed(self):
1107 """
1123 """
1108 Returns list of removed ``FileNode`` objects.
1124 Returns list of removed ``FileNode`` objects.
1109 """
1125 """
1110 raise NotImplementedError
1126 raise NotImplementedError
1111
1127
1112 @LazyProperty
1128 @LazyProperty
1113 def size(self):
1129 def size(self):
1114 """
1130 """
1115 Returns total number of bytes from contents of all filenodes.
1131 Returns total number of bytes from contents of all filenodes.
1116 """
1132 """
1117 return sum((node.size for node in self.get_filenodes_generator()))
1133 return sum((node.size for node in self.get_filenodes_generator()))
1118
1134
1119 def walk(self, topurl=''):
1135 def walk(self, topurl=''):
1120 """
1136 """
1121 Similar to os.walk method. Insted of filesystem it walks through
1137 Similar to os.walk method. Insted of filesystem it walks through
1122 commit starting at given ``topurl``. Returns generator of tuples
1138 commit starting at given ``topurl``. Returns generator of tuples
1123 (topnode, dirnodes, filenodes).
1139 (topnode, dirnodes, filenodes).
1124 """
1140 """
1125 topnode = self.get_node(topurl)
1141 topnode = self.get_node(topurl)
1126 if not topnode.is_dir():
1142 if not topnode.is_dir():
1127 return
1143 return
1128 yield (topnode, topnode.dirs, topnode.files)
1144 yield (topnode, topnode.dirs, topnode.files)
1129 for dirnode in topnode.dirs:
1145 for dirnode in topnode.dirs:
1130 for tup in self.walk(dirnode.path):
1146 for tup in self.walk(dirnode.path):
1131 yield tup
1147 yield tup
1132
1148
1133 def get_filenodes_generator(self):
1149 def get_filenodes_generator(self):
1134 """
1150 """
1135 Returns generator that yields *all* file nodes.
1151 Returns generator that yields *all* file nodes.
1136 """
1152 """
1137 for topnode, dirs, files in self.walk():
1153 for topnode, dirs, files in self.walk():
1138 for node in files:
1154 for node in files:
1139 yield node
1155 yield node
1140
1156
1141 #
1157 #
1142 # Utilities for sub classes to support consistent behavior
1158 # Utilities for sub classes to support consistent behavior
1143 #
1159 #
1144
1160
1145 def no_node_at_path(self, path):
1161 def no_node_at_path(self, path):
1146 return NodeDoesNotExistError(
1162 return NodeDoesNotExistError(
1147 u"There is no file nor directory at the given path: "
1163 u"There is no file nor directory at the given path: "
1148 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1164 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1149
1165
1150 def _fix_path(self, path):
1166 def _fix_path(self, path):
1151 """
1167 """
1152 Paths are stored without trailing slash so we need to get rid off it if
1168 Paths are stored without trailing slash so we need to get rid off it if
1153 needed.
1169 needed.
1154 """
1170 """
1155 return path.rstrip('/')
1171 return path.rstrip('/')
1156
1172
1157 #
1173 #
1158 # Deprecated API based on changesets
1174 # Deprecated API based on changesets
1159 #
1175 #
1160
1176
1161 @property
1177 @property
1162 def revision(self):
1178 def revision(self):
1163 warnings.warn("Use idx instead", DeprecationWarning)
1179 warnings.warn("Use idx instead", DeprecationWarning)
1164 return self.idx
1180 return self.idx
1165
1181
1166 @revision.setter
1182 @revision.setter
1167 def revision(self, value):
1183 def revision(self, value):
1168 warnings.warn("Use idx instead", DeprecationWarning)
1184 warnings.warn("Use idx instead", DeprecationWarning)
1169 self.idx = value
1185 self.idx = value
1170
1186
1171 def get_file_changeset(self, path):
1187 def get_file_changeset(self, path):
1172 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1188 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1173 return self.get_file_commit(path)
1189 return self.get_file_commit(path)
1174
1190
1175
1191
1176 class BaseChangesetClass(type):
1192 class BaseChangesetClass(type):
1177
1193
1178 def __instancecheck__(self, instance):
1194 def __instancecheck__(self, instance):
1179 return isinstance(instance, BaseCommit)
1195 return isinstance(instance, BaseCommit)
1180
1196
1181
1197
1182 class BaseChangeset(BaseCommit):
1198 class BaseChangeset(BaseCommit):
1183
1199
1184 __metaclass__ = BaseChangesetClass
1200 __metaclass__ = BaseChangesetClass
1185
1201
1186 def __new__(cls, *args, **kwargs):
1202 def __new__(cls, *args, **kwargs):
1187 warnings.warn(
1203 warnings.warn(
1188 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1204 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1189 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1205 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1190
1206
1191
1207
1192 class BaseInMemoryCommit(object):
1208 class BaseInMemoryCommit(object):
1193 """
1209 """
1194 Represents differences between repository's state (most recent head) and
1210 Represents differences between repository's state (most recent head) and
1195 changes made *in place*.
1211 changes made *in place*.
1196
1212
1197 **Attributes**
1213 **Attributes**
1198
1214
1199 ``repository``
1215 ``repository``
1200 repository object for this in-memory-commit
1216 repository object for this in-memory-commit
1201
1217
1202 ``added``
1218 ``added``
1203 list of ``FileNode`` objects marked as *added*
1219 list of ``FileNode`` objects marked as *added*
1204
1220
1205 ``changed``
1221 ``changed``
1206 list of ``FileNode`` objects marked as *changed*
1222 list of ``FileNode`` objects marked as *changed*
1207
1223
1208 ``removed``
1224 ``removed``
1209 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1225 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1210 *removed*
1226 *removed*
1211
1227
1212 ``parents``
1228 ``parents``
1213 list of :class:`BaseCommit` instances representing parents of
1229 list of :class:`BaseCommit` instances representing parents of
1214 in-memory commit. Should always be 2-element sequence.
1230 in-memory commit. Should always be 2-element sequence.
1215
1231
1216 """
1232 """
1217
1233
1218 def __init__(self, repository):
1234 def __init__(self, repository):
1219 self.repository = repository
1235 self.repository = repository
1220 self.added = []
1236 self.added = []
1221 self.changed = []
1237 self.changed = []
1222 self.removed = []
1238 self.removed = []
1223 self.parents = []
1239 self.parents = []
1224
1240
1225 def add(self, *filenodes):
1241 def add(self, *filenodes):
1226 """
1242 """
1227 Marks given ``FileNode`` objects as *to be committed*.
1243 Marks given ``FileNode`` objects as *to be committed*.
1228
1244
1229 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1245 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1230 latest commit
1246 latest commit
1231 :raises ``NodeAlreadyAddedError``: if node with same path is already
1247 :raises ``NodeAlreadyAddedError``: if node with same path is already
1232 marked as *added*
1248 marked as *added*
1233 """
1249 """
1234 # Check if not already marked as *added* first
1250 # Check if not already marked as *added* first
1235 for node in filenodes:
1251 for node in filenodes:
1236 if node.path in (n.path for n in self.added):
1252 if node.path in (n.path for n in self.added):
1237 raise NodeAlreadyAddedError(
1253 raise NodeAlreadyAddedError(
1238 "Such FileNode %s is already marked for addition"
1254 "Such FileNode %s is already marked for addition"
1239 % node.path)
1255 % node.path)
1240 for node in filenodes:
1256 for node in filenodes:
1241 self.added.append(node)
1257 self.added.append(node)
1242
1258
1243 def change(self, *filenodes):
1259 def change(self, *filenodes):
1244 """
1260 """
1245 Marks given ``FileNode`` objects to be *changed* in next commit.
1261 Marks given ``FileNode`` objects to be *changed* in next commit.
1246
1262
1247 :raises ``EmptyRepositoryError``: if there are no commits yet
1263 :raises ``EmptyRepositoryError``: if there are no commits yet
1248 :raises ``NodeAlreadyExistsError``: if node with same path is already
1264 :raises ``NodeAlreadyExistsError``: if node with same path is already
1249 marked to be *changed*
1265 marked to be *changed*
1250 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1266 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1251 marked to be *removed*
1267 marked to be *removed*
1252 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1268 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1253 commit
1269 commit
1254 :raises ``NodeNotChangedError``: if node hasn't really be changed
1270 :raises ``NodeNotChangedError``: if node hasn't really be changed
1255 """
1271 """
1256 for node in filenodes:
1272 for node in filenodes:
1257 if node.path in (n.path for n in self.removed):
1273 if node.path in (n.path for n in self.removed):
1258 raise NodeAlreadyRemovedError(
1274 raise NodeAlreadyRemovedError(
1259 "Node at %s is already marked as removed" % node.path)
1275 "Node at %s is already marked as removed" % node.path)
1260 try:
1276 try:
1261 self.repository.get_commit()
1277 self.repository.get_commit()
1262 except EmptyRepositoryError:
1278 except EmptyRepositoryError:
1263 raise EmptyRepositoryError(
1279 raise EmptyRepositoryError(
1264 "Nothing to change - try to *add* new nodes rather than "
1280 "Nothing to change - try to *add* new nodes rather than "
1265 "changing them")
1281 "changing them")
1266 for node in filenodes:
1282 for node in filenodes:
1267 if node.path in (n.path for n in self.changed):
1283 if node.path in (n.path for n in self.changed):
1268 raise NodeAlreadyChangedError(
1284 raise NodeAlreadyChangedError(
1269 "Node at '%s' is already marked as changed" % node.path)
1285 "Node at '%s' is already marked as changed" % node.path)
1270 self.changed.append(node)
1286 self.changed.append(node)
1271
1287
1272 def remove(self, *filenodes):
1288 def remove(self, *filenodes):
1273 """
1289 """
1274 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1290 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1275 *removed* in next commit.
1291 *removed* in next commit.
1276
1292
1277 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1293 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1278 be *removed*
1294 be *removed*
1279 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1295 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1280 be *changed*
1296 be *changed*
1281 """
1297 """
1282 for node in filenodes:
1298 for node in filenodes:
1283 if node.path in (n.path for n in self.removed):
1299 if node.path in (n.path for n in self.removed):
1284 raise NodeAlreadyRemovedError(
1300 raise NodeAlreadyRemovedError(
1285 "Node is already marked to for removal at %s" % node.path)
1301 "Node is already marked to for removal at %s" % node.path)
1286 if node.path in (n.path for n in self.changed):
1302 if node.path in (n.path for n in self.changed):
1287 raise NodeAlreadyChangedError(
1303 raise NodeAlreadyChangedError(
1288 "Node is already marked to be changed at %s" % node.path)
1304 "Node is already marked to be changed at %s" % node.path)
1289 # We only mark node as *removed* - real removal is done by
1305 # We only mark node as *removed* - real removal is done by
1290 # commit method
1306 # commit method
1291 self.removed.append(node)
1307 self.removed.append(node)
1292
1308
1293 def reset(self):
1309 def reset(self):
1294 """
1310 """
1295 Resets this instance to initial state (cleans ``added``, ``changed``
1311 Resets this instance to initial state (cleans ``added``, ``changed``
1296 and ``removed`` lists).
1312 and ``removed`` lists).
1297 """
1313 """
1298 self.added = []
1314 self.added = []
1299 self.changed = []
1315 self.changed = []
1300 self.removed = []
1316 self.removed = []
1301 self.parents = []
1317 self.parents = []
1302
1318
1303 def get_ipaths(self):
1319 def get_ipaths(self):
1304 """
1320 """
1305 Returns generator of paths from nodes marked as added, changed or
1321 Returns generator of paths from nodes marked as added, changed or
1306 removed.
1322 removed.
1307 """
1323 """
1308 for node in itertools.chain(self.added, self.changed, self.removed):
1324 for node in itertools.chain(self.added, self.changed, self.removed):
1309 yield node.path
1325 yield node.path
1310
1326
1311 def get_paths(self):
1327 def get_paths(self):
1312 """
1328 """
1313 Returns list of paths from nodes marked as added, changed or removed.
1329 Returns list of paths from nodes marked as added, changed or removed.
1314 """
1330 """
1315 return list(self.get_ipaths())
1331 return list(self.get_ipaths())
1316
1332
1317 def check_integrity(self, parents=None):
1333 def check_integrity(self, parents=None):
1318 """
1334 """
1319 Checks in-memory commit's integrity. Also, sets parents if not
1335 Checks in-memory commit's integrity. Also, sets parents if not
1320 already set.
1336 already set.
1321
1337
1322 :raises CommitError: if any error occurs (i.e.
1338 :raises CommitError: if any error occurs (i.e.
1323 ``NodeDoesNotExistError``).
1339 ``NodeDoesNotExistError``).
1324 """
1340 """
1325 if not self.parents:
1341 if not self.parents:
1326 parents = parents or []
1342 parents = parents or []
1327 if len(parents) == 0:
1343 if len(parents) == 0:
1328 try:
1344 try:
1329 parents = [self.repository.get_commit(), None]
1345 parents = [self.repository.get_commit(), None]
1330 except EmptyRepositoryError:
1346 except EmptyRepositoryError:
1331 parents = [None, None]
1347 parents = [None, None]
1332 elif len(parents) == 1:
1348 elif len(parents) == 1:
1333 parents += [None]
1349 parents += [None]
1334 self.parents = parents
1350 self.parents = parents
1335
1351
1336 # Local parents, only if not None
1352 # Local parents, only if not None
1337 parents = [p for p in self.parents if p]
1353 parents = [p for p in self.parents if p]
1338
1354
1339 # Check nodes marked as added
1355 # Check nodes marked as added
1340 for p in parents:
1356 for p in parents:
1341 for node in self.added:
1357 for node in self.added:
1342 try:
1358 try:
1343 p.get_node(node.path)
1359 p.get_node(node.path)
1344 except NodeDoesNotExistError:
1360 except NodeDoesNotExistError:
1345 pass
1361 pass
1346 else:
1362 else:
1347 raise NodeAlreadyExistsError(
1363 raise NodeAlreadyExistsError(
1348 "Node `%s` already exists at %s" % (node.path, p))
1364 "Node `%s` already exists at %s" % (node.path, p))
1349
1365
1350 # Check nodes marked as changed
1366 # Check nodes marked as changed
1351 missing = set(self.changed)
1367 missing = set(self.changed)
1352 not_changed = set(self.changed)
1368 not_changed = set(self.changed)
1353 if self.changed and not parents:
1369 if self.changed and not parents:
1354 raise NodeDoesNotExistError(str(self.changed[0].path))
1370 raise NodeDoesNotExistError(str(self.changed[0].path))
1355 for p in parents:
1371 for p in parents:
1356 for node in self.changed:
1372 for node in self.changed:
1357 try:
1373 try:
1358 old = p.get_node(node.path)
1374 old = p.get_node(node.path)
1359 missing.remove(node)
1375 missing.remove(node)
1360 # if content actually changed, remove node from not_changed
1376 # if content actually changed, remove node from not_changed
1361 if old.content != node.content:
1377 if old.content != node.content:
1362 not_changed.remove(node)
1378 not_changed.remove(node)
1363 except NodeDoesNotExistError:
1379 except NodeDoesNotExistError:
1364 pass
1380 pass
1365 if self.changed and missing:
1381 if self.changed and missing:
1366 raise NodeDoesNotExistError(
1382 raise NodeDoesNotExistError(
1367 "Node `%s` marked as modified but missing in parents: %s"
1383 "Node `%s` marked as modified but missing in parents: %s"
1368 % (node.path, parents))
1384 % (node.path, parents))
1369
1385
1370 if self.changed and not_changed:
1386 if self.changed and not_changed:
1371 raise NodeNotChangedError(
1387 raise NodeNotChangedError(
1372 "Node `%s` wasn't actually changed (parents: %s)"
1388 "Node `%s` wasn't actually changed (parents: %s)"
1373 % (not_changed.pop().path, parents))
1389 % (not_changed.pop().path, parents))
1374
1390
1375 # Check nodes marked as removed
1391 # Check nodes marked as removed
1376 if self.removed and not parents:
1392 if self.removed and not parents:
1377 raise NodeDoesNotExistError(
1393 raise NodeDoesNotExistError(
1378 "Cannot remove node at %s as there "
1394 "Cannot remove node at %s as there "
1379 "were no parents specified" % self.removed[0].path)
1395 "were no parents specified" % self.removed[0].path)
1380 really_removed = set()
1396 really_removed = set()
1381 for p in parents:
1397 for p in parents:
1382 for node in self.removed:
1398 for node in self.removed:
1383 try:
1399 try:
1384 p.get_node(node.path)
1400 p.get_node(node.path)
1385 really_removed.add(node)
1401 really_removed.add(node)
1386 except CommitError:
1402 except CommitError:
1387 pass
1403 pass
1388 not_removed = set(self.removed) - really_removed
1404 not_removed = set(self.removed) - really_removed
1389 if not_removed:
1405 if not_removed:
1390 # TODO: johbo: This code branch does not seem to be covered
1406 # TODO: johbo: This code branch does not seem to be covered
1391 raise NodeDoesNotExistError(
1407 raise NodeDoesNotExistError(
1392 "Cannot remove node at %s from "
1408 "Cannot remove node at %s from "
1393 "following parents: %s" % (not_removed, parents))
1409 "following parents: %s" % (not_removed, parents))
1394
1410
1395 def commit(
1411 def commit(
1396 self, message, author, parents=None, branch=None, date=None,
1412 self, message, author, parents=None, branch=None, date=None,
1397 **kwargs):
1413 **kwargs):
1398 """
1414 """
1399 Performs in-memory commit (doesn't check workdir in any way) and
1415 Performs in-memory commit (doesn't check workdir in any way) and
1400 returns newly created :class:`BaseCommit`. Updates repository's
1416 returns newly created :class:`BaseCommit`. Updates repository's
1401 attribute `commits`.
1417 attribute `commits`.
1402
1418
1403 .. note::
1419 .. note::
1404
1420
1405 While overriding this method each backend's should call
1421 While overriding this method each backend's should call
1406 ``self.check_integrity(parents)`` in the first place.
1422 ``self.check_integrity(parents)`` in the first place.
1407
1423
1408 :param message: message of the commit
1424 :param message: message of the commit
1409 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1425 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1410 :param parents: single parent or sequence of parents from which commit
1426 :param parents: single parent or sequence of parents from which commit
1411 would be derived
1427 would be derived
1412 :param date: ``datetime.datetime`` instance. Defaults to
1428 :param date: ``datetime.datetime`` instance. Defaults to
1413 ``datetime.datetime.now()``.
1429 ``datetime.datetime.now()``.
1414 :param branch: branch name, as string. If none given, default backend's
1430 :param branch: branch name, as string. If none given, default backend's
1415 branch would be used.
1431 branch would be used.
1416
1432
1417 :raises ``CommitError``: if any error occurs while committing
1433 :raises ``CommitError``: if any error occurs while committing
1418 """
1434 """
1419 raise NotImplementedError
1435 raise NotImplementedError
1420
1436
1421
1437
1422 class BaseInMemoryChangesetClass(type):
1438 class BaseInMemoryChangesetClass(type):
1423
1439
1424 def __instancecheck__(self, instance):
1440 def __instancecheck__(self, instance):
1425 return isinstance(instance, BaseInMemoryCommit)
1441 return isinstance(instance, BaseInMemoryCommit)
1426
1442
1427
1443
1428 class BaseInMemoryChangeset(BaseInMemoryCommit):
1444 class BaseInMemoryChangeset(BaseInMemoryCommit):
1429
1445
1430 __metaclass__ = BaseInMemoryChangesetClass
1446 __metaclass__ = BaseInMemoryChangesetClass
1431
1447
1432 def __new__(cls, *args, **kwargs):
1448 def __new__(cls, *args, **kwargs):
1433 warnings.warn(
1449 warnings.warn(
1434 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1450 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1435 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1451 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1436
1452
1437
1453
1438 class EmptyCommit(BaseCommit):
1454 class EmptyCommit(BaseCommit):
1439 """
1455 """
1440 An dummy empty commit. It's possible to pass hash when creating
1456 An dummy empty commit. It's possible to pass hash when creating
1441 an EmptyCommit
1457 an EmptyCommit
1442 """
1458 """
1443
1459
1444 def __init__(
1460 def __init__(
1445 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1461 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1446 message='', author='', date=None):
1462 message='', author='', date=None):
1447 self._empty_commit_id = commit_id
1463 self._empty_commit_id = commit_id
1448 # TODO: johbo: Solve idx parameter, default value does not make
1464 # TODO: johbo: Solve idx parameter, default value does not make
1449 # too much sense
1465 # too much sense
1450 self.idx = idx
1466 self.idx = idx
1451 self.message = message
1467 self.message = message
1452 self.author = author
1468 self.author = author
1453 self.date = date or datetime.datetime.fromtimestamp(0)
1469 self.date = date or datetime.datetime.fromtimestamp(0)
1454 self.repository = repo
1470 self.repository = repo
1455 self.alias = alias
1471 self.alias = alias
1456
1472
1457 @LazyProperty
1473 @LazyProperty
1458 def raw_id(self):
1474 def raw_id(self):
1459 """
1475 """
1460 Returns raw string identifying this commit, useful for web
1476 Returns raw string identifying this commit, useful for web
1461 representation.
1477 representation.
1462 """
1478 """
1463
1479
1464 return self._empty_commit_id
1480 return self._empty_commit_id
1465
1481
1466 @LazyProperty
1482 @LazyProperty
1467 def branch(self):
1483 def branch(self):
1468 if self.alias:
1484 if self.alias:
1469 from rhodecode.lib.vcs.backends import get_backend
1485 from rhodecode.lib.vcs.backends import get_backend
1470 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1486 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1471
1487
1472 @LazyProperty
1488 @LazyProperty
1473 def short_id(self):
1489 def short_id(self):
1474 return self.raw_id[:12]
1490 return self.raw_id[:12]
1475
1491
1476 @LazyProperty
1492 @LazyProperty
1477 def id(self):
1493 def id(self):
1478 return self.raw_id
1494 return self.raw_id
1479
1495
1480 def get_file_commit(self, path):
1496 def get_file_commit(self, path):
1481 return self
1497 return self
1482
1498
1483 def get_file_content(self, path):
1499 def get_file_content(self, path):
1484 return u''
1500 return u''
1485
1501
1486 def get_file_size(self, path):
1502 def get_file_size(self, path):
1487 return 0
1503 return 0
1488
1504
1489
1505
1490 class EmptyChangesetClass(type):
1506 class EmptyChangesetClass(type):
1491
1507
1492 def __instancecheck__(self, instance):
1508 def __instancecheck__(self, instance):
1493 return isinstance(instance, EmptyCommit)
1509 return isinstance(instance, EmptyCommit)
1494
1510
1495
1511
1496 class EmptyChangeset(EmptyCommit):
1512 class EmptyChangeset(EmptyCommit):
1497
1513
1498 __metaclass__ = EmptyChangesetClass
1514 __metaclass__ = EmptyChangesetClass
1499
1515
1500 def __new__(cls, *args, **kwargs):
1516 def __new__(cls, *args, **kwargs):
1501 warnings.warn(
1517 warnings.warn(
1502 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1518 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1503 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1519 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1504
1520
1505 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1521 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1506 alias=None, revision=-1, message='', author='', date=None):
1522 alias=None, revision=-1, message='', author='', date=None):
1507 if requested_revision is not None:
1523 if requested_revision is not None:
1508 warnings.warn(
1524 warnings.warn(
1509 "Parameter requested_revision not supported anymore",
1525 "Parameter requested_revision not supported anymore",
1510 DeprecationWarning)
1526 DeprecationWarning)
1511 super(EmptyChangeset, self).__init__(
1527 super(EmptyChangeset, self).__init__(
1512 commit_id=cs, repo=repo, alias=alias, idx=revision,
1528 commit_id=cs, repo=repo, alias=alias, idx=revision,
1513 message=message, author=author, date=date)
1529 message=message, author=author, date=date)
1514
1530
1515 @property
1531 @property
1516 def revision(self):
1532 def revision(self):
1517 warnings.warn("Use idx instead", DeprecationWarning)
1533 warnings.warn("Use idx instead", DeprecationWarning)
1518 return self.idx
1534 return self.idx
1519
1535
1520 @revision.setter
1536 @revision.setter
1521 def revision(self, value):
1537 def revision(self, value):
1522 warnings.warn("Use idx instead", DeprecationWarning)
1538 warnings.warn("Use idx instead", DeprecationWarning)
1523 self.idx = value
1539 self.idx = value
1524
1540
1525
1541
1526 class EmptyRepository(BaseRepository):
1542 class EmptyRepository(BaseRepository):
1527 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1543 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1528 pass
1544 pass
1529
1545
1530 def get_diff(self, *args, **kwargs):
1546 def get_diff(self, *args, **kwargs):
1531 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1547 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1532 return GitDiff('')
1548 return GitDiff('')
1533
1549
1534
1550
1535 class CollectionGenerator(object):
1551 class CollectionGenerator(object):
1536
1552
1537 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1553 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1538 self.repo = repo
1554 self.repo = repo
1539 self.commit_ids = commit_ids
1555 self.commit_ids = commit_ids
1540 # TODO: (oliver) this isn't currently hooked up
1556 # TODO: (oliver) this isn't currently hooked up
1541 self.collection_size = None
1557 self.collection_size = None
1542 self.pre_load = pre_load
1558 self.pre_load = pre_load
1543
1559
1544 def __len__(self):
1560 def __len__(self):
1545 if self.collection_size is not None:
1561 if self.collection_size is not None:
1546 return self.collection_size
1562 return self.collection_size
1547 return self.commit_ids.__len__()
1563 return self.commit_ids.__len__()
1548
1564
1549 def __iter__(self):
1565 def __iter__(self):
1550 for commit_id in self.commit_ids:
1566 for commit_id in self.commit_ids:
1551 # TODO: johbo: Mercurial passes in commit indices or commit ids
1567 # TODO: johbo: Mercurial passes in commit indices or commit ids
1552 yield self._commit_factory(commit_id)
1568 yield self._commit_factory(commit_id)
1553
1569
1554 def _commit_factory(self, commit_id):
1570 def _commit_factory(self, commit_id):
1555 """
1571 """
1556 Allows backends to override the way commits are generated.
1572 Allows backends to override the way commits are generated.
1557 """
1573 """
1558 return self.repo.get_commit(commit_id=commit_id,
1574 return self.repo.get_commit(commit_id=commit_id,
1559 pre_load=self.pre_load)
1575 pre_load=self.pre_load)
1560
1576
1561 def __getslice__(self, i, j):
1577 def __getslice__(self, i, j):
1562 """
1578 """
1563 Returns an iterator of sliced repository
1579 Returns an iterator of sliced repository
1564 """
1580 """
1565 commit_ids = self.commit_ids[i:j]
1581 commit_ids = self.commit_ids[i:j]
1566 return self.__class__(
1582 return self.__class__(
1567 self.repo, commit_ids, pre_load=self.pre_load)
1583 self.repo, commit_ids, pre_load=self.pre_load)
1568
1584
1569 def __repr__(self):
1585 def __repr__(self):
1570 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1586 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1571
1587
1572
1588
1573 class Config(object):
1589 class Config(object):
1574 """
1590 """
1575 Represents the configuration for a repository.
1591 Represents the configuration for a repository.
1576
1592
1577 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1593 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1578 standard library. It implements only the needed subset.
1594 standard library. It implements only the needed subset.
1579 """
1595 """
1580
1596
1581 def __init__(self):
1597 def __init__(self):
1582 self._values = {}
1598 self._values = {}
1583
1599
1584 def copy(self):
1600 def copy(self):
1585 clone = Config()
1601 clone = Config()
1586 for section, values in self._values.items():
1602 for section, values in self._values.items():
1587 clone._values[section] = values.copy()
1603 clone._values[section] = values.copy()
1588 return clone
1604 return clone
1589
1605
1590 def __repr__(self):
1606 def __repr__(self):
1591 return '<Config(%s sections) at %s>' % (
1607 return '<Config(%s sections) at %s>' % (
1592 len(self._values), hex(id(self)))
1608 len(self._values), hex(id(self)))
1593
1609
1594 def items(self, section):
1610 def items(self, section):
1595 return self._values.get(section, {}).iteritems()
1611 return self._values.get(section, {}).iteritems()
1596
1612
1597 def get(self, section, option):
1613 def get(self, section, option):
1598 return self._values.get(section, {}).get(option)
1614 return self._values.get(section, {}).get(option)
1599
1615
1600 def set(self, section, option, value):
1616 def set(self, section, option, value):
1601 section_values = self._values.setdefault(section, {})
1617 section_values = self._values.setdefault(section, {})
1602 section_values[option] = value
1618 section_values[option] = value
1603
1619
1604 def clear_section(self, section):
1620 def clear_section(self, section):
1605 self._values[section] = {}
1621 self._values[section] = {}
1606
1622
1607 def serialize(self):
1623 def serialize(self):
1608 """
1624 """
1609 Creates a list of three tuples (section, key, value) representing
1625 Creates a list of three tuples (section, key, value) representing
1610 this config object.
1626 this config object.
1611 """
1627 """
1612 items = []
1628 items = []
1613 for section in self._values:
1629 for section in self._values:
1614 for option, value in self._values[section].items():
1630 for option, value in self._values[section].items():
1615 items.append(
1631 items.append(
1616 (safe_str(section), safe_str(option), safe_str(value)))
1632 (safe_str(section), safe_str(option), safe_str(value)))
1617 return items
1633 return items
1618
1634
1619
1635
1620 class Diff(object):
1636 class Diff(object):
1621 """
1637 """
1622 Represents a diff result from a repository backend.
1638 Represents a diff result from a repository backend.
1623
1639
1624 Subclasses have to provide a backend specific value for
1640 Subclasses have to provide a backend specific value for
1625 :attr:`_header_re` and :attr:`_meta_re`.
1641 :attr:`_header_re` and :attr:`_meta_re`.
1626 """
1642 """
1627 _meta_re = None
1643 _meta_re = None
1628 _header_re = None
1644 _header_re = None
1629
1645
1630 def __init__(self, raw_diff):
1646 def __init__(self, raw_diff):
1631 self.raw = raw_diff
1647 self.raw = raw_diff
1632
1648
1633 def chunks(self):
1649 def chunks(self):
1634 """
1650 """
1635 split the diff in chunks of separate --git a/file b/file chunks
1651 split the diff in chunks of separate --git a/file b/file chunks
1636 to make diffs consistent we must prepend with \n, and make sure
1652 to make diffs consistent we must prepend with \n, and make sure
1637 we can detect last chunk as this was also has special rule
1653 we can detect last chunk as this was also has special rule
1638 """
1654 """
1639
1655
1640 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1656 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1641 header = diff_parts[0]
1657 header = diff_parts[0]
1642
1658
1643 if self._meta_re:
1659 if self._meta_re:
1644 match = self._meta_re.match(header)
1660 match = self._meta_re.match(header)
1645
1661
1646 chunks = diff_parts[1:]
1662 chunks = diff_parts[1:]
1647 total_chunks = len(chunks)
1663 total_chunks = len(chunks)
1648
1664
1649 return (
1665 return (
1650 DiffChunk(chunk, self, cur_chunk == total_chunks)
1666 DiffChunk(chunk, self, cur_chunk == total_chunks)
1651 for cur_chunk, chunk in enumerate(chunks, start=1))
1667 for cur_chunk, chunk in enumerate(chunks, start=1))
1652
1668
1653
1669
1654 class DiffChunk(object):
1670 class DiffChunk(object):
1655
1671
1656 def __init__(self, chunk, diff, last_chunk):
1672 def __init__(self, chunk, diff, last_chunk):
1657 self._diff = diff
1673 self._diff = diff
1658
1674
1659 # since we split by \ndiff --git that part is lost from original diff
1675 # since we split by \ndiff --git that part is lost from original diff
1660 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1676 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1661 if not last_chunk:
1677 if not last_chunk:
1662 chunk += '\n'
1678 chunk += '\n'
1663
1679
1664 match = self._diff._header_re.match(chunk)
1680 match = self._diff._header_re.match(chunk)
1665 self.header = match.groupdict()
1681 self.header = match.groupdict()
1666 self.diff = chunk[match.end():]
1682 self.diff = chunk[match.end():]
1667 self.raw = chunk
1683 self.raw = chunk
1668
1684
1669
1685
1670 class BasePathPermissionChecker(object):
1686 class BasePathPermissionChecker(object):
1671
1687
1672 @staticmethod
1688 @staticmethod
1673 def create_from_patterns(includes, excludes):
1689 def create_from_patterns(includes, excludes):
1674 if includes and '*' in includes and not excludes:
1690 if includes and '*' in includes and not excludes:
1675 return AllPathPermissionChecker()
1691 return AllPathPermissionChecker()
1676 elif excludes and '*' in excludes:
1692 elif excludes and '*' in excludes:
1677 return NonePathPermissionChecker()
1693 return NonePathPermissionChecker()
1678 else:
1694 else:
1679 return PatternPathPermissionChecker(includes, excludes)
1695 return PatternPathPermissionChecker(includes, excludes)
1680
1696
1681 @property
1697 @property
1682 def has_full_access(self):
1698 def has_full_access(self):
1683 raise NotImplemented()
1699 raise NotImplemented()
1684
1700
1685 def has_access(self, path):
1701 def has_access(self, path):
1686 raise NotImplemented()
1702 raise NotImplemented()
1687
1703
1688
1704
1689 class AllPathPermissionChecker(BasePathPermissionChecker):
1705 class AllPathPermissionChecker(BasePathPermissionChecker):
1690
1706
1691 @property
1707 @property
1692 def has_full_access(self):
1708 def has_full_access(self):
1693 return True
1709 return True
1694
1710
1695 def has_access(self, path):
1711 def has_access(self, path):
1696 return True
1712 return True
1697
1713
1698
1714
1699 class NonePathPermissionChecker(BasePathPermissionChecker):
1715 class NonePathPermissionChecker(BasePathPermissionChecker):
1700
1716
1701 @property
1717 @property
1702 def has_full_access(self):
1718 def has_full_access(self):
1703 return False
1719 return False
1704
1720
1705 def has_access(self, path):
1721 def has_access(self, path):
1706 return False
1722 return False
1707
1723
1708
1724
1709 class PatternPathPermissionChecker(BasePathPermissionChecker):
1725 class PatternPathPermissionChecker(BasePathPermissionChecker):
1710
1726
1711 def __init__(self, includes, excludes):
1727 def __init__(self, includes, excludes):
1712 self.includes = includes
1728 self.includes = includes
1713 self.excludes = excludes
1729 self.excludes = excludes
1714 self.includes_re = [] if not includes else [
1730 self.includes_re = [] if not includes else [
1715 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1731 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1716 self.excludes_re = [] if not excludes else [
1732 self.excludes_re = [] if not excludes else [
1717 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1733 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1718
1734
1719 @property
1735 @property
1720 def has_full_access(self):
1736 def has_full_access(self):
1721 return '*' in self.includes and not self.excludes
1737 return '*' in self.includes and not self.excludes
1722
1738
1723 def has_access(self, path):
1739 def has_access(self, path):
1724 for regex in self.excludes_re:
1740 for regex in self.excludes_re:
1725 if regex.match(path):
1741 if regex.match(path):
1726 return False
1742 return False
1727 for regex in self.includes_re:
1743 for regex in self.includes_re:
1728 if regex.match(path):
1744 if regex.match(path):
1729 return True
1745 return True
1730 return False
1746 return False
@@ -1,1006 +1,1006 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference)
38 MergeFailureReason, Reference)
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError,
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45
45
46
46
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 class GitRepository(BaseRepository):
52 class GitRepository(BaseRepository):
53 """
53 """
54 Git repository backend.
54 Git repository backend.
55 """
55 """
56 DEFAULT_BRANCH_NAME = 'master'
56 DEFAULT_BRANCH_NAME = 'master'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 update_after_clone=False, with_wire=None, bare=False):
61 update_after_clone=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self._remote = connection.Git(
65 self._remote = connection.Git(
66 self.path, self.config, with_wire=with_wire)
66 self.path, self.config, with_wire=with_wire)
67
67
68 self._init_repo(create, src_url, update_after_clone, bare)
68 self._init_repo(create, src_url, update_after_clone, bare)
69
69
70 # caches
70 # caches
71 self._commit_ids = {}
71 self._commit_ids = {}
72
72
73 @LazyProperty
73 @LazyProperty
74 def bare(self):
74 def bare(self):
75 return self._remote.bare()
75 return self._remote.bare()
76
76
77 @LazyProperty
77 @LazyProperty
78 def head(self):
78 def head(self):
79 return self._remote.head()
79 return self._remote.head()
80
80
81 @LazyProperty
81 @LazyProperty
82 def commit_ids(self):
82 def commit_ids(self):
83 """
83 """
84 Returns list of commit ids, in ascending order. Being lazy
84 Returns list of commit ids, in ascending order. Being lazy
85 attribute allows external tools to inject commit ids from cache.
85 attribute allows external tools to inject commit ids from cache.
86 """
86 """
87 commit_ids = self._get_all_commit_ids()
87 commit_ids = self._get_all_commit_ids()
88 self._rebuild_cache(commit_ids)
88 self._rebuild_cache(commit_ids)
89 return commit_ids
89 return commit_ids
90
90
91 def _rebuild_cache(self, commit_ids):
91 def _rebuild_cache(self, commit_ids):
92 self._commit_ids = dict((commit_id, index)
92 self._commit_ids = dict((commit_id, index)
93 for index, commit_id in enumerate(commit_ids))
93 for index, commit_id in enumerate(commit_ids))
94
94
95 def run_git_command(self, cmd, **opts):
95 def run_git_command(self, cmd, **opts):
96 """
96 """
97 Runs given ``cmd`` as git command and returns tuple
97 Runs given ``cmd`` as git command and returns tuple
98 (stdout, stderr).
98 (stdout, stderr).
99
99
100 :param cmd: git command to be executed
100 :param cmd: git command to be executed
101 :param opts: env options to pass into Subprocess command
101 :param opts: env options to pass into Subprocess command
102 """
102 """
103 if not isinstance(cmd, list):
103 if not isinstance(cmd, list):
104 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
104 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
105
105
106 skip_stderr_log = opts.pop('skip_stderr_log', False)
106 skip_stderr_log = opts.pop('skip_stderr_log', False)
107 out, err = self._remote.run_git_command(cmd, **opts)
107 out, err = self._remote.run_git_command(cmd, **opts)
108 if err and not skip_stderr_log:
108 if err and not skip_stderr_log:
109 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
109 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
110 return out, err
110 return out, err
111
111
112 @staticmethod
112 @staticmethod
113 def check_url(url, config):
113 def check_url(url, config):
114 """
114 """
115 Function will check given url and try to verify if it's a valid
115 Function will check given url and try to verify if it's a valid
116 link. Sometimes it may happened that git will issue basic
116 link. Sometimes it may happened that git will issue basic
117 auth request that can cause whole API to hang when used from python
117 auth request that can cause whole API to hang when used from python
118 or other external calls.
118 or other external calls.
119
119
120 On failures it'll raise urllib2.HTTPError, exception is also thrown
120 On failures it'll raise urllib2.HTTPError, exception is also thrown
121 when the return code is non 200
121 when the return code is non 200
122 """
122 """
123 # check first if it's not an url
123 # check first if it's not an url
124 if os.path.isdir(url) or url.startswith('file:'):
124 if os.path.isdir(url) or url.startswith('file:'):
125 return True
125 return True
126
126
127 if '+' in url.split('://', 1)[0]:
127 if '+' in url.split('://', 1)[0]:
128 url = url.split('+', 1)[1]
128 url = url.split('+', 1)[1]
129
129
130 # Request the _remote to verify the url
130 # Request the _remote to verify the url
131 return connection.Git.check_url(url, config.serialize())
131 return connection.Git.check_url(url, config.serialize())
132
132
133 @staticmethod
133 @staticmethod
134 def is_valid_repository(path):
134 def is_valid_repository(path):
135 if os.path.isdir(os.path.join(path, '.git')):
135 if os.path.isdir(os.path.join(path, '.git')):
136 return True
136 return True
137 # check case of bare repository
137 # check case of bare repository
138 try:
138 try:
139 GitRepository(path)
139 GitRepository(path)
140 return True
140 return True
141 except VCSError:
141 except VCSError:
142 pass
142 pass
143 return False
143 return False
144
144
145 def _init_repo(self, create, src_url=None, update_after_clone=False,
145 def _init_repo(self, create, src_url=None, update_after_clone=False,
146 bare=False):
146 bare=False):
147 if create and os.path.exists(self.path):
147 if create and os.path.exists(self.path):
148 raise RepositoryError(
148 raise RepositoryError(
149 "Cannot create repository at %s, location already exist"
149 "Cannot create repository at %s, location already exist"
150 % self.path)
150 % self.path)
151
151
152 try:
152 try:
153 if create and src_url:
153 if create and src_url:
154 GitRepository.check_url(src_url, self.config)
154 GitRepository.check_url(src_url, self.config)
155 self.clone(src_url, update_after_clone, bare)
155 self.clone(src_url, update_after_clone, bare)
156 elif create:
156 elif create:
157 os.makedirs(self.path, mode=0755)
157 os.makedirs(self.path, mode=0755)
158
158
159 if bare:
159 if bare:
160 self._remote.init_bare()
160 self._remote.init_bare()
161 else:
161 else:
162 self._remote.init()
162 self._remote.init()
163 else:
163 else:
164 if not self._remote.assert_correct_path():
164 if not self._remote.assert_correct_path():
165 raise RepositoryError(
165 raise RepositoryError(
166 'Path "%s" does not contain a Git repository' %
166 'Path "%s" does not contain a Git repository' %
167 (self.path,))
167 (self.path,))
168
168
169 # TODO: johbo: check if we have to translate the OSError here
169 # TODO: johbo: check if we have to translate the OSError here
170 except OSError as err:
170 except OSError as err:
171 raise RepositoryError(err)
171 raise RepositoryError(err)
172
172
173 def _get_all_commit_ids(self, filters=None):
173 def _get_all_commit_ids(self, filters=None):
174 # we must check if this repo is not empty, since later command
174 # we must check if this repo is not empty, since later command
175 # fails if it is. And it's cheaper to ask than throw the subprocess
175 # fails if it is. And it's cheaper to ask than throw the subprocess
176 # errors
176 # errors
177 try:
177 try:
178 self._remote.head()
178 self._remote.head()
179 except KeyError:
179 except KeyError:
180 return []
180 return []
181
181
182 rev_filter = ['--branches', '--tags']
182 rev_filter = ['--branches', '--tags']
183 extra_filter = []
183 extra_filter = []
184
184
185 if filters:
185 if filters:
186 if filters.get('since'):
186 if filters.get('since'):
187 extra_filter.append('--since=%s' % (filters['since']))
187 extra_filter.append('--since=%s' % (filters['since']))
188 if filters.get('until'):
188 if filters.get('until'):
189 extra_filter.append('--until=%s' % (filters['until']))
189 extra_filter.append('--until=%s' % (filters['until']))
190 if filters.get('branch_name'):
190 if filters.get('branch_name'):
191 rev_filter = ['--tags']
191 rev_filter = ['--tags']
192 extra_filter.append(filters['branch_name'])
192 extra_filter.append(filters['branch_name'])
193 rev_filter.extend(extra_filter)
193 rev_filter.extend(extra_filter)
194
194
195 # if filters.get('start') or filters.get('end'):
195 # if filters.get('start') or filters.get('end'):
196 # # skip is offset, max-count is limit
196 # # skip is offset, max-count is limit
197 # if filters.get('start'):
197 # if filters.get('start'):
198 # extra_filter += ' --skip=%s' % filters['start']
198 # extra_filter += ' --skip=%s' % filters['start']
199 # if filters.get('end'):
199 # if filters.get('end'):
200 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
200 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
201
201
202 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
202 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
203 try:
203 try:
204 output, __ = self.run_git_command(cmd)
204 output, __ = self.run_git_command(cmd)
205 except RepositoryError:
205 except RepositoryError:
206 # Can be raised for empty repositories
206 # Can be raised for empty repositories
207 return []
207 return []
208 return output.splitlines()
208 return output.splitlines()
209
209
210 def _get_commit_id(self, commit_id_or_idx):
210 def _get_commit_id(self, commit_id_or_idx):
211 def is_null(value):
211 def is_null(value):
212 return len(value) == commit_id_or_idx.count('0')
212 return len(value) == commit_id_or_idx.count('0')
213
213
214 if self.is_empty():
214 if self.is_empty():
215 raise EmptyRepositoryError("There are no commits yet")
215 raise EmptyRepositoryError("There are no commits yet")
216
216
217 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
217 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
218 return self.commit_ids[-1]
218 return self.commit_ids[-1]
219
219
220 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
220 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
221 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
221 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
222 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
222 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
223 try:
223 try:
224 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
224 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
225 except Exception:
225 except Exception:
226 msg = "Commit %s does not exist for %s" % (
226 msg = "Commit %s does not exist for %s" % (
227 commit_id_or_idx, self)
227 commit_id_or_idx, self)
228 raise CommitDoesNotExistError(msg)
228 raise CommitDoesNotExistError(msg)
229
229
230 elif is_bstr:
230 elif is_bstr:
231 # check full path ref, eg. refs/heads/master
231 # check full path ref, eg. refs/heads/master
232 ref_id = self._refs.get(commit_id_or_idx)
232 ref_id = self._refs.get(commit_id_or_idx)
233 if ref_id:
233 if ref_id:
234 return ref_id
234 return ref_id
235
235
236 # check branch name
236 # check branch name
237 branch_ids = self.branches.values()
237 branch_ids = self.branches.values()
238 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
238 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
239 if ref_id:
239 if ref_id:
240 return ref_id
240 return ref_id
241
241
242 # check tag name
242 # check tag name
243 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
243 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
244 if ref_id:
244 if ref_id:
245 return ref_id
245 return ref_id
246
246
247 if (not SHA_PATTERN.match(commit_id_or_idx) or
247 if (not SHA_PATTERN.match(commit_id_or_idx) or
248 commit_id_or_idx not in self.commit_ids):
248 commit_id_or_idx not in self.commit_ids):
249 msg = "Commit %s does not exist for %s" % (
249 msg = "Commit %s does not exist for %s" % (
250 commit_id_or_idx, self)
250 commit_id_or_idx, self)
251 raise CommitDoesNotExistError(msg)
251 raise CommitDoesNotExistError(msg)
252
252
253 # Ensure we return full id
253 # Ensure we return full id
254 if not SHA_PATTERN.match(str(commit_id_or_idx)):
254 if not SHA_PATTERN.match(str(commit_id_or_idx)):
255 raise CommitDoesNotExistError(
255 raise CommitDoesNotExistError(
256 "Given commit id %s not recognized" % commit_id_or_idx)
256 "Given commit id %s not recognized" % commit_id_or_idx)
257 return commit_id_or_idx
257 return commit_id_or_idx
258
258
259 def get_hook_location(self):
259 def get_hook_location(self):
260 """
260 """
261 returns absolute path to location where hooks are stored
261 returns absolute path to location where hooks are stored
262 """
262 """
263 loc = os.path.join(self.path, 'hooks')
263 loc = os.path.join(self.path, 'hooks')
264 if not self.bare:
264 if not self.bare:
265 loc = os.path.join(self.path, '.git', 'hooks')
265 loc = os.path.join(self.path, '.git', 'hooks')
266 return loc
266 return loc
267
267
268 @LazyProperty
268 @LazyProperty
269 def last_change(self):
269 def last_change(self):
270 """
270 """
271 Returns last change made on this repository as
271 Returns last change made on this repository as
272 `datetime.datetime` object.
272 `datetime.datetime` object.
273 """
273 """
274 try:
274 try:
275 return self.get_commit().date
275 return self.get_commit().date
276 except RepositoryError:
276 except RepositoryError:
277 tzoffset = makedate()[1]
277 tzoffset = makedate()[1]
278 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
278 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
279
279
280 def _get_fs_mtime(self):
280 def _get_fs_mtime(self):
281 idx_loc = '' if self.bare else '.git'
281 idx_loc = '' if self.bare else '.git'
282 # fallback to filesystem
282 # fallback to filesystem
283 in_path = os.path.join(self.path, idx_loc, "index")
283 in_path = os.path.join(self.path, idx_loc, "index")
284 he_path = os.path.join(self.path, idx_loc, "HEAD")
284 he_path = os.path.join(self.path, idx_loc, "HEAD")
285 if os.path.exists(in_path):
285 if os.path.exists(in_path):
286 return os.stat(in_path).st_mtime
286 return os.stat(in_path).st_mtime
287 else:
287 else:
288 return os.stat(he_path).st_mtime
288 return os.stat(he_path).st_mtime
289
289
290 @LazyProperty
290 @LazyProperty
291 def description(self):
291 def description(self):
292 description = self._remote.get_description()
292 description = self._remote.get_description()
293 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
293 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
294
294
295 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
295 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
296 if self.is_empty():
296 if self.is_empty():
297 return OrderedDict()
297 return OrderedDict()
298
298
299 result = []
299 result = []
300 for ref, sha in self._refs.iteritems():
300 for ref, sha in self._refs.iteritems():
301 if ref.startswith(prefix):
301 if ref.startswith(prefix):
302 ref_name = ref
302 ref_name = ref
303 if strip_prefix:
303 if strip_prefix:
304 ref_name = ref[len(prefix):]
304 ref_name = ref[len(prefix):]
305 result.append((safe_unicode(ref_name), sha))
305 result.append((safe_unicode(ref_name), sha))
306
306
307 def get_name(entry):
307 def get_name(entry):
308 return entry[0]
308 return entry[0]
309
309
310 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
310 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
311
311
312 def _get_branches(self):
312 def _get_branches(self):
313 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
313 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
314
314
315 @LazyProperty
315 @LazyProperty
316 def branches(self):
316 def branches(self):
317 return self._get_branches()
317 return self._get_branches()
318
318
319 @LazyProperty
319 @LazyProperty
320 def branches_closed(self):
320 def branches_closed(self):
321 return {}
321 return {}
322
322
323 @LazyProperty
323 @LazyProperty
324 def bookmarks(self):
324 def bookmarks(self):
325 return {}
325 return {}
326
326
327 @LazyProperty
327 @LazyProperty
328 def branches_all(self):
328 def branches_all(self):
329 all_branches = {}
329 all_branches = {}
330 all_branches.update(self.branches)
330 all_branches.update(self.branches)
331 all_branches.update(self.branches_closed)
331 all_branches.update(self.branches_closed)
332 return all_branches
332 return all_branches
333
333
334 @LazyProperty
334 @LazyProperty
335 def tags(self):
335 def tags(self):
336 return self._get_tags()
336 return self._get_tags()
337
337
338 def _get_tags(self):
338 def _get_tags(self):
339 return self._get_refs_entries(
339 return self._get_refs_entries(
340 prefix='refs/tags/', strip_prefix=True, reverse=True)
340 prefix='refs/tags/', strip_prefix=True, reverse=True)
341
341
342 def tag(self, name, user, commit_id=None, message=None, date=None,
342 def tag(self, name, user, commit_id=None, message=None, date=None,
343 **kwargs):
343 **kwargs):
344 # TODO: fix this method to apply annotated tags correct with message
344 # TODO: fix this method to apply annotated tags correct with message
345 """
345 """
346 Creates and returns a tag for the given ``commit_id``.
346 Creates and returns a tag for the given ``commit_id``.
347
347
348 :param name: name for new tag
348 :param name: name for new tag
349 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
349 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
350 :param commit_id: commit id for which new tag would be created
350 :param commit_id: commit id for which new tag would be created
351 :param message: message of the tag's commit
351 :param message: message of the tag's commit
352 :param date: date of tag's commit
352 :param date: date of tag's commit
353
353
354 :raises TagAlreadyExistError: if tag with same name already exists
354 :raises TagAlreadyExistError: if tag with same name already exists
355 """
355 """
356 if name in self.tags:
356 if name in self.tags:
357 raise TagAlreadyExistError("Tag %s already exists" % name)
357 raise TagAlreadyExistError("Tag %s already exists" % name)
358 commit = self.get_commit(commit_id=commit_id)
358 commit = self.get_commit(commit_id=commit_id)
359 message = message or "Added tag %s for commit %s" % (
359 message = message or "Added tag %s for commit %s" % (
360 name, commit.raw_id)
360 name, commit.raw_id)
361 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
361 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
362
362
363 self._refs = self._get_refs()
363 self._refs = self._get_refs()
364 self.tags = self._get_tags()
364 self.tags = self._get_tags()
365 return commit
365 return commit
366
366
367 def remove_tag(self, name, user, message=None, date=None):
367 def remove_tag(self, name, user, message=None, date=None):
368 """
368 """
369 Removes tag with the given ``name``.
369 Removes tag with the given ``name``.
370
370
371 :param name: name of the tag to be removed
371 :param name: name of the tag to be removed
372 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
372 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 :param message: message of the tag's removal commit
373 :param message: message of the tag's removal commit
374 :param date: date of tag's removal commit
374 :param date: date of tag's removal commit
375
375
376 :raises TagDoesNotExistError: if tag with given name does not exists
376 :raises TagDoesNotExistError: if tag with given name does not exists
377 """
377 """
378 if name not in self.tags:
378 if name not in self.tags:
379 raise TagDoesNotExistError("Tag %s does not exist" % name)
379 raise TagDoesNotExistError("Tag %s does not exist" % name)
380 tagpath = vcspath.join(
380 tagpath = vcspath.join(
381 self._remote.get_refs_path(), 'refs', 'tags', name)
381 self._remote.get_refs_path(), 'refs', 'tags', name)
382 try:
382 try:
383 os.remove(tagpath)
383 os.remove(tagpath)
384 self._refs = self._get_refs()
384 self._refs = self._get_refs()
385 self.tags = self._get_tags()
385 self.tags = self._get_tags()
386 except OSError as e:
386 except OSError as e:
387 raise RepositoryError(e.strerror)
387 raise RepositoryError(e.strerror)
388
388
389 def _get_refs(self):
389 def _get_refs(self):
390 return self._remote.get_refs()
390 return self._remote.get_refs()
391
391
392 @LazyProperty
392 @LazyProperty
393 def _refs(self):
393 def _refs(self):
394 return self._get_refs()
394 return self._get_refs()
395
395
396 @property
396 @property
397 def _ref_tree(self):
397 def _ref_tree(self):
398 node = tree = {}
398 node = tree = {}
399 for ref, sha in self._refs.iteritems():
399 for ref, sha in self._refs.iteritems():
400 path = ref.split('/')
400 path = ref.split('/')
401 for bit in path[:-1]:
401 for bit in path[:-1]:
402 node = node.setdefault(bit, {})
402 node = node.setdefault(bit, {})
403 node[path[-1]] = sha
403 node[path[-1]] = sha
404 node = tree
404 node = tree
405 return tree
405 return tree
406
406
407 def get_remote_ref(self, ref_name):
407 def get_remote_ref(self, ref_name):
408 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
409 try:
409 try:
410 return self._refs[ref_key]
410 return self._refs[ref_key]
411 except Exception:
411 except Exception:
412 return
412 return
413
413
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 """
415 """
416 Returns `GitCommit` object representing commit from git repository
416 Returns `GitCommit` object representing commit from git repository
417 at the given `commit_id` or head (most recent commit) if None given.
417 at the given `commit_id` or head (most recent commit) if None given.
418 """
418 """
419 if commit_id is not None:
419 if commit_id is not None:
420 self._validate_commit_id(commit_id)
420 self._validate_commit_id(commit_id)
421 elif commit_idx is not None:
421 elif commit_idx is not None:
422 self._validate_commit_idx(commit_idx)
422 self._validate_commit_idx(commit_idx)
423 commit_id = commit_idx
423 commit_id = commit_idx
424 commit_id = self._get_commit_id(commit_id)
424 commit_id = self._get_commit_id(commit_id)
425 try:
425 try:
426 # Need to call remote to translate id for tagging scenario
426 # Need to call remote to translate id for tagging scenario
427 commit_id = self._remote.get_object(commit_id)["commit_id"]
427 commit_id = self._remote.get_object(commit_id)["commit_id"]
428 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
429 except KeyError:
429 except KeyError:
430 raise RepositoryError("Cannot get object with id %s" % commit_id)
430 raise RepositoryError("Cannot get object with id %s" % commit_id)
431
431
432 return GitCommit(self, commit_id, idx, pre_load=pre_load)
432 return GitCommit(self, commit_id, idx, pre_load=pre_load)
433
433
434 def get_commits(
434 def get_commits(
435 self, start_id=None, end_id=None, start_date=None, end_date=None,
435 self, start_id=None, end_id=None, start_date=None, end_date=None,
436 branch_name=None, show_hidden=False, pre_load=None):
436 branch_name=None, show_hidden=False, pre_load=None):
437 """
437 """
438 Returns generator of `GitCommit` objects from start to end (both
438 Returns generator of `GitCommit` objects from start to end (both
439 are inclusive), in ascending date order.
439 are inclusive), in ascending date order.
440
440
441 :param start_id: None, str(commit_id)
441 :param start_id: None, str(commit_id)
442 :param end_id: None, str(commit_id)
442 :param end_id: None, str(commit_id)
443 :param start_date: if specified, commits with commit date less than
443 :param start_date: if specified, commits with commit date less than
444 ``start_date`` would be filtered out from returned set
444 ``start_date`` would be filtered out from returned set
445 :param end_date: if specified, commits with commit date greater than
445 :param end_date: if specified, commits with commit date greater than
446 ``end_date`` would be filtered out from returned set
446 ``end_date`` would be filtered out from returned set
447 :param branch_name: if specified, commits not reachable from given
447 :param branch_name: if specified, commits not reachable from given
448 branch would be filtered out from returned set
448 branch would be filtered out from returned set
449 :param show_hidden: Show hidden commits such as obsolete or hidden from
449 :param show_hidden: Show hidden commits such as obsolete or hidden from
450 Mercurial evolve
450 Mercurial evolve
451 :raise BranchDoesNotExistError: If given `branch_name` does not
451 :raise BranchDoesNotExistError: If given `branch_name` does not
452 exist.
452 exist.
453 :raise CommitDoesNotExistError: If commits for given `start` or
453 :raise CommitDoesNotExistError: If commits for given `start` or
454 `end` could not be found.
454 `end` could not be found.
455
455
456 """
456 """
457 if self.is_empty():
457 if self.is_empty():
458 raise EmptyRepositoryError("There are no commits yet")
458 raise EmptyRepositoryError("There are no commits yet")
459 self._validate_branch_name(branch_name)
459 self._validate_branch_name(branch_name)
460
460
461 if start_id is not None:
461 if start_id is not None:
462 self._validate_commit_id(start_id)
462 self._validate_commit_id(start_id)
463 if end_id is not None:
463 if end_id is not None:
464 self._validate_commit_id(end_id)
464 self._validate_commit_id(end_id)
465
465
466 start_raw_id = self._get_commit_id(start_id)
466 start_raw_id = self._get_commit_id(start_id)
467 start_pos = self._commit_ids[start_raw_id] if start_id else None
467 start_pos = self._commit_ids[start_raw_id] if start_id else None
468 end_raw_id = self._get_commit_id(end_id)
468 end_raw_id = self._get_commit_id(end_id)
469 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
469 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
470
470
471 if None not in [start_id, end_id] and start_pos > end_pos:
471 if None not in [start_id, end_id] and start_pos > end_pos:
472 raise RepositoryError(
472 raise RepositoryError(
473 "Start commit '%s' cannot be after end commit '%s'" %
473 "Start commit '%s' cannot be after end commit '%s'" %
474 (start_id, end_id))
474 (start_id, end_id))
475
475
476 if end_pos is not None:
476 if end_pos is not None:
477 end_pos += 1
477 end_pos += 1
478
478
479 filter_ = []
479 filter_ = []
480 if branch_name:
480 if branch_name:
481 filter_.append({'branch_name': branch_name})
481 filter_.append({'branch_name': branch_name})
482 if start_date and not end_date:
482 if start_date and not end_date:
483 filter_.append({'since': start_date})
483 filter_.append({'since': start_date})
484 if end_date and not start_date:
484 if end_date and not start_date:
485 filter_.append({'until': end_date})
485 filter_.append({'until': end_date})
486 if start_date and end_date:
486 if start_date and end_date:
487 filter_.append({'since': start_date})
487 filter_.append({'since': start_date})
488 filter_.append({'until': end_date})
488 filter_.append({'until': end_date})
489
489
490 # if start_pos or end_pos:
490 # if start_pos or end_pos:
491 # filter_.append({'start': start_pos})
491 # filter_.append({'start': start_pos})
492 # filter_.append({'end': end_pos})
492 # filter_.append({'end': end_pos})
493
493
494 if filter_:
494 if filter_:
495 revfilters = {
495 revfilters = {
496 'branch_name': branch_name,
496 'branch_name': branch_name,
497 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
497 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
498 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
498 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
499 'start': start_pos,
499 'start': start_pos,
500 'end': end_pos,
500 'end': end_pos,
501 }
501 }
502 commit_ids = self._get_all_commit_ids(filters=revfilters)
502 commit_ids = self._get_all_commit_ids(filters=revfilters)
503
503
504 # pure python stuff, it's slow due to walker walking whole repo
504 # pure python stuff, it's slow due to walker walking whole repo
505 # def get_revs(walker):
505 # def get_revs(walker):
506 # for walker_entry in walker:
506 # for walker_entry in walker:
507 # yield walker_entry.commit.id
507 # yield walker_entry.commit.id
508 # revfilters = {}
508 # revfilters = {}
509 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
509 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
510 else:
510 else:
511 commit_ids = self.commit_ids
511 commit_ids = self.commit_ids
512
512
513 if start_pos or end_pos:
513 if start_pos or end_pos:
514 commit_ids = commit_ids[start_pos: end_pos]
514 commit_ids = commit_ids[start_pos: end_pos]
515
515
516 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
516 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
517
517
518 def get_diff(
518 def get_diff(
519 self, commit1, commit2, path='', ignore_whitespace=False,
519 self, commit1, commit2, path='', ignore_whitespace=False,
520 context=3, path1=None):
520 context=3, path1=None):
521 """
521 """
522 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 ``commit2`` since ``commit1``.
523 ``commit2`` since ``commit1``.
524
524
525 :param commit1: Entry point from which diff is shown. Can be
525 :param commit1: Entry point from which diff is shown. Can be
526 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 ``self.EMPTY_COMMIT`` - in this case, patch showing all
527 the changes since empty state of the repository until ``commit2``
527 the changes since empty state of the repository until ``commit2``
528 :param commit2: Until which commits changes should be shown.
528 :param commit2: Until which commits changes should be shown.
529 :param ignore_whitespace: If set to ``True``, would not show whitespace
529 :param ignore_whitespace: If set to ``True``, would not show whitespace
530 changes. Defaults to ``False``.
530 changes. Defaults to ``False``.
531 :param context: How many lines before/after changed lines should be
531 :param context: How many lines before/after changed lines should be
532 shown. Defaults to ``3``.
532 shown. Defaults to ``3``.
533 """
533 """
534 self._validate_diff_commits(commit1, commit2)
534 self._validate_diff_commits(commit1, commit2)
535 if path1 is not None and path1 != path:
535 if path1 is not None and path1 != path:
536 raise ValueError("Diff of two different paths not supported.")
536 raise ValueError("Diff of two different paths not supported.")
537
537
538 flags = [
538 flags = [
539 '-U%s' % context, '--full-index', '--binary', '-p',
539 '-U%s' % context, '--full-index', '--binary', '-p',
540 '-M', '--abbrev=40']
540 '-M', '--abbrev=40']
541 if ignore_whitespace:
541 if ignore_whitespace:
542 flags.append('-w')
542 flags.append('-w')
543
543
544 if commit1 == self.EMPTY_COMMIT:
544 if commit1 == self.EMPTY_COMMIT:
545 cmd = ['show'] + flags + [commit2.raw_id]
545 cmd = ['show'] + flags + [commit2.raw_id]
546 else:
546 else:
547 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
547 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
548
548
549 if path:
549 if path:
550 cmd.extend(['--', path])
550 cmd.extend(['--', path])
551
551
552 stdout, __ = self.run_git_command(cmd)
552 stdout, __ = self.run_git_command(cmd)
553 # If we used 'show' command, strip first few lines (until actual diff
553 # If we used 'show' command, strip first few lines (until actual diff
554 # starts)
554 # starts)
555 if commit1 == self.EMPTY_COMMIT:
555 if commit1 == self.EMPTY_COMMIT:
556 lines = stdout.splitlines()
556 lines = stdout.splitlines()
557 x = 0
557 x = 0
558 for line in lines:
558 for line in lines:
559 if line.startswith('diff'):
559 if line.startswith('diff'):
560 break
560 break
561 x += 1
561 x += 1
562 # Append new line just like 'diff' command do
562 # Append new line just like 'diff' command do
563 stdout = '\n'.join(lines[x:]) + '\n'
563 stdout = '\n'.join(lines[x:]) + '\n'
564 return GitDiff(stdout)
564 return GitDiff(stdout)
565
565
566 def strip(self, commit_id, branch_name):
566 def strip(self, commit_id, branch_name):
567 commit = self.get_commit(commit_id=commit_id)
567 commit = self.get_commit(commit_id=commit_id)
568 if commit.merge:
568 if commit.merge:
569 raise Exception('Cannot reset to merge commit')
569 raise Exception('Cannot reset to merge commit')
570
570
571 # parent is going to be the new head now
571 # parent is going to be the new head now
572 commit = commit.parents[0]
572 commit = commit.parents[0]
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574
574
575 self.commit_ids = self._get_all_commit_ids()
575 self.commit_ids = self._get_all_commit_ids()
576 self._rebuild_cache(self.commit_ids)
576 self._rebuild_cache(self.commit_ids)
577
577
578 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
578 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
579 if commit_id1 == commit_id2:
579 if commit_id1 == commit_id2:
580 return commit_id1
580 return commit_id1
581
581
582 if self != repo2:
582 if self != repo2:
583 commits = self._remote.get_missing_revs(
583 commits = self._remote.get_missing_revs(
584 commit_id1, commit_id2, repo2.path)
584 commit_id1, commit_id2, repo2.path)
585 if commits:
585 if commits:
586 commit = repo2.get_commit(commits[-1])
586 commit = repo2.get_commit(commits[-1])
587 if commit.parents:
587 if commit.parents:
588 ancestor_id = commit.parents[0].raw_id
588 ancestor_id = commit.parents[0].raw_id
589 else:
589 else:
590 ancestor_id = None
590 ancestor_id = None
591 else:
591 else:
592 # no commits from other repo, ancestor_id is the commit_id2
592 # no commits from other repo, ancestor_id is the commit_id2
593 ancestor_id = commit_id2
593 ancestor_id = commit_id2
594 else:
594 else:
595 output, __ = self.run_git_command(
595 output, __ = self.run_git_command(
596 ['merge-base', commit_id1, commit_id2])
596 ['merge-base', commit_id1, commit_id2])
597 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
597 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
598
598
599 return ancestor_id
599 return ancestor_id
600
600
601 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
601 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
602 repo1 = self
602 repo1 = self
603 ancestor_id = None
603 ancestor_id = None
604
604
605 if commit_id1 == commit_id2:
605 if commit_id1 == commit_id2:
606 commits = []
606 commits = []
607 elif repo1 != repo2:
607 elif repo1 != repo2:
608 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
608 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
609 repo2.path)
609 repo2.path)
610 commits = [
610 commits = [
611 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
611 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
612 for commit_id in reversed(missing_ids)]
612 for commit_id in reversed(missing_ids)]
613 else:
613 else:
614 output, __ = repo1.run_git_command(
614 output, __ = repo1.run_git_command(
615 ['log', '--reverse', '--pretty=format: %H', '-s',
615 ['log', '--reverse', '--pretty=format: %H', '-s',
616 '%s..%s' % (commit_id1, commit_id2)])
616 '%s..%s' % (commit_id1, commit_id2)])
617 commits = [
617 commits = [
618 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
618 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
619 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
619 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
620
620
621 return commits
621 return commits
622
622
623 @LazyProperty
623 @LazyProperty
624 def in_memory_commit(self):
624 def in_memory_commit(self):
625 """
625 """
626 Returns ``GitInMemoryCommit`` object for this repository.
626 Returns ``GitInMemoryCommit`` object for this repository.
627 """
627 """
628 return GitInMemoryCommit(self)
628 return GitInMemoryCommit(self)
629
629
630 def clone(self, url, update_after_clone=True, bare=False):
630 def clone(self, url, update_after_clone=True, bare=False):
631 """
631 """
632 Tries to clone commits from external location.
632 Tries to clone commits from external location.
633
633
634 :param update_after_clone: If set to ``False``, git won't checkout
634 :param update_after_clone: If set to ``False``, git won't checkout
635 working directory
635 working directory
636 :param bare: If set to ``True``, repository would be cloned into
636 :param bare: If set to ``True``, repository would be cloned into
637 *bare* git repository (no working directory at all).
637 *bare* git repository (no working directory at all).
638 """
638 """
639 # init_bare and init expect empty dir created to proceed
639 # init_bare and init expect empty dir created to proceed
640 if not os.path.exists(self.path):
640 if not os.path.exists(self.path):
641 os.mkdir(self.path)
641 os.mkdir(self.path)
642
642
643 if bare:
643 if bare:
644 self._remote.init_bare()
644 self._remote.init_bare()
645 else:
645 else:
646 self._remote.init()
646 self._remote.init()
647
647
648 deferred = '^{}'
648 deferred = '^{}'
649 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
649 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
650
650
651 return self._remote.clone(
651 return self._remote.clone(
652 url, deferred, valid_refs, update_after_clone)
652 url, deferred, valid_refs, update_after_clone)
653
653
654 def pull(self, url, commit_ids=None):
654 def pull(self, url, commit_ids=None):
655 """
655 """
656 Tries to pull changes from external location. We use fetch here since
656 Tries to pull changes from external location. We use fetch here since
657 pull in get does merges and we want to be compatible with hg backend so
657 pull in get does merges and we want to be compatible with hg backend so
658 pull == fetch in this case
658 pull == fetch in this case
659 """
659 """
660 self.fetch(url, commit_ids=commit_ids)
660 self.fetch(url, commit_ids=commit_ids)
661
661
662 def fetch(self, url, commit_ids=None):
662 def fetch(self, url, commit_ids=None):
663 """
663 """
664 Tries to fetch changes from external location.
664 Tries to fetch changes from external location.
665 """
665 """
666 refs = None
666 refs = None
667
667
668 if commit_ids is not None:
668 if commit_ids is not None:
669 remote_refs = self._remote.get_remote_refs(url)
669 remote_refs = self._remote.get_remote_refs(url)
670 refs = [
670 refs = [
671 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
671 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
672 self._remote.fetch(url, refs=refs)
672 self._remote.fetch(url, refs=refs)
673
673
674 def push(self, url):
674 def push(self, url):
675 refs = None
675 refs = None
676 self._remote.sync_push(url, refs=refs)
676 self._remote.sync_push(url, refs=refs)
677
677
678 def set_refs(self, ref_name, commit_id):
678 def set_refs(self, ref_name, commit_id):
679 self._remote.set_refs(ref_name, commit_id)
679 self._remote.set_refs(ref_name, commit_id)
680
680
681 def remove_ref(self, ref_name):
681 def remove_ref(self, ref_name):
682 self._remote.remove_ref(ref_name)
682 self._remote.remove_ref(ref_name)
683
683
684 def _update_server_info(self):
684 def _update_server_info(self):
685 """
685 """
686 runs gits update-server-info command in this repo instance
686 runs gits update-server-info command in this repo instance
687 """
687 """
688 self._remote.update_server_info()
688 self._remote.update_server_info()
689
689
690 def _current_branch(self):
690 def _current_branch(self):
691 """
691 """
692 Return the name of the current branch.
692 Return the name of the current branch.
693
693
694 It only works for non bare repositories (i.e. repositories with a
694 It only works for non bare repositories (i.e. repositories with a
695 working copy)
695 working copy)
696 """
696 """
697 if self.bare:
697 if self.bare:
698 raise RepositoryError('Bare git repos do not have active branches')
698 raise RepositoryError('Bare git repos do not have active branches')
699
699
700 if self.is_empty():
700 if self.is_empty():
701 return None
701 return None
702
702
703 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
703 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
704 return stdout.strip()
704 return stdout.strip()
705
705
706 def _checkout(self, branch_name, create=False, force=False):
706 def _checkout(self, branch_name, create=False, force=False):
707 """
707 """
708 Checkout a branch in the working directory.
708 Checkout a branch in the working directory.
709
709
710 It tries to create the branch if create is True, failing if the branch
710 It tries to create the branch if create is True, failing if the branch
711 already exists.
711 already exists.
712
712
713 It only works for non bare repositories (i.e. repositories with a
713 It only works for non bare repositories (i.e. repositories with a
714 working copy)
714 working copy)
715 """
715 """
716 if self.bare:
716 if self.bare:
717 raise RepositoryError('Cannot checkout branches in a bare git repo')
717 raise RepositoryError('Cannot checkout branches in a bare git repo')
718
718
719 cmd = ['checkout']
719 cmd = ['checkout']
720 if force:
720 if force:
721 cmd.append('-f')
721 cmd.append('-f')
722 if create:
722 if create:
723 cmd.append('-b')
723 cmd.append('-b')
724 cmd.append(branch_name)
724 cmd.append(branch_name)
725 self.run_git_command(cmd, fail_on_stderr=False)
725 self.run_git_command(cmd, fail_on_stderr=False)
726
726
727 def _identify(self):
727 def _identify(self):
728 """
728 """
729 Return the current state of the working directory.
729 Return the current state of the working directory.
730 """
730 """
731 if self.bare:
731 if self.bare:
732 raise RepositoryError('Bare git repos do not have active branches')
732 raise RepositoryError('Bare git repos do not have active branches')
733
733
734 if self.is_empty():
734 if self.is_empty():
735 return None
735 return None
736
736
737 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
737 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
738 return stdout.strip()
738 return stdout.strip()
739
739
740 def _local_clone(self, clone_path, branch_name, source_branch=None):
740 def _local_clone(self, clone_path, branch_name, source_branch=None):
741 """
741 """
742 Create a local clone of the current repo.
742 Create a local clone of the current repo.
743 """
743 """
744 # N.B.(skreft): the --branch option is required as otherwise the shallow
744 # N.B.(skreft): the --branch option is required as otherwise the shallow
745 # clone will only fetch the active branch.
745 # clone will only fetch the active branch.
746 cmd = ['clone', '--branch', branch_name,
746 cmd = ['clone', '--branch', branch_name,
747 self.path, os.path.abspath(clone_path)]
747 self.path, os.path.abspath(clone_path)]
748
748
749 self.run_git_command(cmd, fail_on_stderr=False)
749 self.run_git_command(cmd, fail_on_stderr=False)
750
750
751 # if we get the different source branch, make sure we also fetch it for
751 # if we get the different source branch, make sure we also fetch it for
752 # merge conditions
752 # merge conditions
753 if source_branch and source_branch != branch_name:
753 if source_branch and source_branch != branch_name:
754 # check if the ref exists.
754 # check if the ref exists.
755 shadow_repo = GitRepository(os.path.abspath(clone_path))
755 shadow_repo = GitRepository(os.path.abspath(clone_path))
756 if shadow_repo.get_remote_ref(source_branch):
756 if shadow_repo.get_remote_ref(source_branch):
757 cmd = ['fetch', self.path, source_branch]
757 cmd = ['fetch', self.path, source_branch]
758 self.run_git_command(cmd, fail_on_stderr=False)
758 self.run_git_command(cmd, fail_on_stderr=False)
759
759
760 def _local_fetch(self, repository_path, branch_name, use_origin=False):
760 def _local_fetch(self, repository_path, branch_name, use_origin=False):
761 """
761 """
762 Fetch a branch from a local repository.
762 Fetch a branch from a local repository.
763 """
763 """
764 repository_path = os.path.abspath(repository_path)
764 repository_path = os.path.abspath(repository_path)
765 if repository_path == self.path:
765 if repository_path == self.path:
766 raise ValueError('Cannot fetch from the same repository')
766 raise ValueError('Cannot fetch from the same repository')
767
767
768 if use_origin:
768 if use_origin:
769 branch_name = '+{branch}:refs/heads/{branch}'.format(
769 branch_name = '+{branch}:refs/heads/{branch}'.format(
770 branch=branch_name)
770 branch=branch_name)
771
771
772 cmd = ['fetch', '--no-tags', '--update-head-ok',
772 cmd = ['fetch', '--no-tags', '--update-head-ok',
773 repository_path, branch_name]
773 repository_path, branch_name]
774 self.run_git_command(cmd, fail_on_stderr=False)
774 self.run_git_command(cmd, fail_on_stderr=False)
775
775
776 def _local_reset(self, branch_name):
776 def _local_reset(self, branch_name):
777 branch_name = '{}'.format(branch_name)
777 branch_name = '{}'.format(branch_name)
778 cmd = ['reset', '--hard', branch_name]
778 cmd = ['reset', '--hard', branch_name]
779 self.run_git_command(cmd, fail_on_stderr=False)
779 self.run_git_command(cmd, fail_on_stderr=False)
780
780
781 def _last_fetch_heads(self):
781 def _last_fetch_heads(self):
782 """
782 """
783 Return the last fetched heads that need merging.
783 Return the last fetched heads that need merging.
784
784
785 The algorithm is defined at
785 The algorithm is defined at
786 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
786 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
787 """
787 """
788 if not self.bare:
788 if not self.bare:
789 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
789 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
790 else:
790 else:
791 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
791 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
792
792
793 heads = []
793 heads = []
794 with open(fetch_heads_path) as f:
794 with open(fetch_heads_path) as f:
795 for line in f:
795 for line in f:
796 if ' not-for-merge ' in line:
796 if ' not-for-merge ' in line:
797 continue
797 continue
798 line = re.sub('\t.*', '', line, flags=re.DOTALL)
798 line = re.sub('\t.*', '', line, flags=re.DOTALL)
799 heads.append(line)
799 heads.append(line)
800
800
801 return heads
801 return heads
802
802
803 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
803 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
804 return GitRepository(shadow_repository_path)
804 return GitRepository(shadow_repository_path)
805
805
806 def _local_pull(self, repository_path, branch_name, ff_only=True):
806 def _local_pull(self, repository_path, branch_name, ff_only=True):
807 """
807 """
808 Pull a branch from a local repository.
808 Pull a branch from a local repository.
809 """
809 """
810 if self.bare:
810 if self.bare:
811 raise RepositoryError('Cannot pull into a bare git repository')
811 raise RepositoryError('Cannot pull into a bare git repository')
812 # N.B.(skreft): The --ff-only option is to make sure this is a
812 # N.B.(skreft): The --ff-only option is to make sure this is a
813 # fast-forward (i.e., we are only pulling new changes and there are no
813 # fast-forward (i.e., we are only pulling new changes and there are no
814 # conflicts with our current branch)
814 # conflicts with our current branch)
815 # Additionally, that option needs to go before --no-tags, otherwise git
815 # Additionally, that option needs to go before --no-tags, otherwise git
816 # pull complains about it being an unknown flag.
816 # pull complains about it being an unknown flag.
817 cmd = ['pull']
817 cmd = ['pull']
818 if ff_only:
818 if ff_only:
819 cmd.append('--ff-only')
819 cmd.append('--ff-only')
820 cmd.extend(['--no-tags', repository_path, branch_name])
820 cmd.extend(['--no-tags', repository_path, branch_name])
821 self.run_git_command(cmd, fail_on_stderr=False)
821 self.run_git_command(cmd, fail_on_stderr=False)
822
822
823 def _local_merge(self, merge_message, user_name, user_email, heads):
823 def _local_merge(self, merge_message, user_name, user_email, heads):
824 """
824 """
825 Merge the given head into the checked out branch.
825 Merge the given head into the checked out branch.
826
826
827 It will force a merge commit.
827 It will force a merge commit.
828
828
829 Currently it raises an error if the repo is empty, as it is not possible
829 Currently it raises an error if the repo is empty, as it is not possible
830 to create a merge commit in an empty repo.
830 to create a merge commit in an empty repo.
831
831
832 :param merge_message: The message to use for the merge commit.
832 :param merge_message: The message to use for the merge commit.
833 :param heads: the heads to merge.
833 :param heads: the heads to merge.
834 """
834 """
835 if self.bare:
835 if self.bare:
836 raise RepositoryError('Cannot merge into a bare git repository')
836 raise RepositoryError('Cannot merge into a bare git repository')
837
837
838 if not heads:
838 if not heads:
839 return
839 return
840
840
841 if self.is_empty():
841 if self.is_empty():
842 # TODO(skreft): do somehting more robust in this case.
842 # TODO(skreft): do somehting more robust in this case.
843 raise RepositoryError(
843 raise RepositoryError(
844 'Do not know how to merge into empty repositories yet')
844 'Do not know how to merge into empty repositories yet')
845
845
846 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
846 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
847 # commit message. We also specify the user who is doing the merge.
847 # commit message. We also specify the user who is doing the merge.
848 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
848 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
849 '-c', 'user.email=%s' % safe_str(user_email),
849 '-c', 'user.email=%s' % safe_str(user_email),
850 'merge', '--no-ff', '-m', safe_str(merge_message)]
850 'merge', '--no-ff', '-m', safe_str(merge_message)]
851 cmd.extend(heads)
851 cmd.extend(heads)
852 try:
852 try:
853 output = self.run_git_command(cmd, fail_on_stderr=False)
853 output = self.run_git_command(cmd, fail_on_stderr=False)
854 except RepositoryError:
854 except RepositoryError:
855 # Cleanup any merge leftovers
855 # Cleanup any merge leftovers
856 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
856 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
857 raise
857 raise
858
858
859 def _local_push(
859 def _local_push(
860 self, source_branch, repository_path, target_branch,
860 self, source_branch, repository_path, target_branch,
861 enable_hooks=False, rc_scm_data=None):
861 enable_hooks=False, rc_scm_data=None):
862 """
862 """
863 Push the source_branch to the given repository and target_branch.
863 Push the source_branch to the given repository and target_branch.
864
864
865 Currently it if the target_branch is not master and the target repo is
865 Currently it if the target_branch is not master and the target repo is
866 empty, the push will work, but then GitRepository won't be able to find
866 empty, the push will work, but then GitRepository won't be able to find
867 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
867 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
868 pointing to master, which does not exist).
868 pointing to master, which does not exist).
869
869
870 It does not run the hooks in the target repo.
870 It does not run the hooks in the target repo.
871 """
871 """
872 # TODO(skreft): deal with the case in which the target repo is empty,
872 # TODO(skreft): deal with the case in which the target repo is empty,
873 # and the target_branch is not master.
873 # and the target_branch is not master.
874 target_repo = GitRepository(repository_path)
874 target_repo = GitRepository(repository_path)
875 if (not target_repo.bare and
875 if (not target_repo.bare and
876 target_repo._current_branch() == target_branch):
876 target_repo._current_branch() == target_branch):
877 # Git prevents pushing to the checked out branch, so simulate it by
877 # Git prevents pushing to the checked out branch, so simulate it by
878 # pulling into the target repository.
878 # pulling into the target repository.
879 target_repo._local_pull(self.path, source_branch)
879 target_repo._local_pull(self.path, source_branch)
880 else:
880 else:
881 cmd = ['push', os.path.abspath(repository_path),
881 cmd = ['push', os.path.abspath(repository_path),
882 '%s:%s' % (source_branch, target_branch)]
882 '%s:%s' % (source_branch, target_branch)]
883 gitenv = {}
883 gitenv = {}
884 if rc_scm_data:
884 if rc_scm_data:
885 gitenv.update({'RC_SCM_DATA': rc_scm_data})
885 gitenv.update({'RC_SCM_DATA': rc_scm_data})
886
886
887 if not enable_hooks:
887 if not enable_hooks:
888 gitenv['RC_SKIP_HOOKS'] = '1'
888 gitenv['RC_SKIP_HOOKS'] = '1'
889 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
889 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
890
890
891 def _get_new_pr_branch(self, source_branch, target_branch):
891 def _get_new_pr_branch(self, source_branch, target_branch):
892 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
892 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
893 pr_branches = []
893 pr_branches = []
894 for branch in self.branches:
894 for branch in self.branches:
895 if branch.startswith(prefix):
895 if branch.startswith(prefix):
896 pr_branches.append(int(branch[len(prefix):]))
896 pr_branches.append(int(branch[len(prefix):]))
897
897
898 if not pr_branches:
898 if not pr_branches:
899 branch_id = 0
899 branch_id = 0
900 else:
900 else:
901 branch_id = max(pr_branches) + 1
901 branch_id = max(pr_branches) + 1
902
902
903 return '%s%d' % (prefix, branch_id)
903 return '%s%d' % (prefix, branch_id)
904
904
905 def _merge_repo(self, shadow_repository_path, target_ref,
905 def _maybe_prepare_merge_workspace(
906 self, repo_id, workspace_id, target_ref, source_ref):
907 shadow_repository_path = self._get_shadow_repository_path(
908 repo_id, workspace_id)
909 if not os.path.exists(shadow_repository_path):
910 self._local_clone(
911 shadow_repository_path, target_ref.name, source_ref.name)
912 log.debug(
913 'Prepared shadow repository in %s', shadow_repository_path)
914
915 return shadow_repository_path
916
917 def _merge_repo(self, repo_id, workspace_id, target_ref,
906 source_repo, source_ref, merge_message,
918 source_repo, source_ref, merge_message,
907 merger_name, merger_email, dry_run=False,
919 merger_name, merger_email, dry_run=False,
908 use_rebase=False, close_branch=False):
920 use_rebase=False, close_branch=False):
909 if target_ref.commit_id != self.branches[target_ref.name]:
921 if target_ref.commit_id != self.branches[target_ref.name]:
910 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
911 target_ref.commit_id, self.branches[target_ref.name])
923 target_ref.commit_id, self.branches[target_ref.name])
912 return MergeResponse(
924 return MergeResponse(
913 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
914
926
915 shadow_repo = GitRepository(shadow_repository_path)
927 shadow_repository_path = self._maybe_prepare_merge_workspace(
928 repo_id, workspace_id, target_ref, source_ref)
929 shadow_repo = self._get_shadow_instance(shadow_repository_path)
930
916 # checkout source, if it's different. Otherwise we could not
931 # checkout source, if it's different. Otherwise we could not
917 # fetch proper commits for merge testing
932 # fetch proper commits for merge testing
918 if source_ref.name != target_ref.name:
933 if source_ref.name != target_ref.name:
919 if shadow_repo.get_remote_ref(source_ref.name):
934 if shadow_repo.get_remote_ref(source_ref.name):
920 shadow_repo._checkout(source_ref.name, force=True)
935 shadow_repo._checkout(source_ref.name, force=True)
921
936
922 # checkout target, and fetch changes
937 # checkout target, and fetch changes
923 shadow_repo._checkout(target_ref.name, force=True)
938 shadow_repo._checkout(target_ref.name, force=True)
924
939
925 # fetch/reset pull the target, in case it is changed
940 # fetch/reset pull the target, in case it is changed
926 # this handles even force changes
941 # this handles even force changes
927 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
942 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
928 shadow_repo._local_reset(target_ref.name)
943 shadow_repo._local_reset(target_ref.name)
929
944
930 # Need to reload repo to invalidate the cache, or otherwise we cannot
945 # Need to reload repo to invalidate the cache, or otherwise we cannot
931 # retrieve the last target commit.
946 # retrieve the last target commit.
932 shadow_repo = GitRepository(shadow_repository_path)
947 shadow_repo = self._get_shadow_instance(shadow_repository_path)
933 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
948 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
934 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
949 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
935 target_ref, target_ref.commit_id,
950 target_ref, target_ref.commit_id,
936 shadow_repo.branches[target_ref.name])
951 shadow_repo.branches[target_ref.name])
937 return MergeResponse(
952 return MergeResponse(
938 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
953 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
939
954
940 # calculate new branch
955 # calculate new branch
941 pr_branch = shadow_repo._get_new_pr_branch(
956 pr_branch = shadow_repo._get_new_pr_branch(
942 source_ref.name, target_ref.name)
957 source_ref.name, target_ref.name)
943 log.debug('using pull-request merge branch: `%s`', pr_branch)
958 log.debug('using pull-request merge branch: `%s`', pr_branch)
944 # checkout to temp branch, and fetch changes
959 # checkout to temp branch, and fetch changes
945 shadow_repo._checkout(pr_branch, create=True)
960 shadow_repo._checkout(pr_branch, create=True)
946 try:
961 try:
947 shadow_repo._local_fetch(source_repo.path, source_ref.name)
962 shadow_repo._local_fetch(source_repo.path, source_ref.name)
948 except RepositoryError:
963 except RepositoryError:
949 log.exception('Failure when doing local fetch on git shadow repo')
964 log.exception('Failure when doing local fetch on git shadow repo')
950 return MergeResponse(
965 return MergeResponse(
951 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
966 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
952
967
953 merge_ref = None
968 merge_ref = None
954 merge_failure_reason = MergeFailureReason.NONE
969 merge_failure_reason = MergeFailureReason.NONE
955 try:
970 try:
956 shadow_repo._local_merge(merge_message, merger_name, merger_email,
971 shadow_repo._local_merge(merge_message, merger_name, merger_email,
957 [source_ref.commit_id])
972 [source_ref.commit_id])
958 merge_possible = True
973 merge_possible = True
959
974
960 # Need to reload repo to invalidate the cache, or otherwise we
975 # Need to reload repo to invalidate the cache, or otherwise we
961 # cannot retrieve the merge commit.
976 # cannot retrieve the merge commit.
962 shadow_repo = GitRepository(shadow_repository_path)
977 shadow_repo = GitRepository(shadow_repository_path)
963 merge_commit_id = shadow_repo.branches[pr_branch]
978 merge_commit_id = shadow_repo.branches[pr_branch]
964
979
965 # Set a reference pointing to the merge commit. This reference may
980 # Set a reference pointing to the merge commit. This reference may
966 # be used to easily identify the last successful merge commit in
981 # be used to easily identify the last successful merge commit in
967 # the shadow repository.
982 # the shadow repository.
968 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
983 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
969 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
984 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
970 except RepositoryError:
985 except RepositoryError:
971 log.exception('Failure when doing local merge on git shadow repo')
986 log.exception('Failure when doing local merge on git shadow repo')
972 merge_possible = False
987 merge_possible = False
973 merge_failure_reason = MergeFailureReason.MERGE_FAILED
988 merge_failure_reason = MergeFailureReason.MERGE_FAILED
974
989
975 if merge_possible and not dry_run:
990 if merge_possible and not dry_run:
976 try:
991 try:
977 shadow_repo._local_push(
992 shadow_repo._local_push(
978 pr_branch, self.path, target_ref.name, enable_hooks=True,
993 pr_branch, self.path, target_ref.name, enable_hooks=True,
979 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
994 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
980 merge_succeeded = True
995 merge_succeeded = True
981 except RepositoryError:
996 except RepositoryError:
982 log.exception(
997 log.exception(
983 'Failure when doing local push on git shadow repo')
998 'Failure when doing local push on git shadow repo')
984 merge_succeeded = False
999 merge_succeeded = False
985 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1000 merge_failure_reason = MergeFailureReason.PUSH_FAILED
986 else:
1001 else:
987 merge_succeeded = False
1002 merge_succeeded = False
988
1003
989 return MergeResponse(
1004 return MergeResponse(
990 merge_possible, merge_succeeded, merge_ref,
1005 merge_possible, merge_succeeded, merge_ref,
991 merge_failure_reason)
1006 merge_failure_reason)
992
993 def _get_shadow_repository_path(self, workspace_id):
994 # The name of the shadow repository must start with '.', so it is
995 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
996 return os.path.join(
997 os.path.dirname(self.path),
998 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
999
1000 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
1001 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
1002 if not os.path.exists(shadow_repository_path):
1003 self._local_clone(
1004 shadow_repository_path, target_ref.name, source_ref.name)
1005
1006 return shadow_repository_path
@@ -1,918 +1,915 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
35 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
45 from rhodecode.lib.vcs.compat import configparser
46
46
47 hexlify = binascii.hexlify
47 hexlify = binascii.hexlify
48 nullid = "\0" * 20
48 nullid = "\0" * 20
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class MercurialRepository(BaseRepository):
53 class MercurialRepository(BaseRepository):
54 """
54 """
55 Mercurial repository backend
55 Mercurial repository backend
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'default'
57 DEFAULT_BRANCH_NAME = 'default'
58
58
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 update_after_clone=False, with_wire=None):
60 update_after_clone=False, with_wire=None):
61 """
61 """
62 Raises RepositoryError if repository could not be find at the given
62 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
63 ``repo_path``.
64
64
65 :param repo_path: local path of the repository
65 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
66 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
67 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
68 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
69 :param src_url=None: would try to clone repository from given location
70 :param update_after_clone=False: sets update of working copy after
70 :param update_after_clone=False: sets update of working copy after
71 making a clone
71 making a clone
72 """
72 """
73
73
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 # mercurial since 4.4.X requires certain configuration to be present
75 # mercurial since 4.4.X requires certain configuration to be present
76 # because sometimes we init the repos with config we need to meet
76 # because sometimes we init the repos with config we need to meet
77 # special requirements
77 # special requirements
78 self.config = config if config else self.get_default_config(
78 self.config = config if config else self.get_default_config(
79 default=[('extensions', 'largefiles', '1')])
79 default=[('extensions', 'largefiles', '1')])
80
80
81 self._remote = connection.Hg(
81 self._remote = connection.Hg(
82 self.path, self.config, with_wire=with_wire)
82 self.path, self.config, with_wire=with_wire)
83
83
84 self._init_repo(create, src_url, update_after_clone)
84 self._init_repo(create, src_url, update_after_clone)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def commit_ids(self):
90 def commit_ids(self):
91 """
91 """
92 Returns list of commit ids, in ascending order. Being lazy
92 Returns list of commit ids, in ascending order. Being lazy
93 attribute allows external tools to inject shas from cache.
93 attribute allows external tools to inject shas from cache.
94 """
94 """
95 commit_ids = self._get_all_commit_ids()
95 commit_ids = self._get_all_commit_ids()
96 self._rebuild_cache(commit_ids)
96 self._rebuild_cache(commit_ids)
97 return commit_ids
97 return commit_ids
98
98
99 def _rebuild_cache(self, commit_ids):
99 def _rebuild_cache(self, commit_ids):
100 self._commit_ids = dict((commit_id, index)
100 self._commit_ids = dict((commit_id, index)
101 for index, commit_id in enumerate(commit_ids))
101 for index, commit_id in enumerate(commit_ids))
102
102
103 @LazyProperty
103 @LazyProperty
104 def branches(self):
104 def branches(self):
105 return self._get_branches()
105 return self._get_branches()
106
106
107 @LazyProperty
107 @LazyProperty
108 def branches_closed(self):
108 def branches_closed(self):
109 return self._get_branches(active=False, closed=True)
109 return self._get_branches(active=False, closed=True)
110
110
111 @LazyProperty
111 @LazyProperty
112 def branches_all(self):
112 def branches_all(self):
113 all_branches = {}
113 all_branches = {}
114 all_branches.update(self.branches)
114 all_branches.update(self.branches)
115 all_branches.update(self.branches_closed)
115 all_branches.update(self.branches_closed)
116 return all_branches
116 return all_branches
117
117
118 def _get_branches(self, active=True, closed=False):
118 def _get_branches(self, active=True, closed=False):
119 """
119 """
120 Gets branches for this repository
120 Gets branches for this repository
121 Returns only not closed active branches by default
121 Returns only not closed active branches by default
122
122
123 :param active: return also active branches
123 :param active: return also active branches
124 :param closed: return also closed branches
124 :param closed: return also closed branches
125
125
126 """
126 """
127 if self.is_empty():
127 if self.is_empty():
128 return {}
128 return {}
129
129
130 def get_name(ctx):
130 def get_name(ctx):
131 return ctx[0]
131 return ctx[0]
132
132
133 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
133 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
134 self._remote.branches(active, closed).items()]
134 self._remote.branches(active, closed).items()]
135
135
136 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
136 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
137
137
138 @LazyProperty
138 @LazyProperty
139 def tags(self):
139 def tags(self):
140 """
140 """
141 Gets tags for this repository
141 Gets tags for this repository
142 """
142 """
143 return self._get_tags()
143 return self._get_tags()
144
144
145 def _get_tags(self):
145 def _get_tags(self):
146 if self.is_empty():
146 if self.is_empty():
147 return {}
147 return {}
148
148
149 def get_name(ctx):
149 def get_name(ctx):
150 return ctx[0]
150 return ctx[0]
151
151
152 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
152 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
153 self._remote.tags().items()]
153 self._remote.tags().items()]
154
154
155 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
155 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
156
156
157 def tag(self, name, user, commit_id=None, message=None, date=None,
157 def tag(self, name, user, commit_id=None, message=None, date=None,
158 **kwargs):
158 **kwargs):
159 """
159 """
160 Creates and returns a tag for the given ``commit_id``.
160 Creates and returns a tag for the given ``commit_id``.
161
161
162 :param name: name for new tag
162 :param name: name for new tag
163 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
163 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
164 :param commit_id: commit id for which new tag would be created
164 :param commit_id: commit id for which new tag would be created
165 :param message: message of the tag's commit
165 :param message: message of the tag's commit
166 :param date: date of tag's commit
166 :param date: date of tag's commit
167
167
168 :raises TagAlreadyExistError: if tag with same name already exists
168 :raises TagAlreadyExistError: if tag with same name already exists
169 """
169 """
170 if name in self.tags:
170 if name in self.tags:
171 raise TagAlreadyExistError("Tag %s already exists" % name)
171 raise TagAlreadyExistError("Tag %s already exists" % name)
172 commit = self.get_commit(commit_id=commit_id)
172 commit = self.get_commit(commit_id=commit_id)
173 local = kwargs.setdefault('local', False)
173 local = kwargs.setdefault('local', False)
174
174
175 if message is None:
175 if message is None:
176 message = "Added tag %s for commit %s" % (name, commit.short_id)
176 message = "Added tag %s for commit %s" % (name, commit.short_id)
177
177
178 date, tz = date_to_timestamp_plus_offset(date)
178 date, tz = date_to_timestamp_plus_offset(date)
179
179
180 self._remote.tag(
180 self._remote.tag(
181 name, commit.raw_id, message, local, user, date, tz)
181 name, commit.raw_id, message, local, user, date, tz)
182 self._remote.invalidate_vcs_cache()
182 self._remote.invalidate_vcs_cache()
183
183
184 # Reinitialize tags
184 # Reinitialize tags
185 self.tags = self._get_tags()
185 self.tags = self._get_tags()
186 tag_id = self.tags[name]
186 tag_id = self.tags[name]
187
187
188 return self.get_commit(commit_id=tag_id)
188 return self.get_commit(commit_id=tag_id)
189
189
190 def remove_tag(self, name, user, message=None, date=None):
190 def remove_tag(self, name, user, message=None, date=None):
191 """
191 """
192 Removes tag with the given `name`.
192 Removes tag with the given `name`.
193
193
194 :param name: name of the tag to be removed
194 :param name: name of the tag to be removed
195 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
195 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
196 :param message: message of the tag's removal commit
196 :param message: message of the tag's removal commit
197 :param date: date of tag's removal commit
197 :param date: date of tag's removal commit
198
198
199 :raises TagDoesNotExistError: if tag with given name does not exists
199 :raises TagDoesNotExistError: if tag with given name does not exists
200 """
200 """
201 if name not in self.tags:
201 if name not in self.tags:
202 raise TagDoesNotExistError("Tag %s does not exist" % name)
202 raise TagDoesNotExistError("Tag %s does not exist" % name)
203 if message is None:
203 if message is None:
204 message = "Removed tag %s" % name
204 message = "Removed tag %s" % name
205 local = False
205 local = False
206
206
207 date, tz = date_to_timestamp_plus_offset(date)
207 date, tz = date_to_timestamp_plus_offset(date)
208
208
209 self._remote.tag(name, nullid, message, local, user, date, tz)
209 self._remote.tag(name, nullid, message, local, user, date, tz)
210 self._remote.invalidate_vcs_cache()
210 self._remote.invalidate_vcs_cache()
211 self.tags = self._get_tags()
211 self.tags = self._get_tags()
212
212
213 @LazyProperty
213 @LazyProperty
214 def bookmarks(self):
214 def bookmarks(self):
215 """
215 """
216 Gets bookmarks for this repository
216 Gets bookmarks for this repository
217 """
217 """
218 return self._get_bookmarks()
218 return self._get_bookmarks()
219
219
220 def _get_bookmarks(self):
220 def _get_bookmarks(self):
221 if self.is_empty():
221 if self.is_empty():
222 return {}
222 return {}
223
223
224 def get_name(ctx):
224 def get_name(ctx):
225 return ctx[0]
225 return ctx[0]
226
226
227 _bookmarks = [
227 _bookmarks = [
228 (safe_unicode(n), hexlify(h)) for n, h in
228 (safe_unicode(n), hexlify(h)) for n, h in
229 self._remote.bookmarks().items()]
229 self._remote.bookmarks().items()]
230
230
231 return OrderedDict(sorted(_bookmarks, key=get_name))
231 return OrderedDict(sorted(_bookmarks, key=get_name))
232
232
233 def _get_all_commit_ids(self):
233 def _get_all_commit_ids(self):
234 return self._remote.get_all_commit_ids('visible')
234 return self._remote.get_all_commit_ids('visible')
235
235
236 def get_diff(
236 def get_diff(
237 self, commit1, commit2, path='', ignore_whitespace=False,
237 self, commit1, commit2, path='', ignore_whitespace=False,
238 context=3, path1=None):
238 context=3, path1=None):
239 """
239 """
240 Returns (git like) *diff*, as plain text. Shows changes introduced by
240 Returns (git like) *diff*, as plain text. Shows changes introduced by
241 `commit2` since `commit1`.
241 `commit2` since `commit1`.
242
242
243 :param commit1: Entry point from which diff is shown. Can be
243 :param commit1: Entry point from which diff is shown. Can be
244 ``self.EMPTY_COMMIT`` - in this case, patch showing all
244 ``self.EMPTY_COMMIT`` - in this case, patch showing all
245 the changes since empty state of the repository until `commit2`
245 the changes since empty state of the repository until `commit2`
246 :param commit2: Until which commit changes should be shown.
246 :param commit2: Until which commit changes should be shown.
247 :param ignore_whitespace: If set to ``True``, would not show whitespace
247 :param ignore_whitespace: If set to ``True``, would not show whitespace
248 changes. Defaults to ``False``.
248 changes. Defaults to ``False``.
249 :param context: How many lines before/after changed lines should be
249 :param context: How many lines before/after changed lines should be
250 shown. Defaults to ``3``.
250 shown. Defaults to ``3``.
251 """
251 """
252 self._validate_diff_commits(commit1, commit2)
252 self._validate_diff_commits(commit1, commit2)
253 if path1 is not None and path1 != path:
253 if path1 is not None and path1 != path:
254 raise ValueError("Diff of two different paths not supported.")
254 raise ValueError("Diff of two different paths not supported.")
255
255
256 if path:
256 if path:
257 file_filter = [self.path, path]
257 file_filter = [self.path, path]
258 else:
258 else:
259 file_filter = None
259 file_filter = None
260
260
261 diff = self._remote.diff(
261 diff = self._remote.diff(
262 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
262 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
263 opt_git=True, opt_ignorews=ignore_whitespace,
263 opt_git=True, opt_ignorews=ignore_whitespace,
264 context=context)
264 context=context)
265 return MercurialDiff(diff)
265 return MercurialDiff(diff)
266
266
267 def strip(self, commit_id, branch=None):
267 def strip(self, commit_id, branch=None):
268 self._remote.strip(commit_id, update=False, backup="none")
268 self._remote.strip(commit_id, update=False, backup="none")
269
269
270 self._remote.invalidate_vcs_cache()
270 self._remote.invalidate_vcs_cache()
271 self.commit_ids = self._get_all_commit_ids()
271 self.commit_ids = self._get_all_commit_ids()
272 self._rebuild_cache(self.commit_ids)
272 self._rebuild_cache(self.commit_ids)
273
273
274 def verify(self):
274 def verify(self):
275 verify = self._remote.verify()
275 verify = self._remote.verify()
276
276
277 self._remote.invalidate_vcs_cache()
277 self._remote.invalidate_vcs_cache()
278 return verify
278 return verify
279
279
280 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
280 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
281 if commit_id1 == commit_id2:
281 if commit_id1 == commit_id2:
282 return commit_id1
282 return commit_id1
283
283
284 ancestors = self._remote.revs_from_revspec(
284 ancestors = self._remote.revs_from_revspec(
285 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
285 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
286 other_path=repo2.path)
286 other_path=repo2.path)
287 return repo2[ancestors[0]].raw_id if ancestors else None
287 return repo2[ancestors[0]].raw_id if ancestors else None
288
288
289 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
289 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
290 if commit_id1 == commit_id2:
290 if commit_id1 == commit_id2:
291 commits = []
291 commits = []
292 else:
292 else:
293 if merge:
293 if merge:
294 indexes = self._remote.revs_from_revspec(
294 indexes = self._remote.revs_from_revspec(
295 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
295 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
296 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
296 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
297 else:
297 else:
298 indexes = self._remote.revs_from_revspec(
298 indexes = self._remote.revs_from_revspec(
299 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
299 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
300 commit_id1, other_path=repo2.path)
300 commit_id1, other_path=repo2.path)
301
301
302 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
302 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
303 for idx in indexes]
303 for idx in indexes]
304
304
305 return commits
305 return commits
306
306
307 @staticmethod
307 @staticmethod
308 def check_url(url, config):
308 def check_url(url, config):
309 """
309 """
310 Function will check given url and try to verify if it's a valid
310 Function will check given url and try to verify if it's a valid
311 link. Sometimes it may happened that mercurial will issue basic
311 link. Sometimes it may happened that mercurial will issue basic
312 auth request that can cause whole API to hang when used from python
312 auth request that can cause whole API to hang when used from python
313 or other external calls.
313 or other external calls.
314
314
315 On failures it'll raise urllib2.HTTPError, exception is also thrown
315 On failures it'll raise urllib2.HTTPError, exception is also thrown
316 when the return code is non 200
316 when the return code is non 200
317 """
317 """
318 # check first if it's not an local url
318 # check first if it's not an local url
319 if os.path.isdir(url) or url.startswith('file:'):
319 if os.path.isdir(url) or url.startswith('file:'):
320 return True
320 return True
321
321
322 # Request the _remote to verify the url
322 # Request the _remote to verify the url
323 return connection.Hg.check_url(url, config.serialize())
323 return connection.Hg.check_url(url, config.serialize())
324
324
325 @staticmethod
325 @staticmethod
326 def is_valid_repository(path):
326 def is_valid_repository(path):
327 return os.path.isdir(os.path.join(path, '.hg'))
327 return os.path.isdir(os.path.join(path, '.hg'))
328
328
329 def _init_repo(self, create, src_url=None, update_after_clone=False):
329 def _init_repo(self, create, src_url=None, update_after_clone=False):
330 """
330 """
331 Function will check for mercurial repository in given path. If there
331 Function will check for mercurial repository in given path. If there
332 is no repository in that path it will raise an exception unless
332 is no repository in that path it will raise an exception unless
333 `create` parameter is set to True - in that case repository would
333 `create` parameter is set to True - in that case repository would
334 be created.
334 be created.
335
335
336 If `src_url` is given, would try to clone repository from the
336 If `src_url` is given, would try to clone repository from the
337 location at given clone_point. Additionally it'll make update to
337 location at given clone_point. Additionally it'll make update to
338 working copy accordingly to `update_after_clone` flag.
338 working copy accordingly to `update_after_clone` flag.
339 """
339 """
340 if create and os.path.exists(self.path):
340 if create and os.path.exists(self.path):
341 raise RepositoryError(
341 raise RepositoryError(
342 "Cannot create repository at %s, location already exist"
342 "Cannot create repository at %s, location already exist"
343 % self.path)
343 % self.path)
344
344
345 if src_url:
345 if src_url:
346 url = str(self._get_url(src_url))
346 url = str(self._get_url(src_url))
347 MercurialRepository.check_url(url, self.config)
347 MercurialRepository.check_url(url, self.config)
348
348
349 self._remote.clone(url, self.path, update_after_clone)
349 self._remote.clone(url, self.path, update_after_clone)
350
350
351 # Don't try to create if we've already cloned repo
351 # Don't try to create if we've already cloned repo
352 create = False
352 create = False
353
353
354 if create:
354 if create:
355 os.makedirs(self.path, mode=0755)
355 os.makedirs(self.path, mode=0755)
356
356
357 self._remote.localrepository(create)
357 self._remote.localrepository(create)
358
358
359 @LazyProperty
359 @LazyProperty
360 def in_memory_commit(self):
360 def in_memory_commit(self):
361 return MercurialInMemoryCommit(self)
361 return MercurialInMemoryCommit(self)
362
362
363 @LazyProperty
363 @LazyProperty
364 def description(self):
364 def description(self):
365 description = self._remote.get_config_value(
365 description = self._remote.get_config_value(
366 'web', 'description', untrusted=True)
366 'web', 'description', untrusted=True)
367 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
367 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
368
368
369 @LazyProperty
369 @LazyProperty
370 def contact(self):
370 def contact(self):
371 contact = (
371 contact = (
372 self._remote.get_config_value("web", "contact") or
372 self._remote.get_config_value("web", "contact") or
373 self._remote.get_config_value("ui", "username"))
373 self._remote.get_config_value("ui", "username"))
374 return safe_unicode(contact or self.DEFAULT_CONTACT)
374 return safe_unicode(contact or self.DEFAULT_CONTACT)
375
375
376 @LazyProperty
376 @LazyProperty
377 def last_change(self):
377 def last_change(self):
378 """
378 """
379 Returns last change made on this repository as
379 Returns last change made on this repository as
380 `datetime.datetime` object.
380 `datetime.datetime` object.
381 """
381 """
382 try:
382 try:
383 return self.get_commit().date
383 return self.get_commit().date
384 except RepositoryError:
384 except RepositoryError:
385 tzoffset = makedate()[1]
385 tzoffset = makedate()[1]
386 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
386 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
387
387
388 def _get_fs_mtime(self):
388 def _get_fs_mtime(self):
389 # fallback to filesystem
389 # fallback to filesystem
390 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
390 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
391 st_path = os.path.join(self.path, '.hg', "store")
391 st_path = os.path.join(self.path, '.hg', "store")
392 if os.path.exists(cl_path):
392 if os.path.exists(cl_path):
393 return os.stat(cl_path).st_mtime
393 return os.stat(cl_path).st_mtime
394 else:
394 else:
395 return os.stat(st_path).st_mtime
395 return os.stat(st_path).st_mtime
396
396
397 def _get_url(self, url):
397 def _get_url(self, url):
398 """
398 """
399 Returns normalized url. If schema is not given, would fall
399 Returns normalized url. If schema is not given, would fall
400 to filesystem
400 to filesystem
401 (``file:///``) schema.
401 (``file:///``) schema.
402 """
402 """
403 url = url.encode('utf8')
403 url = url.encode('utf8')
404 if url != 'default' and '://' not in url:
404 if url != 'default' and '://' not in url:
405 url = "file:" + urllib.pathname2url(url)
405 url = "file:" + urllib.pathname2url(url)
406 return url
406 return url
407
407
408 def get_hook_location(self):
408 def get_hook_location(self):
409 """
409 """
410 returns absolute path to location where hooks are stored
410 returns absolute path to location where hooks are stored
411 """
411 """
412 return os.path.join(self.path, '.hg', '.hgrc')
412 return os.path.join(self.path, '.hg', '.hgrc')
413
413
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 """
415 """
416 Returns ``MercurialCommit`` object representing repository's
416 Returns ``MercurialCommit`` object representing repository's
417 commit at the given `commit_id` or `commit_idx`.
417 commit at the given `commit_id` or `commit_idx`.
418 """
418 """
419 if self.is_empty():
419 if self.is_empty():
420 raise EmptyRepositoryError("There are no commits yet")
420 raise EmptyRepositoryError("There are no commits yet")
421
421
422 if commit_id is not None:
422 if commit_id is not None:
423 self._validate_commit_id(commit_id)
423 self._validate_commit_id(commit_id)
424 try:
424 try:
425 idx = self._commit_ids[commit_id]
425 idx = self._commit_ids[commit_id]
426 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
426 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
427 except KeyError:
427 except KeyError:
428 pass
428 pass
429 elif commit_idx is not None:
429 elif commit_idx is not None:
430 self._validate_commit_idx(commit_idx)
430 self._validate_commit_idx(commit_idx)
431 try:
431 try:
432 id_ = self.commit_ids[commit_idx]
432 id_ = self.commit_ids[commit_idx]
433 if commit_idx < 0:
433 if commit_idx < 0:
434 commit_idx += len(self.commit_ids)
434 commit_idx += len(self.commit_ids)
435 return MercurialCommit(
435 return MercurialCommit(
436 self, id_, commit_idx, pre_load=pre_load)
436 self, id_, commit_idx, pre_load=pre_load)
437 except IndexError:
437 except IndexError:
438 commit_id = commit_idx
438 commit_id = commit_idx
439 else:
439 else:
440 commit_id = "tip"
440 commit_id = "tip"
441
441
442 if isinstance(commit_id, unicode):
442 if isinstance(commit_id, unicode):
443 commit_id = safe_str(commit_id)
443 commit_id = safe_str(commit_id)
444
444
445 try:
445 try:
446 raw_id, idx = self._remote.lookup(commit_id, both=True)
446 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 except CommitDoesNotExistError:
447 except CommitDoesNotExistError:
448 msg = "Commit %s does not exist for %s" % (
448 msg = "Commit %s does not exist for %s" % (
449 commit_id, self)
449 commit_id, self)
450 raise CommitDoesNotExistError(msg)
450 raise CommitDoesNotExistError(msg)
451
451
452 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
452 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
453
453
454 def get_commits(
454 def get_commits(
455 self, start_id=None, end_id=None, start_date=None, end_date=None,
455 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 branch_name=None, show_hidden=False, pre_load=None):
456 branch_name=None, show_hidden=False, pre_load=None):
457 """
457 """
458 Returns generator of ``MercurialCommit`` objects from start to end
458 Returns generator of ``MercurialCommit`` objects from start to end
459 (both are inclusive)
459 (both are inclusive)
460
460
461 :param start_id: None, str(commit_id)
461 :param start_id: None, str(commit_id)
462 :param end_id: None, str(commit_id)
462 :param end_id: None, str(commit_id)
463 :param start_date: if specified, commits with commit date less than
463 :param start_date: if specified, commits with commit date less than
464 ``start_date`` would be filtered out from returned set
464 ``start_date`` would be filtered out from returned set
465 :param end_date: if specified, commits with commit date greater than
465 :param end_date: if specified, commits with commit date greater than
466 ``end_date`` would be filtered out from returned set
466 ``end_date`` would be filtered out from returned set
467 :param branch_name: if specified, commits not reachable from given
467 :param branch_name: if specified, commits not reachable from given
468 branch would be filtered out from returned set
468 branch would be filtered out from returned set
469 :param show_hidden: Show hidden commits such as obsolete or hidden from
469 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 Mercurial evolve
470 Mercurial evolve
471 :raise BranchDoesNotExistError: If given ``branch_name`` does not
471 :raise BranchDoesNotExistError: If given ``branch_name`` does not
472 exist.
472 exist.
473 :raise CommitDoesNotExistError: If commit for given ``start`` or
473 :raise CommitDoesNotExistError: If commit for given ``start`` or
474 ``end`` could not be found.
474 ``end`` could not be found.
475 """
475 """
476 # actually we should check now if it's not an empty repo
476 # actually we should check now if it's not an empty repo
477 branch_ancestors = False
477 branch_ancestors = False
478 if self.is_empty():
478 if self.is_empty():
479 raise EmptyRepositoryError("There are no commits yet")
479 raise EmptyRepositoryError("There are no commits yet")
480 self._validate_branch_name(branch_name)
480 self._validate_branch_name(branch_name)
481
481
482 if start_id is not None:
482 if start_id is not None:
483 self._validate_commit_id(start_id)
483 self._validate_commit_id(start_id)
484 c_start = self.get_commit(commit_id=start_id)
484 c_start = self.get_commit(commit_id=start_id)
485 start_pos = self._commit_ids[c_start.raw_id]
485 start_pos = self._commit_ids[c_start.raw_id]
486 else:
486 else:
487 start_pos = None
487 start_pos = None
488
488
489 if end_id is not None:
489 if end_id is not None:
490 self._validate_commit_id(end_id)
490 self._validate_commit_id(end_id)
491 c_end = self.get_commit(commit_id=end_id)
491 c_end = self.get_commit(commit_id=end_id)
492 end_pos = max(0, self._commit_ids[c_end.raw_id])
492 end_pos = max(0, self._commit_ids[c_end.raw_id])
493 else:
493 else:
494 end_pos = None
494 end_pos = None
495
495
496 if None not in [start_id, end_id] and start_pos > end_pos:
496 if None not in [start_id, end_id] and start_pos > end_pos:
497 raise RepositoryError(
497 raise RepositoryError(
498 "Start commit '%s' cannot be after end commit '%s'" %
498 "Start commit '%s' cannot be after end commit '%s'" %
499 (start_id, end_id))
499 (start_id, end_id))
500
500
501 if end_pos is not None:
501 if end_pos is not None:
502 end_pos += 1
502 end_pos += 1
503
503
504 commit_filter = []
504 commit_filter = []
505
505
506 if branch_name and not branch_ancestors:
506 if branch_name and not branch_ancestors:
507 commit_filter.append('branch("%s")' % (branch_name,))
507 commit_filter.append('branch("%s")' % (branch_name,))
508 elif branch_name and branch_ancestors:
508 elif branch_name and branch_ancestors:
509 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
509 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
510
510
511 if start_date and not end_date:
511 if start_date and not end_date:
512 commit_filter.append('date(">%s")' % (start_date,))
512 commit_filter.append('date(">%s")' % (start_date,))
513 if end_date and not start_date:
513 if end_date and not start_date:
514 commit_filter.append('date("<%s")' % (end_date,))
514 commit_filter.append('date("<%s")' % (end_date,))
515 if start_date and end_date:
515 if start_date and end_date:
516 commit_filter.append(
516 commit_filter.append(
517 'date(">%s") and date("<%s")' % (start_date, end_date))
517 'date(">%s") and date("<%s")' % (start_date, end_date))
518
518
519 if not show_hidden:
519 if not show_hidden:
520 commit_filter.append('not obsolete()')
520 commit_filter.append('not obsolete()')
521 commit_filter.append('not hidden()')
521 commit_filter.append('not hidden()')
522
522
523 # TODO: johbo: Figure out a simpler way for this solution
523 # TODO: johbo: Figure out a simpler way for this solution
524 collection_generator = CollectionGenerator
524 collection_generator = CollectionGenerator
525 if commit_filter:
525 if commit_filter:
526 commit_filter = ' and '.join(map(safe_str, commit_filter))
526 commit_filter = ' and '.join(map(safe_str, commit_filter))
527 revisions = self._remote.rev_range([commit_filter])
527 revisions = self._remote.rev_range([commit_filter])
528 collection_generator = MercurialIndexBasedCollectionGenerator
528 collection_generator = MercurialIndexBasedCollectionGenerator
529 else:
529 else:
530 revisions = self.commit_ids
530 revisions = self.commit_ids
531
531
532 if start_pos or end_pos:
532 if start_pos or end_pos:
533 revisions = revisions[start_pos:end_pos]
533 revisions = revisions[start_pos:end_pos]
534
534
535 return collection_generator(self, revisions, pre_load=pre_load)
535 return collection_generator(self, revisions, pre_load=pre_load)
536
536
537 def pull(self, url, commit_ids=None):
537 def pull(self, url, commit_ids=None):
538 """
538 """
539 Tries to pull changes from external location.
539 Tries to pull changes from external location.
540
540
541 :param commit_ids: Optional. Can be set to a list of commit ids
541 :param commit_ids: Optional. Can be set to a list of commit ids
542 which shall be pulled from the other repository.
542 which shall be pulled from the other repository.
543 """
543 """
544 url = self._get_url(url)
544 url = self._get_url(url)
545 self._remote.pull(url, commit_ids=commit_ids)
545 self._remote.pull(url, commit_ids=commit_ids)
546 self._remote.invalidate_vcs_cache()
546 self._remote.invalidate_vcs_cache()
547
547
548 def push(self, url):
548 def push(self, url):
549 url = self._get_url(url)
549 url = self._get_url(url)
550 self._remote.sync_push(url)
550 self._remote.sync_push(url)
551
551
552 def _local_clone(self, clone_path):
552 def _local_clone(self, clone_path):
553 """
553 """
554 Create a local clone of the current repo.
554 Create a local clone of the current repo.
555 """
555 """
556 self._remote.clone(self.path, clone_path, update_after_clone=True,
556 self._remote.clone(self.path, clone_path, update_after_clone=True,
557 hooks=False)
557 hooks=False)
558
558
559 def _update(self, revision, clean=False):
559 def _update(self, revision, clean=False):
560 """
560 """
561 Update the working copy to the specified revision.
561 Update the working copy to the specified revision.
562 """
562 """
563 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
563 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
564 self._remote.update(revision, clean=clean)
564 self._remote.update(revision, clean=clean)
565
565
566 def _identify(self):
566 def _identify(self):
567 """
567 """
568 Return the current state of the working directory.
568 Return the current state of the working directory.
569 """
569 """
570 return self._remote.identify().strip().rstrip('+')
570 return self._remote.identify().strip().rstrip('+')
571
571
572 def _heads(self, branch=None):
572 def _heads(self, branch=None):
573 """
573 """
574 Return the commit ids of the repository heads.
574 Return the commit ids of the repository heads.
575 """
575 """
576 return self._remote.heads(branch=branch).strip().split(' ')
576 return self._remote.heads(branch=branch).strip().split(' ')
577
577
578 def _ancestor(self, revision1, revision2):
578 def _ancestor(self, revision1, revision2):
579 """
579 """
580 Return the common ancestor of the two revisions.
580 Return the common ancestor of the two revisions.
581 """
581 """
582 return self._remote.ancestor(revision1, revision2)
582 return self._remote.ancestor(revision1, revision2)
583
583
584 def _local_push(
584 def _local_push(
585 self, revision, repository_path, push_branches=False,
585 self, revision, repository_path, push_branches=False,
586 enable_hooks=False):
586 enable_hooks=False):
587 """
587 """
588 Push the given revision to the specified repository.
588 Push the given revision to the specified repository.
589
589
590 :param push_branches: allow to create branches in the target repo.
590 :param push_branches: allow to create branches in the target repo.
591 """
591 """
592 self._remote.push(
592 self._remote.push(
593 [revision], repository_path, hooks=enable_hooks,
593 [revision], repository_path, hooks=enable_hooks,
594 push_branches=push_branches)
594 push_branches=push_branches)
595
595
596 def _local_merge(self, target_ref, merge_message, user_name, user_email,
596 def _local_merge(self, target_ref, merge_message, user_name, user_email,
597 source_ref, use_rebase=False, dry_run=False):
597 source_ref, use_rebase=False, dry_run=False):
598 """
598 """
599 Merge the given source_revision into the checked out revision.
599 Merge the given source_revision into the checked out revision.
600
600
601 Returns the commit id of the merge and a boolean indicating if the
601 Returns the commit id of the merge and a boolean indicating if the
602 commit needs to be pushed.
602 commit needs to be pushed.
603 """
603 """
604 self._update(target_ref.commit_id)
604 self._update(target_ref.commit_id)
605
605
606 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
606 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
607 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
607 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
608
608
609 if ancestor == source_ref.commit_id:
609 if ancestor == source_ref.commit_id:
610 # Nothing to do, the changes were already integrated
610 # Nothing to do, the changes were already integrated
611 return target_ref.commit_id, False
611 return target_ref.commit_id, False
612
612
613 elif ancestor == target_ref.commit_id and is_the_same_branch:
613 elif ancestor == target_ref.commit_id and is_the_same_branch:
614 # In this case we should force a commit message
614 # In this case we should force a commit message
615 return source_ref.commit_id, True
615 return source_ref.commit_id, True
616
616
617 if use_rebase:
617 if use_rebase:
618 try:
618 try:
619 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
619 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
620 target_ref.commit_id)
620 target_ref.commit_id)
621 self.bookmark(bookmark_name, revision=source_ref.commit_id)
621 self.bookmark(bookmark_name, revision=source_ref.commit_id)
622 self._remote.rebase(
622 self._remote.rebase(
623 source=source_ref.commit_id, dest=target_ref.commit_id)
623 source=source_ref.commit_id, dest=target_ref.commit_id)
624 self._remote.invalidate_vcs_cache()
624 self._remote.invalidate_vcs_cache()
625 self._update(bookmark_name)
625 self._update(bookmark_name)
626 return self._identify(), True
626 return self._identify(), True
627 except RepositoryError:
627 except RepositoryError:
628 # The rebase-abort may raise another exception which 'hides'
628 # The rebase-abort may raise another exception which 'hides'
629 # the original one, therefore we log it here.
629 # the original one, therefore we log it here.
630 log.exception('Error while rebasing shadow repo during merge.')
630 log.exception('Error while rebasing shadow repo during merge.')
631
631
632 # Cleanup any rebase leftovers
632 # Cleanup any rebase leftovers
633 self._remote.invalidate_vcs_cache()
633 self._remote.invalidate_vcs_cache()
634 self._remote.rebase(abort=True)
634 self._remote.rebase(abort=True)
635 self._remote.invalidate_vcs_cache()
635 self._remote.invalidate_vcs_cache()
636 self._remote.update(clean=True)
636 self._remote.update(clean=True)
637 raise
637 raise
638 else:
638 else:
639 try:
639 try:
640 self._remote.merge(source_ref.commit_id)
640 self._remote.merge(source_ref.commit_id)
641 self._remote.invalidate_vcs_cache()
641 self._remote.invalidate_vcs_cache()
642 self._remote.commit(
642 self._remote.commit(
643 message=safe_str(merge_message),
643 message=safe_str(merge_message),
644 username=safe_str('%s <%s>' % (user_name, user_email)))
644 username=safe_str('%s <%s>' % (user_name, user_email)))
645 self._remote.invalidate_vcs_cache()
645 self._remote.invalidate_vcs_cache()
646 return self._identify(), True
646 return self._identify(), True
647 except RepositoryError:
647 except RepositoryError:
648 # Cleanup any merge leftovers
648 # Cleanup any merge leftovers
649 self._remote.update(clean=True)
649 self._remote.update(clean=True)
650 raise
650 raise
651
651
652 def _local_close(self, target_ref, user_name, user_email,
652 def _local_close(self, target_ref, user_name, user_email,
653 source_ref, close_message=''):
653 source_ref, close_message=''):
654 """
654 """
655 Close the branch of the given source_revision
655 Close the branch of the given source_revision
656
656
657 Returns the commit id of the close and a boolean indicating if the
657 Returns the commit id of the close and a boolean indicating if the
658 commit needs to be pushed.
658 commit needs to be pushed.
659 """
659 """
660 self._update(source_ref.commit_id)
660 self._update(source_ref.commit_id)
661 message = close_message or "Closing branch: `{}`".format(source_ref.name)
661 message = close_message or "Closing branch: `{}`".format(source_ref.name)
662 try:
662 try:
663 self._remote.commit(
663 self._remote.commit(
664 message=safe_str(message),
664 message=safe_str(message),
665 username=safe_str('%s <%s>' % (user_name, user_email)),
665 username=safe_str('%s <%s>' % (user_name, user_email)),
666 close_branch=True)
666 close_branch=True)
667 self._remote.invalidate_vcs_cache()
667 self._remote.invalidate_vcs_cache()
668 return self._identify(), True
668 return self._identify(), True
669 except RepositoryError:
669 except RepositoryError:
670 # Cleanup any commit leftovers
670 # Cleanup any commit leftovers
671 self._remote.update(clean=True)
671 self._remote.update(clean=True)
672 raise
672 raise
673
673
674 def _is_the_same_branch(self, target_ref, source_ref):
674 def _is_the_same_branch(self, target_ref, source_ref):
675 return (
675 return (
676 self._get_branch_name(target_ref) ==
676 self._get_branch_name(target_ref) ==
677 self._get_branch_name(source_ref))
677 self._get_branch_name(source_ref))
678
678
679 def _get_branch_name(self, ref):
679 def _get_branch_name(self, ref):
680 if ref.type == 'branch':
680 if ref.type == 'branch':
681 return ref.name
681 return ref.name
682 return self._remote.ctx_branch(ref.commit_id)
682 return self._remote.ctx_branch(ref.commit_id)
683
683
684 def _get_shadow_repository_path(self, workspace_id):
684 def _maybe_prepare_merge_workspace(
685 # The name of the shadow repository must start with '.', so it is
685 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
686 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
686 shadow_repository_path = self._get_shadow_repository_path(
687 return os.path.join(
687 repo_id, workspace_id)
688 os.path.dirname(self.path),
689 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
690
691 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
692 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
693 if not os.path.exists(shadow_repository_path):
688 if not os.path.exists(shadow_repository_path):
694 self._local_clone(shadow_repository_path)
689 self._local_clone(shadow_repository_path)
695 log.debug(
690 log.debug(
696 'Prepared shadow repository in %s', shadow_repository_path)
691 'Prepared shadow repository in %s', shadow_repository_path)
697
692
698 return shadow_repository_path
693 return shadow_repository_path
699
694
700 def _merge_repo(self, shadow_repository_path, target_ref,
695 def _merge_repo(self, repo_id, workspace_id, target_ref,
701 source_repo, source_ref, merge_message,
696 source_repo, source_ref, merge_message,
702 merger_name, merger_email, dry_run=False,
697 merger_name, merger_email, dry_run=False,
703 use_rebase=False, close_branch=False):
698 use_rebase=False, close_branch=False):
704
699
705 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
700 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
706 'rebase' if use_rebase else 'merge', dry_run)
701 'rebase' if use_rebase else 'merge', dry_run)
707 if target_ref.commit_id not in self._heads():
702 if target_ref.commit_id not in self._heads():
708 return MergeResponse(
703 return MergeResponse(
709 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
704 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
710
705
711 try:
706 try:
712 if (target_ref.type == 'branch' and
707 if (target_ref.type == 'branch' and
713 len(self._heads(target_ref.name)) != 1):
708 len(self._heads(target_ref.name)) != 1):
714 return MergeResponse(
709 return MergeResponse(
715 False, False, None,
710 False, False, None,
716 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
711 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
717 except CommitDoesNotExistError:
712 except CommitDoesNotExistError:
718 log.exception('Failure when looking up branch heads on hg target')
713 log.exception('Failure when looking up branch heads on hg target')
719 return MergeResponse(
714 return MergeResponse(
720 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
715 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
721
716
717 shadow_repository_path = self._maybe_prepare_merge_workspace(
718 repo_id, workspace_id, target_ref, source_ref)
722 shadow_repo = self._get_shadow_instance(shadow_repository_path)
719 shadow_repo = self._get_shadow_instance(shadow_repository_path)
723
720
724 log.debug('Pulling in target reference %s', target_ref)
721 log.debug('Pulling in target reference %s', target_ref)
725 self._validate_pull_reference(target_ref)
722 self._validate_pull_reference(target_ref)
726 shadow_repo._local_pull(self.path, target_ref)
723 shadow_repo._local_pull(self.path, target_ref)
727 try:
724 try:
728 log.debug('Pulling in source reference %s', source_ref)
725 log.debug('Pulling in source reference %s', source_ref)
729 source_repo._validate_pull_reference(source_ref)
726 source_repo._validate_pull_reference(source_ref)
730 shadow_repo._local_pull(source_repo.path, source_ref)
727 shadow_repo._local_pull(source_repo.path, source_ref)
731 except CommitDoesNotExistError:
728 except CommitDoesNotExistError:
732 log.exception('Failure when doing local pull on hg shadow repo')
729 log.exception('Failure when doing local pull on hg shadow repo')
733 return MergeResponse(
730 return MergeResponse(
734 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
731 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
735
732
736 merge_ref = None
733 merge_ref = None
737 merge_commit_id = None
734 merge_commit_id = None
738 close_commit_id = None
735 close_commit_id = None
739 merge_failure_reason = MergeFailureReason.NONE
736 merge_failure_reason = MergeFailureReason.NONE
740
737
741 # enforce that close branch should be used only in case we source from
738 # enforce that close branch should be used only in case we source from
742 # an actual Branch
739 # an actual Branch
743 close_branch = close_branch and source_ref.type == 'branch'
740 close_branch = close_branch and source_ref.type == 'branch'
744
741
745 # don't allow to close branch if source and target are the same
742 # don't allow to close branch if source and target are the same
746 close_branch = close_branch and source_ref.name != target_ref.name
743 close_branch = close_branch and source_ref.name != target_ref.name
747
744
748 needs_push_on_close = False
745 needs_push_on_close = False
749 if close_branch and not use_rebase and not dry_run:
746 if close_branch and not use_rebase and not dry_run:
750 try:
747 try:
751 close_commit_id, needs_push_on_close = shadow_repo._local_close(
748 close_commit_id, needs_push_on_close = shadow_repo._local_close(
752 target_ref, merger_name, merger_email, source_ref)
749 target_ref, merger_name, merger_email, source_ref)
753 merge_possible = True
750 merge_possible = True
754 except RepositoryError:
751 except RepositoryError:
755 log.exception(
752 log.exception(
756 'Failure when doing close branch on hg shadow repo')
753 'Failure when doing close branch on hg shadow repo')
757 merge_possible = False
754 merge_possible = False
758 merge_failure_reason = MergeFailureReason.MERGE_FAILED
755 merge_failure_reason = MergeFailureReason.MERGE_FAILED
759 else:
756 else:
760 merge_possible = True
757 merge_possible = True
761
758
762 needs_push = False
759 needs_push = False
763 if merge_possible:
760 if merge_possible:
764 try:
761 try:
765 merge_commit_id, needs_push = shadow_repo._local_merge(
762 merge_commit_id, needs_push = shadow_repo._local_merge(
766 target_ref, merge_message, merger_name, merger_email,
763 target_ref, merge_message, merger_name, merger_email,
767 source_ref, use_rebase=use_rebase, dry_run=dry_run)
764 source_ref, use_rebase=use_rebase, dry_run=dry_run)
768 merge_possible = True
765 merge_possible = True
769
766
770 # read the state of the close action, if it
767 # read the state of the close action, if it
771 # maybe required a push
768 # maybe required a push
772 needs_push = needs_push or needs_push_on_close
769 needs_push = needs_push or needs_push_on_close
773
770
774 # Set a bookmark pointing to the merge commit. This bookmark
771 # Set a bookmark pointing to the merge commit. This bookmark
775 # may be used to easily identify the last successful merge
772 # may be used to easily identify the last successful merge
776 # commit in the shadow repository.
773 # commit in the shadow repository.
777 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
774 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
778 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
775 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
779 except SubrepoMergeError:
776 except SubrepoMergeError:
780 log.exception(
777 log.exception(
781 'Subrepo merge error during local merge on hg shadow repo.')
778 'Subrepo merge error during local merge on hg shadow repo.')
782 merge_possible = False
779 merge_possible = False
783 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
780 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
784 needs_push = False
781 needs_push = False
785 except RepositoryError:
782 except RepositoryError:
786 log.exception('Failure when doing local merge on hg shadow repo')
783 log.exception('Failure when doing local merge on hg shadow repo')
787 merge_possible = False
784 merge_possible = False
788 merge_failure_reason = MergeFailureReason.MERGE_FAILED
785 merge_failure_reason = MergeFailureReason.MERGE_FAILED
789 needs_push = False
786 needs_push = False
790
787
791 if merge_possible and not dry_run:
788 if merge_possible and not dry_run:
792 if needs_push:
789 if needs_push:
793 # In case the target is a bookmark, update it, so after pushing
790 # In case the target is a bookmark, update it, so after pushing
794 # the bookmarks is also updated in the target.
791 # the bookmarks is also updated in the target.
795 if target_ref.type == 'book':
792 if target_ref.type == 'book':
796 shadow_repo.bookmark(
793 shadow_repo.bookmark(
797 target_ref.name, revision=merge_commit_id)
794 target_ref.name, revision=merge_commit_id)
798 try:
795 try:
799 shadow_repo_with_hooks = self._get_shadow_instance(
796 shadow_repo_with_hooks = self._get_shadow_instance(
800 shadow_repository_path,
797 shadow_repository_path,
801 enable_hooks=True)
798 enable_hooks=True)
802 # This is the actual merge action, we push from shadow
799 # This is the actual merge action, we push from shadow
803 # into origin.
800 # into origin.
804 # Note: the push_branches option will push any new branch
801 # Note: the push_branches option will push any new branch
805 # defined in the source repository to the target. This may
802 # defined in the source repository to the target. This may
806 # be dangerous as branches are permanent in Mercurial.
803 # be dangerous as branches are permanent in Mercurial.
807 # This feature was requested in issue #441.
804 # This feature was requested in issue #441.
808 shadow_repo_with_hooks._local_push(
805 shadow_repo_with_hooks._local_push(
809 merge_commit_id, self.path, push_branches=True,
806 merge_commit_id, self.path, push_branches=True,
810 enable_hooks=True)
807 enable_hooks=True)
811
808
812 # maybe we also need to push the close_commit_id
809 # maybe we also need to push the close_commit_id
813 if close_commit_id:
810 if close_commit_id:
814 shadow_repo_with_hooks._local_push(
811 shadow_repo_with_hooks._local_push(
815 close_commit_id, self.path, push_branches=True,
812 close_commit_id, self.path, push_branches=True,
816 enable_hooks=True)
813 enable_hooks=True)
817 merge_succeeded = True
814 merge_succeeded = True
818 except RepositoryError:
815 except RepositoryError:
819 log.exception(
816 log.exception(
820 'Failure when doing local push from the shadow '
817 'Failure when doing local push from the shadow '
821 'repository to the target repository.')
818 'repository to the target repository.')
822 merge_succeeded = False
819 merge_succeeded = False
823 merge_failure_reason = MergeFailureReason.PUSH_FAILED
820 merge_failure_reason = MergeFailureReason.PUSH_FAILED
824 else:
821 else:
825 merge_succeeded = True
822 merge_succeeded = True
826 else:
823 else:
827 merge_succeeded = False
824 merge_succeeded = False
828
825
829 return MergeResponse(
826 return MergeResponse(
830 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
827 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
831
828
832 def _get_shadow_instance(
829 def _get_shadow_instance(
833 self, shadow_repository_path, enable_hooks=False):
830 self, shadow_repository_path, enable_hooks=False):
834 config = self.config.copy()
831 config = self.config.copy()
835 if not enable_hooks:
832 if not enable_hooks:
836 config.clear_section('hooks')
833 config.clear_section('hooks')
837 return MercurialRepository(shadow_repository_path, config)
834 return MercurialRepository(shadow_repository_path, config)
838
835
839 def _validate_pull_reference(self, reference):
836 def _validate_pull_reference(self, reference):
840 if not (reference.name in self.bookmarks or
837 if not (reference.name in self.bookmarks or
841 reference.name in self.branches or
838 reference.name in self.branches or
842 self.get_commit(reference.commit_id)):
839 self.get_commit(reference.commit_id)):
843 raise CommitDoesNotExistError(
840 raise CommitDoesNotExistError(
844 'Unknown branch, bookmark or commit id')
841 'Unknown branch, bookmark or commit id')
845
842
846 def _local_pull(self, repository_path, reference):
843 def _local_pull(self, repository_path, reference):
847 """
844 """
848 Fetch a branch, bookmark or commit from a local repository.
845 Fetch a branch, bookmark or commit from a local repository.
849 """
846 """
850 repository_path = os.path.abspath(repository_path)
847 repository_path = os.path.abspath(repository_path)
851 if repository_path == self.path:
848 if repository_path == self.path:
852 raise ValueError('Cannot pull from the same repository')
849 raise ValueError('Cannot pull from the same repository')
853
850
854 reference_type_to_option_name = {
851 reference_type_to_option_name = {
855 'book': 'bookmark',
852 'book': 'bookmark',
856 'branch': 'branch',
853 'branch': 'branch',
857 }
854 }
858 option_name = reference_type_to_option_name.get(
855 option_name = reference_type_to_option_name.get(
859 reference.type, 'revision')
856 reference.type, 'revision')
860
857
861 if option_name == 'revision':
858 if option_name == 'revision':
862 ref = reference.commit_id
859 ref = reference.commit_id
863 else:
860 else:
864 ref = reference.name
861 ref = reference.name
865
862
866 options = {option_name: [ref]}
863 options = {option_name: [ref]}
867 self._remote.pull_cmd(repository_path, hooks=False, **options)
864 self._remote.pull_cmd(repository_path, hooks=False, **options)
868 self._remote.invalidate_vcs_cache()
865 self._remote.invalidate_vcs_cache()
869
866
870 def bookmark(self, bookmark, revision=None):
867 def bookmark(self, bookmark, revision=None):
871 if isinstance(bookmark, unicode):
868 if isinstance(bookmark, unicode):
872 bookmark = safe_str(bookmark)
869 bookmark = safe_str(bookmark)
873 self._remote.bookmark(bookmark, revision=revision)
870 self._remote.bookmark(bookmark, revision=revision)
874 self._remote.invalidate_vcs_cache()
871 self._remote.invalidate_vcs_cache()
875
872
876 def get_path_permissions(self, username):
873 def get_path_permissions(self, username):
877 hgacl_file = os.path.join(self.path, '.hg/hgacl')
874 hgacl_file = os.path.join(self.path, '.hg/hgacl')
878
875
879 def read_patterns(suffix):
876 def read_patterns(suffix):
880 svalue = None
877 svalue = None
881 try:
878 try:
882 svalue = hgacl.get('narrowhgacl', username + suffix)
879 svalue = hgacl.get('narrowhgacl', username + suffix)
883 except configparser.NoOptionError:
880 except configparser.NoOptionError:
884 try:
881 try:
885 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
882 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
886 except configparser.NoOptionError:
883 except configparser.NoOptionError:
887 pass
884 pass
888 if not svalue:
885 if not svalue:
889 return None
886 return None
890 result = ['/']
887 result = ['/']
891 for pattern in svalue.split():
888 for pattern in svalue.split():
892 result.append(pattern)
889 result.append(pattern)
893 if '*' not in pattern and '?' not in pattern:
890 if '*' not in pattern and '?' not in pattern:
894 result.append(pattern + '/*')
891 result.append(pattern + '/*')
895 return result
892 return result
896
893
897 if os.path.exists(hgacl_file):
894 if os.path.exists(hgacl_file):
898 try:
895 try:
899 hgacl = configparser.RawConfigParser()
896 hgacl = configparser.RawConfigParser()
900 hgacl.read(hgacl_file)
897 hgacl.read(hgacl_file)
901
898
902 includes = read_patterns('.includes')
899 includes = read_patterns('.includes')
903 excludes = read_patterns('.excludes')
900 excludes = read_patterns('.excludes')
904 return BasePathPermissionChecker.create_from_patterns(
901 return BasePathPermissionChecker.create_from_patterns(
905 includes, excludes)
902 includes, excludes)
906 except BaseException as e:
903 except BaseException as e:
907 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
904 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
908 hgacl_file, self.name, e)
905 hgacl_file, self.name, e)
909 raise exceptions.RepositoryRequirementError(msg)
906 raise exceptions.RepositoryRequirementError(msg)
910 else:
907 else:
911 return None
908 return None
912
909
913
910
914 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
911 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
915
912
916 def _commit_factory(self, commit_id):
913 def _commit_factory(self, commit_id):
917 return self.repo.get_commit(
914 return self.repo.get_commit(
918 commit_idx=commit_id, pre_load=self.pre_load)
915 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,4524 +1,4530 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Database Models for RhodeCode Enterprise
22 Database Models for RhodeCode Enterprise
23 """
23 """
24
24
25 import re
25 import re
26 import os
26 import os
27 import time
27 import time
28 import hashlib
28 import hashlib
29 import logging
29 import logging
30 import datetime
30 import datetime
31 import warnings
31 import warnings
32 import ipaddress
32 import ipaddress
33 import functools
33 import functools
34 import traceback
34 import traceback
35 import collections
35 import collections
36
36
37 from sqlalchemy import (
37 from sqlalchemy import (
38 or_, and_, not_, func, TypeDecorator, event,
38 or_, and_, not_, func, TypeDecorator, event,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 Text, Float, PickleType)
41 Text, Float, PickleType)
42 from sqlalchemy.sql.expression import true, false
42 from sqlalchemy.sql.expression import true, false
43 from sqlalchemy.sql.functions import coalesce, count # noqa
43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 from sqlalchemy.orm import (
44 from sqlalchemy.orm import (
45 relationship, joinedload, class_mapper, validates, aliased)
45 relationship, joinedload, class_mapper, validates, aliased)
46 from sqlalchemy.ext.declarative import declared_attr
46 from sqlalchemy.ext.declarative import declared_attr
47 from sqlalchemy.ext.hybrid import hybrid_property
47 from sqlalchemy.ext.hybrid import hybrid_property
48 from sqlalchemy.exc import IntegrityError # noqa
48 from sqlalchemy.exc import IntegrityError # noqa
49 from sqlalchemy.dialects.mysql import LONGTEXT
49 from sqlalchemy.dialects.mysql import LONGTEXT
50 from beaker.cache import cache_region
50 from beaker.cache import cache_region
51 from zope.cachedescriptors.property import Lazy as LazyProperty
51 from zope.cachedescriptors.property import Lazy as LazyProperty
52
52
53 from pyramid.threadlocal import get_current_request
53 from pyramid.threadlocal import get_current_request
54
54
55 from rhodecode.translation import _
55 from rhodecode.translation import _
56 from rhodecode.lib.vcs import get_vcs_instance
56 from rhodecode.lib.vcs import get_vcs_instance
57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
58 from rhodecode.lib.utils2 import (
58 from rhodecode.lib.utils2 import (
59 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
59 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 glob2re, StrictAttributeDict, cleaned_uri)
61 glob2re, StrictAttributeDict, cleaned_uri)
62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
63 JsonRaw
63 JsonRaw
64 from rhodecode.lib.ext_json import json
64 from rhodecode.lib.ext_json import json
65 from rhodecode.lib.caching_query import FromCache
65 from rhodecode.lib.caching_query import FromCache
66 from rhodecode.lib.encrypt import AESCipher
66 from rhodecode.lib.encrypt import AESCipher
67
67
68 from rhodecode.model.meta import Base, Session
68 from rhodecode.model.meta import Base, Session
69
69
70 URL_SEP = '/'
70 URL_SEP = '/'
71 log = logging.getLogger(__name__)
71 log = logging.getLogger(__name__)
72
72
73 # =============================================================================
73 # =============================================================================
74 # BASE CLASSES
74 # BASE CLASSES
75 # =============================================================================
75 # =============================================================================
76
76
77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
78 # beaker.session.secret if first is not set.
78 # beaker.session.secret if first is not set.
79 # and initialized at environment.py
79 # and initialized at environment.py
80 ENCRYPTION_KEY = None
80 ENCRYPTION_KEY = None
81
81
82 # used to sort permissions by types, '#' used here is not allowed to be in
82 # used to sort permissions by types, '#' used here is not allowed to be in
83 # usernames, and it's very early in sorted string.printable table.
83 # usernames, and it's very early in sorted string.printable table.
84 PERMISSION_TYPE_SORT = {
84 PERMISSION_TYPE_SORT = {
85 'admin': '####',
85 'admin': '####',
86 'write': '###',
86 'write': '###',
87 'read': '##',
87 'read': '##',
88 'none': '#',
88 'none': '#',
89 }
89 }
90
90
91
91
92 def display_user_sort(obj):
92 def display_user_sort(obj):
93 """
93 """
94 Sort function used to sort permissions in .permissions() function of
94 Sort function used to sort permissions in .permissions() function of
95 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 Repository, RepoGroup, UserGroup. Also it put the default user in front
96 of all other resources
96 of all other resources
97 """
97 """
98
98
99 if obj.username == User.DEFAULT_USER:
99 if obj.username == User.DEFAULT_USER:
100 return '#####'
100 return '#####'
101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
102 return prefix + obj.username
102 return prefix + obj.username
103
103
104
104
105 def display_user_group_sort(obj):
105 def display_user_group_sort(obj):
106 """
106 """
107 Sort function used to sort permissions in .permissions() function of
107 Sort function used to sort permissions in .permissions() function of
108 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 Repository, RepoGroup, UserGroup. Also it put the default user in front
109 of all other resources
109 of all other resources
110 """
110 """
111
111
112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
113 return prefix + obj.users_group_name
113 return prefix + obj.users_group_name
114
114
115
115
116 def _hash_key(k):
116 def _hash_key(k):
117 return md5_safe(k)
117 return md5_safe(k)
118
118
119
119
120 def in_filter_generator(qry, items, limit=500):
120 def in_filter_generator(qry, items, limit=500):
121 """
121 """
122 Splits IN() into multiple with OR
122 Splits IN() into multiple with OR
123 e.g.::
123 e.g.::
124 cnt = Repository.query().filter(
124 cnt = Repository.query().filter(
125 or_(
125 or_(
126 *in_filter_generator(Repository.repo_id, range(100000))
126 *in_filter_generator(Repository.repo_id, range(100000))
127 )).count()
127 )).count()
128 """
128 """
129 if not items:
129 if not items:
130 # empty list will cause empty query which might cause security issues
130 # empty list will cause empty query which might cause security issues
131 # this can lead to hidden unpleasant results
131 # this can lead to hidden unpleasant results
132 items = [-1]
132 items = [-1]
133
133
134 parts = []
134 parts = []
135 for chunk in xrange(0, len(items), limit):
135 for chunk in xrange(0, len(items), limit):
136 parts.append(
136 parts.append(
137 qry.in_(items[chunk: chunk + limit])
137 qry.in_(items[chunk: chunk + limit])
138 )
138 )
139
139
140 return parts
140 return parts
141
141
142
142
143 class EncryptedTextValue(TypeDecorator):
143 class EncryptedTextValue(TypeDecorator):
144 """
144 """
145 Special column for encrypted long text data, use like::
145 Special column for encrypted long text data, use like::
146
146
147 value = Column("encrypted_value", EncryptedValue(), nullable=False)
147 value = Column("encrypted_value", EncryptedValue(), nullable=False)
148
148
149 This column is intelligent so if value is in unencrypted form it return
149 This column is intelligent so if value is in unencrypted form it return
150 unencrypted form, but on save it always encrypts
150 unencrypted form, but on save it always encrypts
151 """
151 """
152 impl = Text
152 impl = Text
153
153
154 def process_bind_param(self, value, dialect):
154 def process_bind_param(self, value, dialect):
155 if not value:
155 if not value:
156 return value
156 return value
157 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
157 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
158 # protect against double encrypting if someone manually starts
158 # protect against double encrypting if someone manually starts
159 # doing
159 # doing
160 raise ValueError('value needs to be in unencrypted format, ie. '
160 raise ValueError('value needs to be in unencrypted format, ie. '
161 'not starting with enc$aes')
161 'not starting with enc$aes')
162 return 'enc$aes_hmac$%s' % AESCipher(
162 return 'enc$aes_hmac$%s' % AESCipher(
163 ENCRYPTION_KEY, hmac=True).encrypt(value)
163 ENCRYPTION_KEY, hmac=True).encrypt(value)
164
164
165 def process_result_value(self, value, dialect):
165 def process_result_value(self, value, dialect):
166 import rhodecode
166 import rhodecode
167
167
168 if not value:
168 if not value:
169 return value
169 return value
170
170
171 parts = value.split('$', 3)
171 parts = value.split('$', 3)
172 if not len(parts) == 3:
172 if not len(parts) == 3:
173 # probably not encrypted values
173 # probably not encrypted values
174 return value
174 return value
175 else:
175 else:
176 if parts[0] != 'enc':
176 if parts[0] != 'enc':
177 # parts ok but without our header ?
177 # parts ok but without our header ?
178 return value
178 return value
179 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
179 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
180 'rhodecode.encrypted_values.strict') or True)
180 'rhodecode.encrypted_values.strict') or True)
181 # at that stage we know it's our encryption
181 # at that stage we know it's our encryption
182 if parts[1] == 'aes':
182 if parts[1] == 'aes':
183 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
183 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
184 elif parts[1] == 'aes_hmac':
184 elif parts[1] == 'aes_hmac':
185 decrypted_data = AESCipher(
185 decrypted_data = AESCipher(
186 ENCRYPTION_KEY, hmac=True,
186 ENCRYPTION_KEY, hmac=True,
187 strict_verification=enc_strict_mode).decrypt(parts[2])
187 strict_verification=enc_strict_mode).decrypt(parts[2])
188 else:
188 else:
189 raise ValueError(
189 raise ValueError(
190 'Encryption type part is wrong, must be `aes` '
190 'Encryption type part is wrong, must be `aes` '
191 'or `aes_hmac`, got `%s` instead' % (parts[1]))
191 'or `aes_hmac`, got `%s` instead' % (parts[1]))
192 return decrypted_data
192 return decrypted_data
193
193
194
194
195 class BaseModel(object):
195 class BaseModel(object):
196 """
196 """
197 Base Model for all classes
197 Base Model for all classes
198 """
198 """
199
199
200 @classmethod
200 @classmethod
201 def _get_keys(cls):
201 def _get_keys(cls):
202 """return column names for this model """
202 """return column names for this model """
203 return class_mapper(cls).c.keys()
203 return class_mapper(cls).c.keys()
204
204
205 def get_dict(self):
205 def get_dict(self):
206 """
206 """
207 return dict with keys and values corresponding
207 return dict with keys and values corresponding
208 to this model data """
208 to this model data """
209
209
210 d = {}
210 d = {}
211 for k in self._get_keys():
211 for k in self._get_keys():
212 d[k] = getattr(self, k)
212 d[k] = getattr(self, k)
213
213
214 # also use __json__() if present to get additional fields
214 # also use __json__() if present to get additional fields
215 _json_attr = getattr(self, '__json__', None)
215 _json_attr = getattr(self, '__json__', None)
216 if _json_attr:
216 if _json_attr:
217 # update with attributes from __json__
217 # update with attributes from __json__
218 if callable(_json_attr):
218 if callable(_json_attr):
219 _json_attr = _json_attr()
219 _json_attr = _json_attr()
220 for k, val in _json_attr.iteritems():
220 for k, val in _json_attr.iteritems():
221 d[k] = val
221 d[k] = val
222 return d
222 return d
223
223
224 def get_appstruct(self):
224 def get_appstruct(self):
225 """return list with keys and values tuples corresponding
225 """return list with keys and values tuples corresponding
226 to this model data """
226 to this model data """
227
227
228 lst = []
228 lst = []
229 for k in self._get_keys():
229 for k in self._get_keys():
230 lst.append((k, getattr(self, k),))
230 lst.append((k, getattr(self, k),))
231 return lst
231 return lst
232
232
233 def populate_obj(self, populate_dict):
233 def populate_obj(self, populate_dict):
234 """populate model with data from given populate_dict"""
234 """populate model with data from given populate_dict"""
235
235
236 for k in self._get_keys():
236 for k in self._get_keys():
237 if k in populate_dict:
237 if k in populate_dict:
238 setattr(self, k, populate_dict[k])
238 setattr(self, k, populate_dict[k])
239
239
240 @classmethod
240 @classmethod
241 def query(cls):
241 def query(cls):
242 return Session().query(cls)
242 return Session().query(cls)
243
243
244 @classmethod
244 @classmethod
245 def get(cls, id_):
245 def get(cls, id_):
246 if id_:
246 if id_:
247 return cls.query().get(id_)
247 return cls.query().get(id_)
248
248
249 @classmethod
249 @classmethod
250 def get_or_404(cls, id_):
250 def get_or_404(cls, id_):
251 from pyramid.httpexceptions import HTTPNotFound
251 from pyramid.httpexceptions import HTTPNotFound
252
252
253 try:
253 try:
254 id_ = int(id_)
254 id_ = int(id_)
255 except (TypeError, ValueError):
255 except (TypeError, ValueError):
256 raise HTTPNotFound()
256 raise HTTPNotFound()
257
257
258 res = cls.query().get(id_)
258 res = cls.query().get(id_)
259 if not res:
259 if not res:
260 raise HTTPNotFound()
260 raise HTTPNotFound()
261 return res
261 return res
262
262
263 @classmethod
263 @classmethod
264 def getAll(cls):
264 def getAll(cls):
265 # deprecated and left for backward compatibility
265 # deprecated and left for backward compatibility
266 return cls.get_all()
266 return cls.get_all()
267
267
268 @classmethod
268 @classmethod
269 def get_all(cls):
269 def get_all(cls):
270 return cls.query().all()
270 return cls.query().all()
271
271
272 @classmethod
272 @classmethod
273 def delete(cls, id_):
273 def delete(cls, id_):
274 obj = cls.query().get(id_)
274 obj = cls.query().get(id_)
275 Session().delete(obj)
275 Session().delete(obj)
276
276
277 @classmethod
277 @classmethod
278 def identity_cache(cls, session, attr_name, value):
278 def identity_cache(cls, session, attr_name, value):
279 exist_in_session = []
279 exist_in_session = []
280 for (item_cls, pkey), instance in session.identity_map.items():
280 for (item_cls, pkey), instance in session.identity_map.items():
281 if cls == item_cls and getattr(instance, attr_name) == value:
281 if cls == item_cls and getattr(instance, attr_name) == value:
282 exist_in_session.append(instance)
282 exist_in_session.append(instance)
283 if exist_in_session:
283 if exist_in_session:
284 if len(exist_in_session) == 1:
284 if len(exist_in_session) == 1:
285 return exist_in_session[0]
285 return exist_in_session[0]
286 log.exception(
286 log.exception(
287 'multiple objects with attr %s and '
287 'multiple objects with attr %s and '
288 'value %s found with same name: %r',
288 'value %s found with same name: %r',
289 attr_name, value, exist_in_session)
289 attr_name, value, exist_in_session)
290
290
291 def __repr__(self):
291 def __repr__(self):
292 if hasattr(self, '__unicode__'):
292 if hasattr(self, '__unicode__'):
293 # python repr needs to return str
293 # python repr needs to return str
294 try:
294 try:
295 return safe_str(self.__unicode__())
295 return safe_str(self.__unicode__())
296 except UnicodeDecodeError:
296 except UnicodeDecodeError:
297 pass
297 pass
298 return '<DB:%s>' % (self.__class__.__name__)
298 return '<DB:%s>' % (self.__class__.__name__)
299
299
300
300
301 class RhodeCodeSetting(Base, BaseModel):
301 class RhodeCodeSetting(Base, BaseModel):
302 __tablename__ = 'rhodecode_settings'
302 __tablename__ = 'rhodecode_settings'
303 __table_args__ = (
303 __table_args__ = (
304 UniqueConstraint('app_settings_name'),
304 UniqueConstraint('app_settings_name'),
305 {'extend_existing': True, 'mysql_engine': 'InnoDB',
305 {'extend_existing': True, 'mysql_engine': 'InnoDB',
306 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
306 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
307 )
307 )
308
308
309 SETTINGS_TYPES = {
309 SETTINGS_TYPES = {
310 'str': safe_str,
310 'str': safe_str,
311 'int': safe_int,
311 'int': safe_int,
312 'unicode': safe_unicode,
312 'unicode': safe_unicode,
313 'bool': str2bool,
313 'bool': str2bool,
314 'list': functools.partial(aslist, sep=',')
314 'list': functools.partial(aslist, sep=',')
315 }
315 }
316 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
316 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
317 GLOBAL_CONF_KEY = 'app_settings'
317 GLOBAL_CONF_KEY = 'app_settings'
318
318
319 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
319 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
320 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
320 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
321 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
321 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
322 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
322 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
323
323
324 def __init__(self, key='', val='', type='unicode'):
324 def __init__(self, key='', val='', type='unicode'):
325 self.app_settings_name = key
325 self.app_settings_name = key
326 self.app_settings_type = type
326 self.app_settings_type = type
327 self.app_settings_value = val
327 self.app_settings_value = val
328
328
329 @validates('_app_settings_value')
329 @validates('_app_settings_value')
330 def validate_settings_value(self, key, val):
330 def validate_settings_value(self, key, val):
331 assert type(val) == unicode
331 assert type(val) == unicode
332 return val
332 return val
333
333
334 @hybrid_property
334 @hybrid_property
335 def app_settings_value(self):
335 def app_settings_value(self):
336 v = self._app_settings_value
336 v = self._app_settings_value
337 _type = self.app_settings_type
337 _type = self.app_settings_type
338 if _type:
338 if _type:
339 _type = self.app_settings_type.split('.')[0]
339 _type = self.app_settings_type.split('.')[0]
340 # decode the encrypted value
340 # decode the encrypted value
341 if 'encrypted' in self.app_settings_type:
341 if 'encrypted' in self.app_settings_type:
342 cipher = EncryptedTextValue()
342 cipher = EncryptedTextValue()
343 v = safe_unicode(cipher.process_result_value(v, None))
343 v = safe_unicode(cipher.process_result_value(v, None))
344
344
345 converter = self.SETTINGS_TYPES.get(_type) or \
345 converter = self.SETTINGS_TYPES.get(_type) or \
346 self.SETTINGS_TYPES['unicode']
346 self.SETTINGS_TYPES['unicode']
347 return converter(v)
347 return converter(v)
348
348
349 @app_settings_value.setter
349 @app_settings_value.setter
350 def app_settings_value(self, val):
350 def app_settings_value(self, val):
351 """
351 """
352 Setter that will always make sure we use unicode in app_settings_value
352 Setter that will always make sure we use unicode in app_settings_value
353
353
354 :param val:
354 :param val:
355 """
355 """
356 val = safe_unicode(val)
356 val = safe_unicode(val)
357 # encode the encrypted value
357 # encode the encrypted value
358 if 'encrypted' in self.app_settings_type:
358 if 'encrypted' in self.app_settings_type:
359 cipher = EncryptedTextValue()
359 cipher = EncryptedTextValue()
360 val = safe_unicode(cipher.process_bind_param(val, None))
360 val = safe_unicode(cipher.process_bind_param(val, None))
361 self._app_settings_value = val
361 self._app_settings_value = val
362
362
363 @hybrid_property
363 @hybrid_property
364 def app_settings_type(self):
364 def app_settings_type(self):
365 return self._app_settings_type
365 return self._app_settings_type
366
366
367 @app_settings_type.setter
367 @app_settings_type.setter
368 def app_settings_type(self, val):
368 def app_settings_type(self, val):
369 if val.split('.')[0] not in self.SETTINGS_TYPES:
369 if val.split('.')[0] not in self.SETTINGS_TYPES:
370 raise Exception('type must be one of %s got %s'
370 raise Exception('type must be one of %s got %s'
371 % (self.SETTINGS_TYPES.keys(), val))
371 % (self.SETTINGS_TYPES.keys(), val))
372 self._app_settings_type = val
372 self._app_settings_type = val
373
373
374 def __unicode__(self):
374 def __unicode__(self):
375 return u"<%s('%s:%s[%s]')>" % (
375 return u"<%s('%s:%s[%s]')>" % (
376 self.__class__.__name__,
376 self.__class__.__name__,
377 self.app_settings_name, self.app_settings_value,
377 self.app_settings_name, self.app_settings_value,
378 self.app_settings_type
378 self.app_settings_type
379 )
379 )
380
380
381
381
382 class RhodeCodeUi(Base, BaseModel):
382 class RhodeCodeUi(Base, BaseModel):
383 __tablename__ = 'rhodecode_ui'
383 __tablename__ = 'rhodecode_ui'
384 __table_args__ = (
384 __table_args__ = (
385 UniqueConstraint('ui_key'),
385 UniqueConstraint('ui_key'),
386 {'extend_existing': True, 'mysql_engine': 'InnoDB',
386 {'extend_existing': True, 'mysql_engine': 'InnoDB',
387 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
387 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
388 )
388 )
389
389
390 HOOK_REPO_SIZE = 'changegroup.repo_size'
390 HOOK_REPO_SIZE = 'changegroup.repo_size'
391 # HG
391 # HG
392 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
392 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
393 HOOK_PULL = 'outgoing.pull_logger'
393 HOOK_PULL = 'outgoing.pull_logger'
394 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
394 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
395 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
395 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
396 HOOK_PUSH = 'changegroup.push_logger'
396 HOOK_PUSH = 'changegroup.push_logger'
397 HOOK_PUSH_KEY = 'pushkey.key_push'
397 HOOK_PUSH_KEY = 'pushkey.key_push'
398
398
399 # TODO: johbo: Unify way how hooks are configured for git and hg,
399 # TODO: johbo: Unify way how hooks are configured for git and hg,
400 # git part is currently hardcoded.
400 # git part is currently hardcoded.
401
401
402 # SVN PATTERNS
402 # SVN PATTERNS
403 SVN_BRANCH_ID = 'vcs_svn_branch'
403 SVN_BRANCH_ID = 'vcs_svn_branch'
404 SVN_TAG_ID = 'vcs_svn_tag'
404 SVN_TAG_ID = 'vcs_svn_tag'
405
405
406 ui_id = Column(
406 ui_id = Column(
407 "ui_id", Integer(), nullable=False, unique=True, default=None,
407 "ui_id", Integer(), nullable=False, unique=True, default=None,
408 primary_key=True)
408 primary_key=True)
409 ui_section = Column(
409 ui_section = Column(
410 "ui_section", String(255), nullable=True, unique=None, default=None)
410 "ui_section", String(255), nullable=True, unique=None, default=None)
411 ui_key = Column(
411 ui_key = Column(
412 "ui_key", String(255), nullable=True, unique=None, default=None)
412 "ui_key", String(255), nullable=True, unique=None, default=None)
413 ui_value = Column(
413 ui_value = Column(
414 "ui_value", String(255), nullable=True, unique=None, default=None)
414 "ui_value", String(255), nullable=True, unique=None, default=None)
415 ui_active = Column(
415 ui_active = Column(
416 "ui_active", Boolean(), nullable=True, unique=None, default=True)
416 "ui_active", Boolean(), nullable=True, unique=None, default=True)
417
417
418 def __repr__(self):
418 def __repr__(self):
419 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
419 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
420 self.ui_key, self.ui_value)
420 self.ui_key, self.ui_value)
421
421
422
422
423 class RepoRhodeCodeSetting(Base, BaseModel):
423 class RepoRhodeCodeSetting(Base, BaseModel):
424 __tablename__ = 'repo_rhodecode_settings'
424 __tablename__ = 'repo_rhodecode_settings'
425 __table_args__ = (
425 __table_args__ = (
426 UniqueConstraint(
426 UniqueConstraint(
427 'app_settings_name', 'repository_id',
427 'app_settings_name', 'repository_id',
428 name='uq_repo_rhodecode_setting_name_repo_id'),
428 name='uq_repo_rhodecode_setting_name_repo_id'),
429 {'extend_existing': True, 'mysql_engine': 'InnoDB',
429 {'extend_existing': True, 'mysql_engine': 'InnoDB',
430 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
430 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
431 )
431 )
432
432
433 repository_id = Column(
433 repository_id = Column(
434 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
434 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
435 nullable=False)
435 nullable=False)
436 app_settings_id = Column(
436 app_settings_id = Column(
437 "app_settings_id", Integer(), nullable=False, unique=True,
437 "app_settings_id", Integer(), nullable=False, unique=True,
438 default=None, primary_key=True)
438 default=None, primary_key=True)
439 app_settings_name = Column(
439 app_settings_name = Column(
440 "app_settings_name", String(255), nullable=True, unique=None,
440 "app_settings_name", String(255), nullable=True, unique=None,
441 default=None)
441 default=None)
442 _app_settings_value = Column(
442 _app_settings_value = Column(
443 "app_settings_value", String(4096), nullable=True, unique=None,
443 "app_settings_value", String(4096), nullable=True, unique=None,
444 default=None)
444 default=None)
445 _app_settings_type = Column(
445 _app_settings_type = Column(
446 "app_settings_type", String(255), nullable=True, unique=None,
446 "app_settings_type", String(255), nullable=True, unique=None,
447 default=None)
447 default=None)
448
448
449 repository = relationship('Repository')
449 repository = relationship('Repository')
450
450
451 def __init__(self, repository_id, key='', val='', type='unicode'):
451 def __init__(self, repository_id, key='', val='', type='unicode'):
452 self.repository_id = repository_id
452 self.repository_id = repository_id
453 self.app_settings_name = key
453 self.app_settings_name = key
454 self.app_settings_type = type
454 self.app_settings_type = type
455 self.app_settings_value = val
455 self.app_settings_value = val
456
456
457 @validates('_app_settings_value')
457 @validates('_app_settings_value')
458 def validate_settings_value(self, key, val):
458 def validate_settings_value(self, key, val):
459 assert type(val) == unicode
459 assert type(val) == unicode
460 return val
460 return val
461
461
462 @hybrid_property
462 @hybrid_property
463 def app_settings_value(self):
463 def app_settings_value(self):
464 v = self._app_settings_value
464 v = self._app_settings_value
465 type_ = self.app_settings_type
465 type_ = self.app_settings_type
466 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
466 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
467 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
467 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
468 return converter(v)
468 return converter(v)
469
469
470 @app_settings_value.setter
470 @app_settings_value.setter
471 def app_settings_value(self, val):
471 def app_settings_value(self, val):
472 """
472 """
473 Setter that will always make sure we use unicode in app_settings_value
473 Setter that will always make sure we use unicode in app_settings_value
474
474
475 :param val:
475 :param val:
476 """
476 """
477 self._app_settings_value = safe_unicode(val)
477 self._app_settings_value = safe_unicode(val)
478
478
479 @hybrid_property
479 @hybrid_property
480 def app_settings_type(self):
480 def app_settings_type(self):
481 return self._app_settings_type
481 return self._app_settings_type
482
482
483 @app_settings_type.setter
483 @app_settings_type.setter
484 def app_settings_type(self, val):
484 def app_settings_type(self, val):
485 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
485 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
486 if val not in SETTINGS_TYPES:
486 if val not in SETTINGS_TYPES:
487 raise Exception('type must be one of %s got %s'
487 raise Exception('type must be one of %s got %s'
488 % (SETTINGS_TYPES.keys(), val))
488 % (SETTINGS_TYPES.keys(), val))
489 self._app_settings_type = val
489 self._app_settings_type = val
490
490
491 def __unicode__(self):
491 def __unicode__(self):
492 return u"<%s('%s:%s:%s[%s]')>" % (
492 return u"<%s('%s:%s:%s[%s]')>" % (
493 self.__class__.__name__, self.repository.repo_name,
493 self.__class__.__name__, self.repository.repo_name,
494 self.app_settings_name, self.app_settings_value,
494 self.app_settings_name, self.app_settings_value,
495 self.app_settings_type
495 self.app_settings_type
496 )
496 )
497
497
498
498
499 class RepoRhodeCodeUi(Base, BaseModel):
499 class RepoRhodeCodeUi(Base, BaseModel):
500 __tablename__ = 'repo_rhodecode_ui'
500 __tablename__ = 'repo_rhodecode_ui'
501 __table_args__ = (
501 __table_args__ = (
502 UniqueConstraint(
502 UniqueConstraint(
503 'repository_id', 'ui_section', 'ui_key',
503 'repository_id', 'ui_section', 'ui_key',
504 name='uq_repo_rhodecode_ui_repository_id_section_key'),
504 name='uq_repo_rhodecode_ui_repository_id_section_key'),
505 {'extend_existing': True, 'mysql_engine': 'InnoDB',
505 {'extend_existing': True, 'mysql_engine': 'InnoDB',
506 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
506 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
507 )
507 )
508
508
509 repository_id = Column(
509 repository_id = Column(
510 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
510 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
511 nullable=False)
511 nullable=False)
512 ui_id = Column(
512 ui_id = Column(
513 "ui_id", Integer(), nullable=False, unique=True, default=None,
513 "ui_id", Integer(), nullable=False, unique=True, default=None,
514 primary_key=True)
514 primary_key=True)
515 ui_section = Column(
515 ui_section = Column(
516 "ui_section", String(255), nullable=True, unique=None, default=None)
516 "ui_section", String(255), nullable=True, unique=None, default=None)
517 ui_key = Column(
517 ui_key = Column(
518 "ui_key", String(255), nullable=True, unique=None, default=None)
518 "ui_key", String(255), nullable=True, unique=None, default=None)
519 ui_value = Column(
519 ui_value = Column(
520 "ui_value", String(255), nullable=True, unique=None, default=None)
520 "ui_value", String(255), nullable=True, unique=None, default=None)
521 ui_active = Column(
521 ui_active = Column(
522 "ui_active", Boolean(), nullable=True, unique=None, default=True)
522 "ui_active", Boolean(), nullable=True, unique=None, default=True)
523
523
524 repository = relationship('Repository')
524 repository = relationship('Repository')
525
525
526 def __repr__(self):
526 def __repr__(self):
527 return '<%s[%s:%s]%s=>%s]>' % (
527 return '<%s[%s:%s]%s=>%s]>' % (
528 self.__class__.__name__, self.repository.repo_name,
528 self.__class__.__name__, self.repository.repo_name,
529 self.ui_section, self.ui_key, self.ui_value)
529 self.ui_section, self.ui_key, self.ui_value)
530
530
531
531
532 class User(Base, BaseModel):
532 class User(Base, BaseModel):
533 __tablename__ = 'users'
533 __tablename__ = 'users'
534 __table_args__ = (
534 __table_args__ = (
535 UniqueConstraint('username'), UniqueConstraint('email'),
535 UniqueConstraint('username'), UniqueConstraint('email'),
536 Index('u_username_idx', 'username'),
536 Index('u_username_idx', 'username'),
537 Index('u_email_idx', 'email'),
537 Index('u_email_idx', 'email'),
538 {'extend_existing': True, 'mysql_engine': 'InnoDB',
538 {'extend_existing': True, 'mysql_engine': 'InnoDB',
539 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
539 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
540 )
540 )
541 DEFAULT_USER = 'default'
541 DEFAULT_USER = 'default'
542 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
542 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
543 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
543 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
544
544
545 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
545 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
546 username = Column("username", String(255), nullable=True, unique=None, default=None)
546 username = Column("username", String(255), nullable=True, unique=None, default=None)
547 password = Column("password", String(255), nullable=True, unique=None, default=None)
547 password = Column("password", String(255), nullable=True, unique=None, default=None)
548 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
548 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
549 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
549 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
550 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
550 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
551 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
551 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
552 _email = Column("email", String(255), nullable=True, unique=None, default=None)
552 _email = Column("email", String(255), nullable=True, unique=None, default=None)
553 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
553 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
554 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
554 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
555
555
556 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
556 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
557 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
557 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
558 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
558 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
559 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
559 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
560 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
560 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
561 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
561 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
562
562
563 user_log = relationship('UserLog')
563 user_log = relationship('UserLog')
564 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
564 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
565
565
566 repositories = relationship('Repository')
566 repositories = relationship('Repository')
567 repository_groups = relationship('RepoGroup')
567 repository_groups = relationship('RepoGroup')
568 user_groups = relationship('UserGroup')
568 user_groups = relationship('UserGroup')
569
569
570 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
570 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
571 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
571 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
572
572
573 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
573 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
574 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
574 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
575 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
575 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
576
576
577 group_member = relationship('UserGroupMember', cascade='all')
577 group_member = relationship('UserGroupMember', cascade='all')
578
578
579 notifications = relationship('UserNotification', cascade='all')
579 notifications = relationship('UserNotification', cascade='all')
580 # notifications assigned to this user
580 # notifications assigned to this user
581 user_created_notifications = relationship('Notification', cascade='all')
581 user_created_notifications = relationship('Notification', cascade='all')
582 # comments created by this user
582 # comments created by this user
583 user_comments = relationship('ChangesetComment', cascade='all')
583 user_comments = relationship('ChangesetComment', cascade='all')
584 # user profile extra info
584 # user profile extra info
585 user_emails = relationship('UserEmailMap', cascade='all')
585 user_emails = relationship('UserEmailMap', cascade='all')
586 user_ip_map = relationship('UserIpMap', cascade='all')
586 user_ip_map = relationship('UserIpMap', cascade='all')
587 user_auth_tokens = relationship('UserApiKeys', cascade='all')
587 user_auth_tokens = relationship('UserApiKeys', cascade='all')
588 user_ssh_keys = relationship('UserSshKeys', cascade='all')
588 user_ssh_keys = relationship('UserSshKeys', cascade='all')
589
589
590 # gists
590 # gists
591 user_gists = relationship('Gist', cascade='all')
591 user_gists = relationship('Gist', cascade='all')
592 # user pull requests
592 # user pull requests
593 user_pull_requests = relationship('PullRequest', cascade='all')
593 user_pull_requests = relationship('PullRequest', cascade='all')
594 # external identities
594 # external identities
595 extenal_identities = relationship(
595 extenal_identities = relationship(
596 'ExternalIdentity',
596 'ExternalIdentity',
597 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
597 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
598 cascade='all')
598 cascade='all')
599 # review rules
599 # review rules
600 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
600 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
601
601
602 def __unicode__(self):
602 def __unicode__(self):
603 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
603 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
604 self.user_id, self.username)
604 self.user_id, self.username)
605
605
606 @hybrid_property
606 @hybrid_property
607 def email(self):
607 def email(self):
608 return self._email
608 return self._email
609
609
610 @email.setter
610 @email.setter
611 def email(self, val):
611 def email(self, val):
612 self._email = val.lower() if val else None
612 self._email = val.lower() if val else None
613
613
614 @hybrid_property
614 @hybrid_property
615 def first_name(self):
615 def first_name(self):
616 from rhodecode.lib import helpers as h
616 from rhodecode.lib import helpers as h
617 if self.name:
617 if self.name:
618 return h.escape(self.name)
618 return h.escape(self.name)
619 return self.name
619 return self.name
620
620
621 @hybrid_property
621 @hybrid_property
622 def last_name(self):
622 def last_name(self):
623 from rhodecode.lib import helpers as h
623 from rhodecode.lib import helpers as h
624 if self.lastname:
624 if self.lastname:
625 return h.escape(self.lastname)
625 return h.escape(self.lastname)
626 return self.lastname
626 return self.lastname
627
627
628 @hybrid_property
628 @hybrid_property
629 def api_key(self):
629 def api_key(self):
630 """
630 """
631 Fetch if exist an auth-token with role ALL connected to this user
631 Fetch if exist an auth-token with role ALL connected to this user
632 """
632 """
633 user_auth_token = UserApiKeys.query()\
633 user_auth_token = UserApiKeys.query()\
634 .filter(UserApiKeys.user_id == self.user_id)\
634 .filter(UserApiKeys.user_id == self.user_id)\
635 .filter(or_(UserApiKeys.expires == -1,
635 .filter(or_(UserApiKeys.expires == -1,
636 UserApiKeys.expires >= time.time()))\
636 UserApiKeys.expires >= time.time()))\
637 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
637 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
638 if user_auth_token:
638 if user_auth_token:
639 user_auth_token = user_auth_token.api_key
639 user_auth_token = user_auth_token.api_key
640
640
641 return user_auth_token
641 return user_auth_token
642
642
643 @api_key.setter
643 @api_key.setter
644 def api_key(self, val):
644 def api_key(self, val):
645 # don't allow to set API key this is deprecated for now
645 # don't allow to set API key this is deprecated for now
646 self._api_key = None
646 self._api_key = None
647
647
648 @property
648 @property
649 def reviewer_pull_requests(self):
649 def reviewer_pull_requests(self):
650 return PullRequestReviewers.query() \
650 return PullRequestReviewers.query() \
651 .options(joinedload(PullRequestReviewers.pull_request)) \
651 .options(joinedload(PullRequestReviewers.pull_request)) \
652 .filter(PullRequestReviewers.user_id == self.user_id) \
652 .filter(PullRequestReviewers.user_id == self.user_id) \
653 .all()
653 .all()
654
654
655 @property
655 @property
656 def firstname(self):
656 def firstname(self):
657 # alias for future
657 # alias for future
658 return self.name
658 return self.name
659
659
660 @property
660 @property
661 def emails(self):
661 def emails(self):
662 other = UserEmailMap.query()\
662 other = UserEmailMap.query()\
663 .filter(UserEmailMap.user == self) \
663 .filter(UserEmailMap.user == self) \
664 .order_by(UserEmailMap.email_id.asc()) \
664 .order_by(UserEmailMap.email_id.asc()) \
665 .all()
665 .all()
666 return [self.email] + [x.email for x in other]
666 return [self.email] + [x.email for x in other]
667
667
668 @property
668 @property
669 def auth_tokens(self):
669 def auth_tokens(self):
670 auth_tokens = self.get_auth_tokens()
670 auth_tokens = self.get_auth_tokens()
671 return [x.api_key for x in auth_tokens]
671 return [x.api_key for x in auth_tokens]
672
672
673 def get_auth_tokens(self):
673 def get_auth_tokens(self):
674 return UserApiKeys.query()\
674 return UserApiKeys.query()\
675 .filter(UserApiKeys.user == self)\
675 .filter(UserApiKeys.user == self)\
676 .order_by(UserApiKeys.user_api_key_id.asc())\
676 .order_by(UserApiKeys.user_api_key_id.asc())\
677 .all()
677 .all()
678
678
679 @LazyProperty
679 @LazyProperty
680 def feed_token(self):
680 def feed_token(self):
681 return self.get_feed_token()
681 return self.get_feed_token()
682
682
683 def get_feed_token(self, cache=True):
683 def get_feed_token(self, cache=True):
684 feed_tokens = UserApiKeys.query()\
684 feed_tokens = UserApiKeys.query()\
685 .filter(UserApiKeys.user == self)\
685 .filter(UserApiKeys.user == self)\
686 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
686 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
687 if cache:
687 if cache:
688 feed_tokens = feed_tokens.options(
688 feed_tokens = feed_tokens.options(
689 FromCache("long_term", "get_user_feed_token_%s" % self.user_id))
689 FromCache("long_term", "get_user_feed_token_%s" % self.user_id))
690
690
691 feed_tokens = feed_tokens.all()
691 feed_tokens = feed_tokens.all()
692 if feed_tokens:
692 if feed_tokens:
693 return feed_tokens[0].api_key
693 return feed_tokens[0].api_key
694 return 'NO_FEED_TOKEN_AVAILABLE'
694 return 'NO_FEED_TOKEN_AVAILABLE'
695
695
696 @classmethod
696 @classmethod
697 def get(cls, user_id, cache=False):
697 def get(cls, user_id, cache=False):
698 if not user_id:
698 if not user_id:
699 return
699 return
700
700
701 user = cls.query()
701 user = cls.query()
702 if cache:
702 if cache:
703 user = user.options(
703 user = user.options(
704 FromCache("sql_cache_short", "get_users_%s" % user_id))
704 FromCache("sql_cache_short", "get_users_%s" % user_id))
705 return user.get(user_id)
705 return user.get(user_id)
706
706
707 @classmethod
707 @classmethod
708 def extra_valid_auth_tokens(cls, user, role=None):
708 def extra_valid_auth_tokens(cls, user, role=None):
709 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
709 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
710 .filter(or_(UserApiKeys.expires == -1,
710 .filter(or_(UserApiKeys.expires == -1,
711 UserApiKeys.expires >= time.time()))
711 UserApiKeys.expires >= time.time()))
712 if role:
712 if role:
713 tokens = tokens.filter(or_(UserApiKeys.role == role,
713 tokens = tokens.filter(or_(UserApiKeys.role == role,
714 UserApiKeys.role == UserApiKeys.ROLE_ALL))
714 UserApiKeys.role == UserApiKeys.ROLE_ALL))
715 return tokens.all()
715 return tokens.all()
716
716
717 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
717 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
718 from rhodecode.lib import auth
718 from rhodecode.lib import auth
719
719
720 log.debug('Trying to authenticate user: %s via auth-token, '
720 log.debug('Trying to authenticate user: %s via auth-token, '
721 'and roles: %s', self, roles)
721 'and roles: %s', self, roles)
722
722
723 if not auth_token:
723 if not auth_token:
724 return False
724 return False
725
725
726 crypto_backend = auth.crypto_backend()
726 crypto_backend = auth.crypto_backend()
727
727
728 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
728 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
729 tokens_q = UserApiKeys.query()\
729 tokens_q = UserApiKeys.query()\
730 .filter(UserApiKeys.user_id == self.user_id)\
730 .filter(UserApiKeys.user_id == self.user_id)\
731 .filter(or_(UserApiKeys.expires == -1,
731 .filter(or_(UserApiKeys.expires == -1,
732 UserApiKeys.expires >= time.time()))
732 UserApiKeys.expires >= time.time()))
733
733
734 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
734 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
735
735
736 plain_tokens = []
736 plain_tokens = []
737 hash_tokens = []
737 hash_tokens = []
738
738
739 for token in tokens_q.all():
739 for token in tokens_q.all():
740 # verify scope first
740 # verify scope first
741 if token.repo_id:
741 if token.repo_id:
742 # token has a scope, we need to verify it
742 # token has a scope, we need to verify it
743 if scope_repo_id != token.repo_id:
743 if scope_repo_id != token.repo_id:
744 log.debug(
744 log.debug(
745 'Scope mismatch: token has a set repo scope: %s, '
745 'Scope mismatch: token has a set repo scope: %s, '
746 'and calling scope is:%s, skipping further checks',
746 'and calling scope is:%s, skipping further checks',
747 token.repo, scope_repo_id)
747 token.repo, scope_repo_id)
748 # token has a scope, and it doesn't match, skip token
748 # token has a scope, and it doesn't match, skip token
749 continue
749 continue
750
750
751 if token.api_key.startswith(crypto_backend.ENC_PREF):
751 if token.api_key.startswith(crypto_backend.ENC_PREF):
752 hash_tokens.append(token.api_key)
752 hash_tokens.append(token.api_key)
753 else:
753 else:
754 plain_tokens.append(token.api_key)
754 plain_tokens.append(token.api_key)
755
755
756 is_plain_match = auth_token in plain_tokens
756 is_plain_match = auth_token in plain_tokens
757 if is_plain_match:
757 if is_plain_match:
758 return True
758 return True
759
759
760 for hashed in hash_tokens:
760 for hashed in hash_tokens:
761 # TODO(marcink): this is expensive to calculate, but most secure
761 # TODO(marcink): this is expensive to calculate, but most secure
762 match = crypto_backend.hash_check(auth_token, hashed)
762 match = crypto_backend.hash_check(auth_token, hashed)
763 if match:
763 if match:
764 return True
764 return True
765
765
766 return False
766 return False
767
767
768 @property
768 @property
769 def ip_addresses(self):
769 def ip_addresses(self):
770 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
770 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
771 return [x.ip_addr for x in ret]
771 return [x.ip_addr for x in ret]
772
772
773 @property
773 @property
774 def username_and_name(self):
774 def username_and_name(self):
775 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
775 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
776
776
777 @property
777 @property
778 def username_or_name_or_email(self):
778 def username_or_name_or_email(self):
779 full_name = self.full_name if self.full_name is not ' ' else None
779 full_name = self.full_name if self.full_name is not ' ' else None
780 return self.username or full_name or self.email
780 return self.username or full_name or self.email
781
781
782 @property
782 @property
783 def full_name(self):
783 def full_name(self):
784 return '%s %s' % (self.first_name, self.last_name)
784 return '%s %s' % (self.first_name, self.last_name)
785
785
786 @property
786 @property
787 def full_name_or_username(self):
787 def full_name_or_username(self):
788 return ('%s %s' % (self.first_name, self.last_name)
788 return ('%s %s' % (self.first_name, self.last_name)
789 if (self.first_name and self.last_name) else self.username)
789 if (self.first_name and self.last_name) else self.username)
790
790
791 @property
791 @property
792 def full_contact(self):
792 def full_contact(self):
793 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
793 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
794
794
795 @property
795 @property
796 def short_contact(self):
796 def short_contact(self):
797 return '%s %s' % (self.first_name, self.last_name)
797 return '%s %s' % (self.first_name, self.last_name)
798
798
799 @property
799 @property
800 def is_admin(self):
800 def is_admin(self):
801 return self.admin
801 return self.admin
802
802
803 def AuthUser(self, **kwargs):
803 def AuthUser(self, **kwargs):
804 """
804 """
805 Returns instance of AuthUser for this user
805 Returns instance of AuthUser for this user
806 """
806 """
807 from rhodecode.lib.auth import AuthUser
807 from rhodecode.lib.auth import AuthUser
808 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
808 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
809
809
810 @hybrid_property
810 @hybrid_property
811 def user_data(self):
811 def user_data(self):
812 if not self._user_data:
812 if not self._user_data:
813 return {}
813 return {}
814
814
815 try:
815 try:
816 return json.loads(self._user_data)
816 return json.loads(self._user_data)
817 except TypeError:
817 except TypeError:
818 return {}
818 return {}
819
819
820 @user_data.setter
820 @user_data.setter
821 def user_data(self, val):
821 def user_data(self, val):
822 if not isinstance(val, dict):
822 if not isinstance(val, dict):
823 raise Exception('user_data must be dict, got %s' % type(val))
823 raise Exception('user_data must be dict, got %s' % type(val))
824 try:
824 try:
825 self._user_data = json.dumps(val)
825 self._user_data = json.dumps(val)
826 except Exception:
826 except Exception:
827 log.error(traceback.format_exc())
827 log.error(traceback.format_exc())
828
828
829 @classmethod
829 @classmethod
830 def get_by_username(cls, username, case_insensitive=False,
830 def get_by_username(cls, username, case_insensitive=False,
831 cache=False, identity_cache=False):
831 cache=False, identity_cache=False):
832 session = Session()
832 session = Session()
833
833
834 if case_insensitive:
834 if case_insensitive:
835 q = cls.query().filter(
835 q = cls.query().filter(
836 func.lower(cls.username) == func.lower(username))
836 func.lower(cls.username) == func.lower(username))
837 else:
837 else:
838 q = cls.query().filter(cls.username == username)
838 q = cls.query().filter(cls.username == username)
839
839
840 if cache:
840 if cache:
841 if identity_cache:
841 if identity_cache:
842 val = cls.identity_cache(session, 'username', username)
842 val = cls.identity_cache(session, 'username', username)
843 if val:
843 if val:
844 return val
844 return val
845 else:
845 else:
846 cache_key = "get_user_by_name_%s" % _hash_key(username)
846 cache_key = "get_user_by_name_%s" % _hash_key(username)
847 q = q.options(
847 q = q.options(
848 FromCache("sql_cache_short", cache_key))
848 FromCache("sql_cache_short", cache_key))
849
849
850 return q.scalar()
850 return q.scalar()
851
851
852 @classmethod
852 @classmethod
853 def get_by_auth_token(cls, auth_token, cache=False):
853 def get_by_auth_token(cls, auth_token, cache=False):
854 q = UserApiKeys.query()\
854 q = UserApiKeys.query()\
855 .filter(UserApiKeys.api_key == auth_token)\
855 .filter(UserApiKeys.api_key == auth_token)\
856 .filter(or_(UserApiKeys.expires == -1,
856 .filter(or_(UserApiKeys.expires == -1,
857 UserApiKeys.expires >= time.time()))
857 UserApiKeys.expires >= time.time()))
858 if cache:
858 if cache:
859 q = q.options(
859 q = q.options(
860 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
860 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
861
861
862 match = q.first()
862 match = q.first()
863 if match:
863 if match:
864 return match.user
864 return match.user
865
865
866 @classmethod
866 @classmethod
867 def get_by_email(cls, email, case_insensitive=False, cache=False):
867 def get_by_email(cls, email, case_insensitive=False, cache=False):
868
868
869 if case_insensitive:
869 if case_insensitive:
870 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
870 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
871
871
872 else:
872 else:
873 q = cls.query().filter(cls.email == email)
873 q = cls.query().filter(cls.email == email)
874
874
875 email_key = _hash_key(email)
875 email_key = _hash_key(email)
876 if cache:
876 if cache:
877 q = q.options(
877 q = q.options(
878 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
878 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
879
879
880 ret = q.scalar()
880 ret = q.scalar()
881 if ret is None:
881 if ret is None:
882 q = UserEmailMap.query()
882 q = UserEmailMap.query()
883 # try fetching in alternate email map
883 # try fetching in alternate email map
884 if case_insensitive:
884 if case_insensitive:
885 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
885 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
886 else:
886 else:
887 q = q.filter(UserEmailMap.email == email)
887 q = q.filter(UserEmailMap.email == email)
888 q = q.options(joinedload(UserEmailMap.user))
888 q = q.options(joinedload(UserEmailMap.user))
889 if cache:
889 if cache:
890 q = q.options(
890 q = q.options(
891 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
891 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
892 ret = getattr(q.scalar(), 'user', None)
892 ret = getattr(q.scalar(), 'user', None)
893
893
894 return ret
894 return ret
895
895
896 @classmethod
896 @classmethod
897 def get_from_cs_author(cls, author):
897 def get_from_cs_author(cls, author):
898 """
898 """
899 Tries to get User objects out of commit author string
899 Tries to get User objects out of commit author string
900
900
901 :param author:
901 :param author:
902 """
902 """
903 from rhodecode.lib.helpers import email, author_name
903 from rhodecode.lib.helpers import email, author_name
904 # Valid email in the attribute passed, see if they're in the system
904 # Valid email in the attribute passed, see if they're in the system
905 _email = email(author)
905 _email = email(author)
906 if _email:
906 if _email:
907 user = cls.get_by_email(_email, case_insensitive=True)
907 user = cls.get_by_email(_email, case_insensitive=True)
908 if user:
908 if user:
909 return user
909 return user
910 # Maybe we can match by username?
910 # Maybe we can match by username?
911 _author = author_name(author)
911 _author = author_name(author)
912 user = cls.get_by_username(_author, case_insensitive=True)
912 user = cls.get_by_username(_author, case_insensitive=True)
913 if user:
913 if user:
914 return user
914 return user
915
915
916 def update_userdata(self, **kwargs):
916 def update_userdata(self, **kwargs):
917 usr = self
917 usr = self
918 old = usr.user_data
918 old = usr.user_data
919 old.update(**kwargs)
919 old.update(**kwargs)
920 usr.user_data = old
920 usr.user_data = old
921 Session().add(usr)
921 Session().add(usr)
922 log.debug('updated userdata with ', kwargs)
922 log.debug('updated userdata with ', kwargs)
923
923
924 def update_lastlogin(self):
924 def update_lastlogin(self):
925 """Update user lastlogin"""
925 """Update user lastlogin"""
926 self.last_login = datetime.datetime.now()
926 self.last_login = datetime.datetime.now()
927 Session().add(self)
927 Session().add(self)
928 log.debug('updated user %s lastlogin', self.username)
928 log.debug('updated user %s lastlogin', self.username)
929
929
930 def update_lastactivity(self):
930 def update_lastactivity(self):
931 """Update user lastactivity"""
931 """Update user lastactivity"""
932 self.last_activity = datetime.datetime.now()
932 self.last_activity = datetime.datetime.now()
933 Session().add(self)
933 Session().add(self)
934 log.debug('updated user `%s` last activity', self.username)
934 log.debug('updated user `%s` last activity', self.username)
935
935
936 def update_password(self, new_password):
936 def update_password(self, new_password):
937 from rhodecode.lib.auth import get_crypt_password
937 from rhodecode.lib.auth import get_crypt_password
938
938
939 self.password = get_crypt_password(new_password)
939 self.password = get_crypt_password(new_password)
940 Session().add(self)
940 Session().add(self)
941
941
942 @classmethod
942 @classmethod
943 def get_first_super_admin(cls):
943 def get_first_super_admin(cls):
944 user = User.query().filter(User.admin == true()).first()
944 user = User.query().filter(User.admin == true()).first()
945 if user is None:
945 if user is None:
946 raise Exception('FATAL: Missing administrative account!')
946 raise Exception('FATAL: Missing administrative account!')
947 return user
947 return user
948
948
949 @classmethod
949 @classmethod
950 def get_all_super_admins(cls):
950 def get_all_super_admins(cls):
951 """
951 """
952 Returns all admin accounts sorted by username
952 Returns all admin accounts sorted by username
953 """
953 """
954 return User.query().filter(User.admin == true())\
954 return User.query().filter(User.admin == true())\
955 .order_by(User.username.asc()).all()
955 .order_by(User.username.asc()).all()
956
956
957 @classmethod
957 @classmethod
958 def get_default_user(cls, cache=False, refresh=False):
958 def get_default_user(cls, cache=False, refresh=False):
959 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
959 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
960 if user is None:
960 if user is None:
961 raise Exception('FATAL: Missing default account!')
961 raise Exception('FATAL: Missing default account!')
962 if refresh:
962 if refresh:
963 # The default user might be based on outdated state which
963 # The default user might be based on outdated state which
964 # has been loaded from the cache.
964 # has been loaded from the cache.
965 # A call to refresh() ensures that the
965 # A call to refresh() ensures that the
966 # latest state from the database is used.
966 # latest state from the database is used.
967 Session().refresh(user)
967 Session().refresh(user)
968 return user
968 return user
969
969
970 def _get_default_perms(self, user, suffix=''):
970 def _get_default_perms(self, user, suffix=''):
971 from rhodecode.model.permission import PermissionModel
971 from rhodecode.model.permission import PermissionModel
972 return PermissionModel().get_default_perms(user.user_perms, suffix)
972 return PermissionModel().get_default_perms(user.user_perms, suffix)
973
973
974 def get_default_perms(self, suffix=''):
974 def get_default_perms(self, suffix=''):
975 return self._get_default_perms(self, suffix)
975 return self._get_default_perms(self, suffix)
976
976
977 def get_api_data(self, include_secrets=False, details='full'):
977 def get_api_data(self, include_secrets=False, details='full'):
978 """
978 """
979 Common function for generating user related data for API
979 Common function for generating user related data for API
980
980
981 :param include_secrets: By default secrets in the API data will be replaced
981 :param include_secrets: By default secrets in the API data will be replaced
982 by a placeholder value to prevent exposing this data by accident. In case
982 by a placeholder value to prevent exposing this data by accident. In case
983 this data shall be exposed, set this flag to ``True``.
983 this data shall be exposed, set this flag to ``True``.
984
984
985 :param details: details can be 'basic|full' basic gives only a subset of
985 :param details: details can be 'basic|full' basic gives only a subset of
986 the available user information that includes user_id, name and emails.
986 the available user information that includes user_id, name and emails.
987 """
987 """
988 user = self
988 user = self
989 user_data = self.user_data
989 user_data = self.user_data
990 data = {
990 data = {
991 'user_id': user.user_id,
991 'user_id': user.user_id,
992 'username': user.username,
992 'username': user.username,
993 'firstname': user.name,
993 'firstname': user.name,
994 'lastname': user.lastname,
994 'lastname': user.lastname,
995 'email': user.email,
995 'email': user.email,
996 'emails': user.emails,
996 'emails': user.emails,
997 }
997 }
998 if details == 'basic':
998 if details == 'basic':
999 return data
999 return data
1000
1000
1001 auth_token_length = 40
1001 auth_token_length = 40
1002 auth_token_replacement = '*' * auth_token_length
1002 auth_token_replacement = '*' * auth_token_length
1003
1003
1004 extras = {
1004 extras = {
1005 'auth_tokens': [auth_token_replacement],
1005 'auth_tokens': [auth_token_replacement],
1006 'active': user.active,
1006 'active': user.active,
1007 'admin': user.admin,
1007 'admin': user.admin,
1008 'extern_type': user.extern_type,
1008 'extern_type': user.extern_type,
1009 'extern_name': user.extern_name,
1009 'extern_name': user.extern_name,
1010 'last_login': user.last_login,
1010 'last_login': user.last_login,
1011 'last_activity': user.last_activity,
1011 'last_activity': user.last_activity,
1012 'ip_addresses': user.ip_addresses,
1012 'ip_addresses': user.ip_addresses,
1013 'language': user_data.get('language')
1013 'language': user_data.get('language')
1014 }
1014 }
1015 data.update(extras)
1015 data.update(extras)
1016
1016
1017 if include_secrets:
1017 if include_secrets:
1018 data['auth_tokens'] = user.auth_tokens
1018 data['auth_tokens'] = user.auth_tokens
1019 return data
1019 return data
1020
1020
1021 def __json__(self):
1021 def __json__(self):
1022 data = {
1022 data = {
1023 'full_name': self.full_name,
1023 'full_name': self.full_name,
1024 'full_name_or_username': self.full_name_or_username,
1024 'full_name_or_username': self.full_name_or_username,
1025 'short_contact': self.short_contact,
1025 'short_contact': self.short_contact,
1026 'full_contact': self.full_contact,
1026 'full_contact': self.full_contact,
1027 }
1027 }
1028 data.update(self.get_api_data())
1028 data.update(self.get_api_data())
1029 return data
1029 return data
1030
1030
1031
1031
1032 class UserApiKeys(Base, BaseModel):
1032 class UserApiKeys(Base, BaseModel):
1033 __tablename__ = 'user_api_keys'
1033 __tablename__ = 'user_api_keys'
1034 __table_args__ = (
1034 __table_args__ = (
1035 Index('uak_api_key_idx', 'api_key', unique=True),
1035 Index('uak_api_key_idx', 'api_key', unique=True),
1036 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1036 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1039 )
1039 )
1040 __mapper_args__ = {}
1040 __mapper_args__ = {}
1041
1041
1042 # ApiKey role
1042 # ApiKey role
1043 ROLE_ALL = 'token_role_all'
1043 ROLE_ALL = 'token_role_all'
1044 ROLE_HTTP = 'token_role_http'
1044 ROLE_HTTP = 'token_role_http'
1045 ROLE_VCS = 'token_role_vcs'
1045 ROLE_VCS = 'token_role_vcs'
1046 ROLE_API = 'token_role_api'
1046 ROLE_API = 'token_role_api'
1047 ROLE_FEED = 'token_role_feed'
1047 ROLE_FEED = 'token_role_feed'
1048 ROLE_PASSWORD_RESET = 'token_password_reset'
1048 ROLE_PASSWORD_RESET = 'token_password_reset'
1049
1049
1050 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1050 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1051
1051
1052 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1052 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1053 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1053 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1054 api_key = Column("api_key", String(255), nullable=False, unique=True)
1054 api_key = Column("api_key", String(255), nullable=False, unique=True)
1055 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1055 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1056 expires = Column('expires', Float(53), nullable=False)
1056 expires = Column('expires', Float(53), nullable=False)
1057 role = Column('role', String(255), nullable=True)
1057 role = Column('role', String(255), nullable=True)
1058 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1058 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1059
1059
1060 # scope columns
1060 # scope columns
1061 repo_id = Column(
1061 repo_id = Column(
1062 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1062 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1063 nullable=True, unique=None, default=None)
1063 nullable=True, unique=None, default=None)
1064 repo = relationship('Repository', lazy='joined')
1064 repo = relationship('Repository', lazy='joined')
1065
1065
1066 repo_group_id = Column(
1066 repo_group_id = Column(
1067 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1067 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1068 nullable=True, unique=None, default=None)
1068 nullable=True, unique=None, default=None)
1069 repo_group = relationship('RepoGroup', lazy='joined')
1069 repo_group = relationship('RepoGroup', lazy='joined')
1070
1070
1071 user = relationship('User', lazy='joined')
1071 user = relationship('User', lazy='joined')
1072
1072
1073 def __unicode__(self):
1073 def __unicode__(self):
1074 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1074 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1075
1075
1076 def __json__(self):
1076 def __json__(self):
1077 data = {
1077 data = {
1078 'auth_token': self.api_key,
1078 'auth_token': self.api_key,
1079 'role': self.role,
1079 'role': self.role,
1080 'scope': self.scope_humanized,
1080 'scope': self.scope_humanized,
1081 'expired': self.expired
1081 'expired': self.expired
1082 }
1082 }
1083 return data
1083 return data
1084
1084
1085 def get_api_data(self, include_secrets=False):
1085 def get_api_data(self, include_secrets=False):
1086 data = self.__json__()
1086 data = self.__json__()
1087 if include_secrets:
1087 if include_secrets:
1088 return data
1088 return data
1089 else:
1089 else:
1090 data['auth_token'] = self.token_obfuscated
1090 data['auth_token'] = self.token_obfuscated
1091 return data
1091 return data
1092
1092
1093 @hybrid_property
1093 @hybrid_property
1094 def description_safe(self):
1094 def description_safe(self):
1095 from rhodecode.lib import helpers as h
1095 from rhodecode.lib import helpers as h
1096 return h.escape(self.description)
1096 return h.escape(self.description)
1097
1097
1098 @property
1098 @property
1099 def expired(self):
1099 def expired(self):
1100 if self.expires == -1:
1100 if self.expires == -1:
1101 return False
1101 return False
1102 return time.time() > self.expires
1102 return time.time() > self.expires
1103
1103
1104 @classmethod
1104 @classmethod
1105 def _get_role_name(cls, role):
1105 def _get_role_name(cls, role):
1106 return {
1106 return {
1107 cls.ROLE_ALL: _('all'),
1107 cls.ROLE_ALL: _('all'),
1108 cls.ROLE_HTTP: _('http/web interface'),
1108 cls.ROLE_HTTP: _('http/web interface'),
1109 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1109 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1110 cls.ROLE_API: _('api calls'),
1110 cls.ROLE_API: _('api calls'),
1111 cls.ROLE_FEED: _('feed access'),
1111 cls.ROLE_FEED: _('feed access'),
1112 }.get(role, role)
1112 }.get(role, role)
1113
1113
1114 @property
1114 @property
1115 def role_humanized(self):
1115 def role_humanized(self):
1116 return self._get_role_name(self.role)
1116 return self._get_role_name(self.role)
1117
1117
1118 def _get_scope(self):
1118 def _get_scope(self):
1119 if self.repo:
1119 if self.repo:
1120 return repr(self.repo)
1120 return repr(self.repo)
1121 if self.repo_group:
1121 if self.repo_group:
1122 return repr(self.repo_group) + ' (recursive)'
1122 return repr(self.repo_group) + ' (recursive)'
1123 return 'global'
1123 return 'global'
1124
1124
1125 @property
1125 @property
1126 def scope_humanized(self):
1126 def scope_humanized(self):
1127 return self._get_scope()
1127 return self._get_scope()
1128
1128
1129 @property
1129 @property
1130 def token_obfuscated(self):
1130 def token_obfuscated(self):
1131 if self.api_key:
1131 if self.api_key:
1132 return self.api_key[:4] + "****"
1132 return self.api_key[:4] + "****"
1133
1133
1134
1134
1135 class UserEmailMap(Base, BaseModel):
1135 class UserEmailMap(Base, BaseModel):
1136 __tablename__ = 'user_email_map'
1136 __tablename__ = 'user_email_map'
1137 __table_args__ = (
1137 __table_args__ = (
1138 Index('uem_email_idx', 'email'),
1138 Index('uem_email_idx', 'email'),
1139 UniqueConstraint('email'),
1139 UniqueConstraint('email'),
1140 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1140 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1141 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1141 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1142 )
1142 )
1143 __mapper_args__ = {}
1143 __mapper_args__ = {}
1144
1144
1145 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1145 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1146 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1146 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1147 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1147 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1148 user = relationship('User', lazy='joined')
1148 user = relationship('User', lazy='joined')
1149
1149
1150 @validates('_email')
1150 @validates('_email')
1151 def validate_email(self, key, email):
1151 def validate_email(self, key, email):
1152 # check if this email is not main one
1152 # check if this email is not main one
1153 main_email = Session().query(User).filter(User.email == email).scalar()
1153 main_email = Session().query(User).filter(User.email == email).scalar()
1154 if main_email is not None:
1154 if main_email is not None:
1155 raise AttributeError('email %s is present is user table' % email)
1155 raise AttributeError('email %s is present is user table' % email)
1156 return email
1156 return email
1157
1157
1158 @hybrid_property
1158 @hybrid_property
1159 def email(self):
1159 def email(self):
1160 return self._email
1160 return self._email
1161
1161
1162 @email.setter
1162 @email.setter
1163 def email(self, val):
1163 def email(self, val):
1164 self._email = val.lower() if val else None
1164 self._email = val.lower() if val else None
1165
1165
1166
1166
1167 class UserIpMap(Base, BaseModel):
1167 class UserIpMap(Base, BaseModel):
1168 __tablename__ = 'user_ip_map'
1168 __tablename__ = 'user_ip_map'
1169 __table_args__ = (
1169 __table_args__ = (
1170 UniqueConstraint('user_id', 'ip_addr'),
1170 UniqueConstraint('user_id', 'ip_addr'),
1171 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1171 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1172 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1172 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1173 )
1173 )
1174 __mapper_args__ = {}
1174 __mapper_args__ = {}
1175
1175
1176 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1176 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1177 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1177 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1178 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1178 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1179 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1179 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1180 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1180 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1181 user = relationship('User', lazy='joined')
1181 user = relationship('User', lazy='joined')
1182
1182
1183 @hybrid_property
1183 @hybrid_property
1184 def description_safe(self):
1184 def description_safe(self):
1185 from rhodecode.lib import helpers as h
1185 from rhodecode.lib import helpers as h
1186 return h.escape(self.description)
1186 return h.escape(self.description)
1187
1187
1188 @classmethod
1188 @classmethod
1189 def _get_ip_range(cls, ip_addr):
1189 def _get_ip_range(cls, ip_addr):
1190 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1190 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1191 return [str(net.network_address), str(net.broadcast_address)]
1191 return [str(net.network_address), str(net.broadcast_address)]
1192
1192
1193 def __json__(self):
1193 def __json__(self):
1194 return {
1194 return {
1195 'ip_addr': self.ip_addr,
1195 'ip_addr': self.ip_addr,
1196 'ip_range': self._get_ip_range(self.ip_addr),
1196 'ip_range': self._get_ip_range(self.ip_addr),
1197 }
1197 }
1198
1198
1199 def __unicode__(self):
1199 def __unicode__(self):
1200 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1200 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1201 self.user_id, self.ip_addr)
1201 self.user_id, self.ip_addr)
1202
1202
1203
1203
1204 class UserSshKeys(Base, BaseModel):
1204 class UserSshKeys(Base, BaseModel):
1205 __tablename__ = 'user_ssh_keys'
1205 __tablename__ = 'user_ssh_keys'
1206 __table_args__ = (
1206 __table_args__ = (
1207 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1207 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1208
1208
1209 UniqueConstraint('ssh_key_fingerprint'),
1209 UniqueConstraint('ssh_key_fingerprint'),
1210
1210
1211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1213 )
1213 )
1214 __mapper_args__ = {}
1214 __mapper_args__ = {}
1215
1215
1216 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1216 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1217 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1218 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1218 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1219
1219
1220 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1220 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1221
1221
1222 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1222 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1223 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1223 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1224 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1224 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1225
1225
1226 user = relationship('User', lazy='joined')
1226 user = relationship('User', lazy='joined')
1227
1227
1228 def __json__(self):
1228 def __json__(self):
1229 data = {
1229 data = {
1230 'ssh_fingerprint': self.ssh_key_fingerprint,
1230 'ssh_fingerprint': self.ssh_key_fingerprint,
1231 'description': self.description,
1231 'description': self.description,
1232 'created_on': self.created_on
1232 'created_on': self.created_on
1233 }
1233 }
1234 return data
1234 return data
1235
1235
1236 def get_api_data(self):
1236 def get_api_data(self):
1237 data = self.__json__()
1237 data = self.__json__()
1238 return data
1238 return data
1239
1239
1240
1240
1241 class UserLog(Base, BaseModel):
1241 class UserLog(Base, BaseModel):
1242 __tablename__ = 'user_logs'
1242 __tablename__ = 'user_logs'
1243 __table_args__ = (
1243 __table_args__ = (
1244 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1244 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1245 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1245 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1246 )
1246 )
1247 VERSION_1 = 'v1'
1247 VERSION_1 = 'v1'
1248 VERSION_2 = 'v2'
1248 VERSION_2 = 'v2'
1249 VERSIONS = [VERSION_1, VERSION_2]
1249 VERSIONS = [VERSION_1, VERSION_2]
1250
1250
1251 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1251 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1253 username = Column("username", String(255), nullable=True, unique=None, default=None)
1253 username = Column("username", String(255), nullable=True, unique=None, default=None)
1254 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1254 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1255 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1255 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1256 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1256 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1257 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1257 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1258 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1258 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1259
1259
1260 version = Column("version", String(255), nullable=True, default=VERSION_1)
1260 version = Column("version", String(255), nullable=True, default=VERSION_1)
1261 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1261 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1263
1263
1264 def __unicode__(self):
1264 def __unicode__(self):
1265 return u"<%s('id:%s:%s')>" % (
1265 return u"<%s('id:%s:%s')>" % (
1266 self.__class__.__name__, self.repository_name, self.action)
1266 self.__class__.__name__, self.repository_name, self.action)
1267
1267
1268 def __json__(self):
1268 def __json__(self):
1269 return {
1269 return {
1270 'user_id': self.user_id,
1270 'user_id': self.user_id,
1271 'username': self.username,
1271 'username': self.username,
1272 'repository_id': self.repository_id,
1272 'repository_id': self.repository_id,
1273 'repository_name': self.repository_name,
1273 'repository_name': self.repository_name,
1274 'user_ip': self.user_ip,
1274 'user_ip': self.user_ip,
1275 'action_date': self.action_date,
1275 'action_date': self.action_date,
1276 'action': self.action,
1276 'action': self.action,
1277 }
1277 }
1278
1278
1279 @hybrid_property
1279 @hybrid_property
1280 def entry_id(self):
1280 def entry_id(self):
1281 return self.user_log_id
1281 return self.user_log_id
1282
1282
1283 @property
1283 @property
1284 def action_as_day(self):
1284 def action_as_day(self):
1285 return datetime.date(*self.action_date.timetuple()[:3])
1285 return datetime.date(*self.action_date.timetuple()[:3])
1286
1286
1287 user = relationship('User')
1287 user = relationship('User')
1288 repository = relationship('Repository', cascade='')
1288 repository = relationship('Repository', cascade='')
1289
1289
1290
1290
1291 class UserGroup(Base, BaseModel):
1291 class UserGroup(Base, BaseModel):
1292 __tablename__ = 'users_groups'
1292 __tablename__ = 'users_groups'
1293 __table_args__ = (
1293 __table_args__ = (
1294 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1294 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1295 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1295 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1296 )
1296 )
1297
1297
1298 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1298 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1299 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1299 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1300 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1300 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1301 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1301 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1302 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1302 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1303 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1303 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1304 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1304 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1305 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1305 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1306
1306
1307 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1307 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1308 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1308 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1309 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1309 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1310 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1310 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1311 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1311 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1312 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1312 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1313
1313
1314 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1314 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1315 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1315 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1316
1316
1317 @classmethod
1317 @classmethod
1318 def _load_group_data(cls, column):
1318 def _load_group_data(cls, column):
1319 if not column:
1319 if not column:
1320 return {}
1320 return {}
1321
1321
1322 try:
1322 try:
1323 return json.loads(column) or {}
1323 return json.loads(column) or {}
1324 except TypeError:
1324 except TypeError:
1325 return {}
1325 return {}
1326
1326
1327 @hybrid_property
1327 @hybrid_property
1328 def description_safe(self):
1328 def description_safe(self):
1329 from rhodecode.lib import helpers as h
1329 from rhodecode.lib import helpers as h
1330 return h.escape(self.user_group_description)
1330 return h.escape(self.user_group_description)
1331
1331
1332 @hybrid_property
1332 @hybrid_property
1333 def group_data(self):
1333 def group_data(self):
1334 return self._load_group_data(self._group_data)
1334 return self._load_group_data(self._group_data)
1335
1335
1336 @group_data.expression
1336 @group_data.expression
1337 def group_data(self, **kwargs):
1337 def group_data(self, **kwargs):
1338 return self._group_data
1338 return self._group_data
1339
1339
1340 @group_data.setter
1340 @group_data.setter
1341 def group_data(self, val):
1341 def group_data(self, val):
1342 try:
1342 try:
1343 self._group_data = json.dumps(val)
1343 self._group_data = json.dumps(val)
1344 except Exception:
1344 except Exception:
1345 log.error(traceback.format_exc())
1345 log.error(traceback.format_exc())
1346
1346
1347 @classmethod
1347 @classmethod
1348 def _load_sync(cls, group_data):
1348 def _load_sync(cls, group_data):
1349 if group_data:
1349 if group_data:
1350 return group_data.get('extern_type')
1350 return group_data.get('extern_type')
1351
1351
1352 @property
1352 @property
1353 def sync(self):
1353 def sync(self):
1354 return self._load_sync(self.group_data)
1354 return self._load_sync(self.group_data)
1355
1355
1356 def __unicode__(self):
1356 def __unicode__(self):
1357 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1357 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1358 self.users_group_id,
1358 self.users_group_id,
1359 self.users_group_name)
1359 self.users_group_name)
1360
1360
1361 @classmethod
1361 @classmethod
1362 def get_by_group_name(cls, group_name, cache=False,
1362 def get_by_group_name(cls, group_name, cache=False,
1363 case_insensitive=False):
1363 case_insensitive=False):
1364 if case_insensitive:
1364 if case_insensitive:
1365 q = cls.query().filter(func.lower(cls.users_group_name) ==
1365 q = cls.query().filter(func.lower(cls.users_group_name) ==
1366 func.lower(group_name))
1366 func.lower(group_name))
1367
1367
1368 else:
1368 else:
1369 q = cls.query().filter(cls.users_group_name == group_name)
1369 q = cls.query().filter(cls.users_group_name == group_name)
1370 if cache:
1370 if cache:
1371 q = q.options(
1371 q = q.options(
1372 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1372 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1373 return q.scalar()
1373 return q.scalar()
1374
1374
1375 @classmethod
1375 @classmethod
1376 def get(cls, user_group_id, cache=False):
1376 def get(cls, user_group_id, cache=False):
1377 if not user_group_id:
1377 if not user_group_id:
1378 return
1378 return
1379
1379
1380 user_group = cls.query()
1380 user_group = cls.query()
1381 if cache:
1381 if cache:
1382 user_group = user_group.options(
1382 user_group = user_group.options(
1383 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1383 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1384 return user_group.get(user_group_id)
1384 return user_group.get(user_group_id)
1385
1385
1386 def permissions(self, with_admins=True, with_owner=True):
1386 def permissions(self, with_admins=True, with_owner=True):
1387 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1387 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1388 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1388 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1389 joinedload(UserUserGroupToPerm.user),
1389 joinedload(UserUserGroupToPerm.user),
1390 joinedload(UserUserGroupToPerm.permission),)
1390 joinedload(UserUserGroupToPerm.permission),)
1391
1391
1392 # get owners and admins and permissions. We do a trick of re-writing
1392 # get owners and admins and permissions. We do a trick of re-writing
1393 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1393 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1394 # has a global reference and changing one object propagates to all
1394 # has a global reference and changing one object propagates to all
1395 # others. This means if admin is also an owner admin_row that change
1395 # others. This means if admin is also an owner admin_row that change
1396 # would propagate to both objects
1396 # would propagate to both objects
1397 perm_rows = []
1397 perm_rows = []
1398 for _usr in q.all():
1398 for _usr in q.all():
1399 usr = AttributeDict(_usr.user.get_dict())
1399 usr = AttributeDict(_usr.user.get_dict())
1400 usr.permission = _usr.permission.permission_name
1400 usr.permission = _usr.permission.permission_name
1401 perm_rows.append(usr)
1401 perm_rows.append(usr)
1402
1402
1403 # filter the perm rows by 'default' first and then sort them by
1403 # filter the perm rows by 'default' first and then sort them by
1404 # admin,write,read,none permissions sorted again alphabetically in
1404 # admin,write,read,none permissions sorted again alphabetically in
1405 # each group
1405 # each group
1406 perm_rows = sorted(perm_rows, key=display_user_sort)
1406 perm_rows = sorted(perm_rows, key=display_user_sort)
1407
1407
1408 _admin_perm = 'usergroup.admin'
1408 _admin_perm = 'usergroup.admin'
1409 owner_row = []
1409 owner_row = []
1410 if with_owner:
1410 if with_owner:
1411 usr = AttributeDict(self.user.get_dict())
1411 usr = AttributeDict(self.user.get_dict())
1412 usr.owner_row = True
1412 usr.owner_row = True
1413 usr.permission = _admin_perm
1413 usr.permission = _admin_perm
1414 owner_row.append(usr)
1414 owner_row.append(usr)
1415
1415
1416 super_admin_rows = []
1416 super_admin_rows = []
1417 if with_admins:
1417 if with_admins:
1418 for usr in User.get_all_super_admins():
1418 for usr in User.get_all_super_admins():
1419 # if this admin is also owner, don't double the record
1419 # if this admin is also owner, don't double the record
1420 if usr.user_id == owner_row[0].user_id:
1420 if usr.user_id == owner_row[0].user_id:
1421 owner_row[0].admin_row = True
1421 owner_row[0].admin_row = True
1422 else:
1422 else:
1423 usr = AttributeDict(usr.get_dict())
1423 usr = AttributeDict(usr.get_dict())
1424 usr.admin_row = True
1424 usr.admin_row = True
1425 usr.permission = _admin_perm
1425 usr.permission = _admin_perm
1426 super_admin_rows.append(usr)
1426 super_admin_rows.append(usr)
1427
1427
1428 return super_admin_rows + owner_row + perm_rows
1428 return super_admin_rows + owner_row + perm_rows
1429
1429
1430 def permission_user_groups(self):
1430 def permission_user_groups(self):
1431 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1431 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1432 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1432 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1433 joinedload(UserGroupUserGroupToPerm.target_user_group),
1433 joinedload(UserGroupUserGroupToPerm.target_user_group),
1434 joinedload(UserGroupUserGroupToPerm.permission),)
1434 joinedload(UserGroupUserGroupToPerm.permission),)
1435
1435
1436 perm_rows = []
1436 perm_rows = []
1437 for _user_group in q.all():
1437 for _user_group in q.all():
1438 usr = AttributeDict(_user_group.user_group.get_dict())
1438 usr = AttributeDict(_user_group.user_group.get_dict())
1439 usr.permission = _user_group.permission.permission_name
1439 usr.permission = _user_group.permission.permission_name
1440 perm_rows.append(usr)
1440 perm_rows.append(usr)
1441
1441
1442 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1442 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1443 return perm_rows
1443 return perm_rows
1444
1444
1445 def _get_default_perms(self, user_group, suffix=''):
1445 def _get_default_perms(self, user_group, suffix=''):
1446 from rhodecode.model.permission import PermissionModel
1446 from rhodecode.model.permission import PermissionModel
1447 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1447 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1448
1448
1449 def get_default_perms(self, suffix=''):
1449 def get_default_perms(self, suffix=''):
1450 return self._get_default_perms(self, suffix)
1450 return self._get_default_perms(self, suffix)
1451
1451
1452 def get_api_data(self, with_group_members=True, include_secrets=False):
1452 def get_api_data(self, with_group_members=True, include_secrets=False):
1453 """
1453 """
1454 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1454 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1455 basically forwarded.
1455 basically forwarded.
1456
1456
1457 """
1457 """
1458 user_group = self
1458 user_group = self
1459 data = {
1459 data = {
1460 'users_group_id': user_group.users_group_id,
1460 'users_group_id': user_group.users_group_id,
1461 'group_name': user_group.users_group_name,
1461 'group_name': user_group.users_group_name,
1462 'group_description': user_group.user_group_description,
1462 'group_description': user_group.user_group_description,
1463 'active': user_group.users_group_active,
1463 'active': user_group.users_group_active,
1464 'owner': user_group.user.username,
1464 'owner': user_group.user.username,
1465 'sync': user_group.sync,
1465 'sync': user_group.sync,
1466 'owner_email': user_group.user.email,
1466 'owner_email': user_group.user.email,
1467 }
1467 }
1468
1468
1469 if with_group_members:
1469 if with_group_members:
1470 users = []
1470 users = []
1471 for user in user_group.members:
1471 for user in user_group.members:
1472 user = user.user
1472 user = user.user
1473 users.append(user.get_api_data(include_secrets=include_secrets))
1473 users.append(user.get_api_data(include_secrets=include_secrets))
1474 data['users'] = users
1474 data['users'] = users
1475
1475
1476 return data
1476 return data
1477
1477
1478
1478
1479 class UserGroupMember(Base, BaseModel):
1479 class UserGroupMember(Base, BaseModel):
1480 __tablename__ = 'users_groups_members'
1480 __tablename__ = 'users_groups_members'
1481 __table_args__ = (
1481 __table_args__ = (
1482 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1482 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1483 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1483 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1484 )
1484 )
1485
1485
1486 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1486 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1487 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1487 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1488 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1488 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1489
1489
1490 user = relationship('User', lazy='joined')
1490 user = relationship('User', lazy='joined')
1491 users_group = relationship('UserGroup')
1491 users_group = relationship('UserGroup')
1492
1492
1493 def __init__(self, gr_id='', u_id=''):
1493 def __init__(self, gr_id='', u_id=''):
1494 self.users_group_id = gr_id
1494 self.users_group_id = gr_id
1495 self.user_id = u_id
1495 self.user_id = u_id
1496
1496
1497
1497
1498 class RepositoryField(Base, BaseModel):
1498 class RepositoryField(Base, BaseModel):
1499 __tablename__ = 'repositories_fields'
1499 __tablename__ = 'repositories_fields'
1500 __table_args__ = (
1500 __table_args__ = (
1501 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1501 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1504 )
1504 )
1505 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1505 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1506
1506
1507 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1507 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1508 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1508 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1509 field_key = Column("field_key", String(250))
1509 field_key = Column("field_key", String(250))
1510 field_label = Column("field_label", String(1024), nullable=False)
1510 field_label = Column("field_label", String(1024), nullable=False)
1511 field_value = Column("field_value", String(10000), nullable=False)
1511 field_value = Column("field_value", String(10000), nullable=False)
1512 field_desc = Column("field_desc", String(1024), nullable=False)
1512 field_desc = Column("field_desc", String(1024), nullable=False)
1513 field_type = Column("field_type", String(255), nullable=False, unique=None)
1513 field_type = Column("field_type", String(255), nullable=False, unique=None)
1514 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1514 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1515
1515
1516 repository = relationship('Repository')
1516 repository = relationship('Repository')
1517
1517
1518 @property
1518 @property
1519 def field_key_prefixed(self):
1519 def field_key_prefixed(self):
1520 return 'ex_%s' % self.field_key
1520 return 'ex_%s' % self.field_key
1521
1521
1522 @classmethod
1522 @classmethod
1523 def un_prefix_key(cls, key):
1523 def un_prefix_key(cls, key):
1524 if key.startswith(cls.PREFIX):
1524 if key.startswith(cls.PREFIX):
1525 return key[len(cls.PREFIX):]
1525 return key[len(cls.PREFIX):]
1526 return key
1526 return key
1527
1527
1528 @classmethod
1528 @classmethod
1529 def get_by_key_name(cls, key, repo):
1529 def get_by_key_name(cls, key, repo):
1530 row = cls.query()\
1530 row = cls.query()\
1531 .filter(cls.repository == repo)\
1531 .filter(cls.repository == repo)\
1532 .filter(cls.field_key == key).scalar()
1532 .filter(cls.field_key == key).scalar()
1533 return row
1533 return row
1534
1534
1535
1535
1536 class Repository(Base, BaseModel):
1536 class Repository(Base, BaseModel):
1537 __tablename__ = 'repositories'
1537 __tablename__ = 'repositories'
1538 __table_args__ = (
1538 __table_args__ = (
1539 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1539 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1540 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1540 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1541 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1541 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1542 )
1542 )
1543 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1543 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1544 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1544 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1545 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1545 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1546
1546
1547 STATE_CREATED = 'repo_state_created'
1547 STATE_CREATED = 'repo_state_created'
1548 STATE_PENDING = 'repo_state_pending'
1548 STATE_PENDING = 'repo_state_pending'
1549 STATE_ERROR = 'repo_state_error'
1549 STATE_ERROR = 'repo_state_error'
1550
1550
1551 LOCK_AUTOMATIC = 'lock_auto'
1551 LOCK_AUTOMATIC = 'lock_auto'
1552 LOCK_API = 'lock_api'
1552 LOCK_API = 'lock_api'
1553 LOCK_WEB = 'lock_web'
1553 LOCK_WEB = 'lock_web'
1554 LOCK_PULL = 'lock_pull'
1554 LOCK_PULL = 'lock_pull'
1555
1555
1556 NAME_SEP = URL_SEP
1556 NAME_SEP = URL_SEP
1557
1557
1558 repo_id = Column(
1558 repo_id = Column(
1559 "repo_id", Integer(), nullable=False, unique=True, default=None,
1559 "repo_id", Integer(), nullable=False, unique=True, default=None,
1560 primary_key=True)
1560 primary_key=True)
1561 _repo_name = Column(
1561 _repo_name = Column(
1562 "repo_name", Text(), nullable=False, default=None)
1562 "repo_name", Text(), nullable=False, default=None)
1563 _repo_name_hash = Column(
1563 _repo_name_hash = Column(
1564 "repo_name_hash", String(255), nullable=False, unique=True)
1564 "repo_name_hash", String(255), nullable=False, unique=True)
1565 repo_state = Column("repo_state", String(255), nullable=True)
1565 repo_state = Column("repo_state", String(255), nullable=True)
1566
1566
1567 clone_uri = Column(
1567 clone_uri = Column(
1568 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1568 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1569 default=None)
1569 default=None)
1570 push_uri = Column(
1570 push_uri = Column(
1571 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1571 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1572 default=None)
1572 default=None)
1573 repo_type = Column(
1573 repo_type = Column(
1574 "repo_type", String(255), nullable=False, unique=False, default=None)
1574 "repo_type", String(255), nullable=False, unique=False, default=None)
1575 user_id = Column(
1575 user_id = Column(
1576 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1576 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1577 unique=False, default=None)
1577 unique=False, default=None)
1578 private = Column(
1578 private = Column(
1579 "private", Boolean(), nullable=True, unique=None, default=None)
1579 "private", Boolean(), nullable=True, unique=None, default=None)
1580 enable_statistics = Column(
1580 enable_statistics = Column(
1581 "statistics", Boolean(), nullable=True, unique=None, default=True)
1581 "statistics", Boolean(), nullable=True, unique=None, default=True)
1582 enable_downloads = Column(
1582 enable_downloads = Column(
1583 "downloads", Boolean(), nullable=True, unique=None, default=True)
1583 "downloads", Boolean(), nullable=True, unique=None, default=True)
1584 description = Column(
1584 description = Column(
1585 "description", String(10000), nullable=True, unique=None, default=None)
1585 "description", String(10000), nullable=True, unique=None, default=None)
1586 created_on = Column(
1586 created_on = Column(
1587 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1587 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1588 default=datetime.datetime.now)
1588 default=datetime.datetime.now)
1589 updated_on = Column(
1589 updated_on = Column(
1590 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1590 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1591 default=datetime.datetime.now)
1591 default=datetime.datetime.now)
1592 _landing_revision = Column(
1592 _landing_revision = Column(
1593 "landing_revision", String(255), nullable=False, unique=False,
1593 "landing_revision", String(255), nullable=False, unique=False,
1594 default=None)
1594 default=None)
1595 enable_locking = Column(
1595 enable_locking = Column(
1596 "enable_locking", Boolean(), nullable=False, unique=None,
1596 "enable_locking", Boolean(), nullable=False, unique=None,
1597 default=False)
1597 default=False)
1598 _locked = Column(
1598 _locked = Column(
1599 "locked", String(255), nullable=True, unique=False, default=None)
1599 "locked", String(255), nullable=True, unique=False, default=None)
1600 _changeset_cache = Column(
1600 _changeset_cache = Column(
1601 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1601 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1602
1602
1603 fork_id = Column(
1603 fork_id = Column(
1604 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1604 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1605 nullable=True, unique=False, default=None)
1605 nullable=True, unique=False, default=None)
1606 group_id = Column(
1606 group_id = Column(
1607 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1607 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1608 unique=False, default=None)
1608 unique=False, default=None)
1609
1609
1610 user = relationship('User', lazy='joined')
1610 user = relationship('User', lazy='joined')
1611 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1611 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1612 group = relationship('RepoGroup', lazy='joined')
1612 group = relationship('RepoGroup', lazy='joined')
1613 repo_to_perm = relationship(
1613 repo_to_perm = relationship(
1614 'UserRepoToPerm', cascade='all',
1614 'UserRepoToPerm', cascade='all',
1615 order_by='UserRepoToPerm.repo_to_perm_id')
1615 order_by='UserRepoToPerm.repo_to_perm_id')
1616 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1616 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1617 stats = relationship('Statistics', cascade='all', uselist=False)
1617 stats = relationship('Statistics', cascade='all', uselist=False)
1618
1618
1619 followers = relationship(
1619 followers = relationship(
1620 'UserFollowing',
1620 'UserFollowing',
1621 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1621 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1622 cascade='all')
1622 cascade='all')
1623 extra_fields = relationship(
1623 extra_fields = relationship(
1624 'RepositoryField', cascade="all, delete, delete-orphan")
1624 'RepositoryField', cascade="all, delete, delete-orphan")
1625 logs = relationship('UserLog')
1625 logs = relationship('UserLog')
1626 comments = relationship(
1626 comments = relationship(
1627 'ChangesetComment', cascade="all, delete, delete-orphan")
1627 'ChangesetComment', cascade="all, delete, delete-orphan")
1628 pull_requests_source = relationship(
1628 pull_requests_source = relationship(
1629 'PullRequest',
1629 'PullRequest',
1630 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1630 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1631 cascade="all, delete, delete-orphan")
1631 cascade="all, delete, delete-orphan")
1632 pull_requests_target = relationship(
1632 pull_requests_target = relationship(
1633 'PullRequest',
1633 'PullRequest',
1634 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1634 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1635 cascade="all, delete, delete-orphan")
1635 cascade="all, delete, delete-orphan")
1636 ui = relationship('RepoRhodeCodeUi', cascade="all")
1636 ui = relationship('RepoRhodeCodeUi', cascade="all")
1637 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1637 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1638 integrations = relationship('Integration',
1638 integrations = relationship('Integration',
1639 cascade="all, delete, delete-orphan")
1639 cascade="all, delete, delete-orphan")
1640
1640
1641 scoped_tokens = relationship('UserApiKeys', cascade="all")
1641 scoped_tokens = relationship('UserApiKeys', cascade="all")
1642
1642
1643 def __unicode__(self):
1643 def __unicode__(self):
1644 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1644 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1645 safe_unicode(self.repo_name))
1645 safe_unicode(self.repo_name))
1646
1646
1647 @hybrid_property
1647 @hybrid_property
1648 def description_safe(self):
1648 def description_safe(self):
1649 from rhodecode.lib import helpers as h
1649 from rhodecode.lib import helpers as h
1650 return h.escape(self.description)
1650 return h.escape(self.description)
1651
1651
1652 @hybrid_property
1652 @hybrid_property
1653 def landing_rev(self):
1653 def landing_rev(self):
1654 # always should return [rev_type, rev]
1654 # always should return [rev_type, rev]
1655 if self._landing_revision:
1655 if self._landing_revision:
1656 _rev_info = self._landing_revision.split(':')
1656 _rev_info = self._landing_revision.split(':')
1657 if len(_rev_info) < 2:
1657 if len(_rev_info) < 2:
1658 _rev_info.insert(0, 'rev')
1658 _rev_info.insert(0, 'rev')
1659 return [_rev_info[0], _rev_info[1]]
1659 return [_rev_info[0], _rev_info[1]]
1660 return [None, None]
1660 return [None, None]
1661
1661
1662 @landing_rev.setter
1662 @landing_rev.setter
1663 def landing_rev(self, val):
1663 def landing_rev(self, val):
1664 if ':' not in val:
1664 if ':' not in val:
1665 raise ValueError('value must be delimited with `:` and consist '
1665 raise ValueError('value must be delimited with `:` and consist '
1666 'of <rev_type>:<rev>, got %s instead' % val)
1666 'of <rev_type>:<rev>, got %s instead' % val)
1667 self._landing_revision = val
1667 self._landing_revision = val
1668
1668
1669 @hybrid_property
1669 @hybrid_property
1670 def locked(self):
1670 def locked(self):
1671 if self._locked:
1671 if self._locked:
1672 user_id, timelocked, reason = self._locked.split(':')
1672 user_id, timelocked, reason = self._locked.split(':')
1673 lock_values = int(user_id), timelocked, reason
1673 lock_values = int(user_id), timelocked, reason
1674 else:
1674 else:
1675 lock_values = [None, None, None]
1675 lock_values = [None, None, None]
1676 return lock_values
1676 return lock_values
1677
1677
1678 @locked.setter
1678 @locked.setter
1679 def locked(self, val):
1679 def locked(self, val):
1680 if val and isinstance(val, (list, tuple)):
1680 if val and isinstance(val, (list, tuple)):
1681 self._locked = ':'.join(map(str, val))
1681 self._locked = ':'.join(map(str, val))
1682 else:
1682 else:
1683 self._locked = None
1683 self._locked = None
1684
1684
1685 @hybrid_property
1685 @hybrid_property
1686 def changeset_cache(self):
1686 def changeset_cache(self):
1687 from rhodecode.lib.vcs.backends.base import EmptyCommit
1687 from rhodecode.lib.vcs.backends.base import EmptyCommit
1688 dummy = EmptyCommit().__json__()
1688 dummy = EmptyCommit().__json__()
1689 if not self._changeset_cache:
1689 if not self._changeset_cache:
1690 return dummy
1690 return dummy
1691 try:
1691 try:
1692 return json.loads(self._changeset_cache)
1692 return json.loads(self._changeset_cache)
1693 except TypeError:
1693 except TypeError:
1694 return dummy
1694 return dummy
1695 except Exception:
1695 except Exception:
1696 log.error(traceback.format_exc())
1696 log.error(traceback.format_exc())
1697 return dummy
1697 return dummy
1698
1698
1699 @changeset_cache.setter
1699 @changeset_cache.setter
1700 def changeset_cache(self, val):
1700 def changeset_cache(self, val):
1701 try:
1701 try:
1702 self._changeset_cache = json.dumps(val)
1702 self._changeset_cache = json.dumps(val)
1703 except Exception:
1703 except Exception:
1704 log.error(traceback.format_exc())
1704 log.error(traceback.format_exc())
1705
1705
1706 @hybrid_property
1706 @hybrid_property
1707 def repo_name(self):
1707 def repo_name(self):
1708 return self._repo_name
1708 return self._repo_name
1709
1709
1710 @repo_name.setter
1710 @repo_name.setter
1711 def repo_name(self, value):
1711 def repo_name(self, value):
1712 self._repo_name = value
1712 self._repo_name = value
1713 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1713 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1714
1714
1715 @classmethod
1715 @classmethod
1716 def normalize_repo_name(cls, repo_name):
1716 def normalize_repo_name(cls, repo_name):
1717 """
1717 """
1718 Normalizes os specific repo_name to the format internally stored inside
1718 Normalizes os specific repo_name to the format internally stored inside
1719 database using URL_SEP
1719 database using URL_SEP
1720
1720
1721 :param cls:
1721 :param cls:
1722 :param repo_name:
1722 :param repo_name:
1723 """
1723 """
1724 return cls.NAME_SEP.join(repo_name.split(os.sep))
1724 return cls.NAME_SEP.join(repo_name.split(os.sep))
1725
1725
1726 @classmethod
1726 @classmethod
1727 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1727 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1728 session = Session()
1728 session = Session()
1729 q = session.query(cls).filter(cls.repo_name == repo_name)
1729 q = session.query(cls).filter(cls.repo_name == repo_name)
1730
1730
1731 if cache:
1731 if cache:
1732 if identity_cache:
1732 if identity_cache:
1733 val = cls.identity_cache(session, 'repo_name', repo_name)
1733 val = cls.identity_cache(session, 'repo_name', repo_name)
1734 if val:
1734 if val:
1735 return val
1735 return val
1736 else:
1736 else:
1737 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1737 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1738 q = q.options(
1738 q = q.options(
1739 FromCache("sql_cache_short", cache_key))
1739 FromCache("sql_cache_short", cache_key))
1740
1740
1741 return q.scalar()
1741 return q.scalar()
1742
1742
1743 @classmethod
1743 @classmethod
1744 def get_by_id_or_repo_name(cls, repoid):
1744 def get_by_id_or_repo_name(cls, repoid):
1745 if isinstance(repoid, (int, long)):
1745 if isinstance(repoid, (int, long)):
1746 try:
1746 try:
1747 repo = cls.get(repoid)
1747 repo = cls.get(repoid)
1748 except ValueError:
1748 except ValueError:
1749 repo = None
1749 repo = None
1750 else:
1750 else:
1751 repo = cls.get_by_repo_name(repoid)
1751 repo = cls.get_by_repo_name(repoid)
1752 return repo
1752 return repo
1753
1753
1754 @classmethod
1754 @classmethod
1755 def get_by_full_path(cls, repo_full_path):
1755 def get_by_full_path(cls, repo_full_path):
1756 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1756 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1757 repo_name = cls.normalize_repo_name(repo_name)
1757 repo_name = cls.normalize_repo_name(repo_name)
1758 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1758 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1759
1759
1760 @classmethod
1760 @classmethod
1761 def get_repo_forks(cls, repo_id):
1761 def get_repo_forks(cls, repo_id):
1762 return cls.query().filter(Repository.fork_id == repo_id)
1762 return cls.query().filter(Repository.fork_id == repo_id)
1763
1763
1764 @classmethod
1764 @classmethod
1765 def base_path(cls):
1765 def base_path(cls):
1766 """
1766 """
1767 Returns base path when all repos are stored
1767 Returns base path when all repos are stored
1768
1768
1769 :param cls:
1769 :param cls:
1770 """
1770 """
1771 q = Session().query(RhodeCodeUi)\
1771 q = Session().query(RhodeCodeUi)\
1772 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1772 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1773 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1773 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1774 return q.one().ui_value
1774 return q.one().ui_value
1775
1775
1776 @classmethod
1776 @classmethod
1777 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1777 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1778 case_insensitive=True):
1778 case_insensitive=True):
1779 q = Repository.query()
1779 q = Repository.query()
1780
1780
1781 if not isinstance(user_id, Optional):
1781 if not isinstance(user_id, Optional):
1782 q = q.filter(Repository.user_id == user_id)
1782 q = q.filter(Repository.user_id == user_id)
1783
1783
1784 if not isinstance(group_id, Optional):
1784 if not isinstance(group_id, Optional):
1785 q = q.filter(Repository.group_id == group_id)
1785 q = q.filter(Repository.group_id == group_id)
1786
1786
1787 if case_insensitive:
1787 if case_insensitive:
1788 q = q.order_by(func.lower(Repository.repo_name))
1788 q = q.order_by(func.lower(Repository.repo_name))
1789 else:
1789 else:
1790 q = q.order_by(Repository.repo_name)
1790 q = q.order_by(Repository.repo_name)
1791 return q.all()
1791 return q.all()
1792
1792
1793 @property
1793 @property
1794 def forks(self):
1794 def forks(self):
1795 """
1795 """
1796 Return forks of this repo
1796 Return forks of this repo
1797 """
1797 """
1798 return Repository.get_repo_forks(self.repo_id)
1798 return Repository.get_repo_forks(self.repo_id)
1799
1799
1800 @property
1800 @property
1801 def parent(self):
1801 def parent(self):
1802 """
1802 """
1803 Returns fork parent
1803 Returns fork parent
1804 """
1804 """
1805 return self.fork
1805 return self.fork
1806
1806
1807 @property
1807 @property
1808 def just_name(self):
1808 def just_name(self):
1809 return self.repo_name.split(self.NAME_SEP)[-1]
1809 return self.repo_name.split(self.NAME_SEP)[-1]
1810
1810
1811 @property
1811 @property
1812 def groups_with_parents(self):
1812 def groups_with_parents(self):
1813 groups = []
1813 groups = []
1814 if self.group is None:
1814 if self.group is None:
1815 return groups
1815 return groups
1816
1816
1817 cur_gr = self.group
1817 cur_gr = self.group
1818 groups.insert(0, cur_gr)
1818 groups.insert(0, cur_gr)
1819 while 1:
1819 while 1:
1820 gr = getattr(cur_gr, 'parent_group', None)
1820 gr = getattr(cur_gr, 'parent_group', None)
1821 cur_gr = cur_gr.parent_group
1821 cur_gr = cur_gr.parent_group
1822 if gr is None:
1822 if gr is None:
1823 break
1823 break
1824 groups.insert(0, gr)
1824 groups.insert(0, gr)
1825
1825
1826 return groups
1826 return groups
1827
1827
1828 @property
1828 @property
1829 def groups_and_repo(self):
1829 def groups_and_repo(self):
1830 return self.groups_with_parents, self
1830 return self.groups_with_parents, self
1831
1831
1832 @LazyProperty
1832 @LazyProperty
1833 def repo_path(self):
1833 def repo_path(self):
1834 """
1834 """
1835 Returns base full path for that repository means where it actually
1835 Returns base full path for that repository means where it actually
1836 exists on a filesystem
1836 exists on a filesystem
1837 """
1837 """
1838 q = Session().query(RhodeCodeUi).filter(
1838 q = Session().query(RhodeCodeUi).filter(
1839 RhodeCodeUi.ui_key == self.NAME_SEP)
1839 RhodeCodeUi.ui_key == self.NAME_SEP)
1840 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1840 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1841 return q.one().ui_value
1841 return q.one().ui_value
1842
1842
1843 @property
1843 @property
1844 def repo_full_path(self):
1844 def repo_full_path(self):
1845 p = [self.repo_path]
1845 p = [self.repo_path]
1846 # we need to split the name by / since this is how we store the
1846 # we need to split the name by / since this is how we store the
1847 # names in the database, but that eventually needs to be converted
1847 # names in the database, but that eventually needs to be converted
1848 # into a valid system path
1848 # into a valid system path
1849 p += self.repo_name.split(self.NAME_SEP)
1849 p += self.repo_name.split(self.NAME_SEP)
1850 return os.path.join(*map(safe_unicode, p))
1850 return os.path.join(*map(safe_unicode, p))
1851
1851
1852 @property
1852 @property
1853 def cache_keys(self):
1853 def cache_keys(self):
1854 """
1854 """
1855 Returns associated cache keys for that repo
1855 Returns associated cache keys for that repo
1856 """
1856 """
1857 return CacheKey.query()\
1857 return CacheKey.query()\
1858 .filter(CacheKey.cache_args == self.repo_name)\
1858 .filter(CacheKey.cache_args == self.repo_name)\
1859 .order_by(CacheKey.cache_key)\
1859 .order_by(CacheKey.cache_key)\
1860 .all()
1860 .all()
1861
1861
1862 @property
1862 @property
1863 def cached_diffs_relative_dir(self):
1863 def cached_diffs_relative_dir(self):
1864 """
1864 """
1865 Return a relative to the repository store path of cached diffs
1865 Return a relative to the repository store path of cached diffs
1866 used for safe display for users, who shouldn't know the absolute store
1866 used for safe display for users, who shouldn't know the absolute store
1867 path
1867 path
1868 """
1868 """
1869 return os.path.join(
1869 return os.path.join(
1870 os.path.dirname(self.repo_name),
1870 os.path.dirname(self.repo_name),
1871 self.cached_diffs_dir.split(os.path.sep)[-1])
1871 self.cached_diffs_dir.split(os.path.sep)[-1])
1872
1872
1873 @property
1873 @property
1874 def cached_diffs_dir(self):
1874 def cached_diffs_dir(self):
1875 path = self.repo_full_path
1875 path = self.repo_full_path
1876 return os.path.join(
1876 return os.path.join(
1877 os.path.dirname(path),
1877 os.path.dirname(path),
1878 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1878 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1879
1879
1880 def cached_diffs(self):
1880 def cached_diffs(self):
1881 diff_cache_dir = self.cached_diffs_dir
1881 diff_cache_dir = self.cached_diffs_dir
1882 if os.path.isdir(diff_cache_dir):
1882 if os.path.isdir(diff_cache_dir):
1883 return os.listdir(diff_cache_dir)
1883 return os.listdir(diff_cache_dir)
1884 return []
1884 return []
1885
1885
1886 def shadow_repos(self):
1887 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1888 return [
1889 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1890 if x.startswith(shadow_repos_pattern)]
1891
1886 def get_new_name(self, repo_name):
1892 def get_new_name(self, repo_name):
1887 """
1893 """
1888 returns new full repository name based on assigned group and new new
1894 returns new full repository name based on assigned group and new new
1889
1895
1890 :param group_name:
1896 :param group_name:
1891 """
1897 """
1892 path_prefix = self.group.full_path_splitted if self.group else []
1898 path_prefix = self.group.full_path_splitted if self.group else []
1893 return self.NAME_SEP.join(path_prefix + [repo_name])
1899 return self.NAME_SEP.join(path_prefix + [repo_name])
1894
1900
1895 @property
1901 @property
1896 def _config(self):
1902 def _config(self):
1897 """
1903 """
1898 Returns db based config object.
1904 Returns db based config object.
1899 """
1905 """
1900 from rhodecode.lib.utils import make_db_config
1906 from rhodecode.lib.utils import make_db_config
1901 return make_db_config(clear_session=False, repo=self)
1907 return make_db_config(clear_session=False, repo=self)
1902
1908
1903 def permissions(self, with_admins=True, with_owner=True):
1909 def permissions(self, with_admins=True, with_owner=True):
1904 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1910 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1905 q = q.options(joinedload(UserRepoToPerm.repository),
1911 q = q.options(joinedload(UserRepoToPerm.repository),
1906 joinedload(UserRepoToPerm.user),
1912 joinedload(UserRepoToPerm.user),
1907 joinedload(UserRepoToPerm.permission),)
1913 joinedload(UserRepoToPerm.permission),)
1908
1914
1909 # get owners and admins and permissions. We do a trick of re-writing
1915 # get owners and admins and permissions. We do a trick of re-writing
1910 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1916 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1911 # has a global reference and changing one object propagates to all
1917 # has a global reference and changing one object propagates to all
1912 # others. This means if admin is also an owner admin_row that change
1918 # others. This means if admin is also an owner admin_row that change
1913 # would propagate to both objects
1919 # would propagate to both objects
1914 perm_rows = []
1920 perm_rows = []
1915 for _usr in q.all():
1921 for _usr in q.all():
1916 usr = AttributeDict(_usr.user.get_dict())
1922 usr = AttributeDict(_usr.user.get_dict())
1917 usr.permission = _usr.permission.permission_name
1923 usr.permission = _usr.permission.permission_name
1918 perm_rows.append(usr)
1924 perm_rows.append(usr)
1919
1925
1920 # filter the perm rows by 'default' first and then sort them by
1926 # filter the perm rows by 'default' first and then sort them by
1921 # admin,write,read,none permissions sorted again alphabetically in
1927 # admin,write,read,none permissions sorted again alphabetically in
1922 # each group
1928 # each group
1923 perm_rows = sorted(perm_rows, key=display_user_sort)
1929 perm_rows = sorted(perm_rows, key=display_user_sort)
1924
1930
1925 _admin_perm = 'repository.admin'
1931 _admin_perm = 'repository.admin'
1926 owner_row = []
1932 owner_row = []
1927 if with_owner:
1933 if with_owner:
1928 usr = AttributeDict(self.user.get_dict())
1934 usr = AttributeDict(self.user.get_dict())
1929 usr.owner_row = True
1935 usr.owner_row = True
1930 usr.permission = _admin_perm
1936 usr.permission = _admin_perm
1931 owner_row.append(usr)
1937 owner_row.append(usr)
1932
1938
1933 super_admin_rows = []
1939 super_admin_rows = []
1934 if with_admins:
1940 if with_admins:
1935 for usr in User.get_all_super_admins():
1941 for usr in User.get_all_super_admins():
1936 # if this admin is also owner, don't double the record
1942 # if this admin is also owner, don't double the record
1937 if usr.user_id == owner_row[0].user_id:
1943 if usr.user_id == owner_row[0].user_id:
1938 owner_row[0].admin_row = True
1944 owner_row[0].admin_row = True
1939 else:
1945 else:
1940 usr = AttributeDict(usr.get_dict())
1946 usr = AttributeDict(usr.get_dict())
1941 usr.admin_row = True
1947 usr.admin_row = True
1942 usr.permission = _admin_perm
1948 usr.permission = _admin_perm
1943 super_admin_rows.append(usr)
1949 super_admin_rows.append(usr)
1944
1950
1945 return super_admin_rows + owner_row + perm_rows
1951 return super_admin_rows + owner_row + perm_rows
1946
1952
1947 def permission_user_groups(self):
1953 def permission_user_groups(self):
1948 q = UserGroupRepoToPerm.query().filter(
1954 q = UserGroupRepoToPerm.query().filter(
1949 UserGroupRepoToPerm.repository == self)
1955 UserGroupRepoToPerm.repository == self)
1950 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1956 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1951 joinedload(UserGroupRepoToPerm.users_group),
1957 joinedload(UserGroupRepoToPerm.users_group),
1952 joinedload(UserGroupRepoToPerm.permission),)
1958 joinedload(UserGroupRepoToPerm.permission),)
1953
1959
1954 perm_rows = []
1960 perm_rows = []
1955 for _user_group in q.all():
1961 for _user_group in q.all():
1956 usr = AttributeDict(_user_group.users_group.get_dict())
1962 usr = AttributeDict(_user_group.users_group.get_dict())
1957 usr.permission = _user_group.permission.permission_name
1963 usr.permission = _user_group.permission.permission_name
1958 perm_rows.append(usr)
1964 perm_rows.append(usr)
1959
1965
1960 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1966 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1961 return perm_rows
1967 return perm_rows
1962
1968
1963 def get_api_data(self, include_secrets=False):
1969 def get_api_data(self, include_secrets=False):
1964 """
1970 """
1965 Common function for generating repo api data
1971 Common function for generating repo api data
1966
1972
1967 :param include_secrets: See :meth:`User.get_api_data`.
1973 :param include_secrets: See :meth:`User.get_api_data`.
1968
1974
1969 """
1975 """
1970 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1976 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1971 # move this methods on models level.
1977 # move this methods on models level.
1972 from rhodecode.model.settings import SettingsModel
1978 from rhodecode.model.settings import SettingsModel
1973 from rhodecode.model.repo import RepoModel
1979 from rhodecode.model.repo import RepoModel
1974
1980
1975 repo = self
1981 repo = self
1976 _user_id, _time, _reason = self.locked
1982 _user_id, _time, _reason = self.locked
1977
1983
1978 data = {
1984 data = {
1979 'repo_id': repo.repo_id,
1985 'repo_id': repo.repo_id,
1980 'repo_name': repo.repo_name,
1986 'repo_name': repo.repo_name,
1981 'repo_type': repo.repo_type,
1987 'repo_type': repo.repo_type,
1982 'clone_uri': repo.clone_uri or '',
1988 'clone_uri': repo.clone_uri or '',
1983 'push_uri': repo.push_uri or '',
1989 'push_uri': repo.push_uri or '',
1984 'url': RepoModel().get_url(self),
1990 'url': RepoModel().get_url(self),
1985 'private': repo.private,
1991 'private': repo.private,
1986 'created_on': repo.created_on,
1992 'created_on': repo.created_on,
1987 'description': repo.description_safe,
1993 'description': repo.description_safe,
1988 'landing_rev': repo.landing_rev,
1994 'landing_rev': repo.landing_rev,
1989 'owner': repo.user.username,
1995 'owner': repo.user.username,
1990 'fork_of': repo.fork.repo_name if repo.fork else None,
1996 'fork_of': repo.fork.repo_name if repo.fork else None,
1991 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1997 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1992 'enable_statistics': repo.enable_statistics,
1998 'enable_statistics': repo.enable_statistics,
1993 'enable_locking': repo.enable_locking,
1999 'enable_locking': repo.enable_locking,
1994 'enable_downloads': repo.enable_downloads,
2000 'enable_downloads': repo.enable_downloads,
1995 'last_changeset': repo.changeset_cache,
2001 'last_changeset': repo.changeset_cache,
1996 'locked_by': User.get(_user_id).get_api_data(
2002 'locked_by': User.get(_user_id).get_api_data(
1997 include_secrets=include_secrets) if _user_id else None,
2003 include_secrets=include_secrets) if _user_id else None,
1998 'locked_date': time_to_datetime(_time) if _time else None,
2004 'locked_date': time_to_datetime(_time) if _time else None,
1999 'lock_reason': _reason if _reason else None,
2005 'lock_reason': _reason if _reason else None,
2000 }
2006 }
2001
2007
2002 # TODO: mikhail: should be per-repo settings here
2008 # TODO: mikhail: should be per-repo settings here
2003 rc_config = SettingsModel().get_all_settings()
2009 rc_config = SettingsModel().get_all_settings()
2004 repository_fields = str2bool(
2010 repository_fields = str2bool(
2005 rc_config.get('rhodecode_repository_fields'))
2011 rc_config.get('rhodecode_repository_fields'))
2006 if repository_fields:
2012 if repository_fields:
2007 for f in self.extra_fields:
2013 for f in self.extra_fields:
2008 data[f.field_key_prefixed] = f.field_value
2014 data[f.field_key_prefixed] = f.field_value
2009
2015
2010 return data
2016 return data
2011
2017
2012 @classmethod
2018 @classmethod
2013 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2019 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2014 if not lock_time:
2020 if not lock_time:
2015 lock_time = time.time()
2021 lock_time = time.time()
2016 if not lock_reason:
2022 if not lock_reason:
2017 lock_reason = cls.LOCK_AUTOMATIC
2023 lock_reason = cls.LOCK_AUTOMATIC
2018 repo.locked = [user_id, lock_time, lock_reason]
2024 repo.locked = [user_id, lock_time, lock_reason]
2019 Session().add(repo)
2025 Session().add(repo)
2020 Session().commit()
2026 Session().commit()
2021
2027
2022 @classmethod
2028 @classmethod
2023 def unlock(cls, repo):
2029 def unlock(cls, repo):
2024 repo.locked = None
2030 repo.locked = None
2025 Session().add(repo)
2031 Session().add(repo)
2026 Session().commit()
2032 Session().commit()
2027
2033
2028 @classmethod
2034 @classmethod
2029 def getlock(cls, repo):
2035 def getlock(cls, repo):
2030 return repo.locked
2036 return repo.locked
2031
2037
2032 def is_user_lock(self, user_id):
2038 def is_user_lock(self, user_id):
2033 if self.lock[0]:
2039 if self.lock[0]:
2034 lock_user_id = safe_int(self.lock[0])
2040 lock_user_id = safe_int(self.lock[0])
2035 user_id = safe_int(user_id)
2041 user_id = safe_int(user_id)
2036 # both are ints, and they are equal
2042 # both are ints, and they are equal
2037 return all([lock_user_id, user_id]) and lock_user_id == user_id
2043 return all([lock_user_id, user_id]) and lock_user_id == user_id
2038
2044
2039 return False
2045 return False
2040
2046
2041 def get_locking_state(self, action, user_id, only_when_enabled=True):
2047 def get_locking_state(self, action, user_id, only_when_enabled=True):
2042 """
2048 """
2043 Checks locking on this repository, if locking is enabled and lock is
2049 Checks locking on this repository, if locking is enabled and lock is
2044 present returns a tuple of make_lock, locked, locked_by.
2050 present returns a tuple of make_lock, locked, locked_by.
2045 make_lock can have 3 states None (do nothing) True, make lock
2051 make_lock can have 3 states None (do nothing) True, make lock
2046 False release lock, This value is later propagated to hooks, which
2052 False release lock, This value is later propagated to hooks, which
2047 do the locking. Think about this as signals passed to hooks what to do.
2053 do the locking. Think about this as signals passed to hooks what to do.
2048
2054
2049 """
2055 """
2050 # TODO: johbo: This is part of the business logic and should be moved
2056 # TODO: johbo: This is part of the business logic and should be moved
2051 # into the RepositoryModel.
2057 # into the RepositoryModel.
2052
2058
2053 if action not in ('push', 'pull'):
2059 if action not in ('push', 'pull'):
2054 raise ValueError("Invalid action value: %s" % repr(action))
2060 raise ValueError("Invalid action value: %s" % repr(action))
2055
2061
2056 # defines if locked error should be thrown to user
2062 # defines if locked error should be thrown to user
2057 currently_locked = False
2063 currently_locked = False
2058 # defines if new lock should be made, tri-state
2064 # defines if new lock should be made, tri-state
2059 make_lock = None
2065 make_lock = None
2060 repo = self
2066 repo = self
2061 user = User.get(user_id)
2067 user = User.get(user_id)
2062
2068
2063 lock_info = repo.locked
2069 lock_info = repo.locked
2064
2070
2065 if repo and (repo.enable_locking or not only_when_enabled):
2071 if repo and (repo.enable_locking or not only_when_enabled):
2066 if action == 'push':
2072 if action == 'push':
2067 # check if it's already locked !, if it is compare users
2073 # check if it's already locked !, if it is compare users
2068 locked_by_user_id = lock_info[0]
2074 locked_by_user_id = lock_info[0]
2069 if user.user_id == locked_by_user_id:
2075 if user.user_id == locked_by_user_id:
2070 log.debug(
2076 log.debug(
2071 'Got `push` action from user %s, now unlocking', user)
2077 'Got `push` action from user %s, now unlocking', user)
2072 # unlock if we have push from user who locked
2078 # unlock if we have push from user who locked
2073 make_lock = False
2079 make_lock = False
2074 else:
2080 else:
2075 # we're not the same user who locked, ban with
2081 # we're not the same user who locked, ban with
2076 # code defined in settings (default is 423 HTTP Locked) !
2082 # code defined in settings (default is 423 HTTP Locked) !
2077 log.debug('Repo %s is currently locked by %s', repo, user)
2083 log.debug('Repo %s is currently locked by %s', repo, user)
2078 currently_locked = True
2084 currently_locked = True
2079 elif action == 'pull':
2085 elif action == 'pull':
2080 # [0] user [1] date
2086 # [0] user [1] date
2081 if lock_info[0] and lock_info[1]:
2087 if lock_info[0] and lock_info[1]:
2082 log.debug('Repo %s is currently locked by %s', repo, user)
2088 log.debug('Repo %s is currently locked by %s', repo, user)
2083 currently_locked = True
2089 currently_locked = True
2084 else:
2090 else:
2085 log.debug('Setting lock on repo %s by %s', repo, user)
2091 log.debug('Setting lock on repo %s by %s', repo, user)
2086 make_lock = True
2092 make_lock = True
2087
2093
2088 else:
2094 else:
2089 log.debug('Repository %s do not have locking enabled', repo)
2095 log.debug('Repository %s do not have locking enabled', repo)
2090
2096
2091 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2097 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2092 make_lock, currently_locked, lock_info)
2098 make_lock, currently_locked, lock_info)
2093
2099
2094 from rhodecode.lib.auth import HasRepoPermissionAny
2100 from rhodecode.lib.auth import HasRepoPermissionAny
2095 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2101 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2096 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2102 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2097 # if we don't have at least write permission we cannot make a lock
2103 # if we don't have at least write permission we cannot make a lock
2098 log.debug('lock state reset back to FALSE due to lack '
2104 log.debug('lock state reset back to FALSE due to lack '
2099 'of at least read permission')
2105 'of at least read permission')
2100 make_lock = False
2106 make_lock = False
2101
2107
2102 return make_lock, currently_locked, lock_info
2108 return make_lock, currently_locked, lock_info
2103
2109
2104 @property
2110 @property
2105 def last_db_change(self):
2111 def last_db_change(self):
2106 return self.updated_on
2112 return self.updated_on
2107
2113
2108 @property
2114 @property
2109 def clone_uri_hidden(self):
2115 def clone_uri_hidden(self):
2110 clone_uri = self.clone_uri
2116 clone_uri = self.clone_uri
2111 if clone_uri:
2117 if clone_uri:
2112 import urlobject
2118 import urlobject
2113 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2119 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2114 if url_obj.password:
2120 if url_obj.password:
2115 clone_uri = url_obj.with_password('*****')
2121 clone_uri = url_obj.with_password('*****')
2116 return clone_uri
2122 return clone_uri
2117
2123
2118 @property
2124 @property
2119 def push_uri_hidden(self):
2125 def push_uri_hidden(self):
2120 push_uri = self.push_uri
2126 push_uri = self.push_uri
2121 if push_uri:
2127 if push_uri:
2122 import urlobject
2128 import urlobject
2123 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2129 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2124 if url_obj.password:
2130 if url_obj.password:
2125 push_uri = url_obj.with_password('*****')
2131 push_uri = url_obj.with_password('*****')
2126 return push_uri
2132 return push_uri
2127
2133
2128 def clone_url(self, **override):
2134 def clone_url(self, **override):
2129 from rhodecode.model.settings import SettingsModel
2135 from rhodecode.model.settings import SettingsModel
2130
2136
2131 uri_tmpl = None
2137 uri_tmpl = None
2132 if 'with_id' in override:
2138 if 'with_id' in override:
2133 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2139 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2134 del override['with_id']
2140 del override['with_id']
2135
2141
2136 if 'uri_tmpl' in override:
2142 if 'uri_tmpl' in override:
2137 uri_tmpl = override['uri_tmpl']
2143 uri_tmpl = override['uri_tmpl']
2138 del override['uri_tmpl']
2144 del override['uri_tmpl']
2139
2145
2140 ssh = False
2146 ssh = False
2141 if 'ssh' in override:
2147 if 'ssh' in override:
2142 ssh = True
2148 ssh = True
2143 del override['ssh']
2149 del override['ssh']
2144
2150
2145 # we didn't override our tmpl from **overrides
2151 # we didn't override our tmpl from **overrides
2146 if not uri_tmpl:
2152 if not uri_tmpl:
2147 rc_config = SettingsModel().get_all_settings(cache=True)
2153 rc_config = SettingsModel().get_all_settings(cache=True)
2148 if ssh:
2154 if ssh:
2149 uri_tmpl = rc_config.get(
2155 uri_tmpl = rc_config.get(
2150 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2156 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2151 else:
2157 else:
2152 uri_tmpl = rc_config.get(
2158 uri_tmpl = rc_config.get(
2153 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2159 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2154
2160
2155 request = get_current_request()
2161 request = get_current_request()
2156 return get_clone_url(request=request,
2162 return get_clone_url(request=request,
2157 uri_tmpl=uri_tmpl,
2163 uri_tmpl=uri_tmpl,
2158 repo_name=self.repo_name,
2164 repo_name=self.repo_name,
2159 repo_id=self.repo_id, **override)
2165 repo_id=self.repo_id, **override)
2160
2166
2161 def set_state(self, state):
2167 def set_state(self, state):
2162 self.repo_state = state
2168 self.repo_state = state
2163 Session().add(self)
2169 Session().add(self)
2164 #==========================================================================
2170 #==========================================================================
2165 # SCM PROPERTIES
2171 # SCM PROPERTIES
2166 #==========================================================================
2172 #==========================================================================
2167
2173
2168 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2174 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2169 return get_commit_safe(
2175 return get_commit_safe(
2170 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2176 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2171
2177
2172 def get_changeset(self, rev=None, pre_load=None):
2178 def get_changeset(self, rev=None, pre_load=None):
2173 warnings.warn("Use get_commit", DeprecationWarning)
2179 warnings.warn("Use get_commit", DeprecationWarning)
2174 commit_id = None
2180 commit_id = None
2175 commit_idx = None
2181 commit_idx = None
2176 if isinstance(rev, basestring):
2182 if isinstance(rev, basestring):
2177 commit_id = rev
2183 commit_id = rev
2178 else:
2184 else:
2179 commit_idx = rev
2185 commit_idx = rev
2180 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2186 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2181 pre_load=pre_load)
2187 pre_load=pre_load)
2182
2188
2183 def get_landing_commit(self):
2189 def get_landing_commit(self):
2184 """
2190 """
2185 Returns landing commit, or if that doesn't exist returns the tip
2191 Returns landing commit, or if that doesn't exist returns the tip
2186 """
2192 """
2187 _rev_type, _rev = self.landing_rev
2193 _rev_type, _rev = self.landing_rev
2188 commit = self.get_commit(_rev)
2194 commit = self.get_commit(_rev)
2189 if isinstance(commit, EmptyCommit):
2195 if isinstance(commit, EmptyCommit):
2190 return self.get_commit()
2196 return self.get_commit()
2191 return commit
2197 return commit
2192
2198
2193 def update_commit_cache(self, cs_cache=None, config=None):
2199 def update_commit_cache(self, cs_cache=None, config=None):
2194 """
2200 """
2195 Update cache of last changeset for repository, keys should be::
2201 Update cache of last changeset for repository, keys should be::
2196
2202
2197 short_id
2203 short_id
2198 raw_id
2204 raw_id
2199 revision
2205 revision
2200 parents
2206 parents
2201 message
2207 message
2202 date
2208 date
2203 author
2209 author
2204
2210
2205 :param cs_cache:
2211 :param cs_cache:
2206 """
2212 """
2207 from rhodecode.lib.vcs.backends.base import BaseChangeset
2213 from rhodecode.lib.vcs.backends.base import BaseChangeset
2208 if cs_cache is None:
2214 if cs_cache is None:
2209 # use no-cache version here
2215 # use no-cache version here
2210 scm_repo = self.scm_instance(cache=False, config=config)
2216 scm_repo = self.scm_instance(cache=False, config=config)
2211 if scm_repo:
2217 if scm_repo:
2212 cs_cache = scm_repo.get_commit(
2218 cs_cache = scm_repo.get_commit(
2213 pre_load=["author", "date", "message", "parents"])
2219 pre_load=["author", "date", "message", "parents"])
2214 else:
2220 else:
2215 cs_cache = EmptyCommit()
2221 cs_cache = EmptyCommit()
2216
2222
2217 if isinstance(cs_cache, BaseChangeset):
2223 if isinstance(cs_cache, BaseChangeset):
2218 cs_cache = cs_cache.__json__()
2224 cs_cache = cs_cache.__json__()
2219
2225
2220 def is_outdated(new_cs_cache):
2226 def is_outdated(new_cs_cache):
2221 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2227 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2222 new_cs_cache['revision'] != self.changeset_cache['revision']):
2228 new_cs_cache['revision'] != self.changeset_cache['revision']):
2223 return True
2229 return True
2224 return False
2230 return False
2225
2231
2226 # check if we have maybe already latest cached revision
2232 # check if we have maybe already latest cached revision
2227 if is_outdated(cs_cache) or not self.changeset_cache:
2233 if is_outdated(cs_cache) or not self.changeset_cache:
2228 _default = datetime.datetime.fromtimestamp(0)
2234 _default = datetime.datetime.fromtimestamp(0)
2229 last_change = cs_cache.get('date') or _default
2235 last_change = cs_cache.get('date') or _default
2230 log.debug('updated repo %s with new cs cache %s',
2236 log.debug('updated repo %s with new cs cache %s',
2231 self.repo_name, cs_cache)
2237 self.repo_name, cs_cache)
2232 self.updated_on = last_change
2238 self.updated_on = last_change
2233 self.changeset_cache = cs_cache
2239 self.changeset_cache = cs_cache
2234 Session().add(self)
2240 Session().add(self)
2235 Session().commit()
2241 Session().commit()
2236 else:
2242 else:
2237 log.debug('Skipping update_commit_cache for repo:`%s` '
2243 log.debug('Skipping update_commit_cache for repo:`%s` '
2238 'commit already with latest changes', self.repo_name)
2244 'commit already with latest changes', self.repo_name)
2239
2245
2240 @property
2246 @property
2241 def tip(self):
2247 def tip(self):
2242 return self.get_commit('tip')
2248 return self.get_commit('tip')
2243
2249
2244 @property
2250 @property
2245 def author(self):
2251 def author(self):
2246 return self.tip.author
2252 return self.tip.author
2247
2253
2248 @property
2254 @property
2249 def last_change(self):
2255 def last_change(self):
2250 return self.scm_instance().last_change
2256 return self.scm_instance().last_change
2251
2257
2252 def get_comments(self, revisions=None):
2258 def get_comments(self, revisions=None):
2253 """
2259 """
2254 Returns comments for this repository grouped by revisions
2260 Returns comments for this repository grouped by revisions
2255
2261
2256 :param revisions: filter query by revisions only
2262 :param revisions: filter query by revisions only
2257 """
2263 """
2258 cmts = ChangesetComment.query()\
2264 cmts = ChangesetComment.query()\
2259 .filter(ChangesetComment.repo == self)
2265 .filter(ChangesetComment.repo == self)
2260 if revisions:
2266 if revisions:
2261 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2267 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2262 grouped = collections.defaultdict(list)
2268 grouped = collections.defaultdict(list)
2263 for cmt in cmts.all():
2269 for cmt in cmts.all():
2264 grouped[cmt.revision].append(cmt)
2270 grouped[cmt.revision].append(cmt)
2265 return grouped
2271 return grouped
2266
2272
2267 def statuses(self, revisions=None):
2273 def statuses(self, revisions=None):
2268 """
2274 """
2269 Returns statuses for this repository
2275 Returns statuses for this repository
2270
2276
2271 :param revisions: list of revisions to get statuses for
2277 :param revisions: list of revisions to get statuses for
2272 """
2278 """
2273 statuses = ChangesetStatus.query()\
2279 statuses = ChangesetStatus.query()\
2274 .filter(ChangesetStatus.repo == self)\
2280 .filter(ChangesetStatus.repo == self)\
2275 .filter(ChangesetStatus.version == 0)
2281 .filter(ChangesetStatus.version == 0)
2276
2282
2277 if revisions:
2283 if revisions:
2278 # Try doing the filtering in chunks to avoid hitting limits
2284 # Try doing the filtering in chunks to avoid hitting limits
2279 size = 500
2285 size = 500
2280 status_results = []
2286 status_results = []
2281 for chunk in xrange(0, len(revisions), size):
2287 for chunk in xrange(0, len(revisions), size):
2282 status_results += statuses.filter(
2288 status_results += statuses.filter(
2283 ChangesetStatus.revision.in_(
2289 ChangesetStatus.revision.in_(
2284 revisions[chunk: chunk+size])
2290 revisions[chunk: chunk+size])
2285 ).all()
2291 ).all()
2286 else:
2292 else:
2287 status_results = statuses.all()
2293 status_results = statuses.all()
2288
2294
2289 grouped = {}
2295 grouped = {}
2290
2296
2291 # maybe we have open new pullrequest without a status?
2297 # maybe we have open new pullrequest without a status?
2292 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2298 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2293 status_lbl = ChangesetStatus.get_status_lbl(stat)
2299 status_lbl = ChangesetStatus.get_status_lbl(stat)
2294 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2300 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2295 for rev in pr.revisions:
2301 for rev in pr.revisions:
2296 pr_id = pr.pull_request_id
2302 pr_id = pr.pull_request_id
2297 pr_repo = pr.target_repo.repo_name
2303 pr_repo = pr.target_repo.repo_name
2298 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2304 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2299
2305
2300 for stat in status_results:
2306 for stat in status_results:
2301 pr_id = pr_repo = None
2307 pr_id = pr_repo = None
2302 if stat.pull_request:
2308 if stat.pull_request:
2303 pr_id = stat.pull_request.pull_request_id
2309 pr_id = stat.pull_request.pull_request_id
2304 pr_repo = stat.pull_request.target_repo.repo_name
2310 pr_repo = stat.pull_request.target_repo.repo_name
2305 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2311 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2306 pr_id, pr_repo]
2312 pr_id, pr_repo]
2307 return grouped
2313 return grouped
2308
2314
2309 # ==========================================================================
2315 # ==========================================================================
2310 # SCM CACHE INSTANCE
2316 # SCM CACHE INSTANCE
2311 # ==========================================================================
2317 # ==========================================================================
2312
2318
2313 def scm_instance(self, **kwargs):
2319 def scm_instance(self, **kwargs):
2314 import rhodecode
2320 import rhodecode
2315
2321
2316 # Passing a config will not hit the cache currently only used
2322 # Passing a config will not hit the cache currently only used
2317 # for repo2dbmapper
2323 # for repo2dbmapper
2318 config = kwargs.pop('config', None)
2324 config = kwargs.pop('config', None)
2319 cache = kwargs.pop('cache', None)
2325 cache = kwargs.pop('cache', None)
2320 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2326 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2321 # if cache is NOT defined use default global, else we have a full
2327 # if cache is NOT defined use default global, else we have a full
2322 # control over cache behaviour
2328 # control over cache behaviour
2323 if cache is None and full_cache and not config:
2329 if cache is None and full_cache and not config:
2324 return self._get_instance_cached()
2330 return self._get_instance_cached()
2325 return self._get_instance(cache=bool(cache), config=config)
2331 return self._get_instance(cache=bool(cache), config=config)
2326
2332
2327 def _get_instance_cached(self):
2333 def _get_instance_cached(self):
2328 @cache_region('long_term')
2334 @cache_region('long_term')
2329 def _get_repo(cache_key):
2335 def _get_repo(cache_key):
2330 return self._get_instance()
2336 return self._get_instance()
2331
2337
2332 invalidator_context = CacheKey.repo_context_cache(
2338 invalidator_context = CacheKey.repo_context_cache(
2333 _get_repo, self.repo_name, None, thread_scoped=True)
2339 _get_repo, self.repo_name, None, thread_scoped=True)
2334
2340
2335 with invalidator_context as context:
2341 with invalidator_context as context:
2336 context.invalidate()
2342 context.invalidate()
2337 repo = context.compute()
2343 repo = context.compute()
2338
2344
2339 return repo
2345 return repo
2340
2346
2341 def _get_instance(self, cache=True, config=None):
2347 def _get_instance(self, cache=True, config=None):
2342 config = config or self._config
2348 config = config or self._config
2343 custom_wire = {
2349 custom_wire = {
2344 'cache': cache # controls the vcs.remote cache
2350 'cache': cache # controls the vcs.remote cache
2345 }
2351 }
2346 repo = get_vcs_instance(
2352 repo = get_vcs_instance(
2347 repo_path=safe_str(self.repo_full_path),
2353 repo_path=safe_str(self.repo_full_path),
2348 config=config,
2354 config=config,
2349 with_wire=custom_wire,
2355 with_wire=custom_wire,
2350 create=False,
2356 create=False,
2351 _vcs_alias=self.repo_type)
2357 _vcs_alias=self.repo_type)
2352
2358
2353 return repo
2359 return repo
2354
2360
2355 def __json__(self):
2361 def __json__(self):
2356 return {'landing_rev': self.landing_rev}
2362 return {'landing_rev': self.landing_rev}
2357
2363
2358 def get_dict(self):
2364 def get_dict(self):
2359
2365
2360 # Since we transformed `repo_name` to a hybrid property, we need to
2366 # Since we transformed `repo_name` to a hybrid property, we need to
2361 # keep compatibility with the code which uses `repo_name` field.
2367 # keep compatibility with the code which uses `repo_name` field.
2362
2368
2363 result = super(Repository, self).get_dict()
2369 result = super(Repository, self).get_dict()
2364 result['repo_name'] = result.pop('_repo_name', None)
2370 result['repo_name'] = result.pop('_repo_name', None)
2365 return result
2371 return result
2366
2372
2367
2373
2368 class RepoGroup(Base, BaseModel):
2374 class RepoGroup(Base, BaseModel):
2369 __tablename__ = 'groups'
2375 __tablename__ = 'groups'
2370 __table_args__ = (
2376 __table_args__ = (
2371 UniqueConstraint('group_name', 'group_parent_id'),
2377 UniqueConstraint('group_name', 'group_parent_id'),
2372 CheckConstraint('group_id != group_parent_id'),
2378 CheckConstraint('group_id != group_parent_id'),
2373 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2379 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2374 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2380 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2375 )
2381 )
2376 __mapper_args__ = {'order_by': 'group_name'}
2382 __mapper_args__ = {'order_by': 'group_name'}
2377
2383
2378 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2384 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2379
2385
2380 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2386 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2381 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2387 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2382 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2388 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2383 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2389 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2384 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2390 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2385 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2391 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2386 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2392 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2387 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2393 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2388 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2394 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2389
2395
2390 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2396 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2391 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2397 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2392 parent_group = relationship('RepoGroup', remote_side=group_id)
2398 parent_group = relationship('RepoGroup', remote_side=group_id)
2393 user = relationship('User')
2399 user = relationship('User')
2394 integrations = relationship('Integration',
2400 integrations = relationship('Integration',
2395 cascade="all, delete, delete-orphan")
2401 cascade="all, delete, delete-orphan")
2396
2402
2397 def __init__(self, group_name='', parent_group=None):
2403 def __init__(self, group_name='', parent_group=None):
2398 self.group_name = group_name
2404 self.group_name = group_name
2399 self.parent_group = parent_group
2405 self.parent_group = parent_group
2400
2406
2401 def __unicode__(self):
2407 def __unicode__(self):
2402 return u"<%s('id:%s:%s')>" % (
2408 return u"<%s('id:%s:%s')>" % (
2403 self.__class__.__name__, self.group_id, self.group_name)
2409 self.__class__.__name__, self.group_id, self.group_name)
2404
2410
2405 @hybrid_property
2411 @hybrid_property
2406 def description_safe(self):
2412 def description_safe(self):
2407 from rhodecode.lib import helpers as h
2413 from rhodecode.lib import helpers as h
2408 return h.escape(self.group_description)
2414 return h.escape(self.group_description)
2409
2415
2410 @classmethod
2416 @classmethod
2411 def _generate_choice(cls, repo_group):
2417 def _generate_choice(cls, repo_group):
2412 from webhelpers.html import literal as _literal
2418 from webhelpers.html import literal as _literal
2413 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2419 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2414 return repo_group.group_id, _name(repo_group.full_path_splitted)
2420 return repo_group.group_id, _name(repo_group.full_path_splitted)
2415
2421
2416 @classmethod
2422 @classmethod
2417 def groups_choices(cls, groups=None, show_empty_group=True):
2423 def groups_choices(cls, groups=None, show_empty_group=True):
2418 if not groups:
2424 if not groups:
2419 groups = cls.query().all()
2425 groups = cls.query().all()
2420
2426
2421 repo_groups = []
2427 repo_groups = []
2422 if show_empty_group:
2428 if show_empty_group:
2423 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2429 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2424
2430
2425 repo_groups.extend([cls._generate_choice(x) for x in groups])
2431 repo_groups.extend([cls._generate_choice(x) for x in groups])
2426
2432
2427 repo_groups = sorted(
2433 repo_groups = sorted(
2428 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2434 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2429 return repo_groups
2435 return repo_groups
2430
2436
2431 @classmethod
2437 @classmethod
2432 def url_sep(cls):
2438 def url_sep(cls):
2433 return URL_SEP
2439 return URL_SEP
2434
2440
2435 @classmethod
2441 @classmethod
2436 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2442 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2437 if case_insensitive:
2443 if case_insensitive:
2438 gr = cls.query().filter(func.lower(cls.group_name)
2444 gr = cls.query().filter(func.lower(cls.group_name)
2439 == func.lower(group_name))
2445 == func.lower(group_name))
2440 else:
2446 else:
2441 gr = cls.query().filter(cls.group_name == group_name)
2447 gr = cls.query().filter(cls.group_name == group_name)
2442 if cache:
2448 if cache:
2443 name_key = _hash_key(group_name)
2449 name_key = _hash_key(group_name)
2444 gr = gr.options(
2450 gr = gr.options(
2445 FromCache("sql_cache_short", "get_group_%s" % name_key))
2451 FromCache("sql_cache_short", "get_group_%s" % name_key))
2446 return gr.scalar()
2452 return gr.scalar()
2447
2453
2448 @classmethod
2454 @classmethod
2449 def get_user_personal_repo_group(cls, user_id):
2455 def get_user_personal_repo_group(cls, user_id):
2450 user = User.get(user_id)
2456 user = User.get(user_id)
2451 if user.username == User.DEFAULT_USER:
2457 if user.username == User.DEFAULT_USER:
2452 return None
2458 return None
2453
2459
2454 return cls.query()\
2460 return cls.query()\
2455 .filter(cls.personal == true()) \
2461 .filter(cls.personal == true()) \
2456 .filter(cls.user == user).scalar()
2462 .filter(cls.user == user).scalar()
2457
2463
2458 @classmethod
2464 @classmethod
2459 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2465 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2460 case_insensitive=True):
2466 case_insensitive=True):
2461 q = RepoGroup.query()
2467 q = RepoGroup.query()
2462
2468
2463 if not isinstance(user_id, Optional):
2469 if not isinstance(user_id, Optional):
2464 q = q.filter(RepoGroup.user_id == user_id)
2470 q = q.filter(RepoGroup.user_id == user_id)
2465
2471
2466 if not isinstance(group_id, Optional):
2472 if not isinstance(group_id, Optional):
2467 q = q.filter(RepoGroup.group_parent_id == group_id)
2473 q = q.filter(RepoGroup.group_parent_id == group_id)
2468
2474
2469 if case_insensitive:
2475 if case_insensitive:
2470 q = q.order_by(func.lower(RepoGroup.group_name))
2476 q = q.order_by(func.lower(RepoGroup.group_name))
2471 else:
2477 else:
2472 q = q.order_by(RepoGroup.group_name)
2478 q = q.order_by(RepoGroup.group_name)
2473 return q.all()
2479 return q.all()
2474
2480
2475 @property
2481 @property
2476 def parents(self):
2482 def parents(self):
2477 parents_recursion_limit = 10
2483 parents_recursion_limit = 10
2478 groups = []
2484 groups = []
2479 if self.parent_group is None:
2485 if self.parent_group is None:
2480 return groups
2486 return groups
2481 cur_gr = self.parent_group
2487 cur_gr = self.parent_group
2482 groups.insert(0, cur_gr)
2488 groups.insert(0, cur_gr)
2483 cnt = 0
2489 cnt = 0
2484 while 1:
2490 while 1:
2485 cnt += 1
2491 cnt += 1
2486 gr = getattr(cur_gr, 'parent_group', None)
2492 gr = getattr(cur_gr, 'parent_group', None)
2487 cur_gr = cur_gr.parent_group
2493 cur_gr = cur_gr.parent_group
2488 if gr is None:
2494 if gr is None:
2489 break
2495 break
2490 if cnt == parents_recursion_limit:
2496 if cnt == parents_recursion_limit:
2491 # this will prevent accidental infinit loops
2497 # this will prevent accidental infinit loops
2492 log.error(('more than %s parents found for group %s, stopping '
2498 log.error(('more than %s parents found for group %s, stopping '
2493 'recursive parent fetching' % (parents_recursion_limit, self)))
2499 'recursive parent fetching' % (parents_recursion_limit, self)))
2494 break
2500 break
2495
2501
2496 groups.insert(0, gr)
2502 groups.insert(0, gr)
2497 return groups
2503 return groups
2498
2504
2499 @property
2505 @property
2500 def last_db_change(self):
2506 def last_db_change(self):
2501 return self.updated_on
2507 return self.updated_on
2502
2508
2503 @property
2509 @property
2504 def children(self):
2510 def children(self):
2505 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2511 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2506
2512
2507 @property
2513 @property
2508 def name(self):
2514 def name(self):
2509 return self.group_name.split(RepoGroup.url_sep())[-1]
2515 return self.group_name.split(RepoGroup.url_sep())[-1]
2510
2516
2511 @property
2517 @property
2512 def full_path(self):
2518 def full_path(self):
2513 return self.group_name
2519 return self.group_name
2514
2520
2515 @property
2521 @property
2516 def full_path_splitted(self):
2522 def full_path_splitted(self):
2517 return self.group_name.split(RepoGroup.url_sep())
2523 return self.group_name.split(RepoGroup.url_sep())
2518
2524
2519 @property
2525 @property
2520 def repositories(self):
2526 def repositories(self):
2521 return Repository.query()\
2527 return Repository.query()\
2522 .filter(Repository.group == self)\
2528 .filter(Repository.group == self)\
2523 .order_by(Repository.repo_name)
2529 .order_by(Repository.repo_name)
2524
2530
2525 @property
2531 @property
2526 def repositories_recursive_count(self):
2532 def repositories_recursive_count(self):
2527 cnt = self.repositories.count()
2533 cnt = self.repositories.count()
2528
2534
2529 def children_count(group):
2535 def children_count(group):
2530 cnt = 0
2536 cnt = 0
2531 for child in group.children:
2537 for child in group.children:
2532 cnt += child.repositories.count()
2538 cnt += child.repositories.count()
2533 cnt += children_count(child)
2539 cnt += children_count(child)
2534 return cnt
2540 return cnt
2535
2541
2536 return cnt + children_count(self)
2542 return cnt + children_count(self)
2537
2543
2538 def _recursive_objects(self, include_repos=True):
2544 def _recursive_objects(self, include_repos=True):
2539 all_ = []
2545 all_ = []
2540
2546
2541 def _get_members(root_gr):
2547 def _get_members(root_gr):
2542 if include_repos:
2548 if include_repos:
2543 for r in root_gr.repositories:
2549 for r in root_gr.repositories:
2544 all_.append(r)
2550 all_.append(r)
2545 childs = root_gr.children.all()
2551 childs = root_gr.children.all()
2546 if childs:
2552 if childs:
2547 for gr in childs:
2553 for gr in childs:
2548 all_.append(gr)
2554 all_.append(gr)
2549 _get_members(gr)
2555 _get_members(gr)
2550
2556
2551 _get_members(self)
2557 _get_members(self)
2552 return [self] + all_
2558 return [self] + all_
2553
2559
2554 def recursive_groups_and_repos(self):
2560 def recursive_groups_and_repos(self):
2555 """
2561 """
2556 Recursive return all groups, with repositories in those groups
2562 Recursive return all groups, with repositories in those groups
2557 """
2563 """
2558 return self._recursive_objects()
2564 return self._recursive_objects()
2559
2565
2560 def recursive_groups(self):
2566 def recursive_groups(self):
2561 """
2567 """
2562 Returns all children groups for this group including children of children
2568 Returns all children groups for this group including children of children
2563 """
2569 """
2564 return self._recursive_objects(include_repos=False)
2570 return self._recursive_objects(include_repos=False)
2565
2571
2566 def get_new_name(self, group_name):
2572 def get_new_name(self, group_name):
2567 """
2573 """
2568 returns new full group name based on parent and new name
2574 returns new full group name based on parent and new name
2569
2575
2570 :param group_name:
2576 :param group_name:
2571 """
2577 """
2572 path_prefix = (self.parent_group.full_path_splitted if
2578 path_prefix = (self.parent_group.full_path_splitted if
2573 self.parent_group else [])
2579 self.parent_group else [])
2574 return RepoGroup.url_sep().join(path_prefix + [group_name])
2580 return RepoGroup.url_sep().join(path_prefix + [group_name])
2575
2581
2576 def permissions(self, with_admins=True, with_owner=True):
2582 def permissions(self, with_admins=True, with_owner=True):
2577 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2583 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2578 q = q.options(joinedload(UserRepoGroupToPerm.group),
2584 q = q.options(joinedload(UserRepoGroupToPerm.group),
2579 joinedload(UserRepoGroupToPerm.user),
2585 joinedload(UserRepoGroupToPerm.user),
2580 joinedload(UserRepoGroupToPerm.permission),)
2586 joinedload(UserRepoGroupToPerm.permission),)
2581
2587
2582 # get owners and admins and permissions. We do a trick of re-writing
2588 # get owners and admins and permissions. We do a trick of re-writing
2583 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2589 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2584 # has a global reference and changing one object propagates to all
2590 # has a global reference and changing one object propagates to all
2585 # others. This means if admin is also an owner admin_row that change
2591 # others. This means if admin is also an owner admin_row that change
2586 # would propagate to both objects
2592 # would propagate to both objects
2587 perm_rows = []
2593 perm_rows = []
2588 for _usr in q.all():
2594 for _usr in q.all():
2589 usr = AttributeDict(_usr.user.get_dict())
2595 usr = AttributeDict(_usr.user.get_dict())
2590 usr.permission = _usr.permission.permission_name
2596 usr.permission = _usr.permission.permission_name
2591 perm_rows.append(usr)
2597 perm_rows.append(usr)
2592
2598
2593 # filter the perm rows by 'default' first and then sort them by
2599 # filter the perm rows by 'default' first and then sort them by
2594 # admin,write,read,none permissions sorted again alphabetically in
2600 # admin,write,read,none permissions sorted again alphabetically in
2595 # each group
2601 # each group
2596 perm_rows = sorted(perm_rows, key=display_user_sort)
2602 perm_rows = sorted(perm_rows, key=display_user_sort)
2597
2603
2598 _admin_perm = 'group.admin'
2604 _admin_perm = 'group.admin'
2599 owner_row = []
2605 owner_row = []
2600 if with_owner:
2606 if with_owner:
2601 usr = AttributeDict(self.user.get_dict())
2607 usr = AttributeDict(self.user.get_dict())
2602 usr.owner_row = True
2608 usr.owner_row = True
2603 usr.permission = _admin_perm
2609 usr.permission = _admin_perm
2604 owner_row.append(usr)
2610 owner_row.append(usr)
2605
2611
2606 super_admin_rows = []
2612 super_admin_rows = []
2607 if with_admins:
2613 if with_admins:
2608 for usr in User.get_all_super_admins():
2614 for usr in User.get_all_super_admins():
2609 # if this admin is also owner, don't double the record
2615 # if this admin is also owner, don't double the record
2610 if usr.user_id == owner_row[0].user_id:
2616 if usr.user_id == owner_row[0].user_id:
2611 owner_row[0].admin_row = True
2617 owner_row[0].admin_row = True
2612 else:
2618 else:
2613 usr = AttributeDict(usr.get_dict())
2619 usr = AttributeDict(usr.get_dict())
2614 usr.admin_row = True
2620 usr.admin_row = True
2615 usr.permission = _admin_perm
2621 usr.permission = _admin_perm
2616 super_admin_rows.append(usr)
2622 super_admin_rows.append(usr)
2617
2623
2618 return super_admin_rows + owner_row + perm_rows
2624 return super_admin_rows + owner_row + perm_rows
2619
2625
2620 def permission_user_groups(self):
2626 def permission_user_groups(self):
2621 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2627 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2622 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2628 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2623 joinedload(UserGroupRepoGroupToPerm.users_group),
2629 joinedload(UserGroupRepoGroupToPerm.users_group),
2624 joinedload(UserGroupRepoGroupToPerm.permission),)
2630 joinedload(UserGroupRepoGroupToPerm.permission),)
2625
2631
2626 perm_rows = []
2632 perm_rows = []
2627 for _user_group in q.all():
2633 for _user_group in q.all():
2628 usr = AttributeDict(_user_group.users_group.get_dict())
2634 usr = AttributeDict(_user_group.users_group.get_dict())
2629 usr.permission = _user_group.permission.permission_name
2635 usr.permission = _user_group.permission.permission_name
2630 perm_rows.append(usr)
2636 perm_rows.append(usr)
2631
2637
2632 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2638 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2633 return perm_rows
2639 return perm_rows
2634
2640
2635 def get_api_data(self):
2641 def get_api_data(self):
2636 """
2642 """
2637 Common function for generating api data
2643 Common function for generating api data
2638
2644
2639 """
2645 """
2640 group = self
2646 group = self
2641 data = {
2647 data = {
2642 'group_id': group.group_id,
2648 'group_id': group.group_id,
2643 'group_name': group.group_name,
2649 'group_name': group.group_name,
2644 'group_description': group.description_safe,
2650 'group_description': group.description_safe,
2645 'parent_group': group.parent_group.group_name if group.parent_group else None,
2651 'parent_group': group.parent_group.group_name if group.parent_group else None,
2646 'repositories': [x.repo_name for x in group.repositories],
2652 'repositories': [x.repo_name for x in group.repositories],
2647 'owner': group.user.username,
2653 'owner': group.user.username,
2648 }
2654 }
2649 return data
2655 return data
2650
2656
2651
2657
2652 class Permission(Base, BaseModel):
2658 class Permission(Base, BaseModel):
2653 __tablename__ = 'permissions'
2659 __tablename__ = 'permissions'
2654 __table_args__ = (
2660 __table_args__ = (
2655 Index('p_perm_name_idx', 'permission_name'),
2661 Index('p_perm_name_idx', 'permission_name'),
2656 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2662 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2657 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2663 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2658 )
2664 )
2659 PERMS = [
2665 PERMS = [
2660 ('hg.admin', _('RhodeCode Super Administrator')),
2666 ('hg.admin', _('RhodeCode Super Administrator')),
2661
2667
2662 ('repository.none', _('Repository no access')),
2668 ('repository.none', _('Repository no access')),
2663 ('repository.read', _('Repository read access')),
2669 ('repository.read', _('Repository read access')),
2664 ('repository.write', _('Repository write access')),
2670 ('repository.write', _('Repository write access')),
2665 ('repository.admin', _('Repository admin access')),
2671 ('repository.admin', _('Repository admin access')),
2666
2672
2667 ('group.none', _('Repository group no access')),
2673 ('group.none', _('Repository group no access')),
2668 ('group.read', _('Repository group read access')),
2674 ('group.read', _('Repository group read access')),
2669 ('group.write', _('Repository group write access')),
2675 ('group.write', _('Repository group write access')),
2670 ('group.admin', _('Repository group admin access')),
2676 ('group.admin', _('Repository group admin access')),
2671
2677
2672 ('usergroup.none', _('User group no access')),
2678 ('usergroup.none', _('User group no access')),
2673 ('usergroup.read', _('User group read access')),
2679 ('usergroup.read', _('User group read access')),
2674 ('usergroup.write', _('User group write access')),
2680 ('usergroup.write', _('User group write access')),
2675 ('usergroup.admin', _('User group admin access')),
2681 ('usergroup.admin', _('User group admin access')),
2676
2682
2677 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2683 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2678 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2684 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2679
2685
2680 ('hg.usergroup.create.false', _('User Group creation disabled')),
2686 ('hg.usergroup.create.false', _('User Group creation disabled')),
2681 ('hg.usergroup.create.true', _('User Group creation enabled')),
2687 ('hg.usergroup.create.true', _('User Group creation enabled')),
2682
2688
2683 ('hg.create.none', _('Repository creation disabled')),
2689 ('hg.create.none', _('Repository creation disabled')),
2684 ('hg.create.repository', _('Repository creation enabled')),
2690 ('hg.create.repository', _('Repository creation enabled')),
2685 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2691 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2686 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2692 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2687
2693
2688 ('hg.fork.none', _('Repository forking disabled')),
2694 ('hg.fork.none', _('Repository forking disabled')),
2689 ('hg.fork.repository', _('Repository forking enabled')),
2695 ('hg.fork.repository', _('Repository forking enabled')),
2690
2696
2691 ('hg.register.none', _('Registration disabled')),
2697 ('hg.register.none', _('Registration disabled')),
2692 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2698 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2693 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2699 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2694
2700
2695 ('hg.password_reset.enabled', _('Password reset enabled')),
2701 ('hg.password_reset.enabled', _('Password reset enabled')),
2696 ('hg.password_reset.hidden', _('Password reset hidden')),
2702 ('hg.password_reset.hidden', _('Password reset hidden')),
2697 ('hg.password_reset.disabled', _('Password reset disabled')),
2703 ('hg.password_reset.disabled', _('Password reset disabled')),
2698
2704
2699 ('hg.extern_activate.manual', _('Manual activation of external account')),
2705 ('hg.extern_activate.manual', _('Manual activation of external account')),
2700 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2706 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2701
2707
2702 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2708 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2703 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2709 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2704 ]
2710 ]
2705
2711
2706 # definition of system default permissions for DEFAULT user
2712 # definition of system default permissions for DEFAULT user
2707 DEFAULT_USER_PERMISSIONS = [
2713 DEFAULT_USER_PERMISSIONS = [
2708 'repository.read',
2714 'repository.read',
2709 'group.read',
2715 'group.read',
2710 'usergroup.read',
2716 'usergroup.read',
2711 'hg.create.repository',
2717 'hg.create.repository',
2712 'hg.repogroup.create.false',
2718 'hg.repogroup.create.false',
2713 'hg.usergroup.create.false',
2719 'hg.usergroup.create.false',
2714 'hg.create.write_on_repogroup.true',
2720 'hg.create.write_on_repogroup.true',
2715 'hg.fork.repository',
2721 'hg.fork.repository',
2716 'hg.register.manual_activate',
2722 'hg.register.manual_activate',
2717 'hg.password_reset.enabled',
2723 'hg.password_reset.enabled',
2718 'hg.extern_activate.auto',
2724 'hg.extern_activate.auto',
2719 'hg.inherit_default_perms.true',
2725 'hg.inherit_default_perms.true',
2720 ]
2726 ]
2721
2727
2722 # defines which permissions are more important higher the more important
2728 # defines which permissions are more important higher the more important
2723 # Weight defines which permissions are more important.
2729 # Weight defines which permissions are more important.
2724 # The higher number the more important.
2730 # The higher number the more important.
2725 PERM_WEIGHTS = {
2731 PERM_WEIGHTS = {
2726 'repository.none': 0,
2732 'repository.none': 0,
2727 'repository.read': 1,
2733 'repository.read': 1,
2728 'repository.write': 3,
2734 'repository.write': 3,
2729 'repository.admin': 4,
2735 'repository.admin': 4,
2730
2736
2731 'group.none': 0,
2737 'group.none': 0,
2732 'group.read': 1,
2738 'group.read': 1,
2733 'group.write': 3,
2739 'group.write': 3,
2734 'group.admin': 4,
2740 'group.admin': 4,
2735
2741
2736 'usergroup.none': 0,
2742 'usergroup.none': 0,
2737 'usergroup.read': 1,
2743 'usergroup.read': 1,
2738 'usergroup.write': 3,
2744 'usergroup.write': 3,
2739 'usergroup.admin': 4,
2745 'usergroup.admin': 4,
2740
2746
2741 'hg.repogroup.create.false': 0,
2747 'hg.repogroup.create.false': 0,
2742 'hg.repogroup.create.true': 1,
2748 'hg.repogroup.create.true': 1,
2743
2749
2744 'hg.usergroup.create.false': 0,
2750 'hg.usergroup.create.false': 0,
2745 'hg.usergroup.create.true': 1,
2751 'hg.usergroup.create.true': 1,
2746
2752
2747 'hg.fork.none': 0,
2753 'hg.fork.none': 0,
2748 'hg.fork.repository': 1,
2754 'hg.fork.repository': 1,
2749 'hg.create.none': 0,
2755 'hg.create.none': 0,
2750 'hg.create.repository': 1
2756 'hg.create.repository': 1
2751 }
2757 }
2752
2758
2753 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2759 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2754 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2760 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2755 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2761 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2756
2762
2757 def __unicode__(self):
2763 def __unicode__(self):
2758 return u"<%s('%s:%s')>" % (
2764 return u"<%s('%s:%s')>" % (
2759 self.__class__.__name__, self.permission_id, self.permission_name
2765 self.__class__.__name__, self.permission_id, self.permission_name
2760 )
2766 )
2761
2767
2762 @classmethod
2768 @classmethod
2763 def get_by_key(cls, key):
2769 def get_by_key(cls, key):
2764 return cls.query().filter(cls.permission_name == key).scalar()
2770 return cls.query().filter(cls.permission_name == key).scalar()
2765
2771
2766 @classmethod
2772 @classmethod
2767 def get_default_repo_perms(cls, user_id, repo_id=None):
2773 def get_default_repo_perms(cls, user_id, repo_id=None):
2768 q = Session().query(UserRepoToPerm, Repository, Permission)\
2774 q = Session().query(UserRepoToPerm, Repository, Permission)\
2769 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2775 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2770 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2776 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2771 .filter(UserRepoToPerm.user_id == user_id)
2777 .filter(UserRepoToPerm.user_id == user_id)
2772 if repo_id:
2778 if repo_id:
2773 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2779 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2774 return q.all()
2780 return q.all()
2775
2781
2776 @classmethod
2782 @classmethod
2777 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2783 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2778 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2784 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2779 .join(
2785 .join(
2780 Permission,
2786 Permission,
2781 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2787 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2782 .join(
2788 .join(
2783 Repository,
2789 Repository,
2784 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2790 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2785 .join(
2791 .join(
2786 UserGroup,
2792 UserGroup,
2787 UserGroupRepoToPerm.users_group_id ==
2793 UserGroupRepoToPerm.users_group_id ==
2788 UserGroup.users_group_id)\
2794 UserGroup.users_group_id)\
2789 .join(
2795 .join(
2790 UserGroupMember,
2796 UserGroupMember,
2791 UserGroupRepoToPerm.users_group_id ==
2797 UserGroupRepoToPerm.users_group_id ==
2792 UserGroupMember.users_group_id)\
2798 UserGroupMember.users_group_id)\
2793 .filter(
2799 .filter(
2794 UserGroupMember.user_id == user_id,
2800 UserGroupMember.user_id == user_id,
2795 UserGroup.users_group_active == true())
2801 UserGroup.users_group_active == true())
2796 if repo_id:
2802 if repo_id:
2797 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2803 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2798 return q.all()
2804 return q.all()
2799
2805
2800 @classmethod
2806 @classmethod
2801 def get_default_group_perms(cls, user_id, repo_group_id=None):
2807 def get_default_group_perms(cls, user_id, repo_group_id=None):
2802 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2808 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2803 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2809 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2804 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2810 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2805 .filter(UserRepoGroupToPerm.user_id == user_id)
2811 .filter(UserRepoGroupToPerm.user_id == user_id)
2806 if repo_group_id:
2812 if repo_group_id:
2807 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2813 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2808 return q.all()
2814 return q.all()
2809
2815
2810 @classmethod
2816 @classmethod
2811 def get_default_group_perms_from_user_group(
2817 def get_default_group_perms_from_user_group(
2812 cls, user_id, repo_group_id=None):
2818 cls, user_id, repo_group_id=None):
2813 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2819 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2814 .join(
2820 .join(
2815 Permission,
2821 Permission,
2816 UserGroupRepoGroupToPerm.permission_id ==
2822 UserGroupRepoGroupToPerm.permission_id ==
2817 Permission.permission_id)\
2823 Permission.permission_id)\
2818 .join(
2824 .join(
2819 RepoGroup,
2825 RepoGroup,
2820 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2826 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2821 .join(
2827 .join(
2822 UserGroup,
2828 UserGroup,
2823 UserGroupRepoGroupToPerm.users_group_id ==
2829 UserGroupRepoGroupToPerm.users_group_id ==
2824 UserGroup.users_group_id)\
2830 UserGroup.users_group_id)\
2825 .join(
2831 .join(
2826 UserGroupMember,
2832 UserGroupMember,
2827 UserGroupRepoGroupToPerm.users_group_id ==
2833 UserGroupRepoGroupToPerm.users_group_id ==
2828 UserGroupMember.users_group_id)\
2834 UserGroupMember.users_group_id)\
2829 .filter(
2835 .filter(
2830 UserGroupMember.user_id == user_id,
2836 UserGroupMember.user_id == user_id,
2831 UserGroup.users_group_active == true())
2837 UserGroup.users_group_active == true())
2832 if repo_group_id:
2838 if repo_group_id:
2833 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2839 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2834 return q.all()
2840 return q.all()
2835
2841
2836 @classmethod
2842 @classmethod
2837 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2843 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2838 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2844 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2839 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2845 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2840 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2846 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2841 .filter(UserUserGroupToPerm.user_id == user_id)
2847 .filter(UserUserGroupToPerm.user_id == user_id)
2842 if user_group_id:
2848 if user_group_id:
2843 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2849 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2844 return q.all()
2850 return q.all()
2845
2851
2846 @classmethod
2852 @classmethod
2847 def get_default_user_group_perms_from_user_group(
2853 def get_default_user_group_perms_from_user_group(
2848 cls, user_id, user_group_id=None):
2854 cls, user_id, user_group_id=None):
2849 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2855 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2850 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2856 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2851 .join(
2857 .join(
2852 Permission,
2858 Permission,
2853 UserGroupUserGroupToPerm.permission_id ==
2859 UserGroupUserGroupToPerm.permission_id ==
2854 Permission.permission_id)\
2860 Permission.permission_id)\
2855 .join(
2861 .join(
2856 TargetUserGroup,
2862 TargetUserGroup,
2857 UserGroupUserGroupToPerm.target_user_group_id ==
2863 UserGroupUserGroupToPerm.target_user_group_id ==
2858 TargetUserGroup.users_group_id)\
2864 TargetUserGroup.users_group_id)\
2859 .join(
2865 .join(
2860 UserGroup,
2866 UserGroup,
2861 UserGroupUserGroupToPerm.user_group_id ==
2867 UserGroupUserGroupToPerm.user_group_id ==
2862 UserGroup.users_group_id)\
2868 UserGroup.users_group_id)\
2863 .join(
2869 .join(
2864 UserGroupMember,
2870 UserGroupMember,
2865 UserGroupUserGroupToPerm.user_group_id ==
2871 UserGroupUserGroupToPerm.user_group_id ==
2866 UserGroupMember.users_group_id)\
2872 UserGroupMember.users_group_id)\
2867 .filter(
2873 .filter(
2868 UserGroupMember.user_id == user_id,
2874 UserGroupMember.user_id == user_id,
2869 UserGroup.users_group_active == true())
2875 UserGroup.users_group_active == true())
2870 if user_group_id:
2876 if user_group_id:
2871 q = q.filter(
2877 q = q.filter(
2872 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2878 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2873
2879
2874 return q.all()
2880 return q.all()
2875
2881
2876
2882
2877 class UserRepoToPerm(Base, BaseModel):
2883 class UserRepoToPerm(Base, BaseModel):
2878 __tablename__ = 'repo_to_perm'
2884 __tablename__ = 'repo_to_perm'
2879 __table_args__ = (
2885 __table_args__ = (
2880 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2886 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2881 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2887 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2882 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2888 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2883 )
2889 )
2884 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2890 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2885 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2891 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2886 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2892 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2887 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2893 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2888
2894
2889 user = relationship('User')
2895 user = relationship('User')
2890 repository = relationship('Repository')
2896 repository = relationship('Repository')
2891 permission = relationship('Permission')
2897 permission = relationship('Permission')
2892
2898
2893 @classmethod
2899 @classmethod
2894 def create(cls, user, repository, permission):
2900 def create(cls, user, repository, permission):
2895 n = cls()
2901 n = cls()
2896 n.user = user
2902 n.user = user
2897 n.repository = repository
2903 n.repository = repository
2898 n.permission = permission
2904 n.permission = permission
2899 Session().add(n)
2905 Session().add(n)
2900 return n
2906 return n
2901
2907
2902 def __unicode__(self):
2908 def __unicode__(self):
2903 return u'<%s => %s >' % (self.user, self.repository)
2909 return u'<%s => %s >' % (self.user, self.repository)
2904
2910
2905
2911
2906 class UserUserGroupToPerm(Base, BaseModel):
2912 class UserUserGroupToPerm(Base, BaseModel):
2907 __tablename__ = 'user_user_group_to_perm'
2913 __tablename__ = 'user_user_group_to_perm'
2908 __table_args__ = (
2914 __table_args__ = (
2909 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2915 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2910 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2916 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2911 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2917 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2912 )
2918 )
2913 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2919 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2914 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2920 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2915 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2921 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2916 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2922 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2917
2923
2918 user = relationship('User')
2924 user = relationship('User')
2919 user_group = relationship('UserGroup')
2925 user_group = relationship('UserGroup')
2920 permission = relationship('Permission')
2926 permission = relationship('Permission')
2921
2927
2922 @classmethod
2928 @classmethod
2923 def create(cls, user, user_group, permission):
2929 def create(cls, user, user_group, permission):
2924 n = cls()
2930 n = cls()
2925 n.user = user
2931 n.user = user
2926 n.user_group = user_group
2932 n.user_group = user_group
2927 n.permission = permission
2933 n.permission = permission
2928 Session().add(n)
2934 Session().add(n)
2929 return n
2935 return n
2930
2936
2931 def __unicode__(self):
2937 def __unicode__(self):
2932 return u'<%s => %s >' % (self.user, self.user_group)
2938 return u'<%s => %s >' % (self.user, self.user_group)
2933
2939
2934
2940
2935 class UserToPerm(Base, BaseModel):
2941 class UserToPerm(Base, BaseModel):
2936 __tablename__ = 'user_to_perm'
2942 __tablename__ = 'user_to_perm'
2937 __table_args__ = (
2943 __table_args__ = (
2938 UniqueConstraint('user_id', 'permission_id'),
2944 UniqueConstraint('user_id', 'permission_id'),
2939 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2945 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2940 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2946 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2941 )
2947 )
2942 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2948 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2943 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2949 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2944 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2950 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2945
2951
2946 user = relationship('User')
2952 user = relationship('User')
2947 permission = relationship('Permission', lazy='joined')
2953 permission = relationship('Permission', lazy='joined')
2948
2954
2949 def __unicode__(self):
2955 def __unicode__(self):
2950 return u'<%s => %s >' % (self.user, self.permission)
2956 return u'<%s => %s >' % (self.user, self.permission)
2951
2957
2952
2958
2953 class UserGroupRepoToPerm(Base, BaseModel):
2959 class UserGroupRepoToPerm(Base, BaseModel):
2954 __tablename__ = 'users_group_repo_to_perm'
2960 __tablename__ = 'users_group_repo_to_perm'
2955 __table_args__ = (
2961 __table_args__ = (
2956 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2962 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2957 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2958 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2959 )
2965 )
2960 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2966 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2961 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2967 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2962 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2968 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2963 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2969 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2964
2970
2965 users_group = relationship('UserGroup')
2971 users_group = relationship('UserGroup')
2966 permission = relationship('Permission')
2972 permission = relationship('Permission')
2967 repository = relationship('Repository')
2973 repository = relationship('Repository')
2968
2974
2969 @classmethod
2975 @classmethod
2970 def create(cls, users_group, repository, permission):
2976 def create(cls, users_group, repository, permission):
2971 n = cls()
2977 n = cls()
2972 n.users_group = users_group
2978 n.users_group = users_group
2973 n.repository = repository
2979 n.repository = repository
2974 n.permission = permission
2980 n.permission = permission
2975 Session().add(n)
2981 Session().add(n)
2976 return n
2982 return n
2977
2983
2978 def __unicode__(self):
2984 def __unicode__(self):
2979 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2985 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2980
2986
2981
2987
2982 class UserGroupUserGroupToPerm(Base, BaseModel):
2988 class UserGroupUserGroupToPerm(Base, BaseModel):
2983 __tablename__ = 'user_group_user_group_to_perm'
2989 __tablename__ = 'user_group_user_group_to_perm'
2984 __table_args__ = (
2990 __table_args__ = (
2985 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2991 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2986 CheckConstraint('target_user_group_id != user_group_id'),
2992 CheckConstraint('target_user_group_id != user_group_id'),
2987 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2988 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2989 )
2995 )
2990 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2996 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2991 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2997 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2992 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2998 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2993 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2999 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2994
3000
2995 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3001 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2996 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3002 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2997 permission = relationship('Permission')
3003 permission = relationship('Permission')
2998
3004
2999 @classmethod
3005 @classmethod
3000 def create(cls, target_user_group, user_group, permission):
3006 def create(cls, target_user_group, user_group, permission):
3001 n = cls()
3007 n = cls()
3002 n.target_user_group = target_user_group
3008 n.target_user_group = target_user_group
3003 n.user_group = user_group
3009 n.user_group = user_group
3004 n.permission = permission
3010 n.permission = permission
3005 Session().add(n)
3011 Session().add(n)
3006 return n
3012 return n
3007
3013
3008 def __unicode__(self):
3014 def __unicode__(self):
3009 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3015 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3010
3016
3011
3017
3012 class UserGroupToPerm(Base, BaseModel):
3018 class UserGroupToPerm(Base, BaseModel):
3013 __tablename__ = 'users_group_to_perm'
3019 __tablename__ = 'users_group_to_perm'
3014 __table_args__ = (
3020 __table_args__ = (
3015 UniqueConstraint('users_group_id', 'permission_id',),
3021 UniqueConstraint('users_group_id', 'permission_id',),
3016 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3022 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3017 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3023 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3018 )
3024 )
3019 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3025 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3020 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3026 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3021 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3027 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3022
3028
3023 users_group = relationship('UserGroup')
3029 users_group = relationship('UserGroup')
3024 permission = relationship('Permission')
3030 permission = relationship('Permission')
3025
3031
3026
3032
3027 class UserRepoGroupToPerm(Base, BaseModel):
3033 class UserRepoGroupToPerm(Base, BaseModel):
3028 __tablename__ = 'user_repo_group_to_perm'
3034 __tablename__ = 'user_repo_group_to_perm'
3029 __table_args__ = (
3035 __table_args__ = (
3030 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3036 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3031 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3032 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3033 )
3039 )
3034
3040
3035 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3041 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3036 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3042 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3037 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3043 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3038 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3044 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3039
3045
3040 user = relationship('User')
3046 user = relationship('User')
3041 group = relationship('RepoGroup')
3047 group = relationship('RepoGroup')
3042 permission = relationship('Permission')
3048 permission = relationship('Permission')
3043
3049
3044 @classmethod
3050 @classmethod
3045 def create(cls, user, repository_group, permission):
3051 def create(cls, user, repository_group, permission):
3046 n = cls()
3052 n = cls()
3047 n.user = user
3053 n.user = user
3048 n.group = repository_group
3054 n.group = repository_group
3049 n.permission = permission
3055 n.permission = permission
3050 Session().add(n)
3056 Session().add(n)
3051 return n
3057 return n
3052
3058
3053
3059
3054 class UserGroupRepoGroupToPerm(Base, BaseModel):
3060 class UserGroupRepoGroupToPerm(Base, BaseModel):
3055 __tablename__ = 'users_group_repo_group_to_perm'
3061 __tablename__ = 'users_group_repo_group_to_perm'
3056 __table_args__ = (
3062 __table_args__ = (
3057 UniqueConstraint('users_group_id', 'group_id'),
3063 UniqueConstraint('users_group_id', 'group_id'),
3058 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3064 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3059 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3065 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3060 )
3066 )
3061
3067
3062 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3068 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3063 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3069 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3064 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3070 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3065 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3071 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3066
3072
3067 users_group = relationship('UserGroup')
3073 users_group = relationship('UserGroup')
3068 permission = relationship('Permission')
3074 permission = relationship('Permission')
3069 group = relationship('RepoGroup')
3075 group = relationship('RepoGroup')
3070
3076
3071 @classmethod
3077 @classmethod
3072 def create(cls, user_group, repository_group, permission):
3078 def create(cls, user_group, repository_group, permission):
3073 n = cls()
3079 n = cls()
3074 n.users_group = user_group
3080 n.users_group = user_group
3075 n.group = repository_group
3081 n.group = repository_group
3076 n.permission = permission
3082 n.permission = permission
3077 Session().add(n)
3083 Session().add(n)
3078 return n
3084 return n
3079
3085
3080 def __unicode__(self):
3086 def __unicode__(self):
3081 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3087 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3082
3088
3083
3089
3084 class Statistics(Base, BaseModel):
3090 class Statistics(Base, BaseModel):
3085 __tablename__ = 'statistics'
3091 __tablename__ = 'statistics'
3086 __table_args__ = (
3092 __table_args__ = (
3087 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3093 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3088 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3094 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3089 )
3095 )
3090 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3096 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3091 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3097 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3092 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3098 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3093 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3099 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3094 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3100 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3095 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3101 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3096
3102
3097 repository = relationship('Repository', single_parent=True)
3103 repository = relationship('Repository', single_parent=True)
3098
3104
3099
3105
3100 class UserFollowing(Base, BaseModel):
3106 class UserFollowing(Base, BaseModel):
3101 __tablename__ = 'user_followings'
3107 __tablename__ = 'user_followings'
3102 __table_args__ = (
3108 __table_args__ = (
3103 UniqueConstraint('user_id', 'follows_repository_id'),
3109 UniqueConstraint('user_id', 'follows_repository_id'),
3104 UniqueConstraint('user_id', 'follows_user_id'),
3110 UniqueConstraint('user_id', 'follows_user_id'),
3105 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3111 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3106 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3112 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3107 )
3113 )
3108
3114
3109 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3115 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3110 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3116 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3111 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3117 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3112 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3118 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3113 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3119 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3114
3120
3115 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3121 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3116
3122
3117 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3123 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3118 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3124 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3119
3125
3120 @classmethod
3126 @classmethod
3121 def get_repo_followers(cls, repo_id):
3127 def get_repo_followers(cls, repo_id):
3122 return cls.query().filter(cls.follows_repo_id == repo_id)
3128 return cls.query().filter(cls.follows_repo_id == repo_id)
3123
3129
3124
3130
3125 class CacheKey(Base, BaseModel):
3131 class CacheKey(Base, BaseModel):
3126 __tablename__ = 'cache_invalidation'
3132 __tablename__ = 'cache_invalidation'
3127 __table_args__ = (
3133 __table_args__ = (
3128 UniqueConstraint('cache_key'),
3134 UniqueConstraint('cache_key'),
3129 Index('key_idx', 'cache_key'),
3135 Index('key_idx', 'cache_key'),
3130 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3136 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3131 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3137 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3132 )
3138 )
3133 CACHE_TYPE_ATOM = 'ATOM'
3139 CACHE_TYPE_ATOM = 'ATOM'
3134 CACHE_TYPE_RSS = 'RSS'
3140 CACHE_TYPE_RSS = 'RSS'
3135 CACHE_TYPE_README = 'README'
3141 CACHE_TYPE_README = 'README'
3136
3142
3137 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3143 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3138 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3144 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3139 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3145 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3140 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3146 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3141
3147
3142 def __init__(self, cache_key, cache_args=''):
3148 def __init__(self, cache_key, cache_args=''):
3143 self.cache_key = cache_key
3149 self.cache_key = cache_key
3144 self.cache_args = cache_args
3150 self.cache_args = cache_args
3145 self.cache_active = False
3151 self.cache_active = False
3146
3152
3147 def __unicode__(self):
3153 def __unicode__(self):
3148 return u"<%s('%s:%s[%s]')>" % (
3154 return u"<%s('%s:%s[%s]')>" % (
3149 self.__class__.__name__,
3155 self.__class__.__name__,
3150 self.cache_id, self.cache_key, self.cache_active)
3156 self.cache_id, self.cache_key, self.cache_active)
3151
3157
3152 def _cache_key_partition(self):
3158 def _cache_key_partition(self):
3153 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3159 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3154 return prefix, repo_name, suffix
3160 return prefix, repo_name, suffix
3155
3161
3156 def get_prefix(self):
3162 def get_prefix(self):
3157 """
3163 """
3158 Try to extract prefix from existing cache key. The key could consist
3164 Try to extract prefix from existing cache key. The key could consist
3159 of prefix, repo_name, suffix
3165 of prefix, repo_name, suffix
3160 """
3166 """
3161 # this returns prefix, repo_name, suffix
3167 # this returns prefix, repo_name, suffix
3162 return self._cache_key_partition()[0]
3168 return self._cache_key_partition()[0]
3163
3169
3164 def get_suffix(self):
3170 def get_suffix(self):
3165 """
3171 """
3166 get suffix that might have been used in _get_cache_key to
3172 get suffix that might have been used in _get_cache_key to
3167 generate self.cache_key. Only used for informational purposes
3173 generate self.cache_key. Only used for informational purposes
3168 in repo_edit.mako.
3174 in repo_edit.mako.
3169 """
3175 """
3170 # prefix, repo_name, suffix
3176 # prefix, repo_name, suffix
3171 return self._cache_key_partition()[2]
3177 return self._cache_key_partition()[2]
3172
3178
3173 @classmethod
3179 @classmethod
3174 def delete_all_cache(cls):
3180 def delete_all_cache(cls):
3175 """
3181 """
3176 Delete all cache keys from database.
3182 Delete all cache keys from database.
3177 Should only be run when all instances are down and all entries
3183 Should only be run when all instances are down and all entries
3178 thus stale.
3184 thus stale.
3179 """
3185 """
3180 cls.query().delete()
3186 cls.query().delete()
3181 Session().commit()
3187 Session().commit()
3182
3188
3183 @classmethod
3189 @classmethod
3184 def get_cache_key(cls, repo_name, cache_type):
3190 def get_cache_key(cls, repo_name, cache_type):
3185 """
3191 """
3186
3192
3187 Generate a cache key for this process of RhodeCode instance.
3193 Generate a cache key for this process of RhodeCode instance.
3188 Prefix most likely will be process id or maybe explicitly set
3194 Prefix most likely will be process id or maybe explicitly set
3189 instance_id from .ini file.
3195 instance_id from .ini file.
3190 """
3196 """
3191 import rhodecode
3197 import rhodecode
3192 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3198 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3193
3199
3194 repo_as_unicode = safe_unicode(repo_name)
3200 repo_as_unicode = safe_unicode(repo_name)
3195 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3201 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3196 if cache_type else repo_as_unicode
3202 if cache_type else repo_as_unicode
3197
3203
3198 return u'{}{}'.format(prefix, key)
3204 return u'{}{}'.format(prefix, key)
3199
3205
3200 @classmethod
3206 @classmethod
3201 def set_invalidate(cls, repo_name, delete=False):
3207 def set_invalidate(cls, repo_name, delete=False):
3202 """
3208 """
3203 Mark all caches of a repo as invalid in the database.
3209 Mark all caches of a repo as invalid in the database.
3204 """
3210 """
3205
3211
3206 try:
3212 try:
3207 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3213 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3208 if delete:
3214 if delete:
3209 log.debug('cache objects deleted for repo %s',
3215 log.debug('cache objects deleted for repo %s',
3210 safe_str(repo_name))
3216 safe_str(repo_name))
3211 qry.delete()
3217 qry.delete()
3212 else:
3218 else:
3213 log.debug('cache objects marked as invalid for repo %s',
3219 log.debug('cache objects marked as invalid for repo %s',
3214 safe_str(repo_name))
3220 safe_str(repo_name))
3215 qry.update({"cache_active": False})
3221 qry.update({"cache_active": False})
3216
3222
3217 Session().commit()
3223 Session().commit()
3218 except Exception:
3224 except Exception:
3219 log.exception(
3225 log.exception(
3220 'Cache key invalidation failed for repository %s',
3226 'Cache key invalidation failed for repository %s',
3221 safe_str(repo_name))
3227 safe_str(repo_name))
3222 Session().rollback()
3228 Session().rollback()
3223
3229
3224 @classmethod
3230 @classmethod
3225 def get_active_cache(cls, cache_key):
3231 def get_active_cache(cls, cache_key):
3226 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3232 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3227 if inv_obj:
3233 if inv_obj:
3228 return inv_obj
3234 return inv_obj
3229 return None
3235 return None
3230
3236
3231 @classmethod
3237 @classmethod
3232 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3238 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3233 thread_scoped=False):
3239 thread_scoped=False):
3234 """
3240 """
3235 @cache_region('long_term')
3241 @cache_region('long_term')
3236 def _heavy_calculation(cache_key):
3242 def _heavy_calculation(cache_key):
3237 return 'result'
3243 return 'result'
3238
3244
3239 cache_context = CacheKey.repo_context_cache(
3245 cache_context = CacheKey.repo_context_cache(
3240 _heavy_calculation, repo_name, cache_type)
3246 _heavy_calculation, repo_name, cache_type)
3241
3247
3242 with cache_context as context:
3248 with cache_context as context:
3243 context.invalidate()
3249 context.invalidate()
3244 computed = context.compute()
3250 computed = context.compute()
3245
3251
3246 assert computed == 'result'
3252 assert computed == 'result'
3247 """
3253 """
3248 from rhodecode.lib import caches
3254 from rhodecode.lib import caches
3249 return caches.InvalidationContext(
3255 return caches.InvalidationContext(
3250 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3256 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3251
3257
3252
3258
3253 class ChangesetComment(Base, BaseModel):
3259 class ChangesetComment(Base, BaseModel):
3254 __tablename__ = 'changeset_comments'
3260 __tablename__ = 'changeset_comments'
3255 __table_args__ = (
3261 __table_args__ = (
3256 Index('cc_revision_idx', 'revision'),
3262 Index('cc_revision_idx', 'revision'),
3257 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3263 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3258 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3264 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3259 )
3265 )
3260
3266
3261 COMMENT_OUTDATED = u'comment_outdated'
3267 COMMENT_OUTDATED = u'comment_outdated'
3262 COMMENT_TYPE_NOTE = u'note'
3268 COMMENT_TYPE_NOTE = u'note'
3263 COMMENT_TYPE_TODO = u'todo'
3269 COMMENT_TYPE_TODO = u'todo'
3264 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3270 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3265
3271
3266 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3272 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3267 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3273 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3268 revision = Column('revision', String(40), nullable=True)
3274 revision = Column('revision', String(40), nullable=True)
3269 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3275 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3270 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3276 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3271 line_no = Column('line_no', Unicode(10), nullable=True)
3277 line_no = Column('line_no', Unicode(10), nullable=True)
3272 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3278 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3273 f_path = Column('f_path', Unicode(1000), nullable=True)
3279 f_path = Column('f_path', Unicode(1000), nullable=True)
3274 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3280 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3275 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3281 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3276 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3282 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3277 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3283 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3278 renderer = Column('renderer', Unicode(64), nullable=True)
3284 renderer = Column('renderer', Unicode(64), nullable=True)
3279 display_state = Column('display_state', Unicode(128), nullable=True)
3285 display_state = Column('display_state', Unicode(128), nullable=True)
3280
3286
3281 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3287 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3282 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3288 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3283 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3289 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3284 author = relationship('User', lazy='joined')
3290 author = relationship('User', lazy='joined')
3285 repo = relationship('Repository')
3291 repo = relationship('Repository')
3286 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3292 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3287 pull_request = relationship('PullRequest', lazy='joined')
3293 pull_request = relationship('PullRequest', lazy='joined')
3288 pull_request_version = relationship('PullRequestVersion')
3294 pull_request_version = relationship('PullRequestVersion')
3289
3295
3290 @classmethod
3296 @classmethod
3291 def get_users(cls, revision=None, pull_request_id=None):
3297 def get_users(cls, revision=None, pull_request_id=None):
3292 """
3298 """
3293 Returns user associated with this ChangesetComment. ie those
3299 Returns user associated with this ChangesetComment. ie those
3294 who actually commented
3300 who actually commented
3295
3301
3296 :param cls:
3302 :param cls:
3297 :param revision:
3303 :param revision:
3298 """
3304 """
3299 q = Session().query(User)\
3305 q = Session().query(User)\
3300 .join(ChangesetComment.author)
3306 .join(ChangesetComment.author)
3301 if revision:
3307 if revision:
3302 q = q.filter(cls.revision == revision)
3308 q = q.filter(cls.revision == revision)
3303 elif pull_request_id:
3309 elif pull_request_id:
3304 q = q.filter(cls.pull_request_id == pull_request_id)
3310 q = q.filter(cls.pull_request_id == pull_request_id)
3305 return q.all()
3311 return q.all()
3306
3312
3307 @classmethod
3313 @classmethod
3308 def get_index_from_version(cls, pr_version, versions):
3314 def get_index_from_version(cls, pr_version, versions):
3309 num_versions = [x.pull_request_version_id for x in versions]
3315 num_versions = [x.pull_request_version_id for x in versions]
3310 try:
3316 try:
3311 return num_versions.index(pr_version) +1
3317 return num_versions.index(pr_version) +1
3312 except (IndexError, ValueError):
3318 except (IndexError, ValueError):
3313 return
3319 return
3314
3320
3315 @property
3321 @property
3316 def outdated(self):
3322 def outdated(self):
3317 return self.display_state == self.COMMENT_OUTDATED
3323 return self.display_state == self.COMMENT_OUTDATED
3318
3324
3319 def outdated_at_version(self, version):
3325 def outdated_at_version(self, version):
3320 """
3326 """
3321 Checks if comment is outdated for given pull request version
3327 Checks if comment is outdated for given pull request version
3322 """
3328 """
3323 return self.outdated and self.pull_request_version_id != version
3329 return self.outdated and self.pull_request_version_id != version
3324
3330
3325 def older_than_version(self, version):
3331 def older_than_version(self, version):
3326 """
3332 """
3327 Checks if comment is made from previous version than given
3333 Checks if comment is made from previous version than given
3328 """
3334 """
3329 if version is None:
3335 if version is None:
3330 return self.pull_request_version_id is not None
3336 return self.pull_request_version_id is not None
3331
3337
3332 return self.pull_request_version_id < version
3338 return self.pull_request_version_id < version
3333
3339
3334 @property
3340 @property
3335 def resolved(self):
3341 def resolved(self):
3336 return self.resolved_by[0] if self.resolved_by else None
3342 return self.resolved_by[0] if self.resolved_by else None
3337
3343
3338 @property
3344 @property
3339 def is_todo(self):
3345 def is_todo(self):
3340 return self.comment_type == self.COMMENT_TYPE_TODO
3346 return self.comment_type == self.COMMENT_TYPE_TODO
3341
3347
3342 @property
3348 @property
3343 def is_inline(self):
3349 def is_inline(self):
3344 return self.line_no and self.f_path
3350 return self.line_no and self.f_path
3345
3351
3346 def get_index_version(self, versions):
3352 def get_index_version(self, versions):
3347 return self.get_index_from_version(
3353 return self.get_index_from_version(
3348 self.pull_request_version_id, versions)
3354 self.pull_request_version_id, versions)
3349
3355
3350 def __repr__(self):
3356 def __repr__(self):
3351 if self.comment_id:
3357 if self.comment_id:
3352 return '<DB:Comment #%s>' % self.comment_id
3358 return '<DB:Comment #%s>' % self.comment_id
3353 else:
3359 else:
3354 return '<DB:Comment at %#x>' % id(self)
3360 return '<DB:Comment at %#x>' % id(self)
3355
3361
3356 def get_api_data(self):
3362 def get_api_data(self):
3357 comment = self
3363 comment = self
3358 data = {
3364 data = {
3359 'comment_id': comment.comment_id,
3365 'comment_id': comment.comment_id,
3360 'comment_type': comment.comment_type,
3366 'comment_type': comment.comment_type,
3361 'comment_text': comment.text,
3367 'comment_text': comment.text,
3362 'comment_status': comment.status_change,
3368 'comment_status': comment.status_change,
3363 'comment_f_path': comment.f_path,
3369 'comment_f_path': comment.f_path,
3364 'comment_lineno': comment.line_no,
3370 'comment_lineno': comment.line_no,
3365 'comment_author': comment.author,
3371 'comment_author': comment.author,
3366 'comment_created_on': comment.created_on
3372 'comment_created_on': comment.created_on
3367 }
3373 }
3368 return data
3374 return data
3369
3375
3370 def __json__(self):
3376 def __json__(self):
3371 data = dict()
3377 data = dict()
3372 data.update(self.get_api_data())
3378 data.update(self.get_api_data())
3373 return data
3379 return data
3374
3380
3375
3381
3376 class ChangesetStatus(Base, BaseModel):
3382 class ChangesetStatus(Base, BaseModel):
3377 __tablename__ = 'changeset_statuses'
3383 __tablename__ = 'changeset_statuses'
3378 __table_args__ = (
3384 __table_args__ = (
3379 Index('cs_revision_idx', 'revision'),
3385 Index('cs_revision_idx', 'revision'),
3380 Index('cs_version_idx', 'version'),
3386 Index('cs_version_idx', 'version'),
3381 UniqueConstraint('repo_id', 'revision', 'version'),
3387 UniqueConstraint('repo_id', 'revision', 'version'),
3382 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3388 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3383 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3389 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3384 )
3390 )
3385 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3391 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3386 STATUS_APPROVED = 'approved'
3392 STATUS_APPROVED = 'approved'
3387 STATUS_REJECTED = 'rejected'
3393 STATUS_REJECTED = 'rejected'
3388 STATUS_UNDER_REVIEW = 'under_review'
3394 STATUS_UNDER_REVIEW = 'under_review'
3389
3395
3390 STATUSES = [
3396 STATUSES = [
3391 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3397 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3392 (STATUS_APPROVED, _("Approved")),
3398 (STATUS_APPROVED, _("Approved")),
3393 (STATUS_REJECTED, _("Rejected")),
3399 (STATUS_REJECTED, _("Rejected")),
3394 (STATUS_UNDER_REVIEW, _("Under Review")),
3400 (STATUS_UNDER_REVIEW, _("Under Review")),
3395 ]
3401 ]
3396
3402
3397 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3403 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3398 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3404 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3399 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3405 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3400 revision = Column('revision', String(40), nullable=False)
3406 revision = Column('revision', String(40), nullable=False)
3401 status = Column('status', String(128), nullable=False, default=DEFAULT)
3407 status = Column('status', String(128), nullable=False, default=DEFAULT)
3402 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3408 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3403 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3409 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3404 version = Column('version', Integer(), nullable=False, default=0)
3410 version = Column('version', Integer(), nullable=False, default=0)
3405 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3411 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3406
3412
3407 author = relationship('User', lazy='joined')
3413 author = relationship('User', lazy='joined')
3408 repo = relationship('Repository')
3414 repo = relationship('Repository')
3409 comment = relationship('ChangesetComment', lazy='joined')
3415 comment = relationship('ChangesetComment', lazy='joined')
3410 pull_request = relationship('PullRequest', lazy='joined')
3416 pull_request = relationship('PullRequest', lazy='joined')
3411
3417
3412 def __unicode__(self):
3418 def __unicode__(self):
3413 return u"<%s('%s[v%s]:%s')>" % (
3419 return u"<%s('%s[v%s]:%s')>" % (
3414 self.__class__.__name__,
3420 self.__class__.__name__,
3415 self.status, self.version, self.author
3421 self.status, self.version, self.author
3416 )
3422 )
3417
3423
3418 @classmethod
3424 @classmethod
3419 def get_status_lbl(cls, value):
3425 def get_status_lbl(cls, value):
3420 return dict(cls.STATUSES).get(value)
3426 return dict(cls.STATUSES).get(value)
3421
3427
3422 @property
3428 @property
3423 def status_lbl(self):
3429 def status_lbl(self):
3424 return ChangesetStatus.get_status_lbl(self.status)
3430 return ChangesetStatus.get_status_lbl(self.status)
3425
3431
3426 def get_api_data(self):
3432 def get_api_data(self):
3427 status = self
3433 status = self
3428 data = {
3434 data = {
3429 'status_id': status.changeset_status_id,
3435 'status_id': status.changeset_status_id,
3430 'status': status.status,
3436 'status': status.status,
3431 }
3437 }
3432 return data
3438 return data
3433
3439
3434 def __json__(self):
3440 def __json__(self):
3435 data = dict()
3441 data = dict()
3436 data.update(self.get_api_data())
3442 data.update(self.get_api_data())
3437 return data
3443 return data
3438
3444
3439
3445
3440 class _PullRequestBase(BaseModel):
3446 class _PullRequestBase(BaseModel):
3441 """
3447 """
3442 Common attributes of pull request and version entries.
3448 Common attributes of pull request and version entries.
3443 """
3449 """
3444
3450
3445 # .status values
3451 # .status values
3446 STATUS_NEW = u'new'
3452 STATUS_NEW = u'new'
3447 STATUS_OPEN = u'open'
3453 STATUS_OPEN = u'open'
3448 STATUS_CLOSED = u'closed'
3454 STATUS_CLOSED = u'closed'
3449
3455
3450 title = Column('title', Unicode(255), nullable=True)
3456 title = Column('title', Unicode(255), nullable=True)
3451 description = Column(
3457 description = Column(
3452 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3458 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3453 nullable=True)
3459 nullable=True)
3454 # new/open/closed status of pull request (not approve/reject/etc)
3460 # new/open/closed status of pull request (not approve/reject/etc)
3455 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3461 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3456 created_on = Column(
3462 created_on = Column(
3457 'created_on', DateTime(timezone=False), nullable=False,
3463 'created_on', DateTime(timezone=False), nullable=False,
3458 default=datetime.datetime.now)
3464 default=datetime.datetime.now)
3459 updated_on = Column(
3465 updated_on = Column(
3460 'updated_on', DateTime(timezone=False), nullable=False,
3466 'updated_on', DateTime(timezone=False), nullable=False,
3461 default=datetime.datetime.now)
3467 default=datetime.datetime.now)
3462
3468
3463 @declared_attr
3469 @declared_attr
3464 def user_id(cls):
3470 def user_id(cls):
3465 return Column(
3471 return Column(
3466 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3472 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3467 unique=None)
3473 unique=None)
3468
3474
3469 # 500 revisions max
3475 # 500 revisions max
3470 _revisions = Column(
3476 _revisions = Column(
3471 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3477 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3472
3478
3473 @declared_attr
3479 @declared_attr
3474 def source_repo_id(cls):
3480 def source_repo_id(cls):
3475 # TODO: dan: rename column to source_repo_id
3481 # TODO: dan: rename column to source_repo_id
3476 return Column(
3482 return Column(
3477 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3483 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3478 nullable=False)
3484 nullable=False)
3479
3485
3480 source_ref = Column('org_ref', Unicode(255), nullable=False)
3486 source_ref = Column('org_ref', Unicode(255), nullable=False)
3481
3487
3482 @declared_attr
3488 @declared_attr
3483 def target_repo_id(cls):
3489 def target_repo_id(cls):
3484 # TODO: dan: rename column to target_repo_id
3490 # TODO: dan: rename column to target_repo_id
3485 return Column(
3491 return Column(
3486 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3492 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3487 nullable=False)
3493 nullable=False)
3488
3494
3489 target_ref = Column('other_ref', Unicode(255), nullable=False)
3495 target_ref = Column('other_ref', Unicode(255), nullable=False)
3490 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3496 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3491
3497
3492 # TODO: dan: rename column to last_merge_source_rev
3498 # TODO: dan: rename column to last_merge_source_rev
3493 _last_merge_source_rev = Column(
3499 _last_merge_source_rev = Column(
3494 'last_merge_org_rev', String(40), nullable=True)
3500 'last_merge_org_rev', String(40), nullable=True)
3495 # TODO: dan: rename column to last_merge_target_rev
3501 # TODO: dan: rename column to last_merge_target_rev
3496 _last_merge_target_rev = Column(
3502 _last_merge_target_rev = Column(
3497 'last_merge_other_rev', String(40), nullable=True)
3503 'last_merge_other_rev', String(40), nullable=True)
3498 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3504 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3499 merge_rev = Column('merge_rev', String(40), nullable=True)
3505 merge_rev = Column('merge_rev', String(40), nullable=True)
3500
3506
3501 reviewer_data = Column(
3507 reviewer_data = Column(
3502 'reviewer_data_json', MutationObj.as_mutable(
3508 'reviewer_data_json', MutationObj.as_mutable(
3503 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3509 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3504
3510
3505 @property
3511 @property
3506 def reviewer_data_json(self):
3512 def reviewer_data_json(self):
3507 return json.dumps(self.reviewer_data)
3513 return json.dumps(self.reviewer_data)
3508
3514
3509 @hybrid_property
3515 @hybrid_property
3510 def description_safe(self):
3516 def description_safe(self):
3511 from rhodecode.lib import helpers as h
3517 from rhodecode.lib import helpers as h
3512 return h.escape(self.description)
3518 return h.escape(self.description)
3513
3519
3514 @hybrid_property
3520 @hybrid_property
3515 def revisions(self):
3521 def revisions(self):
3516 return self._revisions.split(':') if self._revisions else []
3522 return self._revisions.split(':') if self._revisions else []
3517
3523
3518 @revisions.setter
3524 @revisions.setter
3519 def revisions(self, val):
3525 def revisions(self, val):
3520 self._revisions = ':'.join(val)
3526 self._revisions = ':'.join(val)
3521
3527
3522 @hybrid_property
3528 @hybrid_property
3523 def last_merge_status(self):
3529 def last_merge_status(self):
3524 return safe_int(self._last_merge_status)
3530 return safe_int(self._last_merge_status)
3525
3531
3526 @last_merge_status.setter
3532 @last_merge_status.setter
3527 def last_merge_status(self, val):
3533 def last_merge_status(self, val):
3528 self._last_merge_status = val
3534 self._last_merge_status = val
3529
3535
3530 @declared_attr
3536 @declared_attr
3531 def author(cls):
3537 def author(cls):
3532 return relationship('User', lazy='joined')
3538 return relationship('User', lazy='joined')
3533
3539
3534 @declared_attr
3540 @declared_attr
3535 def source_repo(cls):
3541 def source_repo(cls):
3536 return relationship(
3542 return relationship(
3537 'Repository',
3543 'Repository',
3538 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3544 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3539
3545
3540 @property
3546 @property
3541 def source_ref_parts(self):
3547 def source_ref_parts(self):
3542 return self.unicode_to_reference(self.source_ref)
3548 return self.unicode_to_reference(self.source_ref)
3543
3549
3544 @declared_attr
3550 @declared_attr
3545 def target_repo(cls):
3551 def target_repo(cls):
3546 return relationship(
3552 return relationship(
3547 'Repository',
3553 'Repository',
3548 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3554 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3549
3555
3550 @property
3556 @property
3551 def target_ref_parts(self):
3557 def target_ref_parts(self):
3552 return self.unicode_to_reference(self.target_ref)
3558 return self.unicode_to_reference(self.target_ref)
3553
3559
3554 @property
3560 @property
3555 def shadow_merge_ref(self):
3561 def shadow_merge_ref(self):
3556 return self.unicode_to_reference(self._shadow_merge_ref)
3562 return self.unicode_to_reference(self._shadow_merge_ref)
3557
3563
3558 @shadow_merge_ref.setter
3564 @shadow_merge_ref.setter
3559 def shadow_merge_ref(self, ref):
3565 def shadow_merge_ref(self, ref):
3560 self._shadow_merge_ref = self.reference_to_unicode(ref)
3566 self._shadow_merge_ref = self.reference_to_unicode(ref)
3561
3567
3562 def unicode_to_reference(self, raw):
3568 def unicode_to_reference(self, raw):
3563 """
3569 """
3564 Convert a unicode (or string) to a reference object.
3570 Convert a unicode (or string) to a reference object.
3565 If unicode evaluates to False it returns None.
3571 If unicode evaluates to False it returns None.
3566 """
3572 """
3567 if raw:
3573 if raw:
3568 refs = raw.split(':')
3574 refs = raw.split(':')
3569 return Reference(*refs)
3575 return Reference(*refs)
3570 else:
3576 else:
3571 return None
3577 return None
3572
3578
3573 def reference_to_unicode(self, ref):
3579 def reference_to_unicode(self, ref):
3574 """
3580 """
3575 Convert a reference object to unicode.
3581 Convert a reference object to unicode.
3576 If reference is None it returns None.
3582 If reference is None it returns None.
3577 """
3583 """
3578 if ref:
3584 if ref:
3579 return u':'.join(ref)
3585 return u':'.join(ref)
3580 else:
3586 else:
3581 return None
3587 return None
3582
3588
3583 def get_api_data(self, with_merge_state=True):
3589 def get_api_data(self, with_merge_state=True):
3584 from rhodecode.model.pull_request import PullRequestModel
3590 from rhodecode.model.pull_request import PullRequestModel
3585
3591
3586 pull_request = self
3592 pull_request = self
3587 if with_merge_state:
3593 if with_merge_state:
3588 merge_status = PullRequestModel().merge_status(pull_request)
3594 merge_status = PullRequestModel().merge_status(pull_request)
3589 merge_state = {
3595 merge_state = {
3590 'status': merge_status[0],
3596 'status': merge_status[0],
3591 'message': safe_unicode(merge_status[1]),
3597 'message': safe_unicode(merge_status[1]),
3592 }
3598 }
3593 else:
3599 else:
3594 merge_state = {'status': 'not_available',
3600 merge_state = {'status': 'not_available',
3595 'message': 'not_available'}
3601 'message': 'not_available'}
3596
3602
3597 merge_data = {
3603 merge_data = {
3598 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3604 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3599 'reference': (
3605 'reference': (
3600 pull_request.shadow_merge_ref._asdict()
3606 pull_request.shadow_merge_ref._asdict()
3601 if pull_request.shadow_merge_ref else None),
3607 if pull_request.shadow_merge_ref else None),
3602 }
3608 }
3603
3609
3604 data = {
3610 data = {
3605 'pull_request_id': pull_request.pull_request_id,
3611 'pull_request_id': pull_request.pull_request_id,
3606 'url': PullRequestModel().get_url(pull_request),
3612 'url': PullRequestModel().get_url(pull_request),
3607 'title': pull_request.title,
3613 'title': pull_request.title,
3608 'description': pull_request.description,
3614 'description': pull_request.description,
3609 'status': pull_request.status,
3615 'status': pull_request.status,
3610 'created_on': pull_request.created_on,
3616 'created_on': pull_request.created_on,
3611 'updated_on': pull_request.updated_on,
3617 'updated_on': pull_request.updated_on,
3612 'commit_ids': pull_request.revisions,
3618 'commit_ids': pull_request.revisions,
3613 'review_status': pull_request.calculated_review_status(),
3619 'review_status': pull_request.calculated_review_status(),
3614 'mergeable': merge_state,
3620 'mergeable': merge_state,
3615 'source': {
3621 'source': {
3616 'clone_url': pull_request.source_repo.clone_url(),
3622 'clone_url': pull_request.source_repo.clone_url(),
3617 'repository': pull_request.source_repo.repo_name,
3623 'repository': pull_request.source_repo.repo_name,
3618 'reference': {
3624 'reference': {
3619 'name': pull_request.source_ref_parts.name,
3625 'name': pull_request.source_ref_parts.name,
3620 'type': pull_request.source_ref_parts.type,
3626 'type': pull_request.source_ref_parts.type,
3621 'commit_id': pull_request.source_ref_parts.commit_id,
3627 'commit_id': pull_request.source_ref_parts.commit_id,
3622 },
3628 },
3623 },
3629 },
3624 'target': {
3630 'target': {
3625 'clone_url': pull_request.target_repo.clone_url(),
3631 'clone_url': pull_request.target_repo.clone_url(),
3626 'repository': pull_request.target_repo.repo_name,
3632 'repository': pull_request.target_repo.repo_name,
3627 'reference': {
3633 'reference': {
3628 'name': pull_request.target_ref_parts.name,
3634 'name': pull_request.target_ref_parts.name,
3629 'type': pull_request.target_ref_parts.type,
3635 'type': pull_request.target_ref_parts.type,
3630 'commit_id': pull_request.target_ref_parts.commit_id,
3636 'commit_id': pull_request.target_ref_parts.commit_id,
3631 },
3637 },
3632 },
3638 },
3633 'merge': merge_data,
3639 'merge': merge_data,
3634 'author': pull_request.author.get_api_data(include_secrets=False,
3640 'author': pull_request.author.get_api_data(include_secrets=False,
3635 details='basic'),
3641 details='basic'),
3636 'reviewers': [
3642 'reviewers': [
3637 {
3643 {
3638 'user': reviewer.get_api_data(include_secrets=False,
3644 'user': reviewer.get_api_data(include_secrets=False,
3639 details='basic'),
3645 details='basic'),
3640 'reasons': reasons,
3646 'reasons': reasons,
3641 'review_status': st[0][1].status if st else 'not_reviewed',
3647 'review_status': st[0][1].status if st else 'not_reviewed',
3642 }
3648 }
3643 for obj, reviewer, reasons, mandatory, st in
3649 for obj, reviewer, reasons, mandatory, st in
3644 pull_request.reviewers_statuses()
3650 pull_request.reviewers_statuses()
3645 ]
3651 ]
3646 }
3652 }
3647
3653
3648 return data
3654 return data
3649
3655
3650
3656
3651 class PullRequest(Base, _PullRequestBase):
3657 class PullRequest(Base, _PullRequestBase):
3652 __tablename__ = 'pull_requests'
3658 __tablename__ = 'pull_requests'
3653 __table_args__ = (
3659 __table_args__ = (
3654 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3660 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3655 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3661 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3656 )
3662 )
3657
3663
3658 pull_request_id = Column(
3664 pull_request_id = Column(
3659 'pull_request_id', Integer(), nullable=False, primary_key=True)
3665 'pull_request_id', Integer(), nullable=False, primary_key=True)
3660
3666
3661 def __repr__(self):
3667 def __repr__(self):
3662 if self.pull_request_id:
3668 if self.pull_request_id:
3663 return '<DB:PullRequest #%s>' % self.pull_request_id
3669 return '<DB:PullRequest #%s>' % self.pull_request_id
3664 else:
3670 else:
3665 return '<DB:PullRequest at %#x>' % id(self)
3671 return '<DB:PullRequest at %#x>' % id(self)
3666
3672
3667 reviewers = relationship('PullRequestReviewers',
3673 reviewers = relationship('PullRequestReviewers',
3668 cascade="all, delete, delete-orphan")
3674 cascade="all, delete, delete-orphan")
3669 statuses = relationship('ChangesetStatus',
3675 statuses = relationship('ChangesetStatus',
3670 cascade="all, delete, delete-orphan")
3676 cascade="all, delete, delete-orphan")
3671 comments = relationship('ChangesetComment',
3677 comments = relationship('ChangesetComment',
3672 cascade="all, delete, delete-orphan")
3678 cascade="all, delete, delete-orphan")
3673 versions = relationship('PullRequestVersion',
3679 versions = relationship('PullRequestVersion',
3674 cascade="all, delete, delete-orphan",
3680 cascade="all, delete, delete-orphan",
3675 lazy='dynamic')
3681 lazy='dynamic')
3676
3682
3677 @classmethod
3683 @classmethod
3678 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3684 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3679 internal_methods=None):
3685 internal_methods=None):
3680
3686
3681 class PullRequestDisplay(object):
3687 class PullRequestDisplay(object):
3682 """
3688 """
3683 Special object wrapper for showing PullRequest data via Versions
3689 Special object wrapper for showing PullRequest data via Versions
3684 It mimics PR object as close as possible. This is read only object
3690 It mimics PR object as close as possible. This is read only object
3685 just for display
3691 just for display
3686 """
3692 """
3687
3693
3688 def __init__(self, attrs, internal=None):
3694 def __init__(self, attrs, internal=None):
3689 self.attrs = attrs
3695 self.attrs = attrs
3690 # internal have priority over the given ones via attrs
3696 # internal have priority over the given ones via attrs
3691 self.internal = internal or ['versions']
3697 self.internal = internal or ['versions']
3692
3698
3693 def __getattr__(self, item):
3699 def __getattr__(self, item):
3694 if item in self.internal:
3700 if item in self.internal:
3695 return getattr(self, item)
3701 return getattr(self, item)
3696 try:
3702 try:
3697 return self.attrs[item]
3703 return self.attrs[item]
3698 except KeyError:
3704 except KeyError:
3699 raise AttributeError(
3705 raise AttributeError(
3700 '%s object has no attribute %s' % (self, item))
3706 '%s object has no attribute %s' % (self, item))
3701
3707
3702 def __repr__(self):
3708 def __repr__(self):
3703 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3709 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3704
3710
3705 def versions(self):
3711 def versions(self):
3706 return pull_request_obj.versions.order_by(
3712 return pull_request_obj.versions.order_by(
3707 PullRequestVersion.pull_request_version_id).all()
3713 PullRequestVersion.pull_request_version_id).all()
3708
3714
3709 def is_closed(self):
3715 def is_closed(self):
3710 return pull_request_obj.is_closed()
3716 return pull_request_obj.is_closed()
3711
3717
3712 @property
3718 @property
3713 def pull_request_version_id(self):
3719 def pull_request_version_id(self):
3714 return getattr(pull_request_obj, 'pull_request_version_id', None)
3720 return getattr(pull_request_obj, 'pull_request_version_id', None)
3715
3721
3716 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3722 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3717
3723
3718 attrs.author = StrictAttributeDict(
3724 attrs.author = StrictAttributeDict(
3719 pull_request_obj.author.get_api_data())
3725 pull_request_obj.author.get_api_data())
3720 if pull_request_obj.target_repo:
3726 if pull_request_obj.target_repo:
3721 attrs.target_repo = StrictAttributeDict(
3727 attrs.target_repo = StrictAttributeDict(
3722 pull_request_obj.target_repo.get_api_data())
3728 pull_request_obj.target_repo.get_api_data())
3723 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3729 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3724
3730
3725 if pull_request_obj.source_repo:
3731 if pull_request_obj.source_repo:
3726 attrs.source_repo = StrictAttributeDict(
3732 attrs.source_repo = StrictAttributeDict(
3727 pull_request_obj.source_repo.get_api_data())
3733 pull_request_obj.source_repo.get_api_data())
3728 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3734 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3729
3735
3730 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3736 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3731 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3737 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3732 attrs.revisions = pull_request_obj.revisions
3738 attrs.revisions = pull_request_obj.revisions
3733
3739
3734 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3740 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3735 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3741 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3736 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3742 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3737
3743
3738 return PullRequestDisplay(attrs, internal=internal_methods)
3744 return PullRequestDisplay(attrs, internal=internal_methods)
3739
3745
3740 def is_closed(self):
3746 def is_closed(self):
3741 return self.status == self.STATUS_CLOSED
3747 return self.status == self.STATUS_CLOSED
3742
3748
3743 def __json__(self):
3749 def __json__(self):
3744 return {
3750 return {
3745 'revisions': self.revisions,
3751 'revisions': self.revisions,
3746 }
3752 }
3747
3753
3748 def calculated_review_status(self):
3754 def calculated_review_status(self):
3749 from rhodecode.model.changeset_status import ChangesetStatusModel
3755 from rhodecode.model.changeset_status import ChangesetStatusModel
3750 return ChangesetStatusModel().calculated_review_status(self)
3756 return ChangesetStatusModel().calculated_review_status(self)
3751
3757
3752 def reviewers_statuses(self):
3758 def reviewers_statuses(self):
3753 from rhodecode.model.changeset_status import ChangesetStatusModel
3759 from rhodecode.model.changeset_status import ChangesetStatusModel
3754 return ChangesetStatusModel().reviewers_statuses(self)
3760 return ChangesetStatusModel().reviewers_statuses(self)
3755
3761
3756 @property
3762 @property
3757 def workspace_id(self):
3763 def workspace_id(self):
3758 from rhodecode.model.pull_request import PullRequestModel
3764 from rhodecode.model.pull_request import PullRequestModel
3759 return PullRequestModel()._workspace_id(self)
3765 return PullRequestModel()._workspace_id(self)
3760
3766
3761 def get_shadow_repo(self):
3767 def get_shadow_repo(self):
3762 workspace_id = self.workspace_id
3768 workspace_id = self.workspace_id
3763 vcs_obj = self.target_repo.scm_instance()
3769 vcs_obj = self.target_repo.scm_instance()
3764 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3770 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3765 workspace_id)
3771 self.target_repo.repo_id, workspace_id)
3766 if os.path.isdir(shadow_repository_path):
3772 if os.path.isdir(shadow_repository_path):
3767 return vcs_obj._get_shadow_instance(shadow_repository_path)
3773 return vcs_obj._get_shadow_instance(shadow_repository_path)
3768
3774
3769
3775
3770 class PullRequestVersion(Base, _PullRequestBase):
3776 class PullRequestVersion(Base, _PullRequestBase):
3771 __tablename__ = 'pull_request_versions'
3777 __tablename__ = 'pull_request_versions'
3772 __table_args__ = (
3778 __table_args__ = (
3773 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3779 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3774 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3780 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3775 )
3781 )
3776
3782
3777 pull_request_version_id = Column(
3783 pull_request_version_id = Column(
3778 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3784 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3779 pull_request_id = Column(
3785 pull_request_id = Column(
3780 'pull_request_id', Integer(),
3786 'pull_request_id', Integer(),
3781 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3787 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3782 pull_request = relationship('PullRequest')
3788 pull_request = relationship('PullRequest')
3783
3789
3784 def __repr__(self):
3790 def __repr__(self):
3785 if self.pull_request_version_id:
3791 if self.pull_request_version_id:
3786 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3792 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3787 else:
3793 else:
3788 return '<DB:PullRequestVersion at %#x>' % id(self)
3794 return '<DB:PullRequestVersion at %#x>' % id(self)
3789
3795
3790 @property
3796 @property
3791 def reviewers(self):
3797 def reviewers(self):
3792 return self.pull_request.reviewers
3798 return self.pull_request.reviewers
3793
3799
3794 @property
3800 @property
3795 def versions(self):
3801 def versions(self):
3796 return self.pull_request.versions
3802 return self.pull_request.versions
3797
3803
3798 def is_closed(self):
3804 def is_closed(self):
3799 # calculate from original
3805 # calculate from original
3800 return self.pull_request.status == self.STATUS_CLOSED
3806 return self.pull_request.status == self.STATUS_CLOSED
3801
3807
3802 def calculated_review_status(self):
3808 def calculated_review_status(self):
3803 return self.pull_request.calculated_review_status()
3809 return self.pull_request.calculated_review_status()
3804
3810
3805 def reviewers_statuses(self):
3811 def reviewers_statuses(self):
3806 return self.pull_request.reviewers_statuses()
3812 return self.pull_request.reviewers_statuses()
3807
3813
3808
3814
3809 class PullRequestReviewers(Base, BaseModel):
3815 class PullRequestReviewers(Base, BaseModel):
3810 __tablename__ = 'pull_request_reviewers'
3816 __tablename__ = 'pull_request_reviewers'
3811 __table_args__ = (
3817 __table_args__ = (
3812 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3818 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3813 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3819 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3814 )
3820 )
3815
3821
3816 @hybrid_property
3822 @hybrid_property
3817 def reasons(self):
3823 def reasons(self):
3818 if not self._reasons:
3824 if not self._reasons:
3819 return []
3825 return []
3820 return self._reasons
3826 return self._reasons
3821
3827
3822 @reasons.setter
3828 @reasons.setter
3823 def reasons(self, val):
3829 def reasons(self, val):
3824 val = val or []
3830 val = val or []
3825 if any(not isinstance(x, basestring) for x in val):
3831 if any(not isinstance(x, basestring) for x in val):
3826 raise Exception('invalid reasons type, must be list of strings')
3832 raise Exception('invalid reasons type, must be list of strings')
3827 self._reasons = val
3833 self._reasons = val
3828
3834
3829 pull_requests_reviewers_id = Column(
3835 pull_requests_reviewers_id = Column(
3830 'pull_requests_reviewers_id', Integer(), nullable=False,
3836 'pull_requests_reviewers_id', Integer(), nullable=False,
3831 primary_key=True)
3837 primary_key=True)
3832 pull_request_id = Column(
3838 pull_request_id = Column(
3833 "pull_request_id", Integer(),
3839 "pull_request_id", Integer(),
3834 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3840 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3835 user_id = Column(
3841 user_id = Column(
3836 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3842 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3837 _reasons = Column(
3843 _reasons = Column(
3838 'reason', MutationList.as_mutable(
3844 'reason', MutationList.as_mutable(
3839 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3845 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3840
3846
3841 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3847 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3842 user = relationship('User')
3848 user = relationship('User')
3843 pull_request = relationship('PullRequest')
3849 pull_request = relationship('PullRequest')
3844
3850
3845 rule_data = Column(
3851 rule_data = Column(
3846 'rule_data_json',
3852 'rule_data_json',
3847 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3853 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3848
3854
3849 def rule_user_group_data(self):
3855 def rule_user_group_data(self):
3850 """
3856 """
3851 Returns the voting user group rule data for this reviewer
3857 Returns the voting user group rule data for this reviewer
3852 """
3858 """
3853
3859
3854 if self.rule_data and 'vote_rule' in self.rule_data:
3860 if self.rule_data and 'vote_rule' in self.rule_data:
3855 user_group_data = {}
3861 user_group_data = {}
3856 if 'rule_user_group_entry_id' in self.rule_data:
3862 if 'rule_user_group_entry_id' in self.rule_data:
3857 # means a group with voting rules !
3863 # means a group with voting rules !
3858 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3864 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3859 user_group_data['name'] = self.rule_data['rule_name']
3865 user_group_data['name'] = self.rule_data['rule_name']
3860 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3866 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3861
3867
3862 return user_group_data
3868 return user_group_data
3863
3869
3864 def __unicode__(self):
3870 def __unicode__(self):
3865 return u"<%s('id:%s')>" % (self.__class__.__name__,
3871 return u"<%s('id:%s')>" % (self.__class__.__name__,
3866 self.pull_requests_reviewers_id)
3872 self.pull_requests_reviewers_id)
3867
3873
3868
3874
3869 class Notification(Base, BaseModel):
3875 class Notification(Base, BaseModel):
3870 __tablename__ = 'notifications'
3876 __tablename__ = 'notifications'
3871 __table_args__ = (
3877 __table_args__ = (
3872 Index('notification_type_idx', 'type'),
3878 Index('notification_type_idx', 'type'),
3873 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3879 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3874 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3880 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3875 )
3881 )
3876
3882
3877 TYPE_CHANGESET_COMMENT = u'cs_comment'
3883 TYPE_CHANGESET_COMMENT = u'cs_comment'
3878 TYPE_MESSAGE = u'message'
3884 TYPE_MESSAGE = u'message'
3879 TYPE_MENTION = u'mention'
3885 TYPE_MENTION = u'mention'
3880 TYPE_REGISTRATION = u'registration'
3886 TYPE_REGISTRATION = u'registration'
3881 TYPE_PULL_REQUEST = u'pull_request'
3887 TYPE_PULL_REQUEST = u'pull_request'
3882 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3888 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3883
3889
3884 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3890 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3885 subject = Column('subject', Unicode(512), nullable=True)
3891 subject = Column('subject', Unicode(512), nullable=True)
3886 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3892 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3887 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3893 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3888 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3894 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3889 type_ = Column('type', Unicode(255))
3895 type_ = Column('type', Unicode(255))
3890
3896
3891 created_by_user = relationship('User')
3897 created_by_user = relationship('User')
3892 notifications_to_users = relationship('UserNotification', lazy='joined',
3898 notifications_to_users = relationship('UserNotification', lazy='joined',
3893 cascade="all, delete, delete-orphan")
3899 cascade="all, delete, delete-orphan")
3894
3900
3895 @property
3901 @property
3896 def recipients(self):
3902 def recipients(self):
3897 return [x.user for x in UserNotification.query()\
3903 return [x.user for x in UserNotification.query()\
3898 .filter(UserNotification.notification == self)\
3904 .filter(UserNotification.notification == self)\
3899 .order_by(UserNotification.user_id.asc()).all()]
3905 .order_by(UserNotification.user_id.asc()).all()]
3900
3906
3901 @classmethod
3907 @classmethod
3902 def create(cls, created_by, subject, body, recipients, type_=None):
3908 def create(cls, created_by, subject, body, recipients, type_=None):
3903 if type_ is None:
3909 if type_ is None:
3904 type_ = Notification.TYPE_MESSAGE
3910 type_ = Notification.TYPE_MESSAGE
3905
3911
3906 notification = cls()
3912 notification = cls()
3907 notification.created_by_user = created_by
3913 notification.created_by_user = created_by
3908 notification.subject = subject
3914 notification.subject = subject
3909 notification.body = body
3915 notification.body = body
3910 notification.type_ = type_
3916 notification.type_ = type_
3911 notification.created_on = datetime.datetime.now()
3917 notification.created_on = datetime.datetime.now()
3912
3918
3913 for u in recipients:
3919 for u in recipients:
3914 assoc = UserNotification()
3920 assoc = UserNotification()
3915 assoc.notification = notification
3921 assoc.notification = notification
3916
3922
3917 # if created_by is inside recipients mark his notification
3923 # if created_by is inside recipients mark his notification
3918 # as read
3924 # as read
3919 if u.user_id == created_by.user_id:
3925 if u.user_id == created_by.user_id:
3920 assoc.read = True
3926 assoc.read = True
3921
3927
3922 u.notifications.append(assoc)
3928 u.notifications.append(assoc)
3923 Session().add(notification)
3929 Session().add(notification)
3924
3930
3925 return notification
3931 return notification
3926
3932
3927
3933
3928 class UserNotification(Base, BaseModel):
3934 class UserNotification(Base, BaseModel):
3929 __tablename__ = 'user_to_notification'
3935 __tablename__ = 'user_to_notification'
3930 __table_args__ = (
3936 __table_args__ = (
3931 UniqueConstraint('user_id', 'notification_id'),
3937 UniqueConstraint('user_id', 'notification_id'),
3932 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3938 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3933 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3939 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3934 )
3940 )
3935 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3941 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3936 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3942 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3937 read = Column('read', Boolean, default=False)
3943 read = Column('read', Boolean, default=False)
3938 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3944 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3939
3945
3940 user = relationship('User', lazy="joined")
3946 user = relationship('User', lazy="joined")
3941 notification = relationship('Notification', lazy="joined",
3947 notification = relationship('Notification', lazy="joined",
3942 order_by=lambda: Notification.created_on.desc(),)
3948 order_by=lambda: Notification.created_on.desc(),)
3943
3949
3944 def mark_as_read(self):
3950 def mark_as_read(self):
3945 self.read = True
3951 self.read = True
3946 Session().add(self)
3952 Session().add(self)
3947
3953
3948
3954
3949 class Gist(Base, BaseModel):
3955 class Gist(Base, BaseModel):
3950 __tablename__ = 'gists'
3956 __tablename__ = 'gists'
3951 __table_args__ = (
3957 __table_args__ = (
3952 Index('g_gist_access_id_idx', 'gist_access_id'),
3958 Index('g_gist_access_id_idx', 'gist_access_id'),
3953 Index('g_created_on_idx', 'created_on'),
3959 Index('g_created_on_idx', 'created_on'),
3954 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3960 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3955 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3961 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3956 )
3962 )
3957 GIST_PUBLIC = u'public'
3963 GIST_PUBLIC = u'public'
3958 GIST_PRIVATE = u'private'
3964 GIST_PRIVATE = u'private'
3959 DEFAULT_FILENAME = u'gistfile1.txt'
3965 DEFAULT_FILENAME = u'gistfile1.txt'
3960
3966
3961 ACL_LEVEL_PUBLIC = u'acl_public'
3967 ACL_LEVEL_PUBLIC = u'acl_public'
3962 ACL_LEVEL_PRIVATE = u'acl_private'
3968 ACL_LEVEL_PRIVATE = u'acl_private'
3963
3969
3964 gist_id = Column('gist_id', Integer(), primary_key=True)
3970 gist_id = Column('gist_id', Integer(), primary_key=True)
3965 gist_access_id = Column('gist_access_id', Unicode(250))
3971 gist_access_id = Column('gist_access_id', Unicode(250))
3966 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3972 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3967 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3973 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3968 gist_expires = Column('gist_expires', Float(53), nullable=False)
3974 gist_expires = Column('gist_expires', Float(53), nullable=False)
3969 gist_type = Column('gist_type', Unicode(128), nullable=False)
3975 gist_type = Column('gist_type', Unicode(128), nullable=False)
3970 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3976 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3971 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3977 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3972 acl_level = Column('acl_level', Unicode(128), nullable=True)
3978 acl_level = Column('acl_level', Unicode(128), nullable=True)
3973
3979
3974 owner = relationship('User')
3980 owner = relationship('User')
3975
3981
3976 def __repr__(self):
3982 def __repr__(self):
3977 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3983 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3978
3984
3979 @hybrid_property
3985 @hybrid_property
3980 def description_safe(self):
3986 def description_safe(self):
3981 from rhodecode.lib import helpers as h
3987 from rhodecode.lib import helpers as h
3982 return h.escape(self.gist_description)
3988 return h.escape(self.gist_description)
3983
3989
3984 @classmethod
3990 @classmethod
3985 def get_or_404(cls, id_):
3991 def get_or_404(cls, id_):
3986 from pyramid.httpexceptions import HTTPNotFound
3992 from pyramid.httpexceptions import HTTPNotFound
3987
3993
3988 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3994 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3989 if not res:
3995 if not res:
3990 raise HTTPNotFound()
3996 raise HTTPNotFound()
3991 return res
3997 return res
3992
3998
3993 @classmethod
3999 @classmethod
3994 def get_by_access_id(cls, gist_access_id):
4000 def get_by_access_id(cls, gist_access_id):
3995 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4001 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3996
4002
3997 def gist_url(self):
4003 def gist_url(self):
3998 from rhodecode.model.gist import GistModel
4004 from rhodecode.model.gist import GistModel
3999 return GistModel().get_url(self)
4005 return GistModel().get_url(self)
4000
4006
4001 @classmethod
4007 @classmethod
4002 def base_path(cls):
4008 def base_path(cls):
4003 """
4009 """
4004 Returns base path when all gists are stored
4010 Returns base path when all gists are stored
4005
4011
4006 :param cls:
4012 :param cls:
4007 """
4013 """
4008 from rhodecode.model.gist import GIST_STORE_LOC
4014 from rhodecode.model.gist import GIST_STORE_LOC
4009 q = Session().query(RhodeCodeUi)\
4015 q = Session().query(RhodeCodeUi)\
4010 .filter(RhodeCodeUi.ui_key == URL_SEP)
4016 .filter(RhodeCodeUi.ui_key == URL_SEP)
4011 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4017 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4012 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4018 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4013
4019
4014 def get_api_data(self):
4020 def get_api_data(self):
4015 """
4021 """
4016 Common function for generating gist related data for API
4022 Common function for generating gist related data for API
4017 """
4023 """
4018 gist = self
4024 gist = self
4019 data = {
4025 data = {
4020 'gist_id': gist.gist_id,
4026 'gist_id': gist.gist_id,
4021 'type': gist.gist_type,
4027 'type': gist.gist_type,
4022 'access_id': gist.gist_access_id,
4028 'access_id': gist.gist_access_id,
4023 'description': gist.gist_description,
4029 'description': gist.gist_description,
4024 'url': gist.gist_url(),
4030 'url': gist.gist_url(),
4025 'expires': gist.gist_expires,
4031 'expires': gist.gist_expires,
4026 'created_on': gist.created_on,
4032 'created_on': gist.created_on,
4027 'modified_at': gist.modified_at,
4033 'modified_at': gist.modified_at,
4028 'content': None,
4034 'content': None,
4029 'acl_level': gist.acl_level,
4035 'acl_level': gist.acl_level,
4030 }
4036 }
4031 return data
4037 return data
4032
4038
4033 def __json__(self):
4039 def __json__(self):
4034 data = dict(
4040 data = dict(
4035 )
4041 )
4036 data.update(self.get_api_data())
4042 data.update(self.get_api_data())
4037 return data
4043 return data
4038 # SCM functions
4044 # SCM functions
4039
4045
4040 def scm_instance(self, **kwargs):
4046 def scm_instance(self, **kwargs):
4041 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4047 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4042 return get_vcs_instance(
4048 return get_vcs_instance(
4043 repo_path=safe_str(full_repo_path), create=False)
4049 repo_path=safe_str(full_repo_path), create=False)
4044
4050
4045
4051
4046 class ExternalIdentity(Base, BaseModel):
4052 class ExternalIdentity(Base, BaseModel):
4047 __tablename__ = 'external_identities'
4053 __tablename__ = 'external_identities'
4048 __table_args__ = (
4054 __table_args__ = (
4049 Index('local_user_id_idx', 'local_user_id'),
4055 Index('local_user_id_idx', 'local_user_id'),
4050 Index('external_id_idx', 'external_id'),
4056 Index('external_id_idx', 'external_id'),
4051 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4057 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4052 'mysql_charset': 'utf8'})
4058 'mysql_charset': 'utf8'})
4053
4059
4054 external_id = Column('external_id', Unicode(255), default=u'',
4060 external_id = Column('external_id', Unicode(255), default=u'',
4055 primary_key=True)
4061 primary_key=True)
4056 external_username = Column('external_username', Unicode(1024), default=u'')
4062 external_username = Column('external_username', Unicode(1024), default=u'')
4057 local_user_id = Column('local_user_id', Integer(),
4063 local_user_id = Column('local_user_id', Integer(),
4058 ForeignKey('users.user_id'), primary_key=True)
4064 ForeignKey('users.user_id'), primary_key=True)
4059 provider_name = Column('provider_name', Unicode(255), default=u'',
4065 provider_name = Column('provider_name', Unicode(255), default=u'',
4060 primary_key=True)
4066 primary_key=True)
4061 access_token = Column('access_token', String(1024), default=u'')
4067 access_token = Column('access_token', String(1024), default=u'')
4062 alt_token = Column('alt_token', String(1024), default=u'')
4068 alt_token = Column('alt_token', String(1024), default=u'')
4063 token_secret = Column('token_secret', String(1024), default=u'')
4069 token_secret = Column('token_secret', String(1024), default=u'')
4064
4070
4065 @classmethod
4071 @classmethod
4066 def by_external_id_and_provider(cls, external_id, provider_name,
4072 def by_external_id_and_provider(cls, external_id, provider_name,
4067 local_user_id=None):
4073 local_user_id=None):
4068 """
4074 """
4069 Returns ExternalIdentity instance based on search params
4075 Returns ExternalIdentity instance based on search params
4070
4076
4071 :param external_id:
4077 :param external_id:
4072 :param provider_name:
4078 :param provider_name:
4073 :return: ExternalIdentity
4079 :return: ExternalIdentity
4074 """
4080 """
4075 query = cls.query()
4081 query = cls.query()
4076 query = query.filter(cls.external_id == external_id)
4082 query = query.filter(cls.external_id == external_id)
4077 query = query.filter(cls.provider_name == provider_name)
4083 query = query.filter(cls.provider_name == provider_name)
4078 if local_user_id:
4084 if local_user_id:
4079 query = query.filter(cls.local_user_id == local_user_id)
4085 query = query.filter(cls.local_user_id == local_user_id)
4080 return query.first()
4086 return query.first()
4081
4087
4082 @classmethod
4088 @classmethod
4083 def user_by_external_id_and_provider(cls, external_id, provider_name):
4089 def user_by_external_id_and_provider(cls, external_id, provider_name):
4084 """
4090 """
4085 Returns User instance based on search params
4091 Returns User instance based on search params
4086
4092
4087 :param external_id:
4093 :param external_id:
4088 :param provider_name:
4094 :param provider_name:
4089 :return: User
4095 :return: User
4090 """
4096 """
4091 query = User.query()
4097 query = User.query()
4092 query = query.filter(cls.external_id == external_id)
4098 query = query.filter(cls.external_id == external_id)
4093 query = query.filter(cls.provider_name == provider_name)
4099 query = query.filter(cls.provider_name == provider_name)
4094 query = query.filter(User.user_id == cls.local_user_id)
4100 query = query.filter(User.user_id == cls.local_user_id)
4095 return query.first()
4101 return query.first()
4096
4102
4097 @classmethod
4103 @classmethod
4098 def by_local_user_id(cls, local_user_id):
4104 def by_local_user_id(cls, local_user_id):
4099 """
4105 """
4100 Returns all tokens for user
4106 Returns all tokens for user
4101
4107
4102 :param local_user_id:
4108 :param local_user_id:
4103 :return: ExternalIdentity
4109 :return: ExternalIdentity
4104 """
4110 """
4105 query = cls.query()
4111 query = cls.query()
4106 query = query.filter(cls.local_user_id == local_user_id)
4112 query = query.filter(cls.local_user_id == local_user_id)
4107 return query
4113 return query
4108
4114
4109
4115
4110 class Integration(Base, BaseModel):
4116 class Integration(Base, BaseModel):
4111 __tablename__ = 'integrations'
4117 __tablename__ = 'integrations'
4112 __table_args__ = (
4118 __table_args__ = (
4113 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4119 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4114 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
4120 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
4115 )
4121 )
4116
4122
4117 integration_id = Column('integration_id', Integer(), primary_key=True)
4123 integration_id = Column('integration_id', Integer(), primary_key=True)
4118 integration_type = Column('integration_type', String(255))
4124 integration_type = Column('integration_type', String(255))
4119 enabled = Column('enabled', Boolean(), nullable=False)
4125 enabled = Column('enabled', Boolean(), nullable=False)
4120 name = Column('name', String(255), nullable=False)
4126 name = Column('name', String(255), nullable=False)
4121 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4127 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4122 default=False)
4128 default=False)
4123
4129
4124 settings = Column(
4130 settings = Column(
4125 'settings_json', MutationObj.as_mutable(
4131 'settings_json', MutationObj.as_mutable(
4126 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4132 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4127 repo_id = Column(
4133 repo_id = Column(
4128 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4134 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4129 nullable=True, unique=None, default=None)
4135 nullable=True, unique=None, default=None)
4130 repo = relationship('Repository', lazy='joined')
4136 repo = relationship('Repository', lazy='joined')
4131
4137
4132 repo_group_id = Column(
4138 repo_group_id = Column(
4133 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4139 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4134 nullable=True, unique=None, default=None)
4140 nullable=True, unique=None, default=None)
4135 repo_group = relationship('RepoGroup', lazy='joined')
4141 repo_group = relationship('RepoGroup', lazy='joined')
4136
4142
4137 @property
4143 @property
4138 def scope(self):
4144 def scope(self):
4139 if self.repo:
4145 if self.repo:
4140 return repr(self.repo)
4146 return repr(self.repo)
4141 if self.repo_group:
4147 if self.repo_group:
4142 if self.child_repos_only:
4148 if self.child_repos_only:
4143 return repr(self.repo_group) + ' (child repos only)'
4149 return repr(self.repo_group) + ' (child repos only)'
4144 else:
4150 else:
4145 return repr(self.repo_group) + ' (recursive)'
4151 return repr(self.repo_group) + ' (recursive)'
4146 if self.child_repos_only:
4152 if self.child_repos_only:
4147 return 'root_repos'
4153 return 'root_repos'
4148 return 'global'
4154 return 'global'
4149
4155
4150 def __repr__(self):
4156 def __repr__(self):
4151 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4157 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4152
4158
4153
4159
4154 class RepoReviewRuleUser(Base, BaseModel):
4160 class RepoReviewRuleUser(Base, BaseModel):
4155 __tablename__ = 'repo_review_rules_users'
4161 __tablename__ = 'repo_review_rules_users'
4156 __table_args__ = (
4162 __table_args__ = (
4157 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4163 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4158 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4164 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4159 )
4165 )
4160
4166
4161 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4167 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4162 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4168 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4163 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4169 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4164 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4170 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4165 user = relationship('User')
4171 user = relationship('User')
4166
4172
4167 def rule_data(self):
4173 def rule_data(self):
4168 return {
4174 return {
4169 'mandatory': self.mandatory
4175 'mandatory': self.mandatory
4170 }
4176 }
4171
4177
4172
4178
4173 class RepoReviewRuleUserGroup(Base, BaseModel):
4179 class RepoReviewRuleUserGroup(Base, BaseModel):
4174 __tablename__ = 'repo_review_rules_users_groups'
4180 __tablename__ = 'repo_review_rules_users_groups'
4175 __table_args__ = (
4181 __table_args__ = (
4176 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4182 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4177 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4183 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4178 )
4184 )
4179 VOTE_RULE_ALL = -1
4185 VOTE_RULE_ALL = -1
4180
4186
4181 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4187 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4182 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4188 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4183 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4189 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4184 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4190 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4185 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4191 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4186 users_group = relationship('UserGroup')
4192 users_group = relationship('UserGroup')
4187
4193
4188 def rule_data(self):
4194 def rule_data(self):
4189 return {
4195 return {
4190 'mandatory': self.mandatory,
4196 'mandatory': self.mandatory,
4191 'vote_rule': self.vote_rule
4197 'vote_rule': self.vote_rule
4192 }
4198 }
4193
4199
4194 @property
4200 @property
4195 def vote_rule_label(self):
4201 def vote_rule_label(self):
4196 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4202 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4197 return 'all must vote'
4203 return 'all must vote'
4198 else:
4204 else:
4199 return 'min. vote {}'.format(self.vote_rule)
4205 return 'min. vote {}'.format(self.vote_rule)
4200
4206
4201
4207
4202 class RepoReviewRule(Base, BaseModel):
4208 class RepoReviewRule(Base, BaseModel):
4203 __tablename__ = 'repo_review_rules'
4209 __tablename__ = 'repo_review_rules'
4204 __table_args__ = (
4210 __table_args__ = (
4205 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4206 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4207 )
4213 )
4208
4214
4209 repo_review_rule_id = Column(
4215 repo_review_rule_id = Column(
4210 'repo_review_rule_id', Integer(), primary_key=True)
4216 'repo_review_rule_id', Integer(), primary_key=True)
4211 repo_id = Column(
4217 repo_id = Column(
4212 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4218 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4213 repo = relationship('Repository', backref='review_rules')
4219 repo = relationship('Repository', backref='review_rules')
4214
4220
4215 review_rule_name = Column('review_rule_name', String(255))
4221 review_rule_name = Column('review_rule_name', String(255))
4216 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4222 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4217 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4223 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4218 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4224 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4219
4225
4220 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4226 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4221 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4227 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4222 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4228 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4223 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4229 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4224
4230
4225 rule_users = relationship('RepoReviewRuleUser')
4231 rule_users = relationship('RepoReviewRuleUser')
4226 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4232 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4227
4233
4228 def _validate_glob(self, value):
4234 def _validate_glob(self, value):
4229 re.compile('^' + glob2re(value) + '$')
4235 re.compile('^' + glob2re(value) + '$')
4230
4236
4231 @hybrid_property
4237 @hybrid_property
4232 def source_branch_pattern(self):
4238 def source_branch_pattern(self):
4233 return self._branch_pattern or '*'
4239 return self._branch_pattern or '*'
4234
4240
4235 @source_branch_pattern.setter
4241 @source_branch_pattern.setter
4236 def source_branch_pattern(self, value):
4242 def source_branch_pattern(self, value):
4237 self._validate_glob(value)
4243 self._validate_glob(value)
4238 self._branch_pattern = value or '*'
4244 self._branch_pattern = value or '*'
4239
4245
4240 @hybrid_property
4246 @hybrid_property
4241 def target_branch_pattern(self):
4247 def target_branch_pattern(self):
4242 return self._target_branch_pattern or '*'
4248 return self._target_branch_pattern or '*'
4243
4249
4244 @target_branch_pattern.setter
4250 @target_branch_pattern.setter
4245 def target_branch_pattern(self, value):
4251 def target_branch_pattern(self, value):
4246 self._validate_glob(value)
4252 self._validate_glob(value)
4247 self._target_branch_pattern = value or '*'
4253 self._target_branch_pattern = value or '*'
4248
4254
4249 @hybrid_property
4255 @hybrid_property
4250 def file_pattern(self):
4256 def file_pattern(self):
4251 return self._file_pattern or '*'
4257 return self._file_pattern or '*'
4252
4258
4253 @file_pattern.setter
4259 @file_pattern.setter
4254 def file_pattern(self, value):
4260 def file_pattern(self, value):
4255 self._validate_glob(value)
4261 self._validate_glob(value)
4256 self._file_pattern = value or '*'
4262 self._file_pattern = value or '*'
4257
4263
4258 def matches(self, source_branch, target_branch, files_changed):
4264 def matches(self, source_branch, target_branch, files_changed):
4259 """
4265 """
4260 Check if this review rule matches a branch/files in a pull request
4266 Check if this review rule matches a branch/files in a pull request
4261
4267
4262 :param source_branch: source branch name for the commit
4268 :param source_branch: source branch name for the commit
4263 :param target_branch: target branch name for the commit
4269 :param target_branch: target branch name for the commit
4264 :param files_changed: list of file paths changed in the pull request
4270 :param files_changed: list of file paths changed in the pull request
4265 """
4271 """
4266
4272
4267 source_branch = source_branch or ''
4273 source_branch = source_branch or ''
4268 target_branch = target_branch or ''
4274 target_branch = target_branch or ''
4269 files_changed = files_changed or []
4275 files_changed = files_changed or []
4270
4276
4271 branch_matches = True
4277 branch_matches = True
4272 if source_branch or target_branch:
4278 if source_branch or target_branch:
4273 if self.source_branch_pattern == '*':
4279 if self.source_branch_pattern == '*':
4274 source_branch_match = True
4280 source_branch_match = True
4275 else:
4281 else:
4276 source_branch_regex = re.compile(
4282 source_branch_regex = re.compile(
4277 '^' + glob2re(self.source_branch_pattern) + '$')
4283 '^' + glob2re(self.source_branch_pattern) + '$')
4278 source_branch_match = bool(source_branch_regex.search(source_branch))
4284 source_branch_match = bool(source_branch_regex.search(source_branch))
4279 if self.target_branch_pattern == '*':
4285 if self.target_branch_pattern == '*':
4280 target_branch_match = True
4286 target_branch_match = True
4281 else:
4287 else:
4282 target_branch_regex = re.compile(
4288 target_branch_regex = re.compile(
4283 '^' + glob2re(self.target_branch_pattern) + '$')
4289 '^' + glob2re(self.target_branch_pattern) + '$')
4284 target_branch_match = bool(target_branch_regex.search(target_branch))
4290 target_branch_match = bool(target_branch_regex.search(target_branch))
4285
4291
4286 branch_matches = source_branch_match and target_branch_match
4292 branch_matches = source_branch_match and target_branch_match
4287
4293
4288 files_matches = True
4294 files_matches = True
4289 if self.file_pattern != '*':
4295 if self.file_pattern != '*':
4290 files_matches = False
4296 files_matches = False
4291 file_regex = re.compile(glob2re(self.file_pattern))
4297 file_regex = re.compile(glob2re(self.file_pattern))
4292 for filename in files_changed:
4298 for filename in files_changed:
4293 if file_regex.search(filename):
4299 if file_regex.search(filename):
4294 files_matches = True
4300 files_matches = True
4295 break
4301 break
4296
4302
4297 return branch_matches and files_matches
4303 return branch_matches and files_matches
4298
4304
4299 @property
4305 @property
4300 def review_users(self):
4306 def review_users(self):
4301 """ Returns the users which this rule applies to """
4307 """ Returns the users which this rule applies to """
4302
4308
4303 users = collections.OrderedDict()
4309 users = collections.OrderedDict()
4304
4310
4305 for rule_user in self.rule_users:
4311 for rule_user in self.rule_users:
4306 if rule_user.user.active:
4312 if rule_user.user.active:
4307 if rule_user.user not in users:
4313 if rule_user.user not in users:
4308 users[rule_user.user.username] = {
4314 users[rule_user.user.username] = {
4309 'user': rule_user.user,
4315 'user': rule_user.user,
4310 'source': 'user',
4316 'source': 'user',
4311 'source_data': {},
4317 'source_data': {},
4312 'data': rule_user.rule_data()
4318 'data': rule_user.rule_data()
4313 }
4319 }
4314
4320
4315 for rule_user_group in self.rule_user_groups:
4321 for rule_user_group in self.rule_user_groups:
4316 source_data = {
4322 source_data = {
4317 'user_group_id': rule_user_group.users_group.users_group_id,
4323 'user_group_id': rule_user_group.users_group.users_group_id,
4318 'name': rule_user_group.users_group.users_group_name,
4324 'name': rule_user_group.users_group.users_group_name,
4319 'members': len(rule_user_group.users_group.members)
4325 'members': len(rule_user_group.users_group.members)
4320 }
4326 }
4321 for member in rule_user_group.users_group.members:
4327 for member in rule_user_group.users_group.members:
4322 if member.user.active:
4328 if member.user.active:
4323 key = member.user.username
4329 key = member.user.username
4324 if key in users:
4330 if key in users:
4325 # skip this member as we have him already
4331 # skip this member as we have him already
4326 # this prevents from override the "first" matched
4332 # this prevents from override the "first" matched
4327 # users with duplicates in multiple groups
4333 # users with duplicates in multiple groups
4328 continue
4334 continue
4329
4335
4330 users[key] = {
4336 users[key] = {
4331 'user': member.user,
4337 'user': member.user,
4332 'source': 'user_group',
4338 'source': 'user_group',
4333 'source_data': source_data,
4339 'source_data': source_data,
4334 'data': rule_user_group.rule_data()
4340 'data': rule_user_group.rule_data()
4335 }
4341 }
4336
4342
4337 return users
4343 return users
4338
4344
4339 def user_group_vote_rule(self):
4345 def user_group_vote_rule(self):
4340 rules = []
4346 rules = []
4341 if self.rule_user_groups:
4347 if self.rule_user_groups:
4342 for user_group in self.rule_user_groups:
4348 for user_group in self.rule_user_groups:
4343 rules.append(user_group)
4349 rules.append(user_group)
4344 return rules
4350 return rules
4345
4351
4346 def __repr__(self):
4352 def __repr__(self):
4347 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4353 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4348 self.repo_review_rule_id, self.repo)
4354 self.repo_review_rule_id, self.repo)
4349
4355
4350
4356
4351 class ScheduleEntry(Base, BaseModel):
4357 class ScheduleEntry(Base, BaseModel):
4352 __tablename__ = 'schedule_entries'
4358 __tablename__ = 'schedule_entries'
4353 __table_args__ = (
4359 __table_args__ = (
4354 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4360 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4355 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4361 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4356 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4362 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4357 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4363 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4358 )
4364 )
4359 schedule_types = ['crontab', 'timedelta', 'integer']
4365 schedule_types = ['crontab', 'timedelta', 'integer']
4360 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4366 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4361
4367
4362 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4368 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4363 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4369 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4364 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4370 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4365
4371
4366 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4372 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4367 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4373 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4368
4374
4369 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4375 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4370 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4376 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4371
4377
4372 # task
4378 # task
4373 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4379 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4374 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4380 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4375 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4381 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4376 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4382 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4377
4383
4378 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4384 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4379 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4385 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4380
4386
4381 @hybrid_property
4387 @hybrid_property
4382 def schedule_type(self):
4388 def schedule_type(self):
4383 return self._schedule_type
4389 return self._schedule_type
4384
4390
4385 @schedule_type.setter
4391 @schedule_type.setter
4386 def schedule_type(self, val):
4392 def schedule_type(self, val):
4387 if val not in self.schedule_types:
4393 if val not in self.schedule_types:
4388 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4394 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4389 val, self.schedule_type))
4395 val, self.schedule_type))
4390
4396
4391 self._schedule_type = val
4397 self._schedule_type = val
4392
4398
4393 @classmethod
4399 @classmethod
4394 def get_uid(cls, obj):
4400 def get_uid(cls, obj):
4395 args = obj.task_args
4401 args = obj.task_args
4396 kwargs = obj.task_kwargs
4402 kwargs = obj.task_kwargs
4397 if isinstance(args, JsonRaw):
4403 if isinstance(args, JsonRaw):
4398 try:
4404 try:
4399 args = json.loads(args)
4405 args = json.loads(args)
4400 except ValueError:
4406 except ValueError:
4401 args = tuple()
4407 args = tuple()
4402
4408
4403 if isinstance(kwargs, JsonRaw):
4409 if isinstance(kwargs, JsonRaw):
4404 try:
4410 try:
4405 kwargs = json.loads(kwargs)
4411 kwargs = json.loads(kwargs)
4406 except ValueError:
4412 except ValueError:
4407 kwargs = dict()
4413 kwargs = dict()
4408
4414
4409 dot_notation = obj.task_dot_notation
4415 dot_notation = obj.task_dot_notation
4410 val = '.'.join(map(safe_str, [
4416 val = '.'.join(map(safe_str, [
4411 sorted(dot_notation), args, sorted(kwargs.items())]))
4417 sorted(dot_notation), args, sorted(kwargs.items())]))
4412 return hashlib.sha1(val).hexdigest()
4418 return hashlib.sha1(val).hexdigest()
4413
4419
4414 @classmethod
4420 @classmethod
4415 def get_by_schedule_name(cls, schedule_name):
4421 def get_by_schedule_name(cls, schedule_name):
4416 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4422 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4417
4423
4418 @classmethod
4424 @classmethod
4419 def get_by_schedule_id(cls, schedule_id):
4425 def get_by_schedule_id(cls, schedule_id):
4420 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4426 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4421
4427
4422 @property
4428 @property
4423 def task(self):
4429 def task(self):
4424 return self.task_dot_notation
4430 return self.task_dot_notation
4425
4431
4426 @property
4432 @property
4427 def schedule(self):
4433 def schedule(self):
4428 from rhodecode.lib.celerylib.utils import raw_2_schedule
4434 from rhodecode.lib.celerylib.utils import raw_2_schedule
4429 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4435 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4430 return schedule
4436 return schedule
4431
4437
4432 @property
4438 @property
4433 def args(self):
4439 def args(self):
4434 try:
4440 try:
4435 return list(self.task_args or [])
4441 return list(self.task_args or [])
4436 except ValueError:
4442 except ValueError:
4437 return list()
4443 return list()
4438
4444
4439 @property
4445 @property
4440 def kwargs(self):
4446 def kwargs(self):
4441 try:
4447 try:
4442 return dict(self.task_kwargs or {})
4448 return dict(self.task_kwargs or {})
4443 except ValueError:
4449 except ValueError:
4444 return dict()
4450 return dict()
4445
4451
4446 def _as_raw(self, val):
4452 def _as_raw(self, val):
4447 if hasattr(val, 'de_coerce'):
4453 if hasattr(val, 'de_coerce'):
4448 val = val.de_coerce()
4454 val = val.de_coerce()
4449 if val:
4455 if val:
4450 val = json.dumps(val)
4456 val = json.dumps(val)
4451
4457
4452 return val
4458 return val
4453
4459
4454 @property
4460 @property
4455 def schedule_definition_raw(self):
4461 def schedule_definition_raw(self):
4456 return self._as_raw(self.schedule_definition)
4462 return self._as_raw(self.schedule_definition)
4457
4463
4458 @property
4464 @property
4459 def args_raw(self):
4465 def args_raw(self):
4460 return self._as_raw(self.task_args)
4466 return self._as_raw(self.task_args)
4461
4467
4462 @property
4468 @property
4463 def kwargs_raw(self):
4469 def kwargs_raw(self):
4464 return self._as_raw(self.task_kwargs)
4470 return self._as_raw(self.task_kwargs)
4465
4471
4466 def __repr__(self):
4472 def __repr__(self):
4467 return '<DB:ScheduleEntry({}:{})>'.format(
4473 return '<DB:ScheduleEntry({}:{})>'.format(
4468 self.schedule_entry_id, self.schedule_name)
4474 self.schedule_entry_id, self.schedule_name)
4469
4475
4470
4476
4471 @event.listens_for(ScheduleEntry, 'before_update')
4477 @event.listens_for(ScheduleEntry, 'before_update')
4472 def update_task_uid(mapper, connection, target):
4478 def update_task_uid(mapper, connection, target):
4473 target.task_uid = ScheduleEntry.get_uid(target)
4479 target.task_uid = ScheduleEntry.get_uid(target)
4474
4480
4475
4481
4476 @event.listens_for(ScheduleEntry, 'before_insert')
4482 @event.listens_for(ScheduleEntry, 'before_insert')
4477 def set_task_uid(mapper, connection, target):
4483 def set_task_uid(mapper, connection, target):
4478 target.task_uid = ScheduleEntry.get_uid(target)
4484 target.task_uid = ScheduleEntry.get_uid(target)
4479
4485
4480
4486
4481 class DbMigrateVersion(Base, BaseModel):
4487 class DbMigrateVersion(Base, BaseModel):
4482 __tablename__ = 'db_migrate_version'
4488 __tablename__ = 'db_migrate_version'
4483 __table_args__ = (
4489 __table_args__ = (
4484 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4490 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4485 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4491 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4486 )
4492 )
4487 repository_id = Column('repository_id', String(250), primary_key=True)
4493 repository_id = Column('repository_id', String(250), primary_key=True)
4488 repository_path = Column('repository_path', Text)
4494 repository_path = Column('repository_path', Text)
4489 version = Column('version', Integer)
4495 version = Column('version', Integer)
4490
4496
4491
4497
4492 class DbSession(Base, BaseModel):
4498 class DbSession(Base, BaseModel):
4493 __tablename__ = 'db_session'
4499 __tablename__ = 'db_session'
4494 __table_args__ = (
4500 __table_args__ = (
4495 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4501 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4496 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4502 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4497 )
4503 )
4498
4504
4499 def __repr__(self):
4505 def __repr__(self):
4500 return '<DB:DbSession({})>'.format(self.id)
4506 return '<DB:DbSession({})>'.format(self.id)
4501
4507
4502 id = Column('id', Integer())
4508 id = Column('id', Integer())
4503 namespace = Column('namespace', String(255), primary_key=True)
4509 namespace = Column('namespace', String(255), primary_key=True)
4504 accessed = Column('accessed', DateTime, nullable=False)
4510 accessed = Column('accessed', DateTime, nullable=False)
4505 created = Column('created', DateTime, nullable=False)
4511 created = Column('created', DateTime, nullable=False)
4506 data = Column('data', PickleType, nullable=False)
4512 data = Column('data', PickleType, nullable=False)
4507
4513
4508
4514
4509
4515
4510 class BeakerCache(Base, BaseModel):
4516 class BeakerCache(Base, BaseModel):
4511 __tablename__ = 'beaker_cache'
4517 __tablename__ = 'beaker_cache'
4512 __table_args__ = (
4518 __table_args__ = (
4513 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4519 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4514 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4520 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4515 )
4521 )
4516
4522
4517 def __repr__(self):
4523 def __repr__(self):
4518 return '<DB:DbSession({})>'.format(self.id)
4524 return '<DB:DbSession({})>'.format(self.id)
4519
4525
4520 id = Column('id', Integer())
4526 id = Column('id', Integer())
4521 namespace = Column('namespace', String(255), primary_key=True)
4527 namespace = Column('namespace', String(255), primary_key=True)
4522 accessed = Column('accessed', DateTime, nullable=False)
4528 accessed = Column('accessed', DateTime, nullable=False)
4523 created = Column('created', DateTime, nullable=False)
4529 created = Column('created', DateTime, nullable=False)
4524 data = Column('data', PickleType, nullable=False)
4530 data = Column('data', PickleType, nullable=False)
@@ -1,1695 +1,1700 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2018 RhodeCode GmbH
3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid.threadlocal import get_current_request
33 from pyramid.threadlocal import get_current_request
34
34
35 from rhodecode import events
35 from rhodecode import events
36 from rhodecode.translation import lazy_ugettext#, _
36 from rhodecode.translation import lazy_ugettext#, _
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import audit_logger
38 from rhodecode.lib import audit_logger
39 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.markup_renderer import (
41 from rhodecode.lib.markup_renderer import (
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.vcs.backends.base import (
44 from rhodecode.lib.vcs.backends.base import (
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.exceptions import (
47 from rhodecode.lib.vcs.exceptions import (
48 CommitDoesNotExistError, EmptyRepositoryError)
48 CommitDoesNotExistError, EmptyRepositoryError)
49 from rhodecode.model import BaseModel
49 from rhodecode.model import BaseModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.notification import NotificationModel, \
56 from rhodecode.model.notification import NotificationModel, \
57 EmailNotificationModel
57 EmailNotificationModel
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64
64
65 # Data structure to hold the response data when updating commits during a pull
65 # Data structure to hold the response data when updating commits during a pull
66 # request update.
66 # request update.
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 'executed', 'reason', 'new', 'old', 'changes',
68 'executed', 'reason', 'new', 'old', 'changes',
69 'source_changed', 'target_changed'])
69 'source_changed', 'target_changed'])
70
70
71
71
72 class PullRequestModel(BaseModel):
72 class PullRequestModel(BaseModel):
73
73
74 cls = PullRequest
74 cls = PullRequest
75
75
76 DIFF_CONTEXT = 3
76 DIFF_CONTEXT = 3
77
77
78 MERGE_STATUS_MESSAGES = {
78 MERGE_STATUS_MESSAGES = {
79 MergeFailureReason.NONE: lazy_ugettext(
79 MergeFailureReason.NONE: lazy_ugettext(
80 'This pull request can be automatically merged.'),
80 'This pull request can be automatically merged.'),
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 'This pull request cannot be merged because of an unhandled'
82 'This pull request cannot be merged because of an unhandled'
83 ' exception.'),
83 ' exception.'),
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 'This pull request cannot be merged because of merge conflicts.'),
85 'This pull request cannot be merged because of merge conflicts.'),
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 'This pull request could not be merged because push to target'
87 'This pull request could not be merged because push to target'
88 ' failed.'),
88 ' failed.'),
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 'This pull request cannot be merged because the target is not a'
90 'This pull request cannot be merged because the target is not a'
91 ' head.'),
91 ' head.'),
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 'This pull request cannot be merged because the source contains'
93 'This pull request cannot be merged because the source contains'
94 ' more branches than the target.'),
94 ' more branches than the target.'),
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 'This pull request cannot be merged because the target has'
96 'This pull request cannot be merged because the target has'
97 ' multiple heads.'),
97 ' multiple heads.'),
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 'This pull request cannot be merged because the target repository'
99 'This pull request cannot be merged because the target repository'
100 ' is locked.'),
100 ' is locked.'),
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 'This pull request cannot be merged because the target or the '
102 'This pull request cannot be merged because the target or the '
103 'source reference is missing.'),
103 'source reference is missing.'),
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 'This pull request cannot be merged because the target '
105 'This pull request cannot be merged because the target '
106 'reference is missing.'),
106 'reference is missing.'),
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 'This pull request cannot be merged because the source '
108 'This pull request cannot be merged because the source '
109 'reference is missing.'),
109 'reference is missing.'),
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 'This pull request cannot be merged because of conflicts related '
111 'This pull request cannot be merged because of conflicts related '
112 'to sub repositories.'),
112 'to sub repositories.'),
113 }
113 }
114
114
115 UPDATE_STATUS_MESSAGES = {
115 UPDATE_STATUS_MESSAGES = {
116 UpdateFailureReason.NONE: lazy_ugettext(
116 UpdateFailureReason.NONE: lazy_ugettext(
117 'Pull request update successful.'),
117 'Pull request update successful.'),
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 'Pull request update failed because of an unknown error.'),
119 'Pull request update failed because of an unknown error.'),
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 'No update needed because the source and target have not changed.'),
121 'No update needed because the source and target have not changed.'),
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 'Pull request cannot be updated because the reference type is '
123 'Pull request cannot be updated because the reference type is '
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 'This pull request cannot be updated because the target '
126 'This pull request cannot be updated because the target '
127 'reference is missing.'),
127 'reference is missing.'),
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 'This pull request cannot be updated because the source '
129 'This pull request cannot be updated because the source '
130 'reference is missing.'),
130 'reference is missing.'),
131 }
131 }
132
132
133 def __get_pull_request(self, pull_request):
133 def __get_pull_request(self, pull_request):
134 return self._get_instance((
134 return self._get_instance((
135 PullRequest, PullRequestVersion), pull_request)
135 PullRequest, PullRequestVersion), pull_request)
136
136
137 def _check_perms(self, perms, pull_request, user, api=False):
137 def _check_perms(self, perms, pull_request, user, api=False):
138 if not api:
138 if not api:
139 return h.HasRepoPermissionAny(*perms)(
139 return h.HasRepoPermissionAny(*perms)(
140 user=user, repo_name=pull_request.target_repo.repo_name)
140 user=user, repo_name=pull_request.target_repo.repo_name)
141 else:
141 else:
142 return h.HasRepoPermissionAnyApi(*perms)(
142 return h.HasRepoPermissionAnyApi(*perms)(
143 user=user, repo_name=pull_request.target_repo.repo_name)
143 user=user, repo_name=pull_request.target_repo.repo_name)
144
144
145 def check_user_read(self, pull_request, user, api=False):
145 def check_user_read(self, pull_request, user, api=False):
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 return self._check_perms(_perms, pull_request, user, api)
147 return self._check_perms(_perms, pull_request, user, api)
148
148
149 def check_user_merge(self, pull_request, user, api=False):
149 def check_user_merge(self, pull_request, user, api=False):
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 return self._check_perms(_perms, pull_request, user, api)
151 return self._check_perms(_perms, pull_request, user, api)
152
152
153 def check_user_update(self, pull_request, user, api=False):
153 def check_user_update(self, pull_request, user, api=False):
154 owner = user.user_id == pull_request.user_id
154 owner = user.user_id == pull_request.user_id
155 return self.check_user_merge(pull_request, user, api) or owner
155 return self.check_user_merge(pull_request, user, api) or owner
156
156
157 def check_user_delete(self, pull_request, user):
157 def check_user_delete(self, pull_request, user):
158 owner = user.user_id == pull_request.user_id
158 owner = user.user_id == pull_request.user_id
159 _perms = ('repository.admin',)
159 _perms = ('repository.admin',)
160 return self._check_perms(_perms, pull_request, user) or owner
160 return self._check_perms(_perms, pull_request, user) or owner
161
161
162 def check_user_change_status(self, pull_request, user, api=False):
162 def check_user_change_status(self, pull_request, user, api=False):
163 reviewer = user.user_id in [x.user_id for x in
163 reviewer = user.user_id in [x.user_id for x in
164 pull_request.reviewers]
164 pull_request.reviewers]
165 return self.check_user_update(pull_request, user, api) or reviewer
165 return self.check_user_update(pull_request, user, api) or reviewer
166
166
167 def check_user_comment(self, pull_request, user):
167 def check_user_comment(self, pull_request, user):
168 owner = user.user_id == pull_request.user_id
168 owner = user.user_id == pull_request.user_id
169 return self.check_user_read(pull_request, user) or owner
169 return self.check_user_read(pull_request, user) or owner
170
170
171 def get(self, pull_request):
171 def get(self, pull_request):
172 return self.__get_pull_request(pull_request)
172 return self.__get_pull_request(pull_request)
173
173
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 opened_by=None, order_by=None,
175 opened_by=None, order_by=None,
176 order_dir='desc'):
176 order_dir='desc'):
177 repo = None
177 repo = None
178 if repo_name:
178 if repo_name:
179 repo = self._get_repo(repo_name)
179 repo = self._get_repo(repo_name)
180
180
181 q = PullRequest.query()
181 q = PullRequest.query()
182
182
183 # source or target
183 # source or target
184 if repo and source:
184 if repo and source:
185 q = q.filter(PullRequest.source_repo == repo)
185 q = q.filter(PullRequest.source_repo == repo)
186 elif repo:
186 elif repo:
187 q = q.filter(PullRequest.target_repo == repo)
187 q = q.filter(PullRequest.target_repo == repo)
188
188
189 # closed,opened
189 # closed,opened
190 if statuses:
190 if statuses:
191 q = q.filter(PullRequest.status.in_(statuses))
191 q = q.filter(PullRequest.status.in_(statuses))
192
192
193 # opened by filter
193 # opened by filter
194 if opened_by:
194 if opened_by:
195 q = q.filter(PullRequest.user_id.in_(opened_by))
195 q = q.filter(PullRequest.user_id.in_(opened_by))
196
196
197 if order_by:
197 if order_by:
198 order_map = {
198 order_map = {
199 'name_raw': PullRequest.pull_request_id,
199 'name_raw': PullRequest.pull_request_id,
200 'title': PullRequest.title,
200 'title': PullRequest.title,
201 'updated_on_raw': PullRequest.updated_on,
201 'updated_on_raw': PullRequest.updated_on,
202 'target_repo': PullRequest.target_repo_id
202 'target_repo': PullRequest.target_repo_id
203 }
203 }
204 if order_dir == 'asc':
204 if order_dir == 'asc':
205 q = q.order_by(order_map[order_by].asc())
205 q = q.order_by(order_map[order_by].asc())
206 else:
206 else:
207 q = q.order_by(order_map[order_by].desc())
207 q = q.order_by(order_map[order_by].desc())
208
208
209 return q
209 return q
210
210
211 def count_all(self, repo_name, source=False, statuses=None,
211 def count_all(self, repo_name, source=False, statuses=None,
212 opened_by=None):
212 opened_by=None):
213 """
213 """
214 Count the number of pull requests for a specific repository.
214 Count the number of pull requests for a specific repository.
215
215
216 :param repo_name: target or source repo
216 :param repo_name: target or source repo
217 :param source: boolean flag to specify if repo_name refers to source
217 :param source: boolean flag to specify if repo_name refers to source
218 :param statuses: list of pull request statuses
218 :param statuses: list of pull request statuses
219 :param opened_by: author user of the pull request
219 :param opened_by: author user of the pull request
220 :returns: int number of pull requests
220 :returns: int number of pull requests
221 """
221 """
222 q = self._prepare_get_all_query(
222 q = self._prepare_get_all_query(
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224
224
225 return q.count()
225 return q.count()
226
226
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 offset=0, length=None, order_by=None, order_dir='desc'):
228 offset=0, length=None, order_by=None, order_dir='desc'):
229 """
229 """
230 Get all pull requests for a specific repository.
230 Get all pull requests for a specific repository.
231
231
232 :param repo_name: target or source repo
232 :param repo_name: target or source repo
233 :param source: boolean flag to specify if repo_name refers to source
233 :param source: boolean flag to specify if repo_name refers to source
234 :param statuses: list of pull request statuses
234 :param statuses: list of pull request statuses
235 :param opened_by: author user of the pull request
235 :param opened_by: author user of the pull request
236 :param offset: pagination offset
236 :param offset: pagination offset
237 :param length: length of returned list
237 :param length: length of returned list
238 :param order_by: order of the returned list
238 :param order_by: order of the returned list
239 :param order_dir: 'asc' or 'desc' ordering direction
239 :param order_dir: 'asc' or 'desc' ordering direction
240 :returns: list of pull requests
240 :returns: list of pull requests
241 """
241 """
242 q = self._prepare_get_all_query(
242 q = self._prepare_get_all_query(
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 order_by=order_by, order_dir=order_dir)
244 order_by=order_by, order_dir=order_dir)
245
245
246 if length:
246 if length:
247 pull_requests = q.limit(length).offset(offset).all()
247 pull_requests = q.limit(length).offset(offset).all()
248 else:
248 else:
249 pull_requests = q.all()
249 pull_requests = q.all()
250
250
251 return pull_requests
251 return pull_requests
252
252
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 opened_by=None):
254 opened_by=None):
255 """
255 """
256 Count the number of pull requests for a specific repository that are
256 Count the number of pull requests for a specific repository that are
257 awaiting review.
257 awaiting review.
258
258
259 :param repo_name: target or source repo
259 :param repo_name: target or source repo
260 :param source: boolean flag to specify if repo_name refers to source
260 :param source: boolean flag to specify if repo_name refers to source
261 :param statuses: list of pull request statuses
261 :param statuses: list of pull request statuses
262 :param opened_by: author user of the pull request
262 :param opened_by: author user of the pull request
263 :returns: int number of pull requests
263 :returns: int number of pull requests
264 """
264 """
265 pull_requests = self.get_awaiting_review(
265 pull_requests = self.get_awaiting_review(
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267
267
268 return len(pull_requests)
268 return len(pull_requests)
269
269
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 opened_by=None, offset=0, length=None,
271 opened_by=None, offset=0, length=None,
272 order_by=None, order_dir='desc'):
272 order_by=None, order_dir='desc'):
273 """
273 """
274 Get all pull requests for a specific repository that are awaiting
274 Get all pull requests for a specific repository that are awaiting
275 review.
275 review.
276
276
277 :param repo_name: target or source repo
277 :param repo_name: target or source repo
278 :param source: boolean flag to specify if repo_name refers to source
278 :param source: boolean flag to specify if repo_name refers to source
279 :param statuses: list of pull request statuses
279 :param statuses: list of pull request statuses
280 :param opened_by: author user of the pull request
280 :param opened_by: author user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _filtered_pull_requests = []
291 _filtered_pull_requests = []
292 for pr in pull_requests:
292 for pr in pull_requests:
293 status = pr.calculated_review_status()
293 status = pr.calculated_review_status()
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 _filtered_pull_requests.append(pr)
296 _filtered_pull_requests.append(pr)
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 opened_by=None, user_id=None):
303 opened_by=None, user_id=None):
304 """
304 """
305 Count the number of pull requests for a specific repository that are
305 Count the number of pull requests for a specific repository that are
306 awaiting review from a specific user.
306 awaiting review from a specific user.
307
307
308 :param repo_name: target or source repo
308 :param repo_name: target or source repo
309 :param source: boolean flag to specify if repo_name refers to source
309 :param source: boolean flag to specify if repo_name refers to source
310 :param statuses: list of pull request statuses
310 :param statuses: list of pull request statuses
311 :param opened_by: author user of the pull request
311 :param opened_by: author user of the pull request
312 :param user_id: reviewer user of the pull request
312 :param user_id: reviewer user of the pull request
313 :returns: int number of pull requests
313 :returns: int number of pull requests
314 """
314 """
315 pull_requests = self.get_awaiting_my_review(
315 pull_requests = self.get_awaiting_my_review(
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 user_id=user_id)
317 user_id=user_id)
318
318
319 return len(pull_requests)
319 return len(pull_requests)
320
320
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 opened_by=None, user_id=None, offset=0,
322 opened_by=None, user_id=None, offset=0,
323 length=None, order_by=None, order_dir='desc'):
323 length=None, order_by=None, order_dir='desc'):
324 """
324 """
325 Get all pull requests for a specific repository that are awaiting
325 Get all pull requests for a specific repository that are awaiting
326 review from a specific user.
326 review from a specific user.
327
327
328 :param repo_name: target or source repo
328 :param repo_name: target or source repo
329 :param source: boolean flag to specify if repo_name refers to source
329 :param source: boolean flag to specify if repo_name refers to source
330 :param statuses: list of pull request statuses
330 :param statuses: list of pull request statuses
331 :param opened_by: author user of the pull request
331 :param opened_by: author user of the pull request
332 :param user_id: reviewer user of the pull request
332 :param user_id: reviewer user of the pull request
333 :param offset: pagination offset
333 :param offset: pagination offset
334 :param length: length of returned list
334 :param length: length of returned list
335 :param order_by: order of the returned list
335 :param order_by: order of the returned list
336 :param order_dir: 'asc' or 'desc' ordering direction
336 :param order_dir: 'asc' or 'desc' ordering direction
337 :returns: list of pull requests
337 :returns: list of pull requests
338 """
338 """
339 pull_requests = self.get_all(
339 pull_requests = self.get_all(
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 order_by=order_by, order_dir=order_dir)
341 order_by=order_by, order_dir=order_dir)
342
342
343 _my = PullRequestModel().get_not_reviewed(user_id)
343 _my = PullRequestModel().get_not_reviewed(user_id)
344 my_participation = []
344 my_participation = []
345 for pr in pull_requests:
345 for pr in pull_requests:
346 if pr in _my:
346 if pr in _my:
347 my_participation.append(pr)
347 my_participation.append(pr)
348 _filtered_pull_requests = my_participation
348 _filtered_pull_requests = my_participation
349 if length:
349 if length:
350 return _filtered_pull_requests[offset:offset+length]
350 return _filtered_pull_requests[offset:offset+length]
351 else:
351 else:
352 return _filtered_pull_requests
352 return _filtered_pull_requests
353
353
354 def get_not_reviewed(self, user_id):
354 def get_not_reviewed(self, user_id):
355 return [
355 return [
356 x.pull_request for x in PullRequestReviewers.query().filter(
356 x.pull_request for x in PullRequestReviewers.query().filter(
357 PullRequestReviewers.user_id == user_id).all()
357 PullRequestReviewers.user_id == user_id).all()
358 ]
358 ]
359
359
360 def _prepare_participating_query(self, user_id=None, statuses=None,
360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 order_by=None, order_dir='desc'):
361 order_by=None, order_dir='desc'):
362 q = PullRequest.query()
362 q = PullRequest.query()
363 if user_id:
363 if user_id:
364 reviewers_subquery = Session().query(
364 reviewers_subquery = Session().query(
365 PullRequestReviewers.pull_request_id).filter(
365 PullRequestReviewers.pull_request_id).filter(
366 PullRequestReviewers.user_id == user_id).subquery()
366 PullRequestReviewers.user_id == user_id).subquery()
367 user_filter = or_(
367 user_filter = or_(
368 PullRequest.user_id == user_id,
368 PullRequest.user_id == user_id,
369 PullRequest.pull_request_id.in_(reviewers_subquery)
369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 )
370 )
371 q = PullRequest.query().filter(user_filter)
371 q = PullRequest.query().filter(user_filter)
372
372
373 # closed,opened
373 # closed,opened
374 if statuses:
374 if statuses:
375 q = q.filter(PullRequest.status.in_(statuses))
375 q = q.filter(PullRequest.status.in_(statuses))
376
376
377 if order_by:
377 if order_by:
378 order_map = {
378 order_map = {
379 'name_raw': PullRequest.pull_request_id,
379 'name_raw': PullRequest.pull_request_id,
380 'title': PullRequest.title,
380 'title': PullRequest.title,
381 'updated_on_raw': PullRequest.updated_on,
381 'updated_on_raw': PullRequest.updated_on,
382 'target_repo': PullRequest.target_repo_id
382 'target_repo': PullRequest.target_repo_id
383 }
383 }
384 if order_dir == 'asc':
384 if order_dir == 'asc':
385 q = q.order_by(order_map[order_by].asc())
385 q = q.order_by(order_map[order_by].asc())
386 else:
386 else:
387 q = q.order_by(order_map[order_by].desc())
387 q = q.order_by(order_map[order_by].desc())
388
388
389 return q
389 return q
390
390
391 def count_im_participating_in(self, user_id=None, statuses=None):
391 def count_im_participating_in(self, user_id=None, statuses=None):
392 q = self._prepare_participating_query(user_id, statuses=statuses)
392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 return q.count()
393 return q.count()
394
394
395 def get_im_participating_in(
395 def get_im_participating_in(
396 self, user_id=None, statuses=None, offset=0,
396 self, user_id=None, statuses=None, offset=0,
397 length=None, order_by=None, order_dir='desc'):
397 length=None, order_by=None, order_dir='desc'):
398 """
398 """
399 Get all Pull requests that i'm participating in, or i have opened
399 Get all Pull requests that i'm participating in, or i have opened
400 """
400 """
401
401
402 q = self._prepare_participating_query(
402 q = self._prepare_participating_query(
403 user_id, statuses=statuses, order_by=order_by,
403 user_id, statuses=statuses, order_by=order_by,
404 order_dir=order_dir)
404 order_dir=order_dir)
405
405
406 if length:
406 if length:
407 pull_requests = q.limit(length).offset(offset).all()
407 pull_requests = q.limit(length).offset(offset).all()
408 else:
408 else:
409 pull_requests = q.all()
409 pull_requests = q.all()
410
410
411 return pull_requests
411 return pull_requests
412
412
413 def get_versions(self, pull_request):
413 def get_versions(self, pull_request):
414 """
414 """
415 returns version of pull request sorted by ID descending
415 returns version of pull request sorted by ID descending
416 """
416 """
417 return PullRequestVersion.query()\
417 return PullRequestVersion.query()\
418 .filter(PullRequestVersion.pull_request == pull_request)\
418 .filter(PullRequestVersion.pull_request == pull_request)\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 .all()
420 .all()
421
421
422 def get_pr_version(self, pull_request_id, version=None):
422 def get_pr_version(self, pull_request_id, version=None):
423 at_version = None
423 at_version = None
424
424
425 if version and version == 'latest':
425 if version and version == 'latest':
426 pull_request_ver = PullRequest.get(pull_request_id)
426 pull_request_ver = PullRequest.get(pull_request_id)
427 pull_request_obj = pull_request_ver
427 pull_request_obj = pull_request_ver
428 _org_pull_request_obj = pull_request_obj
428 _org_pull_request_obj = pull_request_obj
429 at_version = 'latest'
429 at_version = 'latest'
430 elif version:
430 elif version:
431 pull_request_ver = PullRequestVersion.get_or_404(version)
431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 pull_request_obj = pull_request_ver
432 pull_request_obj = pull_request_ver
433 _org_pull_request_obj = pull_request_ver.pull_request
433 _org_pull_request_obj = pull_request_ver.pull_request
434 at_version = pull_request_ver.pull_request_version_id
434 at_version = pull_request_ver.pull_request_version_id
435 else:
435 else:
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 pull_request_id)
437 pull_request_id)
438
438
439 pull_request_display_obj = PullRequest.get_pr_display_object(
439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 pull_request_obj, _org_pull_request_obj)
440 pull_request_obj, _org_pull_request_obj)
441
441
442 return _org_pull_request_obj, pull_request_obj, \
442 return _org_pull_request_obj, pull_request_obj, \
443 pull_request_display_obj, at_version
443 pull_request_display_obj, at_version
444
444
445 def create(self, created_by, source_repo, source_ref, target_repo,
445 def create(self, created_by, source_repo, source_ref, target_repo,
446 target_ref, revisions, reviewers, title, description=None,
446 target_ref, revisions, reviewers, title, description=None,
447 reviewer_data=None, translator=None, auth_user=None):
447 reviewer_data=None, translator=None, auth_user=None):
448 translator = translator or get_current_request().translate
448 translator = translator or get_current_request().translate
449
449
450 created_by_user = self._get_user(created_by)
450 created_by_user = self._get_user(created_by)
451 auth_user = auth_user or created_by_user
451 auth_user = auth_user or created_by_user
452 source_repo = self._get_repo(source_repo)
452 source_repo = self._get_repo(source_repo)
453 target_repo = self._get_repo(target_repo)
453 target_repo = self._get_repo(target_repo)
454
454
455 pull_request = PullRequest()
455 pull_request = PullRequest()
456 pull_request.source_repo = source_repo
456 pull_request.source_repo = source_repo
457 pull_request.source_ref = source_ref
457 pull_request.source_ref = source_ref
458 pull_request.target_repo = target_repo
458 pull_request.target_repo = target_repo
459 pull_request.target_ref = target_ref
459 pull_request.target_ref = target_ref
460 pull_request.revisions = revisions
460 pull_request.revisions = revisions
461 pull_request.title = title
461 pull_request.title = title
462 pull_request.description = description
462 pull_request.description = description
463 pull_request.author = created_by_user
463 pull_request.author = created_by_user
464 pull_request.reviewer_data = reviewer_data
464 pull_request.reviewer_data = reviewer_data
465
465
466 Session().add(pull_request)
466 Session().add(pull_request)
467 Session().flush()
467 Session().flush()
468
468
469 reviewer_ids = set()
469 reviewer_ids = set()
470 # members / reviewers
470 # members / reviewers
471 for reviewer_object in reviewers:
471 for reviewer_object in reviewers:
472 user_id, reasons, mandatory, rules = reviewer_object
472 user_id, reasons, mandatory, rules = reviewer_object
473 user = self._get_user(user_id)
473 user = self._get_user(user_id)
474
474
475 # skip duplicates
475 # skip duplicates
476 if user.user_id in reviewer_ids:
476 if user.user_id in reviewer_ids:
477 continue
477 continue
478
478
479 reviewer_ids.add(user.user_id)
479 reviewer_ids.add(user.user_id)
480
480
481 reviewer = PullRequestReviewers()
481 reviewer = PullRequestReviewers()
482 reviewer.user = user
482 reviewer.user = user
483 reviewer.pull_request = pull_request
483 reviewer.pull_request = pull_request
484 reviewer.reasons = reasons
484 reviewer.reasons = reasons
485 reviewer.mandatory = mandatory
485 reviewer.mandatory = mandatory
486
486
487 # NOTE(marcink): pick only first rule for now
487 # NOTE(marcink): pick only first rule for now
488 rule_id = rules[0] if rules else None
488 rule_id = rules[0] if rules else None
489 rule = RepoReviewRule.get(rule_id) if rule_id else None
489 rule = RepoReviewRule.get(rule_id) if rule_id else None
490 if rule:
490 if rule:
491 review_group = rule.user_group_vote_rule()
491 review_group = rule.user_group_vote_rule()
492 if review_group:
492 if review_group:
493 # NOTE(marcink):
493 # NOTE(marcink):
494 # again, can be that user is member of more,
494 # again, can be that user is member of more,
495 # but we pick the first same, as default reviewers algo
495 # but we pick the first same, as default reviewers algo
496 review_group = review_group[0]
496 review_group = review_group[0]
497
497
498 rule_data = {
498 rule_data = {
499 'rule_name':
499 'rule_name':
500 rule.review_rule_name,
500 rule.review_rule_name,
501 'rule_user_group_entry_id':
501 'rule_user_group_entry_id':
502 review_group.repo_review_rule_users_group_id,
502 review_group.repo_review_rule_users_group_id,
503 'rule_user_group_name':
503 'rule_user_group_name':
504 review_group.users_group.users_group_name,
504 review_group.users_group.users_group_name,
505 'rule_user_group_members':
505 'rule_user_group_members':
506 [x.user.username for x in review_group.users_group.members],
506 [x.user.username for x in review_group.users_group.members],
507 }
507 }
508 # e.g {'vote_rule': -1, 'mandatory': True}
508 # e.g {'vote_rule': -1, 'mandatory': True}
509 rule_data.update(review_group.rule_data())
509 rule_data.update(review_group.rule_data())
510
510
511 reviewer.rule_data = rule_data
511 reviewer.rule_data = rule_data
512
512
513 Session().add(reviewer)
513 Session().add(reviewer)
514 Session().flush()
514 Session().flush()
515
515
516 # Set approval status to "Under Review" for all commits which are
516 # Set approval status to "Under Review" for all commits which are
517 # part of this pull request.
517 # part of this pull request.
518 ChangesetStatusModel().set_status(
518 ChangesetStatusModel().set_status(
519 repo=target_repo,
519 repo=target_repo,
520 status=ChangesetStatus.STATUS_UNDER_REVIEW,
520 status=ChangesetStatus.STATUS_UNDER_REVIEW,
521 user=created_by_user,
521 user=created_by_user,
522 pull_request=pull_request
522 pull_request=pull_request
523 )
523 )
524 # we commit early at this point. This has to do with a fact
524 # we commit early at this point. This has to do with a fact
525 # that before queries do some row-locking. And because of that
525 # that before queries do some row-locking. And because of that
526 # we need to commit and finish transation before below validate call
526 # we need to commit and finish transation before below validate call
527 # that for large repos could be long resulting in long row locks
527 # that for large repos could be long resulting in long row locks
528 Session().commit()
528 Session().commit()
529
529
530 # prepare workspace, and run initial merge simulation
530 # prepare workspace, and run initial merge simulation
531 MergeCheck.validate(
531 MergeCheck.validate(
532 pull_request, user=created_by_user, translator=translator)
532 pull_request, user=created_by_user, translator=translator)
533
533
534 self.notify_reviewers(pull_request, reviewer_ids)
534 self.notify_reviewers(pull_request, reviewer_ids)
535 self._trigger_pull_request_hook(
535 self._trigger_pull_request_hook(
536 pull_request, created_by_user, 'create')
536 pull_request, created_by_user, 'create')
537
537
538 creation_data = pull_request.get_api_data(with_merge_state=False)
538 creation_data = pull_request.get_api_data(with_merge_state=False)
539 self._log_audit_action(
539 self._log_audit_action(
540 'repo.pull_request.create', {'data': creation_data},
540 'repo.pull_request.create', {'data': creation_data},
541 auth_user, pull_request)
541 auth_user, pull_request)
542
542
543 return pull_request
543 return pull_request
544
544
545 def _trigger_pull_request_hook(self, pull_request, user, action):
545 def _trigger_pull_request_hook(self, pull_request, user, action):
546 pull_request = self.__get_pull_request(pull_request)
546 pull_request = self.__get_pull_request(pull_request)
547 target_scm = pull_request.target_repo.scm_instance()
547 target_scm = pull_request.target_repo.scm_instance()
548 if action == 'create':
548 if action == 'create':
549 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
549 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
550 elif action == 'merge':
550 elif action == 'merge':
551 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
551 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
552 elif action == 'close':
552 elif action == 'close':
553 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
553 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
554 elif action == 'review_status_change':
554 elif action == 'review_status_change':
555 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
555 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
556 elif action == 'update':
556 elif action == 'update':
557 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
557 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
558 else:
558 else:
559 return
559 return
560
560
561 trigger_hook(
561 trigger_hook(
562 username=user.username,
562 username=user.username,
563 repo_name=pull_request.target_repo.repo_name,
563 repo_name=pull_request.target_repo.repo_name,
564 repo_alias=target_scm.alias,
564 repo_alias=target_scm.alias,
565 pull_request=pull_request)
565 pull_request=pull_request)
566
566
567 def _get_commit_ids(self, pull_request):
567 def _get_commit_ids(self, pull_request):
568 """
568 """
569 Return the commit ids of the merged pull request.
569 Return the commit ids of the merged pull request.
570
570
571 This method is not dealing correctly yet with the lack of autoupdates
571 This method is not dealing correctly yet with the lack of autoupdates
572 nor with the implicit target updates.
572 nor with the implicit target updates.
573 For example: if a commit in the source repo is already in the target it
573 For example: if a commit in the source repo is already in the target it
574 will be reported anyways.
574 will be reported anyways.
575 """
575 """
576 merge_rev = pull_request.merge_rev
576 merge_rev = pull_request.merge_rev
577 if merge_rev is None:
577 if merge_rev is None:
578 raise ValueError('This pull request was not merged yet')
578 raise ValueError('This pull request was not merged yet')
579
579
580 commit_ids = list(pull_request.revisions)
580 commit_ids = list(pull_request.revisions)
581 if merge_rev not in commit_ids:
581 if merge_rev not in commit_ids:
582 commit_ids.append(merge_rev)
582 commit_ids.append(merge_rev)
583
583
584 return commit_ids
584 return commit_ids
585
585
586 def merge(self, pull_request, user, extras):
586 def merge_repo(self, pull_request, user, extras):
587 log.debug("Merging pull request %s", pull_request.pull_request_id)
587 log.debug("Merging pull request %s", pull_request.pull_request_id)
588 merge_state = self._merge_pull_request(pull_request, user, extras)
588 merge_state = self._merge_pull_request(pull_request, user, extras)
589 if merge_state.executed:
589 if merge_state.executed:
590 log.debug(
590 log.debug(
591 "Merge was successful, updating the pull request comments.")
591 "Merge was successful, updating the pull request comments.")
592 self._comment_and_close_pr(pull_request, user, merge_state)
592 self._comment_and_close_pr(pull_request, user, merge_state)
593
593
594 self._log_audit_action(
594 self._log_audit_action(
595 'repo.pull_request.merge',
595 'repo.pull_request.merge',
596 {'merge_state': merge_state.__dict__},
596 {'merge_state': merge_state.__dict__},
597 user, pull_request)
597 user, pull_request)
598
598
599 else:
599 else:
600 log.warn("Merge failed, not updating the pull request.")
600 log.warn("Merge failed, not updating the pull request.")
601 return merge_state
601 return merge_state
602
602
603 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
603 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
604 target_vcs = pull_request.target_repo.scm_instance()
604 target_vcs = pull_request.target_repo.scm_instance()
605 source_vcs = pull_request.source_repo.scm_instance()
605 source_vcs = pull_request.source_repo.scm_instance()
606 target_ref = self._refresh_reference(
606 target_ref = self._refresh_reference(
607 pull_request.target_ref_parts, target_vcs)
607 pull_request.target_ref_parts, target_vcs)
608
608
609 message = merge_msg or (
609 message = merge_msg or (
610 'Merge pull request #%(pr_id)s from '
610 'Merge pull request #%(pr_id)s from '
611 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
611 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
612 'pr_id': pull_request.pull_request_id,
612 'pr_id': pull_request.pull_request_id,
613 'source_repo': source_vcs.name,
613 'source_repo': source_vcs.name,
614 'source_ref_name': pull_request.source_ref_parts.name,
614 'source_ref_name': pull_request.source_ref_parts.name,
615 'pr_title': pull_request.title
615 'pr_title': pull_request.title
616 }
616 }
617
617
618 workspace_id = self._workspace_id(pull_request)
618 workspace_id = self._workspace_id(pull_request)
619 repo_id = pull_request.target_repo.repo_id
619 use_rebase = self._use_rebase_for_merging(pull_request)
620 use_rebase = self._use_rebase_for_merging(pull_request)
620 close_branch = self._close_branch_before_merging(pull_request)
621 close_branch = self._close_branch_before_merging(pull_request)
621
622
622 callback_daemon, extras = prepare_callback_daemon(
623 callback_daemon, extras = prepare_callback_daemon(
623 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
624 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
624 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
625 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
625
626
626 with callback_daemon:
627 with callback_daemon:
627 # TODO: johbo: Implement a clean way to run a config_override
628 # TODO: johbo: Implement a clean way to run a config_override
628 # for a single call.
629 # for a single call.
629 target_vcs.config.set(
630 target_vcs.config.set(
630 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
631 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
631 merge_state = target_vcs.merge(
632 merge_state = target_vcs.merge(
632 target_ref, source_vcs, pull_request.source_ref_parts,
633 repo_id, workspace_id, target_ref, source_vcs,
633 workspace_id, user_name=user.username,
634 pull_request.source_ref_parts,
634 user_email=user.email, message=message, use_rebase=use_rebase,
635 user_name=user.username, user_email=user.email,
636 message=message, use_rebase=use_rebase,
635 close_branch=close_branch)
637 close_branch=close_branch)
636 return merge_state
638 return merge_state
637
639
638 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
640 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
639 pull_request.merge_rev = merge_state.merge_ref.commit_id
641 pull_request.merge_rev = merge_state.merge_ref.commit_id
640 pull_request.updated_on = datetime.datetime.now()
642 pull_request.updated_on = datetime.datetime.now()
641 close_msg = close_msg or 'Pull request merged and closed'
643 close_msg = close_msg or 'Pull request merged and closed'
642
644
643 CommentsModel().create(
645 CommentsModel().create(
644 text=safe_unicode(close_msg),
646 text=safe_unicode(close_msg),
645 repo=pull_request.target_repo.repo_id,
647 repo=pull_request.target_repo.repo_id,
646 user=user.user_id,
648 user=user.user_id,
647 pull_request=pull_request.pull_request_id,
649 pull_request=pull_request.pull_request_id,
648 f_path=None,
650 f_path=None,
649 line_no=None,
651 line_no=None,
650 closing_pr=True
652 closing_pr=True
651 )
653 )
652
654
653 Session().add(pull_request)
655 Session().add(pull_request)
654 Session().flush()
656 Session().flush()
655 # TODO: paris: replace invalidation with less radical solution
657 # TODO: paris: replace invalidation with less radical solution
656 ScmModel().mark_for_invalidation(
658 ScmModel().mark_for_invalidation(
657 pull_request.target_repo.repo_name)
659 pull_request.target_repo.repo_name)
658 self._trigger_pull_request_hook(pull_request, user, 'merge')
660 self._trigger_pull_request_hook(pull_request, user, 'merge')
659
661
660 def has_valid_update_type(self, pull_request):
662 def has_valid_update_type(self, pull_request):
661 source_ref_type = pull_request.source_ref_parts.type
663 source_ref_type = pull_request.source_ref_parts.type
662 return source_ref_type in ['book', 'branch', 'tag']
664 return source_ref_type in ['book', 'branch', 'tag']
663
665
664 def update_commits(self, pull_request):
666 def update_commits(self, pull_request):
665 """
667 """
666 Get the updated list of commits for the pull request
668 Get the updated list of commits for the pull request
667 and return the new pull request version and the list
669 and return the new pull request version and the list
668 of commits processed by this update action
670 of commits processed by this update action
669 """
671 """
670 pull_request = self.__get_pull_request(pull_request)
672 pull_request = self.__get_pull_request(pull_request)
671 source_ref_type = pull_request.source_ref_parts.type
673 source_ref_type = pull_request.source_ref_parts.type
672 source_ref_name = pull_request.source_ref_parts.name
674 source_ref_name = pull_request.source_ref_parts.name
673 source_ref_id = pull_request.source_ref_parts.commit_id
675 source_ref_id = pull_request.source_ref_parts.commit_id
674
676
675 target_ref_type = pull_request.target_ref_parts.type
677 target_ref_type = pull_request.target_ref_parts.type
676 target_ref_name = pull_request.target_ref_parts.name
678 target_ref_name = pull_request.target_ref_parts.name
677 target_ref_id = pull_request.target_ref_parts.commit_id
679 target_ref_id = pull_request.target_ref_parts.commit_id
678
680
679 if not self.has_valid_update_type(pull_request):
681 if not self.has_valid_update_type(pull_request):
680 log.debug(
682 log.debug(
681 "Skipping update of pull request %s due to ref type: %s",
683 "Skipping update of pull request %s due to ref type: %s",
682 pull_request, source_ref_type)
684 pull_request, source_ref_type)
683 return UpdateResponse(
685 return UpdateResponse(
684 executed=False,
686 executed=False,
685 reason=UpdateFailureReason.WRONG_REF_TYPE,
687 reason=UpdateFailureReason.WRONG_REF_TYPE,
686 old=pull_request, new=None, changes=None,
688 old=pull_request, new=None, changes=None,
687 source_changed=False, target_changed=False)
689 source_changed=False, target_changed=False)
688
690
689 # source repo
691 # source repo
690 source_repo = pull_request.source_repo.scm_instance()
692 source_repo = pull_request.source_repo.scm_instance()
691 try:
693 try:
692 source_commit = source_repo.get_commit(commit_id=source_ref_name)
694 source_commit = source_repo.get_commit(commit_id=source_ref_name)
693 except CommitDoesNotExistError:
695 except CommitDoesNotExistError:
694 return UpdateResponse(
696 return UpdateResponse(
695 executed=False,
697 executed=False,
696 reason=UpdateFailureReason.MISSING_SOURCE_REF,
698 reason=UpdateFailureReason.MISSING_SOURCE_REF,
697 old=pull_request, new=None, changes=None,
699 old=pull_request, new=None, changes=None,
698 source_changed=False, target_changed=False)
700 source_changed=False, target_changed=False)
699
701
700 source_changed = source_ref_id != source_commit.raw_id
702 source_changed = source_ref_id != source_commit.raw_id
701
703
702 # target repo
704 # target repo
703 target_repo = pull_request.target_repo.scm_instance()
705 target_repo = pull_request.target_repo.scm_instance()
704 try:
706 try:
705 target_commit = target_repo.get_commit(commit_id=target_ref_name)
707 target_commit = target_repo.get_commit(commit_id=target_ref_name)
706 except CommitDoesNotExistError:
708 except CommitDoesNotExistError:
707 return UpdateResponse(
709 return UpdateResponse(
708 executed=False,
710 executed=False,
709 reason=UpdateFailureReason.MISSING_TARGET_REF,
711 reason=UpdateFailureReason.MISSING_TARGET_REF,
710 old=pull_request, new=None, changes=None,
712 old=pull_request, new=None, changes=None,
711 source_changed=False, target_changed=False)
713 source_changed=False, target_changed=False)
712 target_changed = target_ref_id != target_commit.raw_id
714 target_changed = target_ref_id != target_commit.raw_id
713
715
714 if not (source_changed or target_changed):
716 if not (source_changed or target_changed):
715 log.debug("Nothing changed in pull request %s", pull_request)
717 log.debug("Nothing changed in pull request %s", pull_request)
716 return UpdateResponse(
718 return UpdateResponse(
717 executed=False,
719 executed=False,
718 reason=UpdateFailureReason.NO_CHANGE,
720 reason=UpdateFailureReason.NO_CHANGE,
719 old=pull_request, new=None, changes=None,
721 old=pull_request, new=None, changes=None,
720 source_changed=target_changed, target_changed=source_changed)
722 source_changed=target_changed, target_changed=source_changed)
721
723
722 change_in_found = 'target repo' if target_changed else 'source repo'
724 change_in_found = 'target repo' if target_changed else 'source repo'
723 log.debug('Updating pull request because of change in %s detected',
725 log.debug('Updating pull request because of change in %s detected',
724 change_in_found)
726 change_in_found)
725
727
726 # Finally there is a need for an update, in case of source change
728 # Finally there is a need for an update, in case of source change
727 # we create a new version, else just an update
729 # we create a new version, else just an update
728 if source_changed:
730 if source_changed:
729 pull_request_version = self._create_version_from_snapshot(pull_request)
731 pull_request_version = self._create_version_from_snapshot(pull_request)
730 self._link_comments_to_version(pull_request_version)
732 self._link_comments_to_version(pull_request_version)
731 else:
733 else:
732 try:
734 try:
733 ver = pull_request.versions[-1]
735 ver = pull_request.versions[-1]
734 except IndexError:
736 except IndexError:
735 ver = None
737 ver = None
736
738
737 pull_request.pull_request_version_id = \
739 pull_request.pull_request_version_id = \
738 ver.pull_request_version_id if ver else None
740 ver.pull_request_version_id if ver else None
739 pull_request_version = pull_request
741 pull_request_version = pull_request
740
742
741 try:
743 try:
742 if target_ref_type in ('tag', 'branch', 'book'):
744 if target_ref_type in ('tag', 'branch', 'book'):
743 target_commit = target_repo.get_commit(target_ref_name)
745 target_commit = target_repo.get_commit(target_ref_name)
744 else:
746 else:
745 target_commit = target_repo.get_commit(target_ref_id)
747 target_commit = target_repo.get_commit(target_ref_id)
746 except CommitDoesNotExistError:
748 except CommitDoesNotExistError:
747 return UpdateResponse(
749 return UpdateResponse(
748 executed=False,
750 executed=False,
749 reason=UpdateFailureReason.MISSING_TARGET_REF,
751 reason=UpdateFailureReason.MISSING_TARGET_REF,
750 old=pull_request, new=None, changes=None,
752 old=pull_request, new=None, changes=None,
751 source_changed=source_changed, target_changed=target_changed)
753 source_changed=source_changed, target_changed=target_changed)
752
754
753 # re-compute commit ids
755 # re-compute commit ids
754 old_commit_ids = pull_request.revisions
756 old_commit_ids = pull_request.revisions
755 pre_load = ["author", "branch", "date", "message"]
757 pre_load = ["author", "branch", "date", "message"]
756 commit_ranges = target_repo.compare(
758 commit_ranges = target_repo.compare(
757 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
759 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
758 pre_load=pre_load)
760 pre_load=pre_load)
759
761
760 ancestor = target_repo.get_common_ancestor(
762 ancestor = target_repo.get_common_ancestor(
761 target_commit.raw_id, source_commit.raw_id, source_repo)
763 target_commit.raw_id, source_commit.raw_id, source_repo)
762
764
763 pull_request.source_ref = '%s:%s:%s' % (
765 pull_request.source_ref = '%s:%s:%s' % (
764 source_ref_type, source_ref_name, source_commit.raw_id)
766 source_ref_type, source_ref_name, source_commit.raw_id)
765 pull_request.target_ref = '%s:%s:%s' % (
767 pull_request.target_ref = '%s:%s:%s' % (
766 target_ref_type, target_ref_name, ancestor)
768 target_ref_type, target_ref_name, ancestor)
767
769
768 pull_request.revisions = [
770 pull_request.revisions = [
769 commit.raw_id for commit in reversed(commit_ranges)]
771 commit.raw_id for commit in reversed(commit_ranges)]
770 pull_request.updated_on = datetime.datetime.now()
772 pull_request.updated_on = datetime.datetime.now()
771 Session().add(pull_request)
773 Session().add(pull_request)
772 new_commit_ids = pull_request.revisions
774 new_commit_ids = pull_request.revisions
773
775
774 old_diff_data, new_diff_data = self._generate_update_diffs(
776 old_diff_data, new_diff_data = self._generate_update_diffs(
775 pull_request, pull_request_version)
777 pull_request, pull_request_version)
776
778
777 # calculate commit and file changes
779 # calculate commit and file changes
778 changes = self._calculate_commit_id_changes(
780 changes = self._calculate_commit_id_changes(
779 old_commit_ids, new_commit_ids)
781 old_commit_ids, new_commit_ids)
780 file_changes = self._calculate_file_changes(
782 file_changes = self._calculate_file_changes(
781 old_diff_data, new_diff_data)
783 old_diff_data, new_diff_data)
782
784
783 # set comments as outdated if DIFFS changed
785 # set comments as outdated if DIFFS changed
784 CommentsModel().outdate_comments(
786 CommentsModel().outdate_comments(
785 pull_request, old_diff_data=old_diff_data,
787 pull_request, old_diff_data=old_diff_data,
786 new_diff_data=new_diff_data)
788 new_diff_data=new_diff_data)
787
789
788 commit_changes = (changes.added or changes.removed)
790 commit_changes = (changes.added or changes.removed)
789 file_node_changes = (
791 file_node_changes = (
790 file_changes.added or file_changes.modified or file_changes.removed)
792 file_changes.added or file_changes.modified or file_changes.removed)
791 pr_has_changes = commit_changes or file_node_changes
793 pr_has_changes = commit_changes or file_node_changes
792
794
793 # Add an automatic comment to the pull request, in case
795 # Add an automatic comment to the pull request, in case
794 # anything has changed
796 # anything has changed
795 if pr_has_changes:
797 if pr_has_changes:
796 update_comment = CommentsModel().create(
798 update_comment = CommentsModel().create(
797 text=self._render_update_message(changes, file_changes),
799 text=self._render_update_message(changes, file_changes),
798 repo=pull_request.target_repo,
800 repo=pull_request.target_repo,
799 user=pull_request.author,
801 user=pull_request.author,
800 pull_request=pull_request,
802 pull_request=pull_request,
801 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
803 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
802
804
803 # Update status to "Under Review" for added commits
805 # Update status to "Under Review" for added commits
804 for commit_id in changes.added:
806 for commit_id in changes.added:
805 ChangesetStatusModel().set_status(
807 ChangesetStatusModel().set_status(
806 repo=pull_request.source_repo,
808 repo=pull_request.source_repo,
807 status=ChangesetStatus.STATUS_UNDER_REVIEW,
809 status=ChangesetStatus.STATUS_UNDER_REVIEW,
808 comment=update_comment,
810 comment=update_comment,
809 user=pull_request.author,
811 user=pull_request.author,
810 pull_request=pull_request,
812 pull_request=pull_request,
811 revision=commit_id)
813 revision=commit_id)
812
814
813 log.debug(
815 log.debug(
814 'Updated pull request %s, added_ids: %s, common_ids: %s, '
816 'Updated pull request %s, added_ids: %s, common_ids: %s, '
815 'removed_ids: %s', pull_request.pull_request_id,
817 'removed_ids: %s', pull_request.pull_request_id,
816 changes.added, changes.common, changes.removed)
818 changes.added, changes.common, changes.removed)
817 log.debug(
819 log.debug(
818 'Updated pull request with the following file changes: %s',
820 'Updated pull request with the following file changes: %s',
819 file_changes)
821 file_changes)
820
822
821 log.info(
823 log.info(
822 "Updated pull request %s from commit %s to commit %s, "
824 "Updated pull request %s from commit %s to commit %s, "
823 "stored new version %s of this pull request.",
825 "stored new version %s of this pull request.",
824 pull_request.pull_request_id, source_ref_id,
826 pull_request.pull_request_id, source_ref_id,
825 pull_request.source_ref_parts.commit_id,
827 pull_request.source_ref_parts.commit_id,
826 pull_request_version.pull_request_version_id)
828 pull_request_version.pull_request_version_id)
827 Session().commit()
829 Session().commit()
828 self._trigger_pull_request_hook(
830 self._trigger_pull_request_hook(
829 pull_request, pull_request.author, 'update')
831 pull_request, pull_request.author, 'update')
830
832
831 return UpdateResponse(
833 return UpdateResponse(
832 executed=True, reason=UpdateFailureReason.NONE,
834 executed=True, reason=UpdateFailureReason.NONE,
833 old=pull_request, new=pull_request_version, changes=changes,
835 old=pull_request, new=pull_request_version, changes=changes,
834 source_changed=source_changed, target_changed=target_changed)
836 source_changed=source_changed, target_changed=target_changed)
835
837
836 def _create_version_from_snapshot(self, pull_request):
838 def _create_version_from_snapshot(self, pull_request):
837 version = PullRequestVersion()
839 version = PullRequestVersion()
838 version.title = pull_request.title
840 version.title = pull_request.title
839 version.description = pull_request.description
841 version.description = pull_request.description
840 version.status = pull_request.status
842 version.status = pull_request.status
841 version.created_on = datetime.datetime.now()
843 version.created_on = datetime.datetime.now()
842 version.updated_on = pull_request.updated_on
844 version.updated_on = pull_request.updated_on
843 version.user_id = pull_request.user_id
845 version.user_id = pull_request.user_id
844 version.source_repo = pull_request.source_repo
846 version.source_repo = pull_request.source_repo
845 version.source_ref = pull_request.source_ref
847 version.source_ref = pull_request.source_ref
846 version.target_repo = pull_request.target_repo
848 version.target_repo = pull_request.target_repo
847 version.target_ref = pull_request.target_ref
849 version.target_ref = pull_request.target_ref
848
850
849 version._last_merge_source_rev = pull_request._last_merge_source_rev
851 version._last_merge_source_rev = pull_request._last_merge_source_rev
850 version._last_merge_target_rev = pull_request._last_merge_target_rev
852 version._last_merge_target_rev = pull_request._last_merge_target_rev
851 version.last_merge_status = pull_request.last_merge_status
853 version.last_merge_status = pull_request.last_merge_status
852 version.shadow_merge_ref = pull_request.shadow_merge_ref
854 version.shadow_merge_ref = pull_request.shadow_merge_ref
853 version.merge_rev = pull_request.merge_rev
855 version.merge_rev = pull_request.merge_rev
854 version.reviewer_data = pull_request.reviewer_data
856 version.reviewer_data = pull_request.reviewer_data
855
857
856 version.revisions = pull_request.revisions
858 version.revisions = pull_request.revisions
857 version.pull_request = pull_request
859 version.pull_request = pull_request
858 Session().add(version)
860 Session().add(version)
859 Session().flush()
861 Session().flush()
860
862
861 return version
863 return version
862
864
863 def _generate_update_diffs(self, pull_request, pull_request_version):
865 def _generate_update_diffs(self, pull_request, pull_request_version):
864
866
865 diff_context = (
867 diff_context = (
866 self.DIFF_CONTEXT +
868 self.DIFF_CONTEXT +
867 CommentsModel.needed_extra_diff_context())
869 CommentsModel.needed_extra_diff_context())
868
870
869 source_repo = pull_request_version.source_repo
871 source_repo = pull_request_version.source_repo
870 source_ref_id = pull_request_version.source_ref_parts.commit_id
872 source_ref_id = pull_request_version.source_ref_parts.commit_id
871 target_ref_id = pull_request_version.target_ref_parts.commit_id
873 target_ref_id = pull_request_version.target_ref_parts.commit_id
872 old_diff = self._get_diff_from_pr_or_version(
874 old_diff = self._get_diff_from_pr_or_version(
873 source_repo, source_ref_id, target_ref_id, context=diff_context)
875 source_repo, source_ref_id, target_ref_id, context=diff_context)
874
876
875 source_repo = pull_request.source_repo
877 source_repo = pull_request.source_repo
876 source_ref_id = pull_request.source_ref_parts.commit_id
878 source_ref_id = pull_request.source_ref_parts.commit_id
877 target_ref_id = pull_request.target_ref_parts.commit_id
879 target_ref_id = pull_request.target_ref_parts.commit_id
878
880
879 new_diff = self._get_diff_from_pr_or_version(
881 new_diff = self._get_diff_from_pr_or_version(
880 source_repo, source_ref_id, target_ref_id, context=diff_context)
882 source_repo, source_ref_id, target_ref_id, context=diff_context)
881
883
882 old_diff_data = diffs.DiffProcessor(old_diff)
884 old_diff_data = diffs.DiffProcessor(old_diff)
883 old_diff_data.prepare()
885 old_diff_data.prepare()
884 new_diff_data = diffs.DiffProcessor(new_diff)
886 new_diff_data = diffs.DiffProcessor(new_diff)
885 new_diff_data.prepare()
887 new_diff_data.prepare()
886
888
887 return old_diff_data, new_diff_data
889 return old_diff_data, new_diff_data
888
890
889 def _link_comments_to_version(self, pull_request_version):
891 def _link_comments_to_version(self, pull_request_version):
890 """
892 """
891 Link all unlinked comments of this pull request to the given version.
893 Link all unlinked comments of this pull request to the given version.
892
894
893 :param pull_request_version: The `PullRequestVersion` to which
895 :param pull_request_version: The `PullRequestVersion` to which
894 the comments shall be linked.
896 the comments shall be linked.
895
897
896 """
898 """
897 pull_request = pull_request_version.pull_request
899 pull_request = pull_request_version.pull_request
898 comments = ChangesetComment.query()\
900 comments = ChangesetComment.query()\
899 .filter(
901 .filter(
900 # TODO: johbo: Should we query for the repo at all here?
902 # TODO: johbo: Should we query for the repo at all here?
901 # Pending decision on how comments of PRs are to be related
903 # Pending decision on how comments of PRs are to be related
902 # to either the source repo, the target repo or no repo at all.
904 # to either the source repo, the target repo or no repo at all.
903 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
905 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
904 ChangesetComment.pull_request == pull_request,
906 ChangesetComment.pull_request == pull_request,
905 ChangesetComment.pull_request_version == None)\
907 ChangesetComment.pull_request_version == None)\
906 .order_by(ChangesetComment.comment_id.asc())
908 .order_by(ChangesetComment.comment_id.asc())
907
909
908 # TODO: johbo: Find out why this breaks if it is done in a bulk
910 # TODO: johbo: Find out why this breaks if it is done in a bulk
909 # operation.
911 # operation.
910 for comment in comments:
912 for comment in comments:
911 comment.pull_request_version_id = (
913 comment.pull_request_version_id = (
912 pull_request_version.pull_request_version_id)
914 pull_request_version.pull_request_version_id)
913 Session().add(comment)
915 Session().add(comment)
914
916
915 def _calculate_commit_id_changes(self, old_ids, new_ids):
917 def _calculate_commit_id_changes(self, old_ids, new_ids):
916 added = [x for x in new_ids if x not in old_ids]
918 added = [x for x in new_ids if x not in old_ids]
917 common = [x for x in new_ids if x in old_ids]
919 common = [x for x in new_ids if x in old_ids]
918 removed = [x for x in old_ids if x not in new_ids]
920 removed = [x for x in old_ids if x not in new_ids]
919 total = new_ids
921 total = new_ids
920 return ChangeTuple(added, common, removed, total)
922 return ChangeTuple(added, common, removed, total)
921
923
922 def _calculate_file_changes(self, old_diff_data, new_diff_data):
924 def _calculate_file_changes(self, old_diff_data, new_diff_data):
923
925
924 old_files = OrderedDict()
926 old_files = OrderedDict()
925 for diff_data in old_diff_data.parsed_diff:
927 for diff_data in old_diff_data.parsed_diff:
926 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
928 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
927
929
928 added_files = []
930 added_files = []
929 modified_files = []
931 modified_files = []
930 removed_files = []
932 removed_files = []
931 for diff_data in new_diff_data.parsed_diff:
933 for diff_data in new_diff_data.parsed_diff:
932 new_filename = diff_data['filename']
934 new_filename = diff_data['filename']
933 new_hash = md5_safe(diff_data['raw_diff'])
935 new_hash = md5_safe(diff_data['raw_diff'])
934
936
935 old_hash = old_files.get(new_filename)
937 old_hash = old_files.get(new_filename)
936 if not old_hash:
938 if not old_hash:
937 # file is not present in old diff, means it's added
939 # file is not present in old diff, means it's added
938 added_files.append(new_filename)
940 added_files.append(new_filename)
939 else:
941 else:
940 if new_hash != old_hash:
942 if new_hash != old_hash:
941 modified_files.append(new_filename)
943 modified_files.append(new_filename)
942 # now remove a file from old, since we have seen it already
944 # now remove a file from old, since we have seen it already
943 del old_files[new_filename]
945 del old_files[new_filename]
944
946
945 # removed files is when there are present in old, but not in NEW,
947 # removed files is when there are present in old, but not in NEW,
946 # since we remove old files that are present in new diff, left-overs
948 # since we remove old files that are present in new diff, left-overs
947 # if any should be the removed files
949 # if any should be the removed files
948 removed_files.extend(old_files.keys())
950 removed_files.extend(old_files.keys())
949
951
950 return FileChangeTuple(added_files, modified_files, removed_files)
952 return FileChangeTuple(added_files, modified_files, removed_files)
951
953
952 def _render_update_message(self, changes, file_changes):
954 def _render_update_message(self, changes, file_changes):
953 """
955 """
954 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
956 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
955 so it's always looking the same disregarding on which default
957 so it's always looking the same disregarding on which default
956 renderer system is using.
958 renderer system is using.
957
959
958 :param changes: changes named tuple
960 :param changes: changes named tuple
959 :param file_changes: file changes named tuple
961 :param file_changes: file changes named tuple
960
962
961 """
963 """
962 new_status = ChangesetStatus.get_status_lbl(
964 new_status = ChangesetStatus.get_status_lbl(
963 ChangesetStatus.STATUS_UNDER_REVIEW)
965 ChangesetStatus.STATUS_UNDER_REVIEW)
964
966
965 changed_files = (
967 changed_files = (
966 file_changes.added + file_changes.modified + file_changes.removed)
968 file_changes.added + file_changes.modified + file_changes.removed)
967
969
968 params = {
970 params = {
969 'under_review_label': new_status,
971 'under_review_label': new_status,
970 'added_commits': changes.added,
972 'added_commits': changes.added,
971 'removed_commits': changes.removed,
973 'removed_commits': changes.removed,
972 'changed_files': changed_files,
974 'changed_files': changed_files,
973 'added_files': file_changes.added,
975 'added_files': file_changes.added,
974 'modified_files': file_changes.modified,
976 'modified_files': file_changes.modified,
975 'removed_files': file_changes.removed,
977 'removed_files': file_changes.removed,
976 }
978 }
977 renderer = RstTemplateRenderer()
979 renderer = RstTemplateRenderer()
978 return renderer.render('pull_request_update.mako', **params)
980 return renderer.render('pull_request_update.mako', **params)
979
981
980 def edit(self, pull_request, title, description, user):
982 def edit(self, pull_request, title, description, user):
981 pull_request = self.__get_pull_request(pull_request)
983 pull_request = self.__get_pull_request(pull_request)
982 old_data = pull_request.get_api_data(with_merge_state=False)
984 old_data = pull_request.get_api_data(with_merge_state=False)
983 if pull_request.is_closed():
985 if pull_request.is_closed():
984 raise ValueError('This pull request is closed')
986 raise ValueError('This pull request is closed')
985 if title:
987 if title:
986 pull_request.title = title
988 pull_request.title = title
987 pull_request.description = description
989 pull_request.description = description
988 pull_request.updated_on = datetime.datetime.now()
990 pull_request.updated_on = datetime.datetime.now()
989 Session().add(pull_request)
991 Session().add(pull_request)
990 self._log_audit_action(
992 self._log_audit_action(
991 'repo.pull_request.edit', {'old_data': old_data},
993 'repo.pull_request.edit', {'old_data': old_data},
992 user, pull_request)
994 user, pull_request)
993
995
994 def update_reviewers(self, pull_request, reviewer_data, user):
996 def update_reviewers(self, pull_request, reviewer_data, user):
995 """
997 """
996 Update the reviewers in the pull request
998 Update the reviewers in the pull request
997
999
998 :param pull_request: the pr to update
1000 :param pull_request: the pr to update
999 :param reviewer_data: list of tuples
1001 :param reviewer_data: list of tuples
1000 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1002 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1001 """
1003 """
1002 pull_request = self.__get_pull_request(pull_request)
1004 pull_request = self.__get_pull_request(pull_request)
1003 if pull_request.is_closed():
1005 if pull_request.is_closed():
1004 raise ValueError('This pull request is closed')
1006 raise ValueError('This pull request is closed')
1005
1007
1006 reviewers = {}
1008 reviewers = {}
1007 for user_id, reasons, mandatory, rules in reviewer_data:
1009 for user_id, reasons, mandatory, rules in reviewer_data:
1008 if isinstance(user_id, (int, basestring)):
1010 if isinstance(user_id, (int, basestring)):
1009 user_id = self._get_user(user_id).user_id
1011 user_id = self._get_user(user_id).user_id
1010 reviewers[user_id] = {
1012 reviewers[user_id] = {
1011 'reasons': reasons, 'mandatory': mandatory}
1013 'reasons': reasons, 'mandatory': mandatory}
1012
1014
1013 reviewers_ids = set(reviewers.keys())
1015 reviewers_ids = set(reviewers.keys())
1014 current_reviewers = PullRequestReviewers.query()\
1016 current_reviewers = PullRequestReviewers.query()\
1015 .filter(PullRequestReviewers.pull_request ==
1017 .filter(PullRequestReviewers.pull_request ==
1016 pull_request).all()
1018 pull_request).all()
1017 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1019 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1018
1020
1019 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1021 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1020 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1022 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1021
1023
1022 log.debug("Adding %s reviewers", ids_to_add)
1024 log.debug("Adding %s reviewers", ids_to_add)
1023 log.debug("Removing %s reviewers", ids_to_remove)
1025 log.debug("Removing %s reviewers", ids_to_remove)
1024 changed = False
1026 changed = False
1025 for uid in ids_to_add:
1027 for uid in ids_to_add:
1026 changed = True
1028 changed = True
1027 _usr = self._get_user(uid)
1029 _usr = self._get_user(uid)
1028 reviewer = PullRequestReviewers()
1030 reviewer = PullRequestReviewers()
1029 reviewer.user = _usr
1031 reviewer.user = _usr
1030 reviewer.pull_request = pull_request
1032 reviewer.pull_request = pull_request
1031 reviewer.reasons = reviewers[uid]['reasons']
1033 reviewer.reasons = reviewers[uid]['reasons']
1032 # NOTE(marcink): mandatory shouldn't be changed now
1034 # NOTE(marcink): mandatory shouldn't be changed now
1033 # reviewer.mandatory = reviewers[uid]['reasons']
1035 # reviewer.mandatory = reviewers[uid]['reasons']
1034 Session().add(reviewer)
1036 Session().add(reviewer)
1035 self._log_audit_action(
1037 self._log_audit_action(
1036 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1038 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1037 user, pull_request)
1039 user, pull_request)
1038
1040
1039 for uid in ids_to_remove:
1041 for uid in ids_to_remove:
1040 changed = True
1042 changed = True
1041 reviewers = PullRequestReviewers.query()\
1043 reviewers = PullRequestReviewers.query()\
1042 .filter(PullRequestReviewers.user_id == uid,
1044 .filter(PullRequestReviewers.user_id == uid,
1043 PullRequestReviewers.pull_request == pull_request)\
1045 PullRequestReviewers.pull_request == pull_request)\
1044 .all()
1046 .all()
1045 # use .all() in case we accidentally added the same person twice
1047 # use .all() in case we accidentally added the same person twice
1046 # this CAN happen due to the lack of DB checks
1048 # this CAN happen due to the lack of DB checks
1047 for obj in reviewers:
1049 for obj in reviewers:
1048 old_data = obj.get_dict()
1050 old_data = obj.get_dict()
1049 Session().delete(obj)
1051 Session().delete(obj)
1050 self._log_audit_action(
1052 self._log_audit_action(
1051 'repo.pull_request.reviewer.delete',
1053 'repo.pull_request.reviewer.delete',
1052 {'old_data': old_data}, user, pull_request)
1054 {'old_data': old_data}, user, pull_request)
1053
1055
1054 if changed:
1056 if changed:
1055 pull_request.updated_on = datetime.datetime.now()
1057 pull_request.updated_on = datetime.datetime.now()
1056 Session().add(pull_request)
1058 Session().add(pull_request)
1057
1059
1058 self.notify_reviewers(pull_request, ids_to_add)
1060 self.notify_reviewers(pull_request, ids_to_add)
1059 return ids_to_add, ids_to_remove
1061 return ids_to_add, ids_to_remove
1060
1062
1061 def get_url(self, pull_request, request=None, permalink=False):
1063 def get_url(self, pull_request, request=None, permalink=False):
1062 if not request:
1064 if not request:
1063 request = get_current_request()
1065 request = get_current_request()
1064
1066
1065 if permalink:
1067 if permalink:
1066 return request.route_url(
1068 return request.route_url(
1067 'pull_requests_global',
1069 'pull_requests_global',
1068 pull_request_id=pull_request.pull_request_id,)
1070 pull_request_id=pull_request.pull_request_id,)
1069 else:
1071 else:
1070 return request.route_url('pullrequest_show',
1072 return request.route_url('pullrequest_show',
1071 repo_name=safe_str(pull_request.target_repo.repo_name),
1073 repo_name=safe_str(pull_request.target_repo.repo_name),
1072 pull_request_id=pull_request.pull_request_id,)
1074 pull_request_id=pull_request.pull_request_id,)
1073
1075
1074 def get_shadow_clone_url(self, pull_request, request=None):
1076 def get_shadow_clone_url(self, pull_request, request=None):
1075 """
1077 """
1076 Returns qualified url pointing to the shadow repository. If this pull
1078 Returns qualified url pointing to the shadow repository. If this pull
1077 request is closed there is no shadow repository and ``None`` will be
1079 request is closed there is no shadow repository and ``None`` will be
1078 returned.
1080 returned.
1079 """
1081 """
1080 if pull_request.is_closed():
1082 if pull_request.is_closed():
1081 return None
1083 return None
1082 else:
1084 else:
1083 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1085 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1084 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1086 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1085
1087
1086 def notify_reviewers(self, pull_request, reviewers_ids):
1088 def notify_reviewers(self, pull_request, reviewers_ids):
1087 # notification to reviewers
1089 # notification to reviewers
1088 if not reviewers_ids:
1090 if not reviewers_ids:
1089 return
1091 return
1090
1092
1091 pull_request_obj = pull_request
1093 pull_request_obj = pull_request
1092 # get the current participants of this pull request
1094 # get the current participants of this pull request
1093 recipients = reviewers_ids
1095 recipients = reviewers_ids
1094 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1096 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1095
1097
1096 pr_source_repo = pull_request_obj.source_repo
1098 pr_source_repo = pull_request_obj.source_repo
1097 pr_target_repo = pull_request_obj.target_repo
1099 pr_target_repo = pull_request_obj.target_repo
1098
1100
1099 pr_url = h.route_url('pullrequest_show',
1101 pr_url = h.route_url('pullrequest_show',
1100 repo_name=pr_target_repo.repo_name,
1102 repo_name=pr_target_repo.repo_name,
1101 pull_request_id=pull_request_obj.pull_request_id,)
1103 pull_request_id=pull_request_obj.pull_request_id,)
1102
1104
1103 # set some variables for email notification
1105 # set some variables for email notification
1104 pr_target_repo_url = h.route_url(
1106 pr_target_repo_url = h.route_url(
1105 'repo_summary', repo_name=pr_target_repo.repo_name)
1107 'repo_summary', repo_name=pr_target_repo.repo_name)
1106
1108
1107 pr_source_repo_url = h.route_url(
1109 pr_source_repo_url = h.route_url(
1108 'repo_summary', repo_name=pr_source_repo.repo_name)
1110 'repo_summary', repo_name=pr_source_repo.repo_name)
1109
1111
1110 # pull request specifics
1112 # pull request specifics
1111 pull_request_commits = [
1113 pull_request_commits = [
1112 (x.raw_id, x.message)
1114 (x.raw_id, x.message)
1113 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1115 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1114
1116
1115 kwargs = {
1117 kwargs = {
1116 'user': pull_request.author,
1118 'user': pull_request.author,
1117 'pull_request': pull_request_obj,
1119 'pull_request': pull_request_obj,
1118 'pull_request_commits': pull_request_commits,
1120 'pull_request_commits': pull_request_commits,
1119
1121
1120 'pull_request_target_repo': pr_target_repo,
1122 'pull_request_target_repo': pr_target_repo,
1121 'pull_request_target_repo_url': pr_target_repo_url,
1123 'pull_request_target_repo_url': pr_target_repo_url,
1122
1124
1123 'pull_request_source_repo': pr_source_repo,
1125 'pull_request_source_repo': pr_source_repo,
1124 'pull_request_source_repo_url': pr_source_repo_url,
1126 'pull_request_source_repo_url': pr_source_repo_url,
1125
1127
1126 'pull_request_url': pr_url,
1128 'pull_request_url': pr_url,
1127 }
1129 }
1128
1130
1129 # pre-generate the subject for notification itself
1131 # pre-generate the subject for notification itself
1130 (subject,
1132 (subject,
1131 _h, _e, # we don't care about those
1133 _h, _e, # we don't care about those
1132 body_plaintext) = EmailNotificationModel().render_email(
1134 body_plaintext) = EmailNotificationModel().render_email(
1133 notification_type, **kwargs)
1135 notification_type, **kwargs)
1134
1136
1135 # create notification objects, and emails
1137 # create notification objects, and emails
1136 NotificationModel().create(
1138 NotificationModel().create(
1137 created_by=pull_request.author,
1139 created_by=pull_request.author,
1138 notification_subject=subject,
1140 notification_subject=subject,
1139 notification_body=body_plaintext,
1141 notification_body=body_plaintext,
1140 notification_type=notification_type,
1142 notification_type=notification_type,
1141 recipients=recipients,
1143 recipients=recipients,
1142 email_kwargs=kwargs,
1144 email_kwargs=kwargs,
1143 )
1145 )
1144
1146
1145 def delete(self, pull_request, user):
1147 def delete(self, pull_request, user):
1146 pull_request = self.__get_pull_request(pull_request)
1148 pull_request = self.__get_pull_request(pull_request)
1147 old_data = pull_request.get_api_data(with_merge_state=False)
1149 old_data = pull_request.get_api_data(with_merge_state=False)
1148 self._cleanup_merge_workspace(pull_request)
1150 self._cleanup_merge_workspace(pull_request)
1149 self._log_audit_action(
1151 self._log_audit_action(
1150 'repo.pull_request.delete', {'old_data': old_data},
1152 'repo.pull_request.delete', {'old_data': old_data},
1151 user, pull_request)
1153 user, pull_request)
1152 Session().delete(pull_request)
1154 Session().delete(pull_request)
1153
1155
1154 def close_pull_request(self, pull_request, user):
1156 def close_pull_request(self, pull_request, user):
1155 pull_request = self.__get_pull_request(pull_request)
1157 pull_request = self.__get_pull_request(pull_request)
1156 self._cleanup_merge_workspace(pull_request)
1158 self._cleanup_merge_workspace(pull_request)
1157 pull_request.status = PullRequest.STATUS_CLOSED
1159 pull_request.status = PullRequest.STATUS_CLOSED
1158 pull_request.updated_on = datetime.datetime.now()
1160 pull_request.updated_on = datetime.datetime.now()
1159 Session().add(pull_request)
1161 Session().add(pull_request)
1160 self._trigger_pull_request_hook(
1162 self._trigger_pull_request_hook(
1161 pull_request, pull_request.author, 'close')
1163 pull_request, pull_request.author, 'close')
1162
1164
1163 pr_data = pull_request.get_api_data(with_merge_state=False)
1165 pr_data = pull_request.get_api_data(with_merge_state=False)
1164 self._log_audit_action(
1166 self._log_audit_action(
1165 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1167 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1166
1168
1167 def close_pull_request_with_comment(
1169 def close_pull_request_with_comment(
1168 self, pull_request, user, repo, message=None):
1170 self, pull_request, user, repo, message=None):
1169
1171
1170 pull_request_review_status = pull_request.calculated_review_status()
1172 pull_request_review_status = pull_request.calculated_review_status()
1171
1173
1172 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1174 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1173 # approved only if we have voting consent
1175 # approved only if we have voting consent
1174 status = ChangesetStatus.STATUS_APPROVED
1176 status = ChangesetStatus.STATUS_APPROVED
1175 else:
1177 else:
1176 status = ChangesetStatus.STATUS_REJECTED
1178 status = ChangesetStatus.STATUS_REJECTED
1177 status_lbl = ChangesetStatus.get_status_lbl(status)
1179 status_lbl = ChangesetStatus.get_status_lbl(status)
1178
1180
1179 default_message = (
1181 default_message = (
1180 'Closing with status change {transition_icon} {status}.'
1182 'Closing with status change {transition_icon} {status}.'
1181 ).format(transition_icon='>', status=status_lbl)
1183 ).format(transition_icon='>', status=status_lbl)
1182 text = message or default_message
1184 text = message or default_message
1183
1185
1184 # create a comment, and link it to new status
1186 # create a comment, and link it to new status
1185 comment = CommentsModel().create(
1187 comment = CommentsModel().create(
1186 text=text,
1188 text=text,
1187 repo=repo.repo_id,
1189 repo=repo.repo_id,
1188 user=user.user_id,
1190 user=user.user_id,
1189 pull_request=pull_request.pull_request_id,
1191 pull_request=pull_request.pull_request_id,
1190 status_change=status_lbl,
1192 status_change=status_lbl,
1191 status_change_type=status,
1193 status_change_type=status,
1192 closing_pr=True
1194 closing_pr=True
1193 )
1195 )
1194
1196
1195 # calculate old status before we change it
1197 # calculate old status before we change it
1196 old_calculated_status = pull_request.calculated_review_status()
1198 old_calculated_status = pull_request.calculated_review_status()
1197 ChangesetStatusModel().set_status(
1199 ChangesetStatusModel().set_status(
1198 repo.repo_id,
1200 repo.repo_id,
1199 status,
1201 status,
1200 user.user_id,
1202 user.user_id,
1201 comment=comment,
1203 comment=comment,
1202 pull_request=pull_request.pull_request_id
1204 pull_request=pull_request.pull_request_id
1203 )
1205 )
1204
1206
1205 Session().flush()
1207 Session().flush()
1206 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1208 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1207 # we now calculate the status of pull request again, and based on that
1209 # we now calculate the status of pull request again, and based on that
1208 # calculation trigger status change. This might happen in cases
1210 # calculation trigger status change. This might happen in cases
1209 # that non-reviewer admin closes a pr, which means his vote doesn't
1211 # that non-reviewer admin closes a pr, which means his vote doesn't
1210 # change the status, while if he's a reviewer this might change it.
1212 # change the status, while if he's a reviewer this might change it.
1211 calculated_status = pull_request.calculated_review_status()
1213 calculated_status = pull_request.calculated_review_status()
1212 if old_calculated_status != calculated_status:
1214 if old_calculated_status != calculated_status:
1213 self._trigger_pull_request_hook(
1215 self._trigger_pull_request_hook(
1214 pull_request, user, 'review_status_change')
1216 pull_request, user, 'review_status_change')
1215
1217
1216 # finally close the PR
1218 # finally close the PR
1217 PullRequestModel().close_pull_request(
1219 PullRequestModel().close_pull_request(
1218 pull_request.pull_request_id, user)
1220 pull_request.pull_request_id, user)
1219
1221
1220 return comment, status
1222 return comment, status
1221
1223
1222 def merge_status(self, pull_request, translator=None,
1224 def merge_status(self, pull_request, translator=None,
1223 force_shadow_repo_refresh=False):
1225 force_shadow_repo_refresh=False):
1224 _ = translator or get_current_request().translate
1226 _ = translator or get_current_request().translate
1225
1227
1226 if not self._is_merge_enabled(pull_request):
1228 if not self._is_merge_enabled(pull_request):
1227 return False, _('Server-side pull request merging is disabled.')
1229 return False, _('Server-side pull request merging is disabled.')
1228 if pull_request.is_closed():
1230 if pull_request.is_closed():
1229 return False, _('This pull request is closed.')
1231 return False, _('This pull request is closed.')
1230 merge_possible, msg = self._check_repo_requirements(
1232 merge_possible, msg = self._check_repo_requirements(
1231 target=pull_request.target_repo, source=pull_request.source_repo,
1233 target=pull_request.target_repo, source=pull_request.source_repo,
1232 translator=_)
1234 translator=_)
1233 if not merge_possible:
1235 if not merge_possible:
1234 return merge_possible, msg
1236 return merge_possible, msg
1235
1237
1236 try:
1238 try:
1237 resp = self._try_merge(
1239 resp = self._try_merge(
1238 pull_request,
1240 pull_request,
1239 force_shadow_repo_refresh=force_shadow_repo_refresh)
1241 force_shadow_repo_refresh=force_shadow_repo_refresh)
1240 log.debug("Merge response: %s", resp)
1242 log.debug("Merge response: %s", resp)
1241 status = resp.possible, self.merge_status_message(
1243 status = resp.possible, self.merge_status_message(
1242 resp.failure_reason)
1244 resp.failure_reason)
1243 except NotImplementedError:
1245 except NotImplementedError:
1244 status = False, _('Pull request merging is not supported.')
1246 status = False, _('Pull request merging is not supported.')
1245
1247
1246 return status
1248 return status
1247
1249
1248 def _check_repo_requirements(self, target, source, translator):
1250 def _check_repo_requirements(self, target, source, translator):
1249 """
1251 """
1250 Check if `target` and `source` have compatible requirements.
1252 Check if `target` and `source` have compatible requirements.
1251
1253
1252 Currently this is just checking for largefiles.
1254 Currently this is just checking for largefiles.
1253 """
1255 """
1254 _ = translator
1256 _ = translator
1255 target_has_largefiles = self._has_largefiles(target)
1257 target_has_largefiles = self._has_largefiles(target)
1256 source_has_largefiles = self._has_largefiles(source)
1258 source_has_largefiles = self._has_largefiles(source)
1257 merge_possible = True
1259 merge_possible = True
1258 message = u''
1260 message = u''
1259
1261
1260 if target_has_largefiles != source_has_largefiles:
1262 if target_has_largefiles != source_has_largefiles:
1261 merge_possible = False
1263 merge_possible = False
1262 if source_has_largefiles:
1264 if source_has_largefiles:
1263 message = _(
1265 message = _(
1264 'Target repository large files support is disabled.')
1266 'Target repository large files support is disabled.')
1265 else:
1267 else:
1266 message = _(
1268 message = _(
1267 'Source repository large files support is disabled.')
1269 'Source repository large files support is disabled.')
1268
1270
1269 return merge_possible, message
1271 return merge_possible, message
1270
1272
1271 def _has_largefiles(self, repo):
1273 def _has_largefiles(self, repo):
1272 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1274 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1273 'extensions', 'largefiles')
1275 'extensions', 'largefiles')
1274 return largefiles_ui and largefiles_ui[0].active
1276 return largefiles_ui and largefiles_ui[0].active
1275
1277
1276 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1278 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1277 """
1279 """
1278 Try to merge the pull request and return the merge status.
1280 Try to merge the pull request and return the merge status.
1279 """
1281 """
1280 log.debug(
1282 log.debug(
1281 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1283 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1282 pull_request.pull_request_id, force_shadow_repo_refresh)
1284 pull_request.pull_request_id, force_shadow_repo_refresh)
1283 target_vcs = pull_request.target_repo.scm_instance()
1285 target_vcs = pull_request.target_repo.scm_instance()
1284
1286
1285 # Refresh the target reference.
1287 # Refresh the target reference.
1286 try:
1288 try:
1287 target_ref = self._refresh_reference(
1289 target_ref = self._refresh_reference(
1288 pull_request.target_ref_parts, target_vcs)
1290 pull_request.target_ref_parts, target_vcs)
1289 except CommitDoesNotExistError:
1291 except CommitDoesNotExistError:
1290 merge_state = MergeResponse(
1292 merge_state = MergeResponse(
1291 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1293 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1292 return merge_state
1294 return merge_state
1293
1295
1294 target_locked = pull_request.target_repo.locked
1296 target_locked = pull_request.target_repo.locked
1295 if target_locked and target_locked[0]:
1297 if target_locked and target_locked[0]:
1296 log.debug("The target repository is locked.")
1298 log.debug("The target repository is locked.")
1297 merge_state = MergeResponse(
1299 merge_state = MergeResponse(
1298 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1300 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1299 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1301 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1300 pull_request, target_ref):
1302 pull_request, target_ref):
1301 log.debug("Refreshing the merge status of the repository.")
1303 log.debug("Refreshing the merge status of the repository.")
1302 merge_state = self._refresh_merge_state(
1304 merge_state = self._refresh_merge_state(
1303 pull_request, target_vcs, target_ref)
1305 pull_request, target_vcs, target_ref)
1304 else:
1306 else:
1305 possible = pull_request.\
1307 possible = pull_request.\
1306 last_merge_status == MergeFailureReason.NONE
1308 last_merge_status == MergeFailureReason.NONE
1307 merge_state = MergeResponse(
1309 merge_state = MergeResponse(
1308 possible, False, None, pull_request.last_merge_status)
1310 possible, False, None, pull_request.last_merge_status)
1309
1311
1310 return merge_state
1312 return merge_state
1311
1313
1312 def _refresh_reference(self, reference, vcs_repository):
1314 def _refresh_reference(self, reference, vcs_repository):
1313 if reference.type in ('branch', 'book'):
1315 if reference.type in ('branch', 'book'):
1314 name_or_id = reference.name
1316 name_or_id = reference.name
1315 else:
1317 else:
1316 name_or_id = reference.commit_id
1318 name_or_id = reference.commit_id
1317 refreshed_commit = vcs_repository.get_commit(name_or_id)
1319 refreshed_commit = vcs_repository.get_commit(name_or_id)
1318 refreshed_reference = Reference(
1320 refreshed_reference = Reference(
1319 reference.type, reference.name, refreshed_commit.raw_id)
1321 reference.type, reference.name, refreshed_commit.raw_id)
1320 return refreshed_reference
1322 return refreshed_reference
1321
1323
1322 def _needs_merge_state_refresh(self, pull_request, target_reference):
1324 def _needs_merge_state_refresh(self, pull_request, target_reference):
1323 return not(
1325 return not(
1324 pull_request.revisions and
1326 pull_request.revisions and
1325 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1327 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1326 target_reference.commit_id == pull_request._last_merge_target_rev)
1328 target_reference.commit_id == pull_request._last_merge_target_rev)
1327
1329
1328 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1330 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1329 workspace_id = self._workspace_id(pull_request)
1331 workspace_id = self._workspace_id(pull_request)
1330 source_vcs = pull_request.source_repo.scm_instance()
1332 source_vcs = pull_request.source_repo.scm_instance()
1333 repo_id = pull_request.target_repo.repo_id
1331 use_rebase = self._use_rebase_for_merging(pull_request)
1334 use_rebase = self._use_rebase_for_merging(pull_request)
1332 close_branch = self._close_branch_before_merging(pull_request)
1335 close_branch = self._close_branch_before_merging(pull_request)
1333 merge_state = target_vcs.merge(
1336 merge_state = target_vcs.merge(
1337 repo_id, workspace_id,
1334 target_reference, source_vcs, pull_request.source_ref_parts,
1338 target_reference, source_vcs, pull_request.source_ref_parts,
1335 workspace_id, dry_run=True, use_rebase=use_rebase,
1339 dry_run=True, use_rebase=use_rebase,
1336 close_branch=close_branch)
1340 close_branch=close_branch)
1337
1341
1338 # Do not store the response if there was an unknown error.
1342 # Do not store the response if there was an unknown error.
1339 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1343 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1340 pull_request._last_merge_source_rev = \
1344 pull_request._last_merge_source_rev = \
1341 pull_request.source_ref_parts.commit_id
1345 pull_request.source_ref_parts.commit_id
1342 pull_request._last_merge_target_rev = target_reference.commit_id
1346 pull_request._last_merge_target_rev = target_reference.commit_id
1343 pull_request.last_merge_status = merge_state.failure_reason
1347 pull_request.last_merge_status = merge_state.failure_reason
1344 pull_request.shadow_merge_ref = merge_state.merge_ref
1348 pull_request.shadow_merge_ref = merge_state.merge_ref
1345 Session().add(pull_request)
1349 Session().add(pull_request)
1346 Session().commit()
1350 Session().commit()
1347
1351
1348 return merge_state
1352 return merge_state
1349
1353
1350 def _workspace_id(self, pull_request):
1354 def _workspace_id(self, pull_request):
1351 workspace_id = 'pr-%s' % pull_request.pull_request_id
1355 workspace_id = 'pr-%s' % pull_request.pull_request_id
1352 return workspace_id
1356 return workspace_id
1353
1357
1354 def merge_status_message(self, status_code):
1358 def merge_status_message(self, status_code):
1355 """
1359 """
1356 Return a human friendly error message for the given merge status code.
1360 Return a human friendly error message for the given merge status code.
1357 """
1361 """
1358 return self.MERGE_STATUS_MESSAGES[status_code]
1362 return self.MERGE_STATUS_MESSAGES[status_code]
1359
1363
1360 def generate_repo_data(self, repo, commit_id=None, branch=None,
1364 def generate_repo_data(self, repo, commit_id=None, branch=None,
1361 bookmark=None, translator=None):
1365 bookmark=None, translator=None):
1362 from rhodecode.model.repo import RepoModel
1366 from rhodecode.model.repo import RepoModel
1363
1367
1364 all_refs, selected_ref = \
1368 all_refs, selected_ref = \
1365 self._get_repo_pullrequest_sources(
1369 self._get_repo_pullrequest_sources(
1366 repo.scm_instance(), commit_id=commit_id,
1370 repo.scm_instance(), commit_id=commit_id,
1367 branch=branch, bookmark=bookmark, translator=translator)
1371 branch=branch, bookmark=bookmark, translator=translator)
1368
1372
1369 refs_select2 = []
1373 refs_select2 = []
1370 for element in all_refs:
1374 for element in all_refs:
1371 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1375 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1372 refs_select2.append({'text': element[1], 'children': children})
1376 refs_select2.append({'text': element[1], 'children': children})
1373
1377
1374 return {
1378 return {
1375 'user': {
1379 'user': {
1376 'user_id': repo.user.user_id,
1380 'user_id': repo.user.user_id,
1377 'username': repo.user.username,
1381 'username': repo.user.username,
1378 'firstname': repo.user.first_name,
1382 'firstname': repo.user.first_name,
1379 'lastname': repo.user.last_name,
1383 'lastname': repo.user.last_name,
1380 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1384 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1381 },
1385 },
1382 'name': repo.repo_name,
1386 'name': repo.repo_name,
1383 'link': RepoModel().get_url(repo),
1387 'link': RepoModel().get_url(repo),
1384 'description': h.chop_at_smart(repo.description_safe, '\n'),
1388 'description': h.chop_at_smart(repo.description_safe, '\n'),
1385 'refs': {
1389 'refs': {
1386 'all_refs': all_refs,
1390 'all_refs': all_refs,
1387 'selected_ref': selected_ref,
1391 'selected_ref': selected_ref,
1388 'select2_refs': refs_select2
1392 'select2_refs': refs_select2
1389 }
1393 }
1390 }
1394 }
1391
1395
1392 def generate_pullrequest_title(self, source, source_ref, target):
1396 def generate_pullrequest_title(self, source, source_ref, target):
1393 return u'{source}#{at_ref} to {target}'.format(
1397 return u'{source}#{at_ref} to {target}'.format(
1394 source=source,
1398 source=source,
1395 at_ref=source_ref,
1399 at_ref=source_ref,
1396 target=target,
1400 target=target,
1397 )
1401 )
1398
1402
1399 def _cleanup_merge_workspace(self, pull_request):
1403 def _cleanup_merge_workspace(self, pull_request):
1400 # Merging related cleanup
1404 # Merging related cleanup
1405 repo_id = pull_request.target_repo.repo_id
1401 target_scm = pull_request.target_repo.scm_instance()
1406 target_scm = pull_request.target_repo.scm_instance()
1402 workspace_id = 'pr-%s' % pull_request.pull_request_id
1407 workspace_id = self._workspace_id(pull_request)
1403
1408
1404 try:
1409 try:
1405 target_scm.cleanup_merge_workspace(workspace_id)
1410 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1406 except NotImplementedError:
1411 except NotImplementedError:
1407 pass
1412 pass
1408
1413
1409 def _get_repo_pullrequest_sources(
1414 def _get_repo_pullrequest_sources(
1410 self, repo, commit_id=None, branch=None, bookmark=None,
1415 self, repo, commit_id=None, branch=None, bookmark=None,
1411 translator=None):
1416 translator=None):
1412 """
1417 """
1413 Return a structure with repo's interesting commits, suitable for
1418 Return a structure with repo's interesting commits, suitable for
1414 the selectors in pullrequest controller
1419 the selectors in pullrequest controller
1415
1420
1416 :param commit_id: a commit that must be in the list somehow
1421 :param commit_id: a commit that must be in the list somehow
1417 and selected by default
1422 and selected by default
1418 :param branch: a branch that must be in the list and selected
1423 :param branch: a branch that must be in the list and selected
1419 by default - even if closed
1424 by default - even if closed
1420 :param bookmark: a bookmark that must be in the list and selected
1425 :param bookmark: a bookmark that must be in the list and selected
1421 """
1426 """
1422 _ = translator or get_current_request().translate
1427 _ = translator or get_current_request().translate
1423
1428
1424 commit_id = safe_str(commit_id) if commit_id else None
1429 commit_id = safe_str(commit_id) if commit_id else None
1425 branch = safe_str(branch) if branch else None
1430 branch = safe_str(branch) if branch else None
1426 bookmark = safe_str(bookmark) if bookmark else None
1431 bookmark = safe_str(bookmark) if bookmark else None
1427
1432
1428 selected = None
1433 selected = None
1429
1434
1430 # order matters: first source that has commit_id in it will be selected
1435 # order matters: first source that has commit_id in it will be selected
1431 sources = []
1436 sources = []
1432 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1437 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1433 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1438 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1434
1439
1435 if commit_id:
1440 if commit_id:
1436 ref_commit = (h.short_id(commit_id), commit_id)
1441 ref_commit = (h.short_id(commit_id), commit_id)
1437 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1442 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1438
1443
1439 sources.append(
1444 sources.append(
1440 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1445 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1441 )
1446 )
1442
1447
1443 groups = []
1448 groups = []
1444 for group_key, ref_list, group_name, match in sources:
1449 for group_key, ref_list, group_name, match in sources:
1445 group_refs = []
1450 group_refs = []
1446 for ref_name, ref_id in ref_list:
1451 for ref_name, ref_id in ref_list:
1447 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1452 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1448 group_refs.append((ref_key, ref_name))
1453 group_refs.append((ref_key, ref_name))
1449
1454
1450 if not selected:
1455 if not selected:
1451 if set([commit_id, match]) & set([ref_id, ref_name]):
1456 if set([commit_id, match]) & set([ref_id, ref_name]):
1452 selected = ref_key
1457 selected = ref_key
1453
1458
1454 if group_refs:
1459 if group_refs:
1455 groups.append((group_refs, group_name))
1460 groups.append((group_refs, group_name))
1456
1461
1457 if not selected:
1462 if not selected:
1458 ref = commit_id or branch or bookmark
1463 ref = commit_id or branch or bookmark
1459 if ref:
1464 if ref:
1460 raise CommitDoesNotExistError(
1465 raise CommitDoesNotExistError(
1461 'No commit refs could be found matching: %s' % ref)
1466 'No commit refs could be found matching: %s' % ref)
1462 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1467 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1463 selected = 'branch:%s:%s' % (
1468 selected = 'branch:%s:%s' % (
1464 repo.DEFAULT_BRANCH_NAME,
1469 repo.DEFAULT_BRANCH_NAME,
1465 repo.branches[repo.DEFAULT_BRANCH_NAME]
1470 repo.branches[repo.DEFAULT_BRANCH_NAME]
1466 )
1471 )
1467 elif repo.commit_ids:
1472 elif repo.commit_ids:
1468 # make the user select in this case
1473 # make the user select in this case
1469 selected = None
1474 selected = None
1470 else:
1475 else:
1471 raise EmptyRepositoryError()
1476 raise EmptyRepositoryError()
1472 return groups, selected
1477 return groups, selected
1473
1478
1474 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1479 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1475 return self._get_diff_from_pr_or_version(
1480 return self._get_diff_from_pr_or_version(
1476 source_repo, source_ref_id, target_ref_id, context=context)
1481 source_repo, source_ref_id, target_ref_id, context=context)
1477
1482
1478 def _get_diff_from_pr_or_version(
1483 def _get_diff_from_pr_or_version(
1479 self, source_repo, source_ref_id, target_ref_id, context):
1484 self, source_repo, source_ref_id, target_ref_id, context):
1480 target_commit = source_repo.get_commit(
1485 target_commit = source_repo.get_commit(
1481 commit_id=safe_str(target_ref_id))
1486 commit_id=safe_str(target_ref_id))
1482 source_commit = source_repo.get_commit(
1487 source_commit = source_repo.get_commit(
1483 commit_id=safe_str(source_ref_id))
1488 commit_id=safe_str(source_ref_id))
1484 if isinstance(source_repo, Repository):
1489 if isinstance(source_repo, Repository):
1485 vcs_repo = source_repo.scm_instance()
1490 vcs_repo = source_repo.scm_instance()
1486 else:
1491 else:
1487 vcs_repo = source_repo
1492 vcs_repo = source_repo
1488
1493
1489 # TODO: johbo: In the context of an update, we cannot reach
1494 # TODO: johbo: In the context of an update, we cannot reach
1490 # the old commit anymore with our normal mechanisms. It needs
1495 # the old commit anymore with our normal mechanisms. It needs
1491 # some sort of special support in the vcs layer to avoid this
1496 # some sort of special support in the vcs layer to avoid this
1492 # workaround.
1497 # workaround.
1493 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1498 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1494 vcs_repo.alias == 'git'):
1499 vcs_repo.alias == 'git'):
1495 source_commit.raw_id = safe_str(source_ref_id)
1500 source_commit.raw_id = safe_str(source_ref_id)
1496
1501
1497 log.debug('calculating diff between '
1502 log.debug('calculating diff between '
1498 'source_ref:%s and target_ref:%s for repo `%s`',
1503 'source_ref:%s and target_ref:%s for repo `%s`',
1499 target_ref_id, source_ref_id,
1504 target_ref_id, source_ref_id,
1500 safe_unicode(vcs_repo.path))
1505 safe_unicode(vcs_repo.path))
1501
1506
1502 vcs_diff = vcs_repo.get_diff(
1507 vcs_diff = vcs_repo.get_diff(
1503 commit1=target_commit, commit2=source_commit, context=context)
1508 commit1=target_commit, commit2=source_commit, context=context)
1504 return vcs_diff
1509 return vcs_diff
1505
1510
1506 def _is_merge_enabled(self, pull_request):
1511 def _is_merge_enabled(self, pull_request):
1507 return self._get_general_setting(
1512 return self._get_general_setting(
1508 pull_request, 'rhodecode_pr_merge_enabled')
1513 pull_request, 'rhodecode_pr_merge_enabled')
1509
1514
1510 def _use_rebase_for_merging(self, pull_request):
1515 def _use_rebase_for_merging(self, pull_request):
1511 repo_type = pull_request.target_repo.repo_type
1516 repo_type = pull_request.target_repo.repo_type
1512 if repo_type == 'hg':
1517 if repo_type == 'hg':
1513 return self._get_general_setting(
1518 return self._get_general_setting(
1514 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1519 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1515 elif repo_type == 'git':
1520 elif repo_type == 'git':
1516 return self._get_general_setting(
1521 return self._get_general_setting(
1517 pull_request, 'rhodecode_git_use_rebase_for_merging')
1522 pull_request, 'rhodecode_git_use_rebase_for_merging')
1518
1523
1519 return False
1524 return False
1520
1525
1521 def _close_branch_before_merging(self, pull_request):
1526 def _close_branch_before_merging(self, pull_request):
1522 repo_type = pull_request.target_repo.repo_type
1527 repo_type = pull_request.target_repo.repo_type
1523 if repo_type == 'hg':
1528 if repo_type == 'hg':
1524 return self._get_general_setting(
1529 return self._get_general_setting(
1525 pull_request, 'rhodecode_hg_close_branch_before_merging')
1530 pull_request, 'rhodecode_hg_close_branch_before_merging')
1526 elif repo_type == 'git':
1531 elif repo_type == 'git':
1527 return self._get_general_setting(
1532 return self._get_general_setting(
1528 pull_request, 'rhodecode_git_close_branch_before_merging')
1533 pull_request, 'rhodecode_git_close_branch_before_merging')
1529
1534
1530 return False
1535 return False
1531
1536
1532 def _get_general_setting(self, pull_request, settings_key, default=False):
1537 def _get_general_setting(self, pull_request, settings_key, default=False):
1533 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1538 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1534 settings = settings_model.get_general_settings()
1539 settings = settings_model.get_general_settings()
1535 return settings.get(settings_key, default)
1540 return settings.get(settings_key, default)
1536
1541
1537 def _log_audit_action(self, action, action_data, user, pull_request):
1542 def _log_audit_action(self, action, action_data, user, pull_request):
1538 audit_logger.store(
1543 audit_logger.store(
1539 action=action,
1544 action=action,
1540 action_data=action_data,
1545 action_data=action_data,
1541 user=user,
1546 user=user,
1542 repo=pull_request.target_repo)
1547 repo=pull_request.target_repo)
1543
1548
1544 def get_reviewer_functions(self):
1549 def get_reviewer_functions(self):
1545 """
1550 """
1546 Fetches functions for validation and fetching default reviewers.
1551 Fetches functions for validation and fetching default reviewers.
1547 If available we use the EE package, else we fallback to CE
1552 If available we use the EE package, else we fallback to CE
1548 package functions
1553 package functions
1549 """
1554 """
1550 try:
1555 try:
1551 from rc_reviewers.utils import get_default_reviewers_data
1556 from rc_reviewers.utils import get_default_reviewers_data
1552 from rc_reviewers.utils import validate_default_reviewers
1557 from rc_reviewers.utils import validate_default_reviewers
1553 except ImportError:
1558 except ImportError:
1554 from rhodecode.apps.repository.utils import \
1559 from rhodecode.apps.repository.utils import \
1555 get_default_reviewers_data
1560 get_default_reviewers_data
1556 from rhodecode.apps.repository.utils import \
1561 from rhodecode.apps.repository.utils import \
1557 validate_default_reviewers
1562 validate_default_reviewers
1558
1563
1559 return get_default_reviewers_data, validate_default_reviewers
1564 return get_default_reviewers_data, validate_default_reviewers
1560
1565
1561
1566
1562 class MergeCheck(object):
1567 class MergeCheck(object):
1563 """
1568 """
1564 Perform Merge Checks and returns a check object which stores information
1569 Perform Merge Checks and returns a check object which stores information
1565 about merge errors, and merge conditions
1570 about merge errors, and merge conditions
1566 """
1571 """
1567 TODO_CHECK = 'todo'
1572 TODO_CHECK = 'todo'
1568 PERM_CHECK = 'perm'
1573 PERM_CHECK = 'perm'
1569 REVIEW_CHECK = 'review'
1574 REVIEW_CHECK = 'review'
1570 MERGE_CHECK = 'merge'
1575 MERGE_CHECK = 'merge'
1571
1576
1572 def __init__(self):
1577 def __init__(self):
1573 self.review_status = None
1578 self.review_status = None
1574 self.merge_possible = None
1579 self.merge_possible = None
1575 self.merge_msg = ''
1580 self.merge_msg = ''
1576 self.failed = None
1581 self.failed = None
1577 self.errors = []
1582 self.errors = []
1578 self.error_details = OrderedDict()
1583 self.error_details = OrderedDict()
1579
1584
1580 def push_error(self, error_type, message, error_key, details):
1585 def push_error(self, error_type, message, error_key, details):
1581 self.failed = True
1586 self.failed = True
1582 self.errors.append([error_type, message])
1587 self.errors.append([error_type, message])
1583 self.error_details[error_key] = dict(
1588 self.error_details[error_key] = dict(
1584 details=details,
1589 details=details,
1585 error_type=error_type,
1590 error_type=error_type,
1586 message=message
1591 message=message
1587 )
1592 )
1588
1593
1589 @classmethod
1594 @classmethod
1590 def validate(cls, pull_request, user, translator, fail_early=False,
1595 def validate(cls, pull_request, user, translator, fail_early=False,
1591 force_shadow_repo_refresh=False):
1596 force_shadow_repo_refresh=False):
1592 _ = translator
1597 _ = translator
1593 merge_check = cls()
1598 merge_check = cls()
1594
1599
1595 # permissions to merge
1600 # permissions to merge
1596 user_allowed_to_merge = PullRequestModel().check_user_merge(
1601 user_allowed_to_merge = PullRequestModel().check_user_merge(
1597 pull_request, user)
1602 pull_request, user)
1598 if not user_allowed_to_merge:
1603 if not user_allowed_to_merge:
1599 log.debug("MergeCheck: cannot merge, approval is pending.")
1604 log.debug("MergeCheck: cannot merge, approval is pending.")
1600
1605
1601 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1606 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1602 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1607 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1603 if fail_early:
1608 if fail_early:
1604 return merge_check
1609 return merge_check
1605
1610
1606 # review status, must be always present
1611 # review status, must be always present
1607 review_status = pull_request.calculated_review_status()
1612 review_status = pull_request.calculated_review_status()
1608 merge_check.review_status = review_status
1613 merge_check.review_status = review_status
1609
1614
1610 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1615 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1611 if not status_approved:
1616 if not status_approved:
1612 log.debug("MergeCheck: cannot merge, approval is pending.")
1617 log.debug("MergeCheck: cannot merge, approval is pending.")
1613
1618
1614 msg = _('Pull request reviewer approval is pending.')
1619 msg = _('Pull request reviewer approval is pending.')
1615
1620
1616 merge_check.push_error(
1621 merge_check.push_error(
1617 'warning', msg, cls.REVIEW_CHECK, review_status)
1622 'warning', msg, cls.REVIEW_CHECK, review_status)
1618
1623
1619 if fail_early:
1624 if fail_early:
1620 return merge_check
1625 return merge_check
1621
1626
1622 # left over TODOs
1627 # left over TODOs
1623 todos = CommentsModel().get_unresolved_todos(pull_request)
1628 todos = CommentsModel().get_unresolved_todos(pull_request)
1624 if todos:
1629 if todos:
1625 log.debug("MergeCheck: cannot merge, {} "
1630 log.debug("MergeCheck: cannot merge, {} "
1626 "unresolved todos left.".format(len(todos)))
1631 "unresolved todos left.".format(len(todos)))
1627
1632
1628 if len(todos) == 1:
1633 if len(todos) == 1:
1629 msg = _('Cannot merge, {} TODO still not resolved.').format(
1634 msg = _('Cannot merge, {} TODO still not resolved.').format(
1630 len(todos))
1635 len(todos))
1631 else:
1636 else:
1632 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1637 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1633 len(todos))
1638 len(todos))
1634
1639
1635 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1640 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1636
1641
1637 if fail_early:
1642 if fail_early:
1638 return merge_check
1643 return merge_check
1639
1644
1640 # merge possible, here is the filesystem simulation + shadow repo
1645 # merge possible, here is the filesystem simulation + shadow repo
1641 merge_status, msg = PullRequestModel().merge_status(
1646 merge_status, msg = PullRequestModel().merge_status(
1642 pull_request, translator=translator,
1647 pull_request, translator=translator,
1643 force_shadow_repo_refresh=force_shadow_repo_refresh)
1648 force_shadow_repo_refresh=force_shadow_repo_refresh)
1644 merge_check.merge_possible = merge_status
1649 merge_check.merge_possible = merge_status
1645 merge_check.merge_msg = msg
1650 merge_check.merge_msg = msg
1646 if not merge_status:
1651 if not merge_status:
1647 log.debug(
1652 log.debug(
1648 "MergeCheck: cannot merge, pull request merge not possible.")
1653 "MergeCheck: cannot merge, pull request merge not possible.")
1649 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1654 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1650
1655
1651 if fail_early:
1656 if fail_early:
1652 return merge_check
1657 return merge_check
1653
1658
1654 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1659 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1655 return merge_check
1660 return merge_check
1656
1661
1657 @classmethod
1662 @classmethod
1658 def get_merge_conditions(cls, pull_request, translator):
1663 def get_merge_conditions(cls, pull_request, translator):
1659 _ = translator
1664 _ = translator
1660 merge_details = {}
1665 merge_details = {}
1661
1666
1662 model = PullRequestModel()
1667 model = PullRequestModel()
1663 use_rebase = model._use_rebase_for_merging(pull_request)
1668 use_rebase = model._use_rebase_for_merging(pull_request)
1664
1669
1665 if use_rebase:
1670 if use_rebase:
1666 merge_details['merge_strategy'] = dict(
1671 merge_details['merge_strategy'] = dict(
1667 details={},
1672 details={},
1668 message=_('Merge strategy: rebase')
1673 message=_('Merge strategy: rebase')
1669 )
1674 )
1670 else:
1675 else:
1671 merge_details['merge_strategy'] = dict(
1676 merge_details['merge_strategy'] = dict(
1672 details={},
1677 details={},
1673 message=_('Merge strategy: explicit merge commit')
1678 message=_('Merge strategy: explicit merge commit')
1674 )
1679 )
1675
1680
1676 close_branch = model._close_branch_before_merging(pull_request)
1681 close_branch = model._close_branch_before_merging(pull_request)
1677 if close_branch:
1682 if close_branch:
1678 repo_type = pull_request.target_repo.repo_type
1683 repo_type = pull_request.target_repo.repo_type
1679 if repo_type == 'hg':
1684 if repo_type == 'hg':
1680 close_msg = _('Source branch will be closed after merge.')
1685 close_msg = _('Source branch will be closed after merge.')
1681 elif repo_type == 'git':
1686 elif repo_type == 'git':
1682 close_msg = _('Source branch will be deleted after merge.')
1687 close_msg = _('Source branch will be deleted after merge.')
1683
1688
1684 merge_details['close_branch'] = dict(
1689 merge_details['close_branch'] = dict(
1685 details={},
1690 details={},
1686 message=close_msg
1691 message=close_msg
1687 )
1692 )
1688
1693
1689 return merge_details
1694 return merge_details
1690
1695
1691 ChangeTuple = collections.namedtuple(
1696 ChangeTuple = collections.namedtuple(
1692 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1697 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1693
1698
1694 FileChangeTuple = collections.namedtuple(
1699 FileChangeTuple = collections.namedtuple(
1695 'FileChangeTuple', ['added', 'modified', 'removed'])
1700 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,236 +1,243 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import time
22 import time
23 import logging
23 import logging
24 import datetime
24 import datetime
25 import hashlib
25 import hashlib
26 import tempfile
26 import tempfile
27 from os.path import join as jn
27 from os.path import join as jn
28
28
29 from tempfile import _RandomNameSequence
29 from tempfile import _RandomNameSequence
30
30
31 import pytest
31 import pytest
32
32
33 from rhodecode.model.db import User
33 from rhodecode.model.db import User
34 from rhodecode.lib import auth
34 from rhodecode.lib import auth
35 from rhodecode.lib import helpers as h
35 from rhodecode.lib import helpers as h
36 from rhodecode.lib.helpers import flash, link_to
36 from rhodecode.lib.helpers import flash, link_to
37 from rhodecode.lib.utils2 import safe_str
37 from rhodecode.lib.utils2 import safe_str
38
38
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42 __all__ = [
42 __all__ = [
43 'get_new_dir', 'TestController',
43 'get_new_dir', 'TestController',
44 'link_to', 'clear_all_caches',
44 'link_to', 'clear_all_caches',
45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
47 'NEW_HG_REPO', 'NEW_GIT_REPO',
47 'NEW_HG_REPO', 'NEW_GIT_REPO',
48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
54 ]
54 ]
55
55
56
56
57 # SOME GLOBALS FOR TESTS
57 # SOME GLOBALS FOR TESTS
58 TEST_DIR = tempfile.gettempdir()
58 TEST_DIR = tempfile.gettempdir()
59
59
60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_%s' % _RandomNameSequence().next())
60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_%s' % _RandomNameSequence().next())
61 TEST_USER_ADMIN_LOGIN = 'test_admin'
61 TEST_USER_ADMIN_LOGIN = 'test_admin'
62 TEST_USER_ADMIN_PASS = 'test12'
62 TEST_USER_ADMIN_PASS = 'test12'
63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
64
64
65 TEST_USER_REGULAR_LOGIN = 'test_regular'
65 TEST_USER_REGULAR_LOGIN = 'test_regular'
66 TEST_USER_REGULAR_PASS = 'test12'
66 TEST_USER_REGULAR_PASS = 'test12'
67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
68
68
69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
70 TEST_USER_REGULAR2_PASS = 'test12'
70 TEST_USER_REGULAR2_PASS = 'test12'
71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
72
72
73 HG_REPO = 'vcs_test_hg'
73 HG_REPO = 'vcs_test_hg'
74 GIT_REPO = 'vcs_test_git'
74 GIT_REPO = 'vcs_test_git'
75 SVN_REPO = 'vcs_test_svn'
75 SVN_REPO = 'vcs_test_svn'
76
76
77 NEW_HG_REPO = 'vcs_test_hg_new'
77 NEW_HG_REPO = 'vcs_test_hg_new'
78 NEW_GIT_REPO = 'vcs_test_git_new'
78 NEW_GIT_REPO = 'vcs_test_git_new'
79
79
80 HG_FORK = 'vcs_test_hg_fork'
80 HG_FORK = 'vcs_test_hg_fork'
81 GIT_FORK = 'vcs_test_git_fork'
81 GIT_FORK = 'vcs_test_git_fork'
82
82
83 ## VCS
83 ## VCS
84 SCM_TESTS = ['hg', 'git']
84 SCM_TESTS = ['hg', 'git']
85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
86
86
87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
90
90
91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
94
94
95 TEST_REPO_PREFIX = 'vcs-test'
95 TEST_REPO_PREFIX = 'vcs-test'
96
96
97
97
98 def clear_all_caches():
98 def clear_all_caches():
99 from beaker.cache import cache_managers
99 from beaker.cache import cache_managers
100 for _cache in cache_managers.values():
100 for _cache in cache_managers.values():
101 _cache.clear()
101 _cache.clear()
102
102
103
103
104 def get_new_dir(title):
104 def get_new_dir(title):
105 """
105 """
106 Returns always new directory path.
106 Returns always new directory path.
107 """
107 """
108 from rhodecode.tests.vcs.utils import get_normalized_path
108 from rhodecode.tests.vcs.utils import get_normalized_path
109 name_parts = [TEST_REPO_PREFIX]
109 name_parts = [TEST_REPO_PREFIX]
110 if title:
110 if title:
111 name_parts.append(title)
111 name_parts.append(title)
112 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
112 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
113 name_parts.append(hex_str)
113 name_parts.append(hex_str)
114 name = '-'.join(name_parts)
114 name = '-'.join(name_parts)
115 path = os.path.join(TEST_DIR, name)
115 path = os.path.join(TEST_DIR, name)
116 return get_normalized_path(path)
116 return get_normalized_path(path)
117
117
118
118
119 def repo_id_generator(name):
120 numeric_hash = 0
121 for char in name:
122 numeric_hash += (ord(char))
123 return numeric_hash
124
125
119 @pytest.mark.usefixtures('app', 'index_location')
126 @pytest.mark.usefixtures('app', 'index_location')
120 class TestController(object):
127 class TestController(object):
121
128
122 maxDiff = None
129 maxDiff = None
123
130
124 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
131 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
125 password=TEST_USER_ADMIN_PASS):
132 password=TEST_USER_ADMIN_PASS):
126 self._logged_username = username
133 self._logged_username = username
127 self._session = login_user_session(self.app, username, password)
134 self._session = login_user_session(self.app, username, password)
128 self.csrf_token = auth.get_csrf_token(self._session)
135 self.csrf_token = auth.get_csrf_token(self._session)
129
136
130 return self._session['rhodecode_user']
137 return self._session['rhodecode_user']
131
138
132 def logout_user(self):
139 def logout_user(self):
133 logout_user_session(self.app, auth.get_csrf_token(self._session))
140 logout_user_session(self.app, auth.get_csrf_token(self._session))
134 self.csrf_token = None
141 self.csrf_token = None
135 self._logged_username = None
142 self._logged_username = None
136 self._session = None
143 self._session = None
137
144
138 def _get_logged_user(self):
145 def _get_logged_user(self):
139 return User.get_by_username(self._logged_username)
146 return User.get_by_username(self._logged_username)
140
147
141
148
142 def login_user_session(
149 def login_user_session(
143 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
150 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
144
151
145 response = app.post(
152 response = app.post(
146 h.route_path('login'),
153 h.route_path('login'),
147 {'username': username, 'password': password})
154 {'username': username, 'password': password})
148 if 'invalid user name' in response.body:
155 if 'invalid user name' in response.body:
149 pytest.fail('could not login using %s %s' % (username, password))
156 pytest.fail('could not login using %s %s' % (username, password))
150
157
151 assert response.status == '302 Found'
158 assert response.status == '302 Found'
152 response = response.follow()
159 response = response.follow()
153 assert response.status == '200 OK'
160 assert response.status == '200 OK'
154
161
155 session = response.get_session_from_response()
162 session = response.get_session_from_response()
156 assert 'rhodecode_user' in session
163 assert 'rhodecode_user' in session
157 rc_user = session['rhodecode_user']
164 rc_user = session['rhodecode_user']
158 assert rc_user.get('username') == username
165 assert rc_user.get('username') == username
159 assert rc_user.get('is_authenticated')
166 assert rc_user.get('is_authenticated')
160
167
161 return session
168 return session
162
169
163
170
164 def logout_user_session(app, csrf_token):
171 def logout_user_session(app, csrf_token):
165 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
172 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
166
173
167
174
168 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
175 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
169 password=TEST_USER_ADMIN_PASS):
176 password=TEST_USER_ADMIN_PASS):
170 return login_user_session(app, username, password)['rhodecode_user']
177 return login_user_session(app, username, password)['rhodecode_user']
171
178
172
179
173 def assert_session_flash(response, msg=None, category=None, no_=None):
180 def assert_session_flash(response, msg=None, category=None, no_=None):
174 """
181 """
175 Assert on a flash message in the current session.
182 Assert on a flash message in the current session.
176
183
177 :param response: Response from give calll, it will contain flash
184 :param response: Response from give calll, it will contain flash
178 messages or bound session with them.
185 messages or bound session with them.
179 :param msg: The expected message. Will be evaluated if a
186 :param msg: The expected message. Will be evaluated if a
180 :class:`LazyString` is passed in.
187 :class:`LazyString` is passed in.
181 :param category: Optional. If passed, the message category will be
188 :param category: Optional. If passed, the message category will be
182 checked as well.
189 checked as well.
183 :param no_: Optional. If passed, the message will be checked to NOT
190 :param no_: Optional. If passed, the message will be checked to NOT
184 be in the flash session
191 be in the flash session
185 """
192 """
186 if msg is None and no_ is None:
193 if msg is None and no_ is None:
187 raise ValueError("Parameter msg or no_ is required.")
194 raise ValueError("Parameter msg or no_ is required.")
188
195
189 if msg and no_:
196 if msg and no_:
190 raise ValueError("Please specify either msg or no_, but not both")
197 raise ValueError("Please specify either msg or no_, but not both")
191
198
192 session = response.get_session_from_response()
199 session = response.get_session_from_response()
193 messages = flash.pop_messages(session=session)
200 messages = flash.pop_messages(session=session)
194 msg = _eval_if_lazy(msg)
201 msg = _eval_if_lazy(msg)
195
202
196 if no_:
203 if no_:
197 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
204 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
198 else:
205 else:
199 error_msg = 'unable to find message `%s` in empty flash list' % msg
206 error_msg = 'unable to find message `%s` in empty flash list' % msg
200 assert messages, error_msg
207 assert messages, error_msg
201 message = messages[0]
208 message = messages[0]
202
209
203 message_text = _eval_if_lazy(message.message) or ''
210 message_text = _eval_if_lazy(message.message) or ''
204
211
205 if no_:
212 if no_:
206 if no_ in message_text:
213 if no_ in message_text:
207 msg = u'msg `%s` found in session flash.' % (no_,)
214 msg = u'msg `%s` found in session flash.' % (no_,)
208 pytest.fail(safe_str(msg))
215 pytest.fail(safe_str(msg))
209 else:
216 else:
210 if msg not in message_text:
217 if msg not in message_text:
211 fail_msg = u'msg `%s` not found in session ' \
218 fail_msg = u'msg `%s` not found in session ' \
212 u'flash: got `%s` (type:%s) instead' % (
219 u'flash: got `%s` (type:%s) instead' % (
213 msg, message_text, type(message_text))
220 msg, message_text, type(message_text))
214
221
215 pytest.fail(safe_str(fail_msg))
222 pytest.fail(safe_str(fail_msg))
216 if category:
223 if category:
217 assert category == message.category
224 assert category == message.category
218
225
219
226
220 def _eval_if_lazy(value):
227 def _eval_if_lazy(value):
221 return value.eval() if hasattr(value, 'eval') else value
228 return value.eval() if hasattr(value, 'eval') else value
222
229
223
230
224 def no_newline_id_generator(test_name):
231 def no_newline_id_generator(test_name):
225 """
232 """
226 Generates a test name without spaces or newlines characters. Used for
233 Generates a test name without spaces or newlines characters. Used for
227 nicer output of progress of test
234 nicer output of progress of test
228 """
235 """
229 org_name = test_name
236 org_name = test_name
230 test_name = test_name\
237 test_name = test_name\
231 .replace('\n', '_N') \
238 .replace('\n', '_N') \
232 .replace('\r', '_N') \
239 .replace('\r', '_N') \
233 .replace('\t', '_T') \
240 .replace('\t', '_T') \
234 .replace(' ', '_S')
241 .replace(' ', '_S')
235
242
236 return test_name or 'test-with-empty-name'
243 return test_name or 'test-with-empty-name'
@@ -1,472 +1,472 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import base64
21 import base64
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils2 import AttributeDict
26 from rhodecode.lib.utils2 import AttributeDict
27 from rhodecode.tests.utils import CustomTestApp
27 from rhodecode.tests.utils import CustomTestApp
28
28
29 from rhodecode.lib.caching_query import FromCache
29 from rhodecode.lib.caching_query import FromCache
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
31 from rhodecode.lib.middleware import simplevcs
31 from rhodecode.lib.middleware import simplevcs
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
33 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.lib.middleware.utils import scm_app_http
34 from rhodecode.model.db import User, _hash_key
34 from rhodecode.model.db import User, _hash_key
35 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
36 from rhodecode.tests import (
36 from rhodecode.tests import (
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
38 from rhodecode.tests.lib.middleware import mock_scm_app
38 from rhodecode.tests.lib.middleware import mock_scm_app
39
39
40
40
41 class StubVCSController(simplevcs.SimpleVCS):
41 class StubVCSController(simplevcs.SimpleVCS):
42
42
43 SCM = 'hg'
43 SCM = 'hg'
44 stub_response_body = tuple()
44 stub_response_body = tuple()
45
45
46 def __init__(self, *args, **kwargs):
46 def __init__(self, *args, **kwargs):
47 super(StubVCSController, self).__init__(*args, **kwargs)
47 super(StubVCSController, self).__init__(*args, **kwargs)
48 self._action = 'pull'
48 self._action = 'pull'
49 self._is_shadow_repo_dir = True
49 self._is_shadow_repo_dir = True
50 self._name = HG_REPO
50 self._name = HG_REPO
51 self.set_repo_names(None)
51 self.set_repo_names(None)
52
52
53 @property
53 @property
54 def is_shadow_repo_dir(self):
54 def is_shadow_repo_dir(self):
55 return self._is_shadow_repo_dir
55 return self._is_shadow_repo_dir
56
56
57 def _get_repository_name(self, environ):
57 def _get_repository_name(self, environ):
58 return self._name
58 return self._name
59
59
60 def _get_action(self, environ):
60 def _get_action(self, environ):
61 return self._action
61 return self._action
62
62
63 def _create_wsgi_app(self, repo_path, repo_name, config):
63 def _create_wsgi_app(self, repo_path, repo_name, config):
64 def fake_app(environ, start_response):
64 def fake_app(environ, start_response):
65 headers = [
65 headers = [
66 ('Http-Accept', 'application/mercurial')
66 ('Http-Accept', 'application/mercurial')
67 ]
67 ]
68 start_response('200 OK', headers)
68 start_response('200 OK', headers)
69 return self.stub_response_body
69 return self.stub_response_body
70 return fake_app
70 return fake_app
71
71
72 def _create_config(self, extras, repo_name):
72 def _create_config(self, extras, repo_name):
73 return None
73 return None
74
74
75
75
76 @pytest.fixture
76 @pytest.fixture
77 def vcscontroller(baseapp, config_stub, request_stub):
77 def vcscontroller(baseapp, config_stub, request_stub):
78 config_stub.testing_securitypolicy()
78 config_stub.testing_securitypolicy()
79 config_stub.include('rhodecode.authentication')
79 config_stub.include('rhodecode.authentication')
80
80
81 controller = StubVCSController(
81 controller = StubVCSController(
82 baseapp.config.get_settings(), request_stub.registry)
82 baseapp.config.get_settings(), request_stub.registry)
83 app = HttpsFixup(controller, baseapp.config.get_settings())
83 app = HttpsFixup(controller, baseapp.config.get_settings())
84 app = CustomTestApp(app)
84 app = CustomTestApp(app)
85
85
86 _remove_default_user_from_query_cache()
86 _remove_default_user_from_query_cache()
87
87
88 # Sanity checks that things are set up correctly
88 # Sanity checks that things are set up correctly
89 app.get('/' + HG_REPO, status=200)
89 app.get('/' + HG_REPO, status=200)
90
90
91 app.controller = controller
91 app.controller = controller
92 return app
92 return app
93
93
94
94
95 def _remove_default_user_from_query_cache():
95 def _remove_default_user_from_query_cache():
96 user = User.get_default_user(cache=True)
96 user = User.get_default_user(cache=True)
97 query = Session().query(User).filter(User.username == user.username)
97 query = Session().query(User).filter(User.username == user.username)
98 query = query.options(
98 query = query.options(
99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
100 query.invalidate()
100 query.invalidate()
101 Session().expire(user)
101 Session().expire(user)
102
102
103
103
104 def test_handles_exceptions_during_permissions_checks(
104 def test_handles_exceptions_during_permissions_checks(
105 vcscontroller, disable_anonymous_user):
105 vcscontroller, disable_anonymous_user):
106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
107 auth_password = base64.encodestring(user_and_pass).strip()
107 auth_password = base64.encodestring(user_and_pass).strip()
108 extra_environ = {
108 extra_environ = {
109 'AUTH_TYPE': 'Basic',
109 'AUTH_TYPE': 'Basic',
110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
112 }
112 }
113
113
114 # Verify that things are hooked up correctly
114 # Verify that things are hooked up correctly
115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
116
116
117 # Simulate trouble during permission checks
117 # Simulate trouble during permission checks
118 with mock.patch('rhodecode.model.db.User.get_by_username',
118 with mock.patch('rhodecode.model.db.User.get_by_username',
119 side_effect=Exception) as get_user:
119 side_effect=Exception) as get_user:
120 # Verify that a correct 500 is returned and check that the expected
120 # Verify that a correct 500 is returned and check that the expected
121 # code path was hit.
121 # code path was hit.
122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
123 assert get_user.called
123 assert get_user.called
124
124
125
125
126 def test_returns_forbidden_if_no_anonymous_access(
126 def test_returns_forbidden_if_no_anonymous_access(
127 vcscontroller, disable_anonymous_user):
127 vcscontroller, disable_anonymous_user):
128 vcscontroller.get('/', status=401)
128 vcscontroller.get('/', status=401)
129
129
130
130
131 class StubFailVCSController(simplevcs.SimpleVCS):
131 class StubFailVCSController(simplevcs.SimpleVCS):
132 def _handle_request(self, environ, start_response):
132 def _handle_request(self, environ, start_response):
133 raise Exception("BOOM")
133 raise Exception("BOOM")
134
134
135
135
136 @pytest.fixture(scope='module')
136 @pytest.fixture(scope='module')
137 def fail_controller(baseapp):
137 def fail_controller(baseapp):
138 controller = StubFailVCSController(
138 controller = StubFailVCSController(
139 baseapp.config.get_settings(), baseapp.config)
139 baseapp.config.get_settings(), baseapp.config)
140 controller = HttpsFixup(controller, baseapp.config.get_settings())
140 controller = HttpsFixup(controller, baseapp.config.get_settings())
141 controller = CustomTestApp(controller)
141 controller = CustomTestApp(controller)
142 return controller
142 return controller
143
143
144
144
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
146 fail_controller.get('/', status=500)
146 fail_controller.get('/', status=500)
147
147
148
148
149 def test_provides_traceback_for_appenlight(fail_controller):
149 def test_provides_traceback_for_appenlight(fail_controller):
150 response = fail_controller.get(
150 response = fail_controller.get(
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
152 assert 'appenlight.__traceback' in response.request.environ
152 assert 'appenlight.__traceback' in response.request.environ
153
153
154
154
155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
156 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
156 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
157 assert controller.scm_app is scm_app_http
157 assert controller.scm_app is scm_app_http
158
158
159
159
160 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
160 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
161 config = baseapp.config.get_settings().copy()
161 config = baseapp.config.get_settings().copy()
162 config['vcs.scm_app_implementation'] = (
162 config['vcs.scm_app_implementation'] = (
163 'rhodecode.tests.lib.middleware.mock_scm_app')
163 'rhodecode.tests.lib.middleware.mock_scm_app')
164 controller = StubVCSController(config, request_stub.registry)
164 controller = StubVCSController(config, request_stub.registry)
165 assert controller.scm_app is mock_scm_app
165 assert controller.scm_app is mock_scm_app
166
166
167
167
168 @pytest.mark.parametrize('query_string, expected', [
168 @pytest.mark.parametrize('query_string, expected', [
169 ('cmd=stub_command', True),
169 ('cmd=stub_command', True),
170 ('cmd=listkeys', False),
170 ('cmd=listkeys', False),
171 ])
171 ])
172 def test_should_check_locking(query_string, expected):
172 def test_should_check_locking(query_string, expected):
173 result = simplevcs._should_check_locking(query_string)
173 result = simplevcs._should_check_locking(query_string)
174 assert result == expected
174 assert result == expected
175
175
176
176
177 class TestShadowRepoRegularExpression(object):
177 class TestShadowRepoRegularExpression(object):
178 pr_segment = 'pull-request'
178 pr_segment = 'pull-request'
179 shadow_segment = 'repository'
179 shadow_segment = 'repository'
180
180
181 @pytest.mark.parametrize('url, expected', [
181 @pytest.mark.parametrize('url, expected', [
182 # repo with/without groups
182 # repo with/without groups
183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
187
187
188 # pull request ID
188 # pull request ID
189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
193
193
194 # unicode
194 # unicode
195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
197
197
198 # trailing/leading slash
198 # trailing/leading slash
199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
202
202
203 # misc
203 # misc
204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
206 ])
206 ])
207 def test_shadow_repo_regular_expression(self, url, expected):
207 def test_shadow_repo_regular_expression(self, url, expected):
208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
209 url = url.format(
209 url = url.format(
210 pr_segment=self.pr_segment,
210 pr_segment=self.pr_segment,
211 shadow_segment=self.shadow_segment)
211 shadow_segment=self.shadow_segment)
212 match_obj = SimpleVCS.shadow_repo_re.match(url)
212 match_obj = SimpleVCS.shadow_repo_re.match(url)
213 assert (match_obj is not None) == expected
213 assert (match_obj is not None) == expected
214
214
215
215
216 @pytest.mark.backends('git', 'hg')
216 @pytest.mark.backends('git', 'hg')
217 class TestShadowRepoExposure(object):
217 class TestShadowRepoExposure(object):
218
218
219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
220 self, baseapp, request_stub):
220 self, baseapp, request_stub):
221 """
221 """
222 Check that a pull action to a shadow repo is propagated to the
222 Check that a pull action to a shadow repo is propagated to the
223 underlying wsgi app.
223 underlying wsgi app.
224 """
224 """
225 controller = StubVCSController(
225 controller = StubVCSController(
226 baseapp.config.get_settings(), request_stub.registry)
226 baseapp.config.get_settings(), request_stub.registry)
227 controller._check_ssl = mock.Mock()
227 controller._check_ssl = mock.Mock()
228 controller.is_shadow_repo = True
228 controller.is_shadow_repo = True
229 controller._action = 'pull'
229 controller._action = 'pull'
230 controller._is_shadow_repo_dir = True
230 controller._is_shadow_repo_dir = True
231 controller.stub_response_body = 'dummy body value'
231 controller.stub_response_body = 'dummy body value'
232 controller._get_default_cache_ttl = mock.Mock(
232 controller._get_default_cache_ttl = mock.Mock(
233 return_value=(False, 0))
233 return_value=(False, 0))
234
234
235 environ_stub = {
235 environ_stub = {
236 'HTTP_HOST': 'test.example.com',
236 'HTTP_HOST': 'test.example.com',
237 'HTTP_ACCEPT': 'application/mercurial',
237 'HTTP_ACCEPT': 'application/mercurial',
238 'REQUEST_METHOD': 'GET',
238 'REQUEST_METHOD': 'GET',
239 'wsgi.url_scheme': 'http',
239 'wsgi.url_scheme': 'http',
240 }
240 }
241
241
242 response = controller(environ_stub, mock.Mock())
242 response = controller(environ_stub, mock.Mock())
243 response_body = ''.join(response)
243 response_body = ''.join(response)
244
244
245 # Assert that we got the response from the wsgi app.
245 # Assert that we got the response from the wsgi app.
246 assert response_body == controller.stub_response_body
246 assert response_body == controller.stub_response_body
247
247
248 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
248 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
249 """
249 """
250 Check that a pull action to a shadow repo is propagated to the
250 Check that a pull action to a shadow repo is propagated to the
251 underlying wsgi app.
251 underlying wsgi app.
252 """
252 """
253 controller = StubVCSController(
253 controller = StubVCSController(
254 baseapp.config.get_settings(), request_stub.registry)
254 baseapp.config.get_settings(), request_stub.registry)
255 controller._check_ssl = mock.Mock()
255 controller._check_ssl = mock.Mock()
256 controller.is_shadow_repo = True
256 controller.is_shadow_repo = True
257 controller._action = 'pull'
257 controller._action = 'pull'
258 controller._is_shadow_repo_dir = False
258 controller._is_shadow_repo_dir = False
259 controller.stub_response_body = 'dummy body value'
259 controller.stub_response_body = 'dummy body value'
260 environ_stub = {
260 environ_stub = {
261 'HTTP_HOST': 'test.example.com',
261 'HTTP_HOST': 'test.example.com',
262 'HTTP_ACCEPT': 'application/mercurial',
262 'HTTP_ACCEPT': 'application/mercurial',
263 'REQUEST_METHOD': 'GET',
263 'REQUEST_METHOD': 'GET',
264 'wsgi.url_scheme': 'http',
264 'wsgi.url_scheme': 'http',
265 }
265 }
266
266
267 response = controller(environ_stub, mock.Mock())
267 response = controller(environ_stub, mock.Mock())
268 response_body = ''.join(response)
268 response_body = ''.join(response)
269
269
270 # Assert that we got the response from the wsgi app.
270 # Assert that we got the response from the wsgi app.
271 assert '404 Not Found' in response_body
271 assert '404 Not Found' in response_body
272
272
273 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
273 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
274 """
274 """
275 Check that a push action to a shadow repo is aborted.
275 Check that a push action to a shadow repo is aborted.
276 """
276 """
277 controller = StubVCSController(
277 controller = StubVCSController(
278 baseapp.config.get_settings(), request_stub.registry)
278 baseapp.config.get_settings(), request_stub.registry)
279 controller._check_ssl = mock.Mock()
279 controller._check_ssl = mock.Mock()
280 controller.is_shadow_repo = True
280 controller.is_shadow_repo = True
281 controller._action = 'push'
281 controller._action = 'push'
282 controller.stub_response_body = 'dummy body value'
282 controller.stub_response_body = 'dummy body value'
283 environ_stub = {
283 environ_stub = {
284 'HTTP_HOST': 'test.example.com',
284 'HTTP_HOST': 'test.example.com',
285 'HTTP_ACCEPT': 'application/mercurial',
285 'HTTP_ACCEPT': 'application/mercurial',
286 'REQUEST_METHOD': 'GET',
286 'REQUEST_METHOD': 'GET',
287 'wsgi.url_scheme': 'http',
287 'wsgi.url_scheme': 'http',
288 }
288 }
289
289
290 response = controller(environ_stub, mock.Mock())
290 response = controller(environ_stub, mock.Mock())
291 response_body = ''.join(response)
291 response_body = ''.join(response)
292
292
293 assert response_body != controller.stub_response_body
293 assert response_body != controller.stub_response_body
294 # Assert that a 406 error is returned.
294 # Assert that a 406 error is returned.
295 assert '406 Not Acceptable' in response_body
295 assert '406 Not Acceptable' in response_body
296
296
297 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
297 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
298 """
298 """
299 Check that the set_repo_names method sets all names to the one returned
299 Check that the set_repo_names method sets all names to the one returned
300 by the _get_repository_name method on a request to a non shadow repo.
300 by the _get_repository_name method on a request to a non shadow repo.
301 """
301 """
302 environ_stub = {}
302 environ_stub = {}
303 controller = StubVCSController(
303 controller = StubVCSController(
304 baseapp.config.get_settings(), request_stub.registry)
304 baseapp.config.get_settings(), request_stub.registry)
305 controller._name = 'RepoGroup/MyRepo'
305 controller._name = 'RepoGroup/MyRepo'
306 controller.set_repo_names(environ_stub)
306 controller.set_repo_names(environ_stub)
307 assert not controller.is_shadow_repo
307 assert not controller.is_shadow_repo
308 assert (controller.url_repo_name ==
308 assert (controller.url_repo_name ==
309 controller.acl_repo_name ==
309 controller.acl_repo_name ==
310 controller.vcs_repo_name ==
310 controller.vcs_repo_name ==
311 controller._get_repository_name(environ_stub))
311 controller._get_repository_name(environ_stub))
312
312
313 def test_set_repo_names_with_shadow(
313 def test_set_repo_names_with_shadow(
314 self, baseapp, pr_util, config_stub, request_stub):
314 self, baseapp, pr_util, config_stub, request_stub):
315 """
315 """
316 Check that the set_repo_names method sets correct names on a request
316 Check that the set_repo_names method sets correct names on a request
317 to a shadow repo.
317 to a shadow repo.
318 """
318 """
319 from rhodecode.model.pull_request import PullRequestModel
319 from rhodecode.model.pull_request import PullRequestModel
320
320
321 pull_request = pr_util.create_pull_request()
321 pull_request = pr_util.create_pull_request()
322 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
322 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
323 target=pull_request.target_repo.repo_name,
323 target=pull_request.target_repo.repo_name,
324 pr_id=pull_request.pull_request_id,
324 pr_id=pull_request.pull_request_id,
325 pr_segment=TestShadowRepoRegularExpression.pr_segment,
325 pr_segment=TestShadowRepoRegularExpression.pr_segment,
326 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
326 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
327 controller = StubVCSController(
327 controller = StubVCSController(
328 baseapp.config.get_settings(), request_stub.registry)
328 baseapp.config.get_settings(), request_stub.registry)
329 controller._name = shadow_url
329 controller._name = shadow_url
330 controller.set_repo_names({})
330 controller.set_repo_names({})
331
331
332 # Get file system path to shadow repo for assertions.
332 # Get file system path to shadow repo for assertions.
333 workspace_id = PullRequestModel()._workspace_id(pull_request)
333 workspace_id = PullRequestModel()._workspace_id(pull_request)
334 target_vcs = pull_request.target_repo.scm_instance()
334 target_vcs = pull_request.target_repo.scm_instance()
335 vcs_repo_name = target_vcs._get_shadow_repository_path(
335 vcs_repo_name = target_vcs._get_shadow_repository_path(
336 workspace_id)
336 pull_request.target_repo.repo_id, workspace_id)
337
337
338 assert controller.vcs_repo_name == vcs_repo_name
338 assert controller.vcs_repo_name == vcs_repo_name
339 assert controller.url_repo_name == shadow_url
339 assert controller.url_repo_name == shadow_url
340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
341 assert controller.is_shadow_repo
341 assert controller.is_shadow_repo
342
342
343 def test_set_repo_names_with_shadow_but_missing_pr(
343 def test_set_repo_names_with_shadow_but_missing_pr(
344 self, baseapp, pr_util, config_stub, request_stub):
344 self, baseapp, pr_util, config_stub, request_stub):
345 """
345 """
346 Checks that the set_repo_names method enforces matching target repos
346 Checks that the set_repo_names method enforces matching target repos
347 and pull request IDs.
347 and pull request IDs.
348 """
348 """
349 pull_request = pr_util.create_pull_request()
349 pull_request = pr_util.create_pull_request()
350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
351 target=pull_request.target_repo.repo_name,
351 target=pull_request.target_repo.repo_name,
352 pr_id=999999999,
352 pr_id=999999999,
353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
355 controller = StubVCSController(
355 controller = StubVCSController(
356 baseapp.config.get_settings(), request_stub.registry)
356 baseapp.config.get_settings(), request_stub.registry)
357 controller._name = shadow_url
357 controller._name = shadow_url
358 controller.set_repo_names({})
358 controller.set_repo_names({})
359
359
360 assert not controller.is_shadow_repo
360 assert not controller.is_shadow_repo
361 assert (controller.url_repo_name ==
361 assert (controller.url_repo_name ==
362 controller.acl_repo_name ==
362 controller.acl_repo_name ==
363 controller.vcs_repo_name)
363 controller.vcs_repo_name)
364
364
365
365
366 @pytest.mark.usefixtures('baseapp')
366 @pytest.mark.usefixtures('baseapp')
367 class TestGenerateVcsResponse(object):
367 class TestGenerateVcsResponse(object):
368
368
369 def test_ensures_that_start_response_is_called_early_enough(self):
369 def test_ensures_that_start_response_is_called_early_enough(self):
370 self.call_controller_with_response_body(iter(['a', 'b']))
370 self.call_controller_with_response_body(iter(['a', 'b']))
371 assert self.start_response.called
371 assert self.start_response.called
372
372
373 def test_invalidates_cache_after_body_is_consumed(self):
373 def test_invalidates_cache_after_body_is_consumed(self):
374 result = self.call_controller_with_response_body(iter(['a', 'b']))
374 result = self.call_controller_with_response_body(iter(['a', 'b']))
375 assert not self.was_cache_invalidated()
375 assert not self.was_cache_invalidated()
376 # Consume the result
376 # Consume the result
377 list(result)
377 list(result)
378 assert self.was_cache_invalidated()
378 assert self.was_cache_invalidated()
379
379
380 def test_raises_unknown_exceptions(self):
380 def test_raises_unknown_exceptions(self):
381 result = self.call_controller_with_response_body(
381 result = self.call_controller_with_response_body(
382 self.raise_result_iter(vcs_kind='unknown'))
382 self.raise_result_iter(vcs_kind='unknown'))
383 with pytest.raises(Exception):
383 with pytest.raises(Exception):
384 list(result)
384 list(result)
385
385
386 def test_prepare_callback_daemon_is_called(self):
386 def test_prepare_callback_daemon_is_called(self):
387 def side_effect(extras, environ, action, txn_id=None):
387 def side_effect(extras, environ, action, txn_id=None):
388 return DummyHooksCallbackDaemon(), extras
388 return DummyHooksCallbackDaemon(), extras
389
389
390 prepare_patcher = mock.patch.object(
390 prepare_patcher = mock.patch.object(
391 StubVCSController, '_prepare_callback_daemon')
391 StubVCSController, '_prepare_callback_daemon')
392 with prepare_patcher as prepare_mock:
392 with prepare_patcher as prepare_mock:
393 prepare_mock.side_effect = side_effect
393 prepare_mock.side_effect = side_effect
394 self.call_controller_with_response_body(iter(['a', 'b']))
394 self.call_controller_with_response_body(iter(['a', 'b']))
395 assert prepare_mock.called
395 assert prepare_mock.called
396 assert prepare_mock.call_count == 1
396 assert prepare_mock.call_count == 1
397
397
398 def call_controller_with_response_body(self, response_body):
398 def call_controller_with_response_body(self, response_body):
399 settings = {
399 settings = {
400 'base_path': 'fake_base_path',
400 'base_path': 'fake_base_path',
401 'vcs.hooks.protocol': 'http',
401 'vcs.hooks.protocol': 'http',
402 'vcs.hooks.direct_calls': False,
402 'vcs.hooks.direct_calls': False,
403 }
403 }
404 registry = AttributeDict()
404 registry = AttributeDict()
405 controller = StubVCSController(settings, registry)
405 controller = StubVCSController(settings, registry)
406 controller._invalidate_cache = mock.Mock()
406 controller._invalidate_cache = mock.Mock()
407 controller.stub_response_body = response_body
407 controller.stub_response_body = response_body
408 self.start_response = mock.Mock()
408 self.start_response = mock.Mock()
409 result = controller._generate_vcs_response(
409 result = controller._generate_vcs_response(
410 environ={}, start_response=self.start_response,
410 environ={}, start_response=self.start_response,
411 repo_path='fake_repo_path',
411 repo_path='fake_repo_path',
412 extras={}, action='push')
412 extras={}, action='push')
413 self.controller = controller
413 self.controller = controller
414 return result
414 return result
415
415
416 def raise_result_iter(self, vcs_kind='repo_locked'):
416 def raise_result_iter(self, vcs_kind='repo_locked'):
417 """
417 """
418 Simulates an exception due to a vcs raised exception if kind vcs_kind
418 Simulates an exception due to a vcs raised exception if kind vcs_kind
419 """
419 """
420 raise self.vcs_exception(vcs_kind=vcs_kind)
420 raise self.vcs_exception(vcs_kind=vcs_kind)
421 yield "never_reached"
421 yield "never_reached"
422
422
423 def vcs_exception(self, vcs_kind='repo_locked'):
423 def vcs_exception(self, vcs_kind='repo_locked'):
424 locked_exception = Exception('TEST_MESSAGE')
424 locked_exception = Exception('TEST_MESSAGE')
425 locked_exception._vcs_kind = vcs_kind
425 locked_exception._vcs_kind = vcs_kind
426 return locked_exception
426 return locked_exception
427
427
428 def was_cache_invalidated(self):
428 def was_cache_invalidated(self):
429 return self.controller._invalidate_cache.called
429 return self.controller._invalidate_cache.called
430
430
431
431
432 class TestInitializeGenerator(object):
432 class TestInitializeGenerator(object):
433
433
434 def test_drains_first_element(self):
434 def test_drains_first_element(self):
435 gen = self.factory(['__init__', 1, 2])
435 gen = self.factory(['__init__', 1, 2])
436 result = list(gen)
436 result = list(gen)
437 assert result == [1, 2]
437 assert result == [1, 2]
438
438
439 @pytest.mark.parametrize('values', [
439 @pytest.mark.parametrize('values', [
440 [],
440 [],
441 [1, 2],
441 [1, 2],
442 ])
442 ])
443 def test_raises_value_error(self, values):
443 def test_raises_value_error(self, values):
444 with pytest.raises(ValueError):
444 with pytest.raises(ValueError):
445 self.factory(values)
445 self.factory(values)
446
446
447 @simplevcs.initialize_generator
447 @simplevcs.initialize_generator
448 def factory(self, iterable):
448 def factory(self, iterable):
449 for elem in iterable:
449 for elem in iterable:
450 yield elem
450 yield elem
451
451
452
452
453 class TestPrepareHooksDaemon(object):
453 class TestPrepareHooksDaemon(object):
454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
455 expected_extras = {'extra1': 'value1'}
455 expected_extras = {'extra1': 'value1'}
456 daemon = DummyHooksCallbackDaemon()
456 daemon = DummyHooksCallbackDaemon()
457
457
458 controller = StubVCSController(app_settings, request_stub.registry)
458 controller = StubVCSController(app_settings, request_stub.registry)
459 prepare_patcher = mock.patch.object(
459 prepare_patcher = mock.patch.object(
460 simplevcs, 'prepare_callback_daemon',
460 simplevcs, 'prepare_callback_daemon',
461 return_value=(daemon, expected_extras))
461 return_value=(daemon, expected_extras))
462 with prepare_patcher as prepare_mock:
462 with prepare_patcher as prepare_mock:
463 callback_daemon, extras = controller._prepare_callback_daemon(
463 callback_daemon, extras = controller._prepare_callback_daemon(
464 expected_extras.copy(), {}, 'push')
464 expected_extras.copy(), {}, 'push')
465 prepare_mock.assert_called_once_with(
465 prepare_mock.assert_called_once_with(
466 expected_extras,
466 expected_extras,
467 protocol=app_settings['vcs.hooks.protocol'],
467 protocol=app_settings['vcs.hooks.protocol'],
468 txn_id=None,
468 txn_id=None,
469 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
469 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
470
470
471 assert callback_daemon == daemon
471 assert callback_daemon == daemon
472 assert extras == extras
472 assert extras == extras
@@ -1,860 +1,868 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture
47 @pytest.fixture
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 self.merge_patcher = mock.patch.object(
53 self.merge_patcher = mock.patch.object(
54 BackendClass, 'merge', return_value=MergeResponse(
54 BackendClass, 'merge', return_value=MergeResponse(
55 False, False, None, MergeFailureReason.UNKNOWN))
55 False, False, None, MergeFailureReason.UNKNOWN))
56 self.workspace_remove_patcher = mock.patch.object(
56 self.workspace_remove_patcher = mock.patch.object(
57 BackendClass, 'cleanup_merge_workspace')
57 BackendClass, 'cleanup_merge_workspace')
58
58
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
61 self.comment_patcher = mock.patch(
61 self.comment_patcher = mock.patch(
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 self.comment_patcher.start()
63 self.comment_patcher.start()
64 self.notification_patcher = mock.patch(
64 self.notification_patcher = mock.patch(
65 'rhodecode.model.notification.NotificationModel.create')
65 'rhodecode.model.notification.NotificationModel.create')
66 self.notification_patcher.start()
66 self.notification_patcher.start()
67 self.helper_patcher = mock.patch(
67 self.helper_patcher = mock.patch(
68 'rhodecode.lib.helpers.route_path')
68 'rhodecode.lib.helpers.route_path')
69 self.helper_patcher.start()
69 self.helper_patcher.start()
70
70
71 self.hook_patcher = mock.patch.object(PullRequestModel,
71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 '_trigger_pull_request_hook')
72 '_trigger_pull_request_hook')
73 self.hook_mock = self.hook_patcher.start()
73 self.hook_mock = self.hook_patcher.start()
74
74
75 self.invalidation_patcher = mock.patch(
75 self.invalidation_patcher = mock.patch(
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 self.invalidation_mock = self.invalidation_patcher.start()
77 self.invalidation_mock = self.invalidation_patcher.start()
78
78
79 self.pull_request = pr_util.create_pull_request(
79 self.pull_request = pr_util.create_pull_request(
80 mergeable=True, name_suffix=u'ąć')
80 mergeable=True, name_suffix=u'ąć')
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 self.repo_id = self.pull_request.target_repo.repo_id
84
85
85 @request.addfinalizer
86 @request.addfinalizer
86 def cleanup_pull_request():
87 def cleanup_pull_request():
87 calls = [mock.call(
88 calls = [mock.call(
88 self.pull_request, self.pull_request.author, 'create')]
89 self.pull_request, self.pull_request.author, 'create')]
89 self.hook_mock.assert_has_calls(calls)
90 self.hook_mock.assert_has_calls(calls)
90
91
91 self.workspace_remove_patcher.stop()
92 self.workspace_remove_patcher.stop()
92 self.merge_patcher.stop()
93 self.merge_patcher.stop()
93 self.comment_patcher.stop()
94 self.comment_patcher.stop()
94 self.notification_patcher.stop()
95 self.notification_patcher.stop()
95 self.helper_patcher.stop()
96 self.helper_patcher.stop()
96 self.hook_patcher.stop()
97 self.hook_patcher.stop()
97 self.invalidation_patcher.stop()
98 self.invalidation_patcher.stop()
98
99
99 return self.pull_request
100 return self.pull_request
100
101
101 def test_get_all(self, pull_request):
102 def test_get_all(self, pull_request):
102 prs = PullRequestModel().get_all(pull_request.target_repo)
103 prs = PullRequestModel().get_all(pull_request.target_repo)
103 assert isinstance(prs, list)
104 assert isinstance(prs, list)
104 assert len(prs) == 1
105 assert len(prs) == 1
105
106
106 def test_count_all(self, pull_request):
107 def test_count_all(self, pull_request):
107 pr_count = PullRequestModel().count_all(pull_request.target_repo)
108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
108 assert pr_count == 1
109 assert pr_count == 1
109
110
110 def test_get_awaiting_review(self, pull_request):
111 def test_get_awaiting_review(self, pull_request):
111 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
112 assert isinstance(prs, list)
113 assert isinstance(prs, list)
113 assert len(prs) == 1
114 assert len(prs) == 1
114
115
115 def test_count_awaiting_review(self, pull_request):
116 def test_count_awaiting_review(self, pull_request):
116 pr_count = PullRequestModel().count_awaiting_review(
117 pr_count = PullRequestModel().count_awaiting_review(
117 pull_request.target_repo)
118 pull_request.target_repo)
118 assert pr_count == 1
119 assert pr_count == 1
119
120
120 def test_get_awaiting_my_review(self, pull_request):
121 def test_get_awaiting_my_review(self, pull_request):
121 PullRequestModel().update_reviewers(
122 PullRequestModel().update_reviewers(
122 pull_request, [(pull_request.author, ['author'], False, [])],
123 pull_request, [(pull_request.author, ['author'], False, [])],
123 pull_request.author)
124 pull_request.author)
124 prs = PullRequestModel().get_awaiting_my_review(
125 prs = PullRequestModel().get_awaiting_my_review(
125 pull_request.target_repo, user_id=pull_request.author.user_id)
126 pull_request.target_repo, user_id=pull_request.author.user_id)
126 assert isinstance(prs, list)
127 assert isinstance(prs, list)
127 assert len(prs) == 1
128 assert len(prs) == 1
128
129
129 def test_count_awaiting_my_review(self, pull_request):
130 def test_count_awaiting_my_review(self, pull_request):
130 PullRequestModel().update_reviewers(
131 PullRequestModel().update_reviewers(
131 pull_request, [(pull_request.author, ['author'], False, [])],
132 pull_request, [(pull_request.author, ['author'], False, [])],
132 pull_request.author)
133 pull_request.author)
133 pr_count = PullRequestModel().count_awaiting_my_review(
134 pr_count = PullRequestModel().count_awaiting_my_review(
134 pull_request.target_repo, user_id=pull_request.author.user_id)
135 pull_request.target_repo, user_id=pull_request.author.user_id)
135 assert pr_count == 1
136 assert pr_count == 1
136
137
137 def test_delete_calls_cleanup_merge(self, pull_request):
138 def test_delete_calls_cleanup_merge(self, pull_request):
139 repo_id = pull_request.target_repo.repo_id
138 PullRequestModel().delete(pull_request, pull_request.author)
140 PullRequestModel().delete(pull_request, pull_request.author)
139
141
140 self.workspace_remove_mock.assert_called_once_with(
142 self.workspace_remove_mock.assert_called_once_with(
141 self.workspace_id)
143 repo_id, self.workspace_id)
142
144
143 def test_close_calls_cleanup_and_hook(self, pull_request):
145 def test_close_calls_cleanup_and_hook(self, pull_request):
144 PullRequestModel().close_pull_request(
146 PullRequestModel().close_pull_request(
145 pull_request, pull_request.author)
147 pull_request, pull_request.author)
148 repo_id = pull_request.target_repo.repo_id
146
149
147 self.workspace_remove_mock.assert_called_once_with(
150 self.workspace_remove_mock.assert_called_once_with(
148 self.workspace_id)
151 repo_id, self.workspace_id)
149 self.hook_mock.assert_called_with(
152 self.hook_mock.assert_called_with(
150 self.pull_request, self.pull_request.author, 'close')
153 self.pull_request, self.pull_request.author, 'close')
151
154
152 def test_merge_status(self, pull_request):
155 def test_merge_status(self, pull_request):
153 self.merge_mock.return_value = MergeResponse(
156 self.merge_mock.return_value = MergeResponse(
154 True, False, None, MergeFailureReason.NONE)
157 True, False, None, MergeFailureReason.NONE)
155
158
156 assert pull_request._last_merge_source_rev is None
159 assert pull_request._last_merge_source_rev is None
157 assert pull_request._last_merge_target_rev is None
160 assert pull_request._last_merge_target_rev is None
158 assert pull_request.last_merge_status is None
161 assert pull_request.last_merge_status is None
159
162
160 status, msg = PullRequestModel().merge_status(pull_request)
163 status, msg = PullRequestModel().merge_status(pull_request)
161 assert status is True
164 assert status is True
162 assert msg.eval() == 'This pull request can be automatically merged.'
165 assert msg.eval() == 'This pull request can be automatically merged.'
163 self.merge_mock.assert_called_with(
166 self.merge_mock.assert_called_with(
167 self.repo_id, self.workspace_id,
164 pull_request.target_ref_parts,
168 pull_request.target_ref_parts,
165 pull_request.source_repo.scm_instance(),
169 pull_request.source_repo.scm_instance(),
166 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
170 pull_request.source_ref_parts, dry_run=True,
167 use_rebase=False, close_branch=False)
171 use_rebase=False, close_branch=False)
168
172
169 assert pull_request._last_merge_source_rev == self.source_commit
173 assert pull_request._last_merge_source_rev == self.source_commit
170 assert pull_request._last_merge_target_rev == self.target_commit
174 assert pull_request._last_merge_target_rev == self.target_commit
171 assert pull_request.last_merge_status is MergeFailureReason.NONE
175 assert pull_request.last_merge_status is MergeFailureReason.NONE
172
176
173 self.merge_mock.reset_mock()
177 self.merge_mock.reset_mock()
174 status, msg = PullRequestModel().merge_status(pull_request)
178 status, msg = PullRequestModel().merge_status(pull_request)
175 assert status is True
179 assert status is True
176 assert msg.eval() == 'This pull request can be automatically merged.'
180 assert msg.eval() == 'This pull request can be automatically merged.'
177 assert self.merge_mock.called is False
181 assert self.merge_mock.called is False
178
182
179 def test_merge_status_known_failure(self, pull_request):
183 def test_merge_status_known_failure(self, pull_request):
180 self.merge_mock.return_value = MergeResponse(
184 self.merge_mock.return_value = MergeResponse(
181 False, False, None, MergeFailureReason.MERGE_FAILED)
185 False, False, None, MergeFailureReason.MERGE_FAILED)
182
186
183 assert pull_request._last_merge_source_rev is None
187 assert pull_request._last_merge_source_rev is None
184 assert pull_request._last_merge_target_rev is None
188 assert pull_request._last_merge_target_rev is None
185 assert pull_request.last_merge_status is None
189 assert pull_request.last_merge_status is None
186
190
187 status, msg = PullRequestModel().merge_status(pull_request)
191 status, msg = PullRequestModel().merge_status(pull_request)
188 assert status is False
192 assert status is False
189 assert (
193 assert (
190 msg.eval() ==
194 msg.eval() ==
191 'This pull request cannot be merged because of merge conflicts.')
195 'This pull request cannot be merged because of merge conflicts.')
192 self.merge_mock.assert_called_with(
196 self.merge_mock.assert_called_with(
197 self.repo_id, self.workspace_id,
193 pull_request.target_ref_parts,
198 pull_request.target_ref_parts,
194 pull_request.source_repo.scm_instance(),
199 pull_request.source_repo.scm_instance(),
195 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
200 pull_request.source_ref_parts, dry_run=True,
196 use_rebase=False, close_branch=False)
201 use_rebase=False, close_branch=False)
197
202
198 assert pull_request._last_merge_source_rev == self.source_commit
203 assert pull_request._last_merge_source_rev == self.source_commit
199 assert pull_request._last_merge_target_rev == self.target_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
200 assert (
205 assert (
201 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
202
207
203 self.merge_mock.reset_mock()
208 self.merge_mock.reset_mock()
204 status, msg = PullRequestModel().merge_status(pull_request)
209 status, msg = PullRequestModel().merge_status(pull_request)
205 assert status is False
210 assert status is False
206 assert (
211 assert (
207 msg.eval() ==
212 msg.eval() ==
208 'This pull request cannot be merged because of merge conflicts.')
213 'This pull request cannot be merged because of merge conflicts.')
209 assert self.merge_mock.called is False
214 assert self.merge_mock.called is False
210
215
211 def test_merge_status_unknown_failure(self, pull_request):
216 def test_merge_status_unknown_failure(self, pull_request):
212 self.merge_mock.return_value = MergeResponse(
217 self.merge_mock.return_value = MergeResponse(
213 False, False, None, MergeFailureReason.UNKNOWN)
218 False, False, None, MergeFailureReason.UNKNOWN)
214
219
215 assert pull_request._last_merge_source_rev is None
220 assert pull_request._last_merge_source_rev is None
216 assert pull_request._last_merge_target_rev is None
221 assert pull_request._last_merge_target_rev is None
217 assert pull_request.last_merge_status is None
222 assert pull_request.last_merge_status is None
218
223
219 status, msg = PullRequestModel().merge_status(pull_request)
224 status, msg = PullRequestModel().merge_status(pull_request)
220 assert status is False
225 assert status is False
221 assert msg.eval() == (
226 assert msg.eval() == (
222 'This pull request cannot be merged because of an unhandled'
227 'This pull request cannot be merged because of an unhandled'
223 ' exception.')
228 ' exception.')
224 self.merge_mock.assert_called_with(
229 self.merge_mock.assert_called_with(
230 self.repo_id, self.workspace_id,
225 pull_request.target_ref_parts,
231 pull_request.target_ref_parts,
226 pull_request.source_repo.scm_instance(),
232 pull_request.source_repo.scm_instance(),
227 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
233 pull_request.source_ref_parts, dry_run=True,
228 use_rebase=False, close_branch=False)
234 use_rebase=False, close_branch=False)
229
235
230 assert pull_request._last_merge_source_rev is None
236 assert pull_request._last_merge_source_rev is None
231 assert pull_request._last_merge_target_rev is None
237 assert pull_request._last_merge_target_rev is None
232 assert pull_request.last_merge_status is None
238 assert pull_request.last_merge_status is None
233
239
234 self.merge_mock.reset_mock()
240 self.merge_mock.reset_mock()
235 status, msg = PullRequestModel().merge_status(pull_request)
241 status, msg = PullRequestModel().merge_status(pull_request)
236 assert status is False
242 assert status is False
237 assert msg.eval() == (
243 assert msg.eval() == (
238 'This pull request cannot be merged because of an unhandled'
244 'This pull request cannot be merged because of an unhandled'
239 ' exception.')
245 ' exception.')
240 assert self.merge_mock.called is True
246 assert self.merge_mock.called is True
241
247
242 def test_merge_status_when_target_is_locked(self, pull_request):
248 def test_merge_status_when_target_is_locked(self, pull_request):
243 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
244 status, msg = PullRequestModel().merge_status(pull_request)
250 status, msg = PullRequestModel().merge_status(pull_request)
245 assert status is False
251 assert status is False
246 assert msg.eval() == (
252 assert msg.eval() == (
247 'This pull request cannot be merged because the target repository'
253 'This pull request cannot be merged because the target repository'
248 ' is locked.')
254 ' is locked.')
249
255
250 def test_merge_status_requirements_check_target(self, pull_request):
256 def test_merge_status_requirements_check_target(self, pull_request):
251
257
252 def has_largefiles(self, repo):
258 def has_largefiles(self, repo):
253 return repo == pull_request.source_repo
259 return repo == pull_request.source_repo
254
260
255 patcher = mock.patch.object(
261 patcher = mock.patch.object(
256 PullRequestModel, '_has_largefiles', has_largefiles)
262 PullRequestModel, '_has_largefiles', has_largefiles)
257 with patcher:
263 with patcher:
258 status, msg = PullRequestModel().merge_status(pull_request)
264 status, msg = PullRequestModel().merge_status(pull_request)
259
265
260 assert status is False
266 assert status is False
261 assert msg == 'Target repository large files support is disabled.'
267 assert msg == 'Target repository large files support is disabled.'
262
268
263 def test_merge_status_requirements_check_source(self, pull_request):
269 def test_merge_status_requirements_check_source(self, pull_request):
264
270
265 def has_largefiles(self, repo):
271 def has_largefiles(self, repo):
266 return repo == pull_request.target_repo
272 return repo == pull_request.target_repo
267
273
268 patcher = mock.patch.object(
274 patcher = mock.patch.object(
269 PullRequestModel, '_has_largefiles', has_largefiles)
275 PullRequestModel, '_has_largefiles', has_largefiles)
270 with patcher:
276 with patcher:
271 status, msg = PullRequestModel().merge_status(pull_request)
277 status, msg = PullRequestModel().merge_status(pull_request)
272
278
273 assert status is False
279 assert status is False
274 assert msg == 'Source repository large files support is disabled.'
280 assert msg == 'Source repository large files support is disabled.'
275
281
276 def test_merge(self, pull_request, merge_extras):
282 def test_merge(self, pull_request, merge_extras):
277 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
278 merge_ref = Reference(
284 merge_ref = Reference(
279 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
280 self.merge_mock.return_value = MergeResponse(
286 self.merge_mock.return_value = MergeResponse(
281 True, True, merge_ref, MergeFailureReason.NONE)
287 True, True, merge_ref, MergeFailureReason.NONE)
282
288
283 merge_extras['repository'] = pull_request.target_repo.repo_name
289 merge_extras['repository'] = pull_request.target_repo.repo_name
284 PullRequestModel().merge(
290 PullRequestModel().merge_repo(
285 pull_request, pull_request.author, extras=merge_extras)
291 pull_request, pull_request.author, extras=merge_extras)
286
292
287 message = (
293 message = (
288 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
289 u'\n\n {pr_title}'.format(
295 u'\n\n {pr_title}'.format(
290 pr_id=pull_request.pull_request_id,
296 pr_id=pull_request.pull_request_id,
291 source_repo=safe_unicode(
297 source_repo=safe_unicode(
292 pull_request.source_repo.scm_instance().name),
298 pull_request.source_repo.scm_instance().name),
293 source_ref_name=pull_request.source_ref_parts.name,
299 source_ref_name=pull_request.source_ref_parts.name,
294 pr_title=safe_unicode(pull_request.title)
300 pr_title=safe_unicode(pull_request.title)
295 )
301 )
296 )
302 )
297 self.merge_mock.assert_called_with(
303 self.merge_mock.assert_called_with(
304 self.repo_id, self.workspace_id,
298 pull_request.target_ref_parts,
305 pull_request.target_ref_parts,
299 pull_request.source_repo.scm_instance(),
306 pull_request.source_repo.scm_instance(),
300 pull_request.source_ref_parts, self.workspace_id,
307 pull_request.source_ref_parts,
301 user_name=user.username, user_email=user.email, message=message,
308 user_name=user.username, user_email=user.email, message=message,
302 use_rebase=False, close_branch=False
309 use_rebase=False, close_branch=False
303 )
310 )
304 self.invalidation_mock.assert_called_once_with(
311 self.invalidation_mock.assert_called_once_with(
305 pull_request.target_repo.repo_name)
312 pull_request.target_repo.repo_name)
306
313
307 self.hook_mock.assert_called_with(
314 self.hook_mock.assert_called_with(
308 self.pull_request, self.pull_request.author, 'merge')
315 self.pull_request, self.pull_request.author, 'merge')
309
316
310 pull_request = PullRequest.get(pull_request.pull_request_id)
317 pull_request = PullRequest.get(pull_request.pull_request_id)
311 assert (
318 assert (
312 pull_request.merge_rev ==
319 pull_request.merge_rev ==
313 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
314
321
315 def test_merge_failed(self, pull_request, merge_extras):
322 def test_merge_failed(self, pull_request, merge_extras):
316 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
317 merge_ref = Reference(
324 merge_ref = Reference(
318 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
319 self.merge_mock.return_value = MergeResponse(
326 self.merge_mock.return_value = MergeResponse(
320 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
321
328
322 merge_extras['repository'] = pull_request.target_repo.repo_name
329 merge_extras['repository'] = pull_request.target_repo.repo_name
323 PullRequestModel().merge(
330 PullRequestModel().merge_repo(
324 pull_request, pull_request.author, extras=merge_extras)
331 pull_request, pull_request.author, extras=merge_extras)
325
332
326 message = (
333 message = (
327 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
328 u'\n\n {pr_title}'.format(
335 u'\n\n {pr_title}'.format(
329 pr_id=pull_request.pull_request_id,
336 pr_id=pull_request.pull_request_id,
330 source_repo=safe_unicode(
337 source_repo=safe_unicode(
331 pull_request.source_repo.scm_instance().name),
338 pull_request.source_repo.scm_instance().name),
332 source_ref_name=pull_request.source_ref_parts.name,
339 source_ref_name=pull_request.source_ref_parts.name,
333 pr_title=safe_unicode(pull_request.title)
340 pr_title=safe_unicode(pull_request.title)
334 )
341 )
335 )
342 )
336 self.merge_mock.assert_called_with(
343 self.merge_mock.assert_called_with(
344 self.repo_id, self.workspace_id,
337 pull_request.target_ref_parts,
345 pull_request.target_ref_parts,
338 pull_request.source_repo.scm_instance(),
346 pull_request.source_repo.scm_instance(),
339 pull_request.source_ref_parts, self.workspace_id,
347 pull_request.source_ref_parts,
340 user_name=user.username, user_email=user.email, message=message,
348 user_name=user.username, user_email=user.email, message=message,
341 use_rebase=False, close_branch=False
349 use_rebase=False, close_branch=False
342 )
350 )
343
351
344 pull_request = PullRequest.get(pull_request.pull_request_id)
352 pull_request = PullRequest.get(pull_request.pull_request_id)
345 assert self.invalidation_mock.called is False
353 assert self.invalidation_mock.called is False
346 assert pull_request.merge_rev is None
354 assert pull_request.merge_rev is None
347
355
348 def test_get_commit_ids(self, pull_request):
356 def test_get_commit_ids(self, pull_request):
349 # The PR has been not merget yet, so expect an exception
357 # The PR has been not merget yet, so expect an exception
350 with pytest.raises(ValueError):
358 with pytest.raises(ValueError):
351 PullRequestModel()._get_commit_ids(pull_request)
359 PullRequestModel()._get_commit_ids(pull_request)
352
360
353 # Merge revision is in the revisions list
361 # Merge revision is in the revisions list
354 pull_request.merge_rev = pull_request.revisions[0]
362 pull_request.merge_rev = pull_request.revisions[0]
355 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
356 assert commit_ids == pull_request.revisions
364 assert commit_ids == pull_request.revisions
357
365
358 # Merge revision is not in the revisions list
366 # Merge revision is not in the revisions list
359 pull_request.merge_rev = 'f000' * 10
367 pull_request.merge_rev = 'f000' * 10
360 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
361 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
362
370
363 def test_get_diff_from_pr_version(self, pull_request):
371 def test_get_diff_from_pr_version(self, pull_request):
364 source_repo = pull_request.source_repo
372 source_repo = pull_request.source_repo
365 source_ref_id = pull_request.source_ref_parts.commit_id
373 source_ref_id = pull_request.source_ref_parts.commit_id
366 target_ref_id = pull_request.target_ref_parts.commit_id
374 target_ref_id = pull_request.target_ref_parts.commit_id
367 diff = PullRequestModel()._get_diff_from_pr_or_version(
375 diff = PullRequestModel()._get_diff_from_pr_or_version(
368 source_repo, source_ref_id, target_ref_id, context=6)
376 source_repo, source_ref_id, target_ref_id, context=6)
369 assert 'file_1' in diff.raw
377 assert 'file_1' in diff.raw
370
378
371 def test_generate_title_returns_unicode(self):
379 def test_generate_title_returns_unicode(self):
372 title = PullRequestModel().generate_pullrequest_title(
380 title = PullRequestModel().generate_pullrequest_title(
373 source='source-dummy',
381 source='source-dummy',
374 source_ref='source-ref-dummy',
382 source_ref='source-ref-dummy',
375 target='target-dummy',
383 target='target-dummy',
376 )
384 )
377 assert type(title) == unicode
385 assert type(title) == unicode
378
386
379
387
380 @pytest.mark.usefixtures('config_stub')
388 @pytest.mark.usefixtures('config_stub')
381 class TestIntegrationMerge(object):
389 class TestIntegrationMerge(object):
382 @pytest.mark.parametrize('extra_config', (
390 @pytest.mark.parametrize('extra_config', (
383 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
384 ))
392 ))
385 def test_merge_triggers_push_hooks(
393 def test_merge_triggers_push_hooks(
386 self, pr_util, user_admin, capture_rcextensions, merge_extras,
394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
387 extra_config):
395 extra_config):
388 pull_request = pr_util.create_pull_request(
396 pull_request = pr_util.create_pull_request(
389 approved=True, mergeable=True)
397 approved=True, mergeable=True)
390 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
398 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
391 merge_extras['repository'] = pull_request.target_repo.repo_name
399 merge_extras['repository'] = pull_request.target_repo.repo_name
392 Session().commit()
400 Session().commit()
393
401
394 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
402 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
395 merge_state = PullRequestModel().merge(
403 merge_state = PullRequestModel().merge_repo(
396 pull_request, user_admin, extras=merge_extras)
404 pull_request, user_admin, extras=merge_extras)
397
405
398 assert merge_state.executed
406 assert merge_state.executed
399 assert 'pre_push' in capture_rcextensions
407 assert 'pre_push' in capture_rcextensions
400 assert 'post_push' in capture_rcextensions
408 assert 'post_push' in capture_rcextensions
401
409
402 def test_merge_can_be_rejected_by_pre_push_hook(
410 def test_merge_can_be_rejected_by_pre_push_hook(
403 self, pr_util, user_admin, capture_rcextensions, merge_extras):
411 self, pr_util, user_admin, capture_rcextensions, merge_extras):
404 pull_request = pr_util.create_pull_request(
412 pull_request = pr_util.create_pull_request(
405 approved=True, mergeable=True)
413 approved=True, mergeable=True)
406 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
414 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
407 merge_extras['repository'] = pull_request.target_repo.repo_name
415 merge_extras['repository'] = pull_request.target_repo.repo_name
408 Session().commit()
416 Session().commit()
409
417
410 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
418 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
411 pre_pull.side_effect = RepositoryError("Disallow push!")
419 pre_pull.side_effect = RepositoryError("Disallow push!")
412 merge_status = PullRequestModel().merge(
420 merge_status = PullRequestModel().merge_repo(
413 pull_request, user_admin, extras=merge_extras)
421 pull_request, user_admin, extras=merge_extras)
414
422
415 assert not merge_status.executed
423 assert not merge_status.executed
416 assert 'pre_push' not in capture_rcextensions
424 assert 'pre_push' not in capture_rcextensions
417 assert 'post_push' not in capture_rcextensions
425 assert 'post_push' not in capture_rcextensions
418
426
419 def test_merge_fails_if_target_is_locked(
427 def test_merge_fails_if_target_is_locked(
420 self, pr_util, user_regular, merge_extras):
428 self, pr_util, user_regular, merge_extras):
421 pull_request = pr_util.create_pull_request(
429 pull_request = pr_util.create_pull_request(
422 approved=True, mergeable=True)
430 approved=True, mergeable=True)
423 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
431 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
424 pull_request.target_repo.locked = locked_by
432 pull_request.target_repo.locked = locked_by
425 # TODO: johbo: Check if this can work based on the database, currently
433 # TODO: johbo: Check if this can work based on the database, currently
426 # all data is pre-computed, that's why just updating the DB is not
434 # all data is pre-computed, that's why just updating the DB is not
427 # enough.
435 # enough.
428 merge_extras['locked_by'] = locked_by
436 merge_extras['locked_by'] = locked_by
429 merge_extras['repository'] = pull_request.target_repo.repo_name
437 merge_extras['repository'] = pull_request.target_repo.repo_name
430 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
431 Session().commit()
439 Session().commit()
432 merge_status = PullRequestModel().merge(
440 merge_status = PullRequestModel().merge_repo(
433 pull_request, user_regular, extras=merge_extras)
441 pull_request, user_regular, extras=merge_extras)
434 assert not merge_status.executed
442 assert not merge_status.executed
435
443
436
444
437 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
445 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
438 (False, 1, 0),
446 (False, 1, 0),
439 (True, 0, 1),
447 (True, 0, 1),
440 ])
448 ])
441 def test_outdated_comments(
449 def test_outdated_comments(
442 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
450 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
443 pull_request = pr_util.create_pull_request()
451 pull_request = pr_util.create_pull_request()
444 pr_util.create_inline_comment(file_path='not_in_updated_diff')
452 pr_util.create_inline_comment(file_path='not_in_updated_diff')
445
453
446 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
454 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
447 pr_util.add_one_commit()
455 pr_util.add_one_commit()
448 assert_inline_comments(
456 assert_inline_comments(
449 pull_request, visible=inlines_count, outdated=outdated_count)
457 pull_request, visible=inlines_count, outdated=outdated_count)
450 outdated_comment_mock.assert_called_with(pull_request)
458 outdated_comment_mock.assert_called_with(pull_request)
451
459
452
460
453 @pytest.fixture
461 @pytest.fixture
454 def merge_extras(user_regular):
462 def merge_extras(user_regular):
455 """
463 """
456 Context for the vcs operation when running a merge.
464 Context for the vcs operation when running a merge.
457 """
465 """
458 extras = {
466 extras = {
459 'ip': '127.0.0.1',
467 'ip': '127.0.0.1',
460 'username': user_regular.username,
468 'username': user_regular.username,
461 'user_id': user_regular.user_id,
469 'user_id': user_regular.user_id,
462 'action': 'push',
470 'action': 'push',
463 'repository': 'fake_target_repo_name',
471 'repository': 'fake_target_repo_name',
464 'scm': 'git',
472 'scm': 'git',
465 'config': 'fake_config_ini_path',
473 'config': 'fake_config_ini_path',
466 'make_lock': None,
474 'make_lock': None,
467 'locked_by': [None, None, None],
475 'locked_by': [None, None, None],
468 'server_url': 'http://test.example.com:5000',
476 'server_url': 'http://test.example.com:5000',
469 'hooks': ['push', 'pull'],
477 'hooks': ['push', 'pull'],
470 'is_shadow_repo': False,
478 'is_shadow_repo': False,
471 }
479 }
472 return extras
480 return extras
473
481
474
482
475 @pytest.mark.usefixtures('config_stub')
483 @pytest.mark.usefixtures('config_stub')
476 class TestUpdateCommentHandling(object):
484 class TestUpdateCommentHandling(object):
477
485
478 @pytest.fixture(autouse=True, scope='class')
486 @pytest.fixture(autouse=True, scope='class')
479 def enable_outdated_comments(self, request, baseapp):
487 def enable_outdated_comments(self, request, baseapp):
480 config_patch = mock.patch.dict(
488 config_patch = mock.patch.dict(
481 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
489 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
482 config_patch.start()
490 config_patch.start()
483
491
484 @request.addfinalizer
492 @request.addfinalizer
485 def cleanup():
493 def cleanup():
486 config_patch.stop()
494 config_patch.stop()
487
495
488 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
496 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
489 commits = [
497 commits = [
490 {'message': 'a'},
498 {'message': 'a'},
491 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
499 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
492 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
500 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
493 ]
501 ]
494 pull_request = pr_util.create_pull_request(
502 pull_request = pr_util.create_pull_request(
495 commits=commits, target_head='a', source_head='b', revisions=['b'])
503 commits=commits, target_head='a', source_head='b', revisions=['b'])
496 pr_util.create_inline_comment(file_path='file_b')
504 pr_util.create_inline_comment(file_path='file_b')
497 pr_util.add_one_commit(head='c')
505 pr_util.add_one_commit(head='c')
498
506
499 assert_inline_comments(pull_request, visible=1, outdated=0)
507 assert_inline_comments(pull_request, visible=1, outdated=0)
500
508
501 def test_comment_stays_unflagged_on_change_above(self, pr_util):
509 def test_comment_stays_unflagged_on_change_above(self, pr_util):
502 original_content = ''.join(
510 original_content = ''.join(
503 ['line {}\n'.format(x) for x in range(1, 11)])
511 ['line {}\n'.format(x) for x in range(1, 11)])
504 updated_content = 'new_line_at_top\n' + original_content
512 updated_content = 'new_line_at_top\n' + original_content
505 commits = [
513 commits = [
506 {'message': 'a'},
514 {'message': 'a'},
507 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
515 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
508 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
516 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
509 ]
517 ]
510 pull_request = pr_util.create_pull_request(
518 pull_request = pr_util.create_pull_request(
511 commits=commits, target_head='a', source_head='b', revisions=['b'])
519 commits=commits, target_head='a', source_head='b', revisions=['b'])
512
520
513 with outdated_comments_patcher():
521 with outdated_comments_patcher():
514 comment = pr_util.create_inline_comment(
522 comment = pr_util.create_inline_comment(
515 line_no=u'n8', file_path='file_b')
523 line_no=u'n8', file_path='file_b')
516 pr_util.add_one_commit(head='c')
524 pr_util.add_one_commit(head='c')
517
525
518 assert_inline_comments(pull_request, visible=1, outdated=0)
526 assert_inline_comments(pull_request, visible=1, outdated=0)
519 assert comment.line_no == u'n9'
527 assert comment.line_no == u'n9'
520
528
521 def test_comment_stays_unflagged_on_change_below(self, pr_util):
529 def test_comment_stays_unflagged_on_change_below(self, pr_util):
522 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
530 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
523 updated_content = original_content + 'new_line_at_end\n'
531 updated_content = original_content + 'new_line_at_end\n'
524 commits = [
532 commits = [
525 {'message': 'a'},
533 {'message': 'a'},
526 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
534 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
527 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
535 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
528 ]
536 ]
529 pull_request = pr_util.create_pull_request(
537 pull_request = pr_util.create_pull_request(
530 commits=commits, target_head='a', source_head='b', revisions=['b'])
538 commits=commits, target_head='a', source_head='b', revisions=['b'])
531 pr_util.create_inline_comment(file_path='file_b')
539 pr_util.create_inline_comment(file_path='file_b')
532 pr_util.add_one_commit(head='c')
540 pr_util.add_one_commit(head='c')
533
541
534 assert_inline_comments(pull_request, visible=1, outdated=0)
542 assert_inline_comments(pull_request, visible=1, outdated=0)
535
543
536 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
544 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
537 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
545 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
538 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
546 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
539 change_lines = list(base_lines)
547 change_lines = list(base_lines)
540 change_lines.insert(6, 'line 6a added\n')
548 change_lines.insert(6, 'line 6a added\n')
541
549
542 # Changes on the last line of sight
550 # Changes on the last line of sight
543 update_lines = list(change_lines)
551 update_lines = list(change_lines)
544 update_lines[0] = 'line 1 changed\n'
552 update_lines[0] = 'line 1 changed\n'
545 update_lines[-1] = 'line 12 changed\n'
553 update_lines[-1] = 'line 12 changed\n'
546
554
547 def file_b(lines):
555 def file_b(lines):
548 return FileNode('file_b', ''.join(lines))
556 return FileNode('file_b', ''.join(lines))
549
557
550 commits = [
558 commits = [
551 {'message': 'a', 'added': [file_b(base_lines)]},
559 {'message': 'a', 'added': [file_b(base_lines)]},
552 {'message': 'b', 'changed': [file_b(change_lines)]},
560 {'message': 'b', 'changed': [file_b(change_lines)]},
553 {'message': 'c', 'changed': [file_b(update_lines)]},
561 {'message': 'c', 'changed': [file_b(update_lines)]},
554 ]
562 ]
555
563
556 pull_request = pr_util.create_pull_request(
564 pull_request = pr_util.create_pull_request(
557 commits=commits, target_head='a', source_head='b', revisions=['b'])
565 commits=commits, target_head='a', source_head='b', revisions=['b'])
558 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
566 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
559
567
560 with outdated_comments_patcher():
568 with outdated_comments_patcher():
561 pr_util.add_one_commit(head='c')
569 pr_util.add_one_commit(head='c')
562 assert_inline_comments(pull_request, visible=0, outdated=1)
570 assert_inline_comments(pull_request, visible=0, outdated=1)
563
571
564 @pytest.mark.parametrize("change, content", [
572 @pytest.mark.parametrize("change, content", [
565 ('changed', 'changed\n'),
573 ('changed', 'changed\n'),
566 ('removed', ''),
574 ('removed', ''),
567 ], ids=['changed', 'removed'])
575 ], ids=['changed', 'removed'])
568 def test_comment_flagged_on_change(self, pr_util, change, content):
576 def test_comment_flagged_on_change(self, pr_util, change, content):
569 commits = [
577 commits = [
570 {'message': 'a'},
578 {'message': 'a'},
571 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
579 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
572 {'message': 'c', change: [FileNode('file_b', content)]},
580 {'message': 'c', change: [FileNode('file_b', content)]},
573 ]
581 ]
574 pull_request = pr_util.create_pull_request(
582 pull_request = pr_util.create_pull_request(
575 commits=commits, target_head='a', source_head='b', revisions=['b'])
583 commits=commits, target_head='a', source_head='b', revisions=['b'])
576 pr_util.create_inline_comment(file_path='file_b')
584 pr_util.create_inline_comment(file_path='file_b')
577
585
578 with outdated_comments_patcher():
586 with outdated_comments_patcher():
579 pr_util.add_one_commit(head='c')
587 pr_util.add_one_commit(head='c')
580 assert_inline_comments(pull_request, visible=0, outdated=1)
588 assert_inline_comments(pull_request, visible=0, outdated=1)
581
589
582
590
583 @pytest.mark.usefixtures('config_stub')
591 @pytest.mark.usefixtures('config_stub')
584 class TestUpdateChangedFiles(object):
592 class TestUpdateChangedFiles(object):
585
593
586 def test_no_changes_on_unchanged_diff(self, pr_util):
594 def test_no_changes_on_unchanged_diff(self, pr_util):
587 commits = [
595 commits = [
588 {'message': 'a'},
596 {'message': 'a'},
589 {'message': 'b',
597 {'message': 'b',
590 'added': [FileNode('file_b', 'test_content b\n')]},
598 'added': [FileNode('file_b', 'test_content b\n')]},
591 {'message': 'c',
599 {'message': 'c',
592 'added': [FileNode('file_c', 'test_content c\n')]},
600 'added': [FileNode('file_c', 'test_content c\n')]},
593 ]
601 ]
594 # open a PR from a to b, adding file_b
602 # open a PR from a to b, adding file_b
595 pull_request = pr_util.create_pull_request(
603 pull_request = pr_util.create_pull_request(
596 commits=commits, target_head='a', source_head='b', revisions=['b'],
604 commits=commits, target_head='a', source_head='b', revisions=['b'],
597 name_suffix='per-file-review')
605 name_suffix='per-file-review')
598
606
599 # modify PR adding new file file_c
607 # modify PR adding new file file_c
600 pr_util.add_one_commit(head='c')
608 pr_util.add_one_commit(head='c')
601
609
602 assert_pr_file_changes(
610 assert_pr_file_changes(
603 pull_request,
611 pull_request,
604 added=['file_c'],
612 added=['file_c'],
605 modified=[],
613 modified=[],
606 removed=[])
614 removed=[])
607
615
608 def test_modify_and_undo_modification_diff(self, pr_util):
616 def test_modify_and_undo_modification_diff(self, pr_util):
609 commits = [
617 commits = [
610 {'message': 'a'},
618 {'message': 'a'},
611 {'message': 'b',
619 {'message': 'b',
612 'added': [FileNode('file_b', 'test_content b\n')]},
620 'added': [FileNode('file_b', 'test_content b\n')]},
613 {'message': 'c',
621 {'message': 'c',
614 'changed': [FileNode('file_b', 'test_content b modified\n')]},
622 'changed': [FileNode('file_b', 'test_content b modified\n')]},
615 {'message': 'd',
623 {'message': 'd',
616 'changed': [FileNode('file_b', 'test_content b\n')]},
624 'changed': [FileNode('file_b', 'test_content b\n')]},
617 ]
625 ]
618 # open a PR from a to b, adding file_b
626 # open a PR from a to b, adding file_b
619 pull_request = pr_util.create_pull_request(
627 pull_request = pr_util.create_pull_request(
620 commits=commits, target_head='a', source_head='b', revisions=['b'],
628 commits=commits, target_head='a', source_head='b', revisions=['b'],
621 name_suffix='per-file-review')
629 name_suffix='per-file-review')
622
630
623 # modify PR modifying file file_b
631 # modify PR modifying file file_b
624 pr_util.add_one_commit(head='c')
632 pr_util.add_one_commit(head='c')
625
633
626 assert_pr_file_changes(
634 assert_pr_file_changes(
627 pull_request,
635 pull_request,
628 added=[],
636 added=[],
629 modified=['file_b'],
637 modified=['file_b'],
630 removed=[])
638 removed=[])
631
639
632 # move the head again to d, which rollbacks change,
640 # move the head again to d, which rollbacks change,
633 # meaning we should indicate no changes
641 # meaning we should indicate no changes
634 pr_util.add_one_commit(head='d')
642 pr_util.add_one_commit(head='d')
635
643
636 assert_pr_file_changes(
644 assert_pr_file_changes(
637 pull_request,
645 pull_request,
638 added=[],
646 added=[],
639 modified=[],
647 modified=[],
640 removed=[])
648 removed=[])
641
649
642 def test_updated_all_files_in_pr(self, pr_util):
650 def test_updated_all_files_in_pr(self, pr_util):
643 commits = [
651 commits = [
644 {'message': 'a'},
652 {'message': 'a'},
645 {'message': 'b', 'added': [
653 {'message': 'b', 'added': [
646 FileNode('file_a', 'test_content a\n'),
654 FileNode('file_a', 'test_content a\n'),
647 FileNode('file_b', 'test_content b\n'),
655 FileNode('file_b', 'test_content b\n'),
648 FileNode('file_c', 'test_content c\n')]},
656 FileNode('file_c', 'test_content c\n')]},
649 {'message': 'c', 'changed': [
657 {'message': 'c', 'changed': [
650 FileNode('file_a', 'test_content a changed\n'),
658 FileNode('file_a', 'test_content a changed\n'),
651 FileNode('file_b', 'test_content b changed\n'),
659 FileNode('file_b', 'test_content b changed\n'),
652 FileNode('file_c', 'test_content c changed\n')]},
660 FileNode('file_c', 'test_content c changed\n')]},
653 ]
661 ]
654 # open a PR from a to b, changing 3 files
662 # open a PR from a to b, changing 3 files
655 pull_request = pr_util.create_pull_request(
663 pull_request = pr_util.create_pull_request(
656 commits=commits, target_head='a', source_head='b', revisions=['b'],
664 commits=commits, target_head='a', source_head='b', revisions=['b'],
657 name_suffix='per-file-review')
665 name_suffix='per-file-review')
658
666
659 pr_util.add_one_commit(head='c')
667 pr_util.add_one_commit(head='c')
660
668
661 assert_pr_file_changes(
669 assert_pr_file_changes(
662 pull_request,
670 pull_request,
663 added=[],
671 added=[],
664 modified=['file_a', 'file_b', 'file_c'],
672 modified=['file_a', 'file_b', 'file_c'],
665 removed=[])
673 removed=[])
666
674
667 def test_updated_and_removed_all_files_in_pr(self, pr_util):
675 def test_updated_and_removed_all_files_in_pr(self, pr_util):
668 commits = [
676 commits = [
669 {'message': 'a'},
677 {'message': 'a'},
670 {'message': 'b', 'added': [
678 {'message': 'b', 'added': [
671 FileNode('file_a', 'test_content a\n'),
679 FileNode('file_a', 'test_content a\n'),
672 FileNode('file_b', 'test_content b\n'),
680 FileNode('file_b', 'test_content b\n'),
673 FileNode('file_c', 'test_content c\n')]},
681 FileNode('file_c', 'test_content c\n')]},
674 {'message': 'c', 'removed': [
682 {'message': 'c', 'removed': [
675 FileNode('file_a', 'test_content a changed\n'),
683 FileNode('file_a', 'test_content a changed\n'),
676 FileNode('file_b', 'test_content b changed\n'),
684 FileNode('file_b', 'test_content b changed\n'),
677 FileNode('file_c', 'test_content c changed\n')]},
685 FileNode('file_c', 'test_content c changed\n')]},
678 ]
686 ]
679 # open a PR from a to b, removing 3 files
687 # open a PR from a to b, removing 3 files
680 pull_request = pr_util.create_pull_request(
688 pull_request = pr_util.create_pull_request(
681 commits=commits, target_head='a', source_head='b', revisions=['b'],
689 commits=commits, target_head='a', source_head='b', revisions=['b'],
682 name_suffix='per-file-review')
690 name_suffix='per-file-review')
683
691
684 pr_util.add_one_commit(head='c')
692 pr_util.add_one_commit(head='c')
685
693
686 assert_pr_file_changes(
694 assert_pr_file_changes(
687 pull_request,
695 pull_request,
688 added=[],
696 added=[],
689 modified=[],
697 modified=[],
690 removed=['file_a', 'file_b', 'file_c'])
698 removed=['file_a', 'file_b', 'file_c'])
691
699
692
700
693 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
701 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
694 model = PullRequestModel()
702 model = PullRequestModel()
695 pull_request = pr_util.create_pull_request()
703 pull_request = pr_util.create_pull_request()
696 pr_util.update_source_repository()
704 pr_util.update_source_repository()
697
705
698 model.update_commits(pull_request)
706 model.update_commits(pull_request)
699
707
700 # Expect that it has a version entry now
708 # Expect that it has a version entry now
701 assert len(model.get_versions(pull_request)) == 1
709 assert len(model.get_versions(pull_request)) == 1
702
710
703
711
704 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
712 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
705 pull_request = pr_util.create_pull_request()
713 pull_request = pr_util.create_pull_request()
706 model = PullRequestModel()
714 model = PullRequestModel()
707 model.update_commits(pull_request)
715 model.update_commits(pull_request)
708
716
709 # Expect that it still has no versions
717 # Expect that it still has no versions
710 assert len(model.get_versions(pull_request)) == 0
718 assert len(model.get_versions(pull_request)) == 0
711
719
712
720
713 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
721 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
714 model = PullRequestModel()
722 model = PullRequestModel()
715 pull_request = pr_util.create_pull_request()
723 pull_request = pr_util.create_pull_request()
716 comment = pr_util.create_comment()
724 comment = pr_util.create_comment()
717 pr_util.update_source_repository()
725 pr_util.update_source_repository()
718
726
719 model.update_commits(pull_request)
727 model.update_commits(pull_request)
720
728
721 # Expect that the comment is linked to the pr version now
729 # Expect that the comment is linked to the pr version now
722 assert comment.pull_request_version == model.get_versions(pull_request)[0]
730 assert comment.pull_request_version == model.get_versions(pull_request)[0]
723
731
724
732
725 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
733 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
726 model = PullRequestModel()
734 model = PullRequestModel()
727 pull_request = pr_util.create_pull_request()
735 pull_request = pr_util.create_pull_request()
728 pr_util.update_source_repository()
736 pr_util.update_source_repository()
729 pr_util.update_source_repository()
737 pr_util.update_source_repository()
730
738
731 model.update_commits(pull_request)
739 model.update_commits(pull_request)
732
740
733 # Expect to find a new comment about the change
741 # Expect to find a new comment about the change
734 expected_message = textwrap.dedent(
742 expected_message = textwrap.dedent(
735 """\
743 """\
736 Pull request updated. Auto status change to |under_review|
744 Pull request updated. Auto status change to |under_review|
737
745
738 .. role:: added
746 .. role:: added
739 .. role:: removed
747 .. role:: removed
740 .. parsed-literal::
748 .. parsed-literal::
741
749
742 Changed commits:
750 Changed commits:
743 * :added:`1 added`
751 * :added:`1 added`
744 * :removed:`0 removed`
752 * :removed:`0 removed`
745
753
746 Changed files:
754 Changed files:
747 * `A file_2 <#a_c--92ed3b5f07b4>`_
755 * `A file_2 <#a_c--92ed3b5f07b4>`_
748
756
749 .. |under_review| replace:: *"Under Review"*"""
757 .. |under_review| replace:: *"Under Review"*"""
750 )
758 )
751 pull_request_comments = sorted(
759 pull_request_comments = sorted(
752 pull_request.comments, key=lambda c: c.modified_at)
760 pull_request.comments, key=lambda c: c.modified_at)
753 update_comment = pull_request_comments[-1]
761 update_comment = pull_request_comments[-1]
754 assert update_comment.text == expected_message
762 assert update_comment.text == expected_message
755
763
756
764
757 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
765 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
758 pull_request = pr_util.create_pull_request()
766 pull_request = pr_util.create_pull_request()
759
767
760 # Avoiding default values
768 # Avoiding default values
761 pull_request.status = PullRequest.STATUS_CLOSED
769 pull_request.status = PullRequest.STATUS_CLOSED
762 pull_request._last_merge_source_rev = "0" * 40
770 pull_request._last_merge_source_rev = "0" * 40
763 pull_request._last_merge_target_rev = "1" * 40
771 pull_request._last_merge_target_rev = "1" * 40
764 pull_request.last_merge_status = 1
772 pull_request.last_merge_status = 1
765 pull_request.merge_rev = "2" * 40
773 pull_request.merge_rev = "2" * 40
766
774
767 # Remember automatic values
775 # Remember automatic values
768 created_on = pull_request.created_on
776 created_on = pull_request.created_on
769 updated_on = pull_request.updated_on
777 updated_on = pull_request.updated_on
770
778
771 # Create a new version of the pull request
779 # Create a new version of the pull request
772 version = PullRequestModel()._create_version_from_snapshot(pull_request)
780 version = PullRequestModel()._create_version_from_snapshot(pull_request)
773
781
774 # Check attributes
782 # Check attributes
775 assert version.title == pr_util.create_parameters['title']
783 assert version.title == pr_util.create_parameters['title']
776 assert version.description == pr_util.create_parameters['description']
784 assert version.description == pr_util.create_parameters['description']
777 assert version.status == PullRequest.STATUS_CLOSED
785 assert version.status == PullRequest.STATUS_CLOSED
778
786
779 # versions get updated created_on
787 # versions get updated created_on
780 assert version.created_on != created_on
788 assert version.created_on != created_on
781
789
782 assert version.updated_on == updated_on
790 assert version.updated_on == updated_on
783 assert version.user_id == pull_request.user_id
791 assert version.user_id == pull_request.user_id
784 assert version.revisions == pr_util.create_parameters['revisions']
792 assert version.revisions == pr_util.create_parameters['revisions']
785 assert version.source_repo == pr_util.source_repository
793 assert version.source_repo == pr_util.source_repository
786 assert version.source_ref == pr_util.create_parameters['source_ref']
794 assert version.source_ref == pr_util.create_parameters['source_ref']
787 assert version.target_repo == pr_util.target_repository
795 assert version.target_repo == pr_util.target_repository
788 assert version.target_ref == pr_util.create_parameters['target_ref']
796 assert version.target_ref == pr_util.create_parameters['target_ref']
789 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
797 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
790 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
798 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
791 assert version.last_merge_status == pull_request.last_merge_status
799 assert version.last_merge_status == pull_request.last_merge_status
792 assert version.merge_rev == pull_request.merge_rev
800 assert version.merge_rev == pull_request.merge_rev
793 assert version.pull_request == pull_request
801 assert version.pull_request == pull_request
794
802
795
803
796 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
804 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
797 version1 = pr_util.create_version_of_pull_request()
805 version1 = pr_util.create_version_of_pull_request()
798 comment_linked = pr_util.create_comment(linked_to=version1)
806 comment_linked = pr_util.create_comment(linked_to=version1)
799 comment_unlinked = pr_util.create_comment()
807 comment_unlinked = pr_util.create_comment()
800 version2 = pr_util.create_version_of_pull_request()
808 version2 = pr_util.create_version_of_pull_request()
801
809
802 PullRequestModel()._link_comments_to_version(version2)
810 PullRequestModel()._link_comments_to_version(version2)
803
811
804 # Expect that only the new comment is linked to version2
812 # Expect that only the new comment is linked to version2
805 assert (
813 assert (
806 comment_unlinked.pull_request_version_id ==
814 comment_unlinked.pull_request_version_id ==
807 version2.pull_request_version_id)
815 version2.pull_request_version_id)
808 assert (
816 assert (
809 comment_linked.pull_request_version_id ==
817 comment_linked.pull_request_version_id ==
810 version1.pull_request_version_id)
818 version1.pull_request_version_id)
811 assert (
819 assert (
812 comment_unlinked.pull_request_version_id !=
820 comment_unlinked.pull_request_version_id !=
813 comment_linked.pull_request_version_id)
821 comment_linked.pull_request_version_id)
814
822
815
823
816 def test_calculate_commits():
824 def test_calculate_commits():
817 old_ids = [1, 2, 3]
825 old_ids = [1, 2, 3]
818 new_ids = [1, 3, 4, 5]
826 new_ids = [1, 3, 4, 5]
819 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
827 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
820 assert change.added == [4, 5]
828 assert change.added == [4, 5]
821 assert change.common == [1, 3]
829 assert change.common == [1, 3]
822 assert change.removed == [2]
830 assert change.removed == [2]
823 assert change.total == [1, 3, 4, 5]
831 assert change.total == [1, 3, 4, 5]
824
832
825
833
826 def assert_inline_comments(pull_request, visible=None, outdated=None):
834 def assert_inline_comments(pull_request, visible=None, outdated=None):
827 if visible is not None:
835 if visible is not None:
828 inline_comments = CommentsModel().get_inline_comments(
836 inline_comments = CommentsModel().get_inline_comments(
829 pull_request.target_repo.repo_id, pull_request=pull_request)
837 pull_request.target_repo.repo_id, pull_request=pull_request)
830 inline_cnt = CommentsModel().get_inline_comments_count(
838 inline_cnt = CommentsModel().get_inline_comments_count(
831 inline_comments)
839 inline_comments)
832 assert inline_cnt == visible
840 assert inline_cnt == visible
833 if outdated is not None:
841 if outdated is not None:
834 outdated_comments = CommentsModel().get_outdated_comments(
842 outdated_comments = CommentsModel().get_outdated_comments(
835 pull_request.target_repo.repo_id, pull_request)
843 pull_request.target_repo.repo_id, pull_request)
836 assert len(outdated_comments) == outdated
844 assert len(outdated_comments) == outdated
837
845
838
846
839 def assert_pr_file_changes(
847 def assert_pr_file_changes(
840 pull_request, added=None, modified=None, removed=None):
848 pull_request, added=None, modified=None, removed=None):
841 pr_versions = PullRequestModel().get_versions(pull_request)
849 pr_versions = PullRequestModel().get_versions(pull_request)
842 # always use first version, ie original PR to calculate changes
850 # always use first version, ie original PR to calculate changes
843 pull_request_version = pr_versions[0]
851 pull_request_version = pr_versions[0]
844 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
852 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
845 pull_request, pull_request_version)
853 pull_request, pull_request_version)
846 file_changes = PullRequestModel()._calculate_file_changes(
854 file_changes = PullRequestModel()._calculate_file_changes(
847 old_diff_data, new_diff_data)
855 old_diff_data, new_diff_data)
848
856
849 assert added == file_changes.added, \
857 assert added == file_changes.added, \
850 'expected added:%s vs value:%s' % (added, file_changes.added)
858 'expected added:%s vs value:%s' % (added, file_changes.added)
851 assert modified == file_changes.modified, \
859 assert modified == file_changes.modified, \
852 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
860 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
853 assert removed == file_changes.removed, \
861 assert removed == file_changes.removed, \
854 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
862 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
855
863
856
864
857 def outdated_comments_patcher(use_outdated=True):
865 def outdated_comments_patcher(use_outdated=True):
858 return mock.patch.object(
866 return mock.patch.object(
859 CommentsModel, 'use_outdated_comments',
867 CommentsModel, 'use_outdated_comments',
860 return_value=use_outdated)
868 return_value=use_outdated)
@@ -1,1289 +1,1289 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import mock
22 import mock
23 import os
23 import os
24 import sys
24 import sys
25 import shutil
25 import shutil
26
26
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib.utils import make_db_config
29 from rhodecode.lib.utils import make_db_config
30 from rhodecode.lib.vcs.backends.base import Reference
30 from rhodecode.lib.vcs.backends.base import Reference
31 from rhodecode.lib.vcs.backends.git import (
31 from rhodecode.lib.vcs.backends.git import (
32 GitRepository, GitCommit, discover_git_version)
32 GitRepository, GitCommit, discover_git_version)
33 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
34 RepositoryError, VCSError, NodeDoesNotExistError)
34 RepositoryError, VCSError, NodeDoesNotExistError)
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39
39
40
40
41 pytestmark = pytest.mark.backends("git")
41 pytestmark = pytest.mark.backends("git")
42
42
43
43
44 def repo_path_generator():
44 def repo_path_generator():
45 """
45 """
46 Return a different path to be used for cloning repos.
46 Return a different path to be used for cloning repos.
47 """
47 """
48 i = 0
48 i = 0
49 while True:
49 while True:
50 i += 1
50 i += 1
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52
52
53
53
54 REPO_PATH_GENERATOR = repo_path_generator()
54 REPO_PATH_GENERATOR = repo_path_generator()
55
55
56
56
57 class TestGitRepository:
57 class TestGitRepository:
58
58
59 # pylint: disable=protected-access
59 # pylint: disable=protected-access
60
60
61 def __check_for_existing_repo(self):
61 def __check_for_existing_repo(self):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 self.fail('Cannot test git clone repo as location %s already '
63 self.fail('Cannot test git clone repo as location %s already '
64 'exists. You should manually remove it first.'
64 'exists. You should manually remove it first.'
65 % TEST_GIT_REPO_CLONE)
65 % TEST_GIT_REPO_CLONE)
66
66
67 @pytest.fixture(autouse=True)
67 @pytest.fixture(autouse=True)
68 def prepare(self, request, baseapp):
68 def prepare(self, request, baseapp):
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70
70
71 def get_clone_repo(self):
71 def get_clone_repo(self):
72 """
72 """
73 Return a non bare clone of the base repo.
73 Return a non bare clone of the base repo.
74 """
74 """
75 clone_path = next(REPO_PATH_GENERATOR)
75 clone_path = next(REPO_PATH_GENERATOR)
76 repo_clone = GitRepository(
76 repo_clone = GitRepository(
77 clone_path, create=True, src_url=self.repo.path, bare=False)
77 clone_path, create=True, src_url=self.repo.path, bare=False)
78
78
79 return repo_clone
79 return repo_clone
80
80
81 def get_empty_repo(self, bare=False):
81 def get_empty_repo(self, bare=False):
82 """
82 """
83 Return a non bare empty repo.
83 Return a non bare empty repo.
84 """
84 """
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86
86
87 def test_wrong_repo_path(self):
87 def test_wrong_repo_path(self):
88 wrong_repo_path = '/tmp/errorrepo_git'
88 wrong_repo_path = '/tmp/errorrepo_git'
89 with pytest.raises(RepositoryError):
89 with pytest.raises(RepositoryError):
90 GitRepository(wrong_repo_path)
90 GitRepository(wrong_repo_path)
91
91
92 def test_repo_clone(self):
92 def test_repo_clone(self):
93 self.__check_for_existing_repo()
93 self.__check_for_existing_repo()
94 repo = GitRepository(TEST_GIT_REPO)
94 repo = GitRepository(TEST_GIT_REPO)
95 repo_clone = GitRepository(
95 repo_clone = GitRepository(
96 TEST_GIT_REPO_CLONE,
96 TEST_GIT_REPO_CLONE,
97 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
97 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 # Checking hashes of commits should be enough
99 # Checking hashes of commits should be enough
100 for commit in repo.get_commits():
100 for commit in repo.get_commits():
101 raw_id = commit.raw_id
101 raw_id = commit.raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103
103
104 def test_repo_clone_without_create(self):
104 def test_repo_clone_without_create(self):
105 with pytest.raises(RepositoryError):
105 with pytest.raises(RepositoryError):
106 GitRepository(
106 GitRepository(
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108
108
109 def test_repo_clone_with_update(self):
109 def test_repo_clone_with_update(self):
110 repo = GitRepository(TEST_GIT_REPO)
110 repo = GitRepository(TEST_GIT_REPO)
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 repo_clone = GitRepository(
112 repo_clone = GitRepository(
113 clone_path,
113 clone_path,
114 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
114 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116
116
117 # check if current workdir was updated
117 # check if current workdir was updated
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 assert os.path.isfile(fpath)
119 assert os.path.isfile(fpath)
120
120
121 def test_repo_clone_without_update(self):
121 def test_repo_clone_without_update(self):
122 repo = GitRepository(TEST_GIT_REPO)
122 repo = GitRepository(TEST_GIT_REPO)
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 repo_clone = GitRepository(
124 repo_clone = GitRepository(
125 clone_path,
125 clone_path,
126 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
126 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 # check if current workdir was *NOT* updated
128 # check if current workdir was *NOT* updated
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 # Make sure it's not bare repo
130 # Make sure it's not bare repo
131 assert not repo_clone.bare
131 assert not repo_clone.bare
132 assert not os.path.isfile(fpath)
132 assert not os.path.isfile(fpath)
133
133
134 def test_repo_clone_into_bare_repo(self):
134 def test_repo_clone_into_bare_repo(self):
135 repo = GitRepository(TEST_GIT_REPO)
135 repo = GitRepository(TEST_GIT_REPO)
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 repo_clone = GitRepository(
137 repo_clone = GitRepository(
138 clone_path, create=True, src_url=repo.path, bare=True)
138 clone_path, create=True, src_url=repo.path, bare=True)
139 assert repo_clone.bare
139 assert repo_clone.bare
140
140
141 def test_create_repo_is_not_bare_by_default(self):
141 def test_create_repo_is_not_bare_by_default(self):
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 assert not repo.bare
143 assert not repo.bare
144
144
145 def test_create_bare_repo(self):
145 def test_create_bare_repo(self):
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 assert repo.bare
147 assert repo.bare
148
148
149 def test_update_server_info(self):
149 def test_update_server_info(self):
150 self.repo._update_server_info()
150 self.repo._update_server_info()
151
151
152 def test_fetch(self, vcsbackend_git):
152 def test_fetch(self, vcsbackend_git):
153 # Note: This is a git specific part of the API, it's only implemented
153 # Note: This is a git specific part of the API, it's only implemented
154 # by the git backend.
154 # by the git backend.
155 source_repo = vcsbackend_git.repo
155 source_repo = vcsbackend_git.repo
156 target_repo = vcsbackend_git.create_repo()
156 target_repo = vcsbackend_git.create_repo()
157 target_repo.fetch(source_repo.path)
157 target_repo.fetch(source_repo.path)
158 # Note: Get a fresh instance, avoids caching trouble
158 # Note: Get a fresh instance, avoids caching trouble
159 target_repo = vcsbackend_git.backend(target_repo.path)
159 target_repo = vcsbackend_git.backend(target_repo.path)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161
161
162 def test_commit_ids(self):
162 def test_commit_ids(self):
163 # there are 112 commits (by now)
163 # there are 112 commits (by now)
164 # so we can assume they would be available from now on
164 # so we can assume they would be available from now on
165 subset = set([
165 subset = set([
166 'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 'c1214f7e79e02fc37156ff215cd71275450cffc3',
167 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
168 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 'fa6600f6848800641328adbf7811fd2372c02ab2',
169 '102607b09cdd60e2793929c4f90478be29f85a17',
169 '102607b09cdd60e2793929c4f90478be29f85a17',
170 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
171 '2d1028c054665b962fa3d307adfc923ddd528038',
171 '2d1028c054665b962fa3d307adfc923ddd528038',
172 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
173 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
174 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
175 '8430a588b43b5d6da365400117c89400326e7992',
175 '8430a588b43b5d6da365400117c89400326e7992',
176 'd955cd312c17b02143c04fa1099a352b04368118',
176 'd955cd312c17b02143c04fa1099a352b04368118',
177 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
178 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
179 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 'f298fe1189f1b69779a4423f40b48edf92a703fc',
180 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
181 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
182 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
183 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
184 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
185 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 '45223f8f114c64bf4d6f853e3c35a369a6305520',
186 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
187 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
188 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 '27d48942240f5b91dfda77accd2caac94708cc7d',
189 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
190 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
190 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
191 assert subset.issubset(set(self.repo.commit_ids))
191 assert subset.issubset(set(self.repo.commit_ids))
192
192
193 def test_slicing(self):
193 def test_slicing(self):
194 # 4 1 5 10 95
194 # 4 1 5 10 95
195 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
196 (10, 20, 10), (5, 100, 95)]:
196 (10, 20, 10), (5, 100, 95)]:
197 commit_ids = list(self.repo[sfrom:sto])
197 commit_ids = list(self.repo[sfrom:sto])
198 assert len(commit_ids) == size
198 assert len(commit_ids) == size
199 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
200 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
201
201
202 def test_branches(self):
202 def test_branches(self):
203 # TODO: Need more tests here
203 # TODO: Need more tests here
204 # Removed (those are 'remotes' branches for cloned repo)
204 # Removed (those are 'remotes' branches for cloned repo)
205 # assert 'master' in self.repo.branches
205 # assert 'master' in self.repo.branches
206 # assert 'gittree' in self.repo.branches
206 # assert 'gittree' in self.repo.branches
207 # assert 'web-branch' in self.repo.branches
207 # assert 'web-branch' in self.repo.branches
208 for __, commit_id in self.repo.branches.items():
208 for __, commit_id in self.repo.branches.items():
209 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
210
210
211 def test_tags(self):
211 def test_tags(self):
212 # TODO: Need more tests here
212 # TODO: Need more tests here
213 assert 'v0.1.1' in self.repo.tags
213 assert 'v0.1.1' in self.repo.tags
214 assert 'v0.1.2' in self.repo.tags
214 assert 'v0.1.2' in self.repo.tags
215 for __, commit_id in self.repo.tags.items():
215 for __, commit_id in self.repo.tags.items():
216 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
217
217
218 def _test_single_commit_cache(self, commit_id):
218 def _test_single_commit_cache(self, commit_id):
219 commit = self.repo.get_commit(commit_id)
219 commit = self.repo.get_commit(commit_id)
220 assert commit_id in self.repo.commits
220 assert commit_id in self.repo.commits
221 assert commit is self.repo.commits[commit_id]
221 assert commit is self.repo.commits[commit_id]
222
222
223 def test_initial_commit(self):
223 def test_initial_commit(self):
224 commit_id = self.repo.commit_ids[0]
224 commit_id = self.repo.commit_ids[0]
225 init_commit = self.repo.get_commit(commit_id)
225 init_commit = self.repo.get_commit(commit_id)
226 init_author = init_commit.author
226 init_author = init_commit.author
227
227
228 assert init_commit.message == 'initial import\n'
228 assert init_commit.message == 'initial import\n'
229 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
230 assert init_author == init_commit.committer
230 assert init_author == init_commit.committer
231 for path in ('vcs/__init__.py',
231 for path in ('vcs/__init__.py',
232 'vcs/backends/BaseRepository.py',
232 'vcs/backends/BaseRepository.py',
233 'vcs/backends/__init__.py'):
233 'vcs/backends/__init__.py'):
234 assert isinstance(init_commit.get_node(path), FileNode)
234 assert isinstance(init_commit.get_node(path), FileNode)
235 for path in ('', 'vcs', 'vcs/backends'):
235 for path in ('', 'vcs', 'vcs/backends'):
236 assert isinstance(init_commit.get_node(path), DirNode)
236 assert isinstance(init_commit.get_node(path), DirNode)
237
237
238 with pytest.raises(NodeDoesNotExistError):
238 with pytest.raises(NodeDoesNotExistError):
239 init_commit.get_node(path='foobar')
239 init_commit.get_node(path='foobar')
240
240
241 node = init_commit.get_node('vcs/')
241 node = init_commit.get_node('vcs/')
242 assert hasattr(node, 'kind')
242 assert hasattr(node, 'kind')
243 assert node.kind == NodeKind.DIR
243 assert node.kind == NodeKind.DIR
244
244
245 node = init_commit.get_node('vcs')
245 node = init_commit.get_node('vcs')
246 assert hasattr(node, 'kind')
246 assert hasattr(node, 'kind')
247 assert node.kind == NodeKind.DIR
247 assert node.kind == NodeKind.DIR
248
248
249 node = init_commit.get_node('vcs/__init__.py')
249 node = init_commit.get_node('vcs/__init__.py')
250 assert hasattr(node, 'kind')
250 assert hasattr(node, 'kind')
251 assert node.kind == NodeKind.FILE
251 assert node.kind == NodeKind.FILE
252
252
253 def test_not_existing_commit(self):
253 def test_not_existing_commit(self):
254 with pytest.raises(RepositoryError):
254 with pytest.raises(RepositoryError):
255 self.repo.get_commit('f' * 40)
255 self.repo.get_commit('f' * 40)
256
256
257 def test_commit10(self):
257 def test_commit10(self):
258
258
259 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
260 README = """===
260 README = """===
261 VCS
261 VCS
262 ===
262 ===
263
263
264 Various Version Control System management abstraction layer for Python.
264 Various Version Control System management abstraction layer for Python.
265
265
266 Introduction
266 Introduction
267 ------------
267 ------------
268
268
269 TODO: To be written...
269 TODO: To be written...
270
270
271 """
271 """
272 node = commit10.get_node('README.rst')
272 node = commit10.get_node('README.rst')
273 assert node.kind == NodeKind.FILE
273 assert node.kind == NodeKind.FILE
274 assert node.content == README
274 assert node.content == README
275
275
276 def test_head(self):
276 def test_head(self):
277 assert self.repo.head == self.repo.get_commit().raw_id
277 assert self.repo.head == self.repo.get_commit().raw_id
278
278
279 def test_checkout_with_create(self):
279 def test_checkout_with_create(self):
280 repo_clone = self.get_clone_repo()
280 repo_clone = self.get_clone_repo()
281
281
282 new_branch = 'new_branch'
282 new_branch = 'new_branch'
283 assert repo_clone._current_branch() == 'master'
283 assert repo_clone._current_branch() == 'master'
284 assert set(repo_clone.branches) == set(('master',))
284 assert set(repo_clone.branches) == set(('master',))
285 repo_clone._checkout(new_branch, create=True)
285 repo_clone._checkout(new_branch, create=True)
286
286
287 # Branches is a lazy property so we need to recrete the Repo object.
287 # Branches is a lazy property so we need to recrete the Repo object.
288 repo_clone = GitRepository(repo_clone.path)
288 repo_clone = GitRepository(repo_clone.path)
289 assert set(repo_clone.branches) == set(('master', new_branch))
289 assert set(repo_clone.branches) == set(('master', new_branch))
290 assert repo_clone._current_branch() == new_branch
290 assert repo_clone._current_branch() == new_branch
291
291
292 def test_checkout(self):
292 def test_checkout(self):
293 repo_clone = self.get_clone_repo()
293 repo_clone = self.get_clone_repo()
294
294
295 repo_clone._checkout('new_branch', create=True)
295 repo_clone._checkout('new_branch', create=True)
296 repo_clone._checkout('master')
296 repo_clone._checkout('master')
297
297
298 assert repo_clone._current_branch() == 'master'
298 assert repo_clone._current_branch() == 'master'
299
299
300 def test_checkout_same_branch(self):
300 def test_checkout_same_branch(self):
301 repo_clone = self.get_clone_repo()
301 repo_clone = self.get_clone_repo()
302
302
303 repo_clone._checkout('master')
303 repo_clone._checkout('master')
304 assert repo_clone._current_branch() == 'master'
304 assert repo_clone._current_branch() == 'master'
305
305
306 def test_checkout_branch_already_exists(self):
306 def test_checkout_branch_already_exists(self):
307 repo_clone = self.get_clone_repo()
307 repo_clone = self.get_clone_repo()
308
308
309 with pytest.raises(RepositoryError):
309 with pytest.raises(RepositoryError):
310 repo_clone._checkout('master', create=True)
310 repo_clone._checkout('master', create=True)
311
311
312 def test_checkout_bare_repo(self):
312 def test_checkout_bare_repo(self):
313 with pytest.raises(RepositoryError):
313 with pytest.raises(RepositoryError):
314 self.repo._checkout('master')
314 self.repo._checkout('master')
315
315
316 def test_current_branch_bare_repo(self):
316 def test_current_branch_bare_repo(self):
317 with pytest.raises(RepositoryError):
317 with pytest.raises(RepositoryError):
318 self.repo._current_branch()
318 self.repo._current_branch()
319
319
320 def test_current_branch_empty_repo(self):
320 def test_current_branch_empty_repo(self):
321 repo = self.get_empty_repo()
321 repo = self.get_empty_repo()
322 assert repo._current_branch() is None
322 assert repo._current_branch() is None
323
323
324 def test_local_clone(self):
324 def test_local_clone(self):
325 clone_path = next(REPO_PATH_GENERATOR)
325 clone_path = next(REPO_PATH_GENERATOR)
326 self.repo._local_clone(clone_path, 'master')
326 self.repo._local_clone(clone_path, 'master')
327 repo_clone = GitRepository(clone_path)
327 repo_clone = GitRepository(clone_path)
328
328
329 assert self.repo.commit_ids == repo_clone.commit_ids
329 assert self.repo.commit_ids == repo_clone.commit_ids
330
330
331 def test_local_clone_with_specific_branch(self):
331 def test_local_clone_with_specific_branch(self):
332 source_repo = self.get_clone_repo()
332 source_repo = self.get_clone_repo()
333
333
334 # Create a new branch in source repo
334 # Create a new branch in source repo
335 new_branch_commit = source_repo.commit_ids[-3]
335 new_branch_commit = source_repo.commit_ids[-3]
336 source_repo._checkout(new_branch_commit)
336 source_repo._checkout(new_branch_commit)
337 source_repo._checkout('new_branch', create=True)
337 source_repo._checkout('new_branch', create=True)
338
338
339 clone_path = next(REPO_PATH_GENERATOR)
339 clone_path = next(REPO_PATH_GENERATOR)
340 source_repo._local_clone(clone_path, 'new_branch')
340 source_repo._local_clone(clone_path, 'new_branch')
341 repo_clone = GitRepository(clone_path)
341 repo_clone = GitRepository(clone_path)
342
342
343 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
344
344
345 clone_path = next(REPO_PATH_GENERATOR)
345 clone_path = next(REPO_PATH_GENERATOR)
346 source_repo._local_clone(clone_path, 'master')
346 source_repo._local_clone(clone_path, 'master')
347 repo_clone = GitRepository(clone_path)
347 repo_clone = GitRepository(clone_path)
348
348
349 assert source_repo.commit_ids == repo_clone.commit_ids
349 assert source_repo.commit_ids == repo_clone.commit_ids
350
350
351 def test_local_clone_fails_if_target_exists(self):
351 def test_local_clone_fails_if_target_exists(self):
352 with pytest.raises(RepositoryError):
352 with pytest.raises(RepositoryError):
353 self.repo._local_clone(self.repo.path, 'master')
353 self.repo._local_clone(self.repo.path, 'master')
354
354
355 def test_local_fetch(self):
355 def test_local_fetch(self):
356 target_repo = self.get_empty_repo()
356 target_repo = self.get_empty_repo()
357 source_repo = self.get_clone_repo()
357 source_repo = self.get_clone_repo()
358
358
359 # Create a new branch in source repo
359 # Create a new branch in source repo
360 master_commit = source_repo.commit_ids[-1]
360 master_commit = source_repo.commit_ids[-1]
361 new_branch_commit = source_repo.commit_ids[-3]
361 new_branch_commit = source_repo.commit_ids[-3]
362 source_repo._checkout(new_branch_commit)
362 source_repo._checkout(new_branch_commit)
363 source_repo._checkout('new_branch', create=True)
363 source_repo._checkout('new_branch', create=True)
364
364
365 target_repo._local_fetch(source_repo.path, 'new_branch')
365 target_repo._local_fetch(source_repo.path, 'new_branch')
366 assert target_repo._last_fetch_heads() == [new_branch_commit]
366 assert target_repo._last_fetch_heads() == [new_branch_commit]
367
367
368 target_repo._local_fetch(source_repo.path, 'master')
368 target_repo._local_fetch(source_repo.path, 'master')
369 assert target_repo._last_fetch_heads() == [master_commit]
369 assert target_repo._last_fetch_heads() == [master_commit]
370
370
371 def test_local_fetch_from_bare_repo(self):
371 def test_local_fetch_from_bare_repo(self):
372 target_repo = self.get_empty_repo()
372 target_repo = self.get_empty_repo()
373 target_repo._local_fetch(self.repo.path, 'master')
373 target_repo._local_fetch(self.repo.path, 'master')
374
374
375 master_commit = self.repo.commit_ids[-1]
375 master_commit = self.repo.commit_ids[-1]
376 assert target_repo._last_fetch_heads() == [master_commit]
376 assert target_repo._last_fetch_heads() == [master_commit]
377
377
378 def test_local_fetch_from_same_repo(self):
378 def test_local_fetch_from_same_repo(self):
379 with pytest.raises(ValueError):
379 with pytest.raises(ValueError):
380 self.repo._local_fetch(self.repo.path, 'master')
380 self.repo._local_fetch(self.repo.path, 'master')
381
381
382 def test_local_fetch_branch_does_not_exist(self):
382 def test_local_fetch_branch_does_not_exist(self):
383 target_repo = self.get_empty_repo()
383 target_repo = self.get_empty_repo()
384
384
385 with pytest.raises(RepositoryError):
385 with pytest.raises(RepositoryError):
386 target_repo._local_fetch(self.repo.path, 'new_branch')
386 target_repo._local_fetch(self.repo.path, 'new_branch')
387
387
388 def test_local_pull(self):
388 def test_local_pull(self):
389 target_repo = self.get_empty_repo()
389 target_repo = self.get_empty_repo()
390 source_repo = self.get_clone_repo()
390 source_repo = self.get_clone_repo()
391
391
392 # Create a new branch in source repo
392 # Create a new branch in source repo
393 master_commit = source_repo.commit_ids[-1]
393 master_commit = source_repo.commit_ids[-1]
394 new_branch_commit = source_repo.commit_ids[-3]
394 new_branch_commit = source_repo.commit_ids[-3]
395 source_repo._checkout(new_branch_commit)
395 source_repo._checkout(new_branch_commit)
396 source_repo._checkout('new_branch', create=True)
396 source_repo._checkout('new_branch', create=True)
397
397
398 target_repo._local_pull(source_repo.path, 'new_branch')
398 target_repo._local_pull(source_repo.path, 'new_branch')
399 target_repo = GitRepository(target_repo.path)
399 target_repo = GitRepository(target_repo.path)
400 assert target_repo.head == new_branch_commit
400 assert target_repo.head == new_branch_commit
401
401
402 target_repo._local_pull(source_repo.path, 'master')
402 target_repo._local_pull(source_repo.path, 'master')
403 target_repo = GitRepository(target_repo.path)
403 target_repo = GitRepository(target_repo.path)
404 assert target_repo.head == master_commit
404 assert target_repo.head == master_commit
405
405
406 def test_local_pull_in_bare_repo(self):
406 def test_local_pull_in_bare_repo(self):
407 with pytest.raises(RepositoryError):
407 with pytest.raises(RepositoryError):
408 self.repo._local_pull(self.repo.path, 'master')
408 self.repo._local_pull(self.repo.path, 'master')
409
409
410 def test_local_merge(self):
410 def test_local_merge(self):
411 target_repo = self.get_empty_repo()
411 target_repo = self.get_empty_repo()
412 source_repo = self.get_clone_repo()
412 source_repo = self.get_clone_repo()
413
413
414 # Create a new branch in source repo
414 # Create a new branch in source repo
415 master_commit = source_repo.commit_ids[-1]
415 master_commit = source_repo.commit_ids[-1]
416 new_branch_commit = source_repo.commit_ids[-3]
416 new_branch_commit = source_repo.commit_ids[-3]
417 source_repo._checkout(new_branch_commit)
417 source_repo._checkout(new_branch_commit)
418 source_repo._checkout('new_branch', create=True)
418 source_repo._checkout('new_branch', create=True)
419
419
420 # This is required as one cannot do a -ff-only merge in an empty repo.
420 # This is required as one cannot do a -ff-only merge in an empty repo.
421 target_repo._local_pull(source_repo.path, 'new_branch')
421 target_repo._local_pull(source_repo.path, 'new_branch')
422
422
423 target_repo._local_fetch(source_repo.path, 'master')
423 target_repo._local_fetch(source_repo.path, 'master')
424 merge_message = 'Merge message\n\nDescription:...'
424 merge_message = 'Merge message\n\nDescription:...'
425 user_name = 'Albert Einstein'
425 user_name = 'Albert Einstein'
426 user_email = 'albert@einstein.com'
426 user_email = 'albert@einstein.com'
427 target_repo._local_merge(merge_message, user_name, user_email,
427 target_repo._local_merge(merge_message, user_name, user_email,
428 target_repo._last_fetch_heads())
428 target_repo._last_fetch_heads())
429
429
430 target_repo = GitRepository(target_repo.path)
430 target_repo = GitRepository(target_repo.path)
431 assert target_repo.commit_ids[-2] == master_commit
431 assert target_repo.commit_ids[-2] == master_commit
432 last_commit = target_repo.get_commit(target_repo.head)
432 last_commit = target_repo.get_commit(target_repo.head)
433 assert last_commit.message.strip() == merge_message
433 assert last_commit.message.strip() == merge_message
434 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434 assert last_commit.author == '%s <%s>' % (user_name, user_email)
435
435
436 assert not os.path.exists(
436 assert not os.path.exists(
437 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
438
438
439 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
440 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
441 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
442
442
443 target_repo._local_fetch(self.repo.path, 'master')
443 target_repo._local_fetch(self.repo.path, 'master')
444 with pytest.raises(RepositoryError):
444 with pytest.raises(RepositoryError):
445 target_repo._local_merge(
445 target_repo._local_merge(
446 'merge_message', 'user name', 'user@name.com',
446 'merge_message', 'user name', 'user@name.com',
447 target_repo._last_fetch_heads())
447 target_repo._last_fetch_heads())
448
448
449 # Check we are not left in an intermediate merge state
449 # Check we are not left in an intermediate merge state
450 assert not os.path.exists(
450 assert not os.path.exists(
451 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
452
452
453 def test_local_merge_into_empty_repo(self):
453 def test_local_merge_into_empty_repo(self):
454 target_repo = self.get_empty_repo()
454 target_repo = self.get_empty_repo()
455
455
456 # This is required as one cannot do a -ff-only merge in an empty repo.
456 # This is required as one cannot do a -ff-only merge in an empty repo.
457 target_repo._local_fetch(self.repo.path, 'master')
457 target_repo._local_fetch(self.repo.path, 'master')
458 with pytest.raises(RepositoryError):
458 with pytest.raises(RepositoryError):
459 target_repo._local_merge(
459 target_repo._local_merge(
460 'merge_message', 'user name', 'user@name.com',
460 'merge_message', 'user name', 'user@name.com',
461 target_repo._last_fetch_heads())
461 target_repo._last_fetch_heads())
462
462
463 def test_local_merge_in_bare_repo(self):
463 def test_local_merge_in_bare_repo(self):
464 with pytest.raises(RepositoryError):
464 with pytest.raises(RepositoryError):
465 self.repo._local_merge(
465 self.repo._local_merge(
466 'merge_message', 'user name', 'user@name.com', None)
466 'merge_message', 'user name', 'user@name.com', None)
467
467
468 def test_local_push_non_bare(self):
468 def test_local_push_non_bare(self):
469 target_repo = self.get_empty_repo()
469 target_repo = self.get_empty_repo()
470
470
471 pushed_branch = 'pushed_branch'
471 pushed_branch = 'pushed_branch'
472 self.repo._local_push('master', target_repo.path, pushed_branch)
472 self.repo._local_push('master', target_repo.path, pushed_branch)
473 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 # Fix the HEAD of the target repo, or otherwise GitRepository won't
474 # report any branches.
474 # report any branches.
475 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
476 f.write('ref: refs/heads/%s' % pushed_branch)
476 f.write('ref: refs/heads/%s' % pushed_branch)
477
477
478 target_repo = GitRepository(target_repo.path)
478 target_repo = GitRepository(target_repo.path)
479
479
480 assert (target_repo.branches[pushed_branch] ==
480 assert (target_repo.branches[pushed_branch] ==
481 self.repo.branches['master'])
481 self.repo.branches['master'])
482
482
483 def test_local_push_bare(self):
483 def test_local_push_bare(self):
484 target_repo = self.get_empty_repo(bare=True)
484 target_repo = self.get_empty_repo(bare=True)
485
485
486 pushed_branch = 'pushed_branch'
486 pushed_branch = 'pushed_branch'
487 self.repo._local_push('master', target_repo.path, pushed_branch)
487 self.repo._local_push('master', target_repo.path, pushed_branch)
488 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 # Fix the HEAD of the target repo, or otherwise GitRepository won't
489 # report any branches.
489 # report any branches.
490 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
491 f.write('ref: refs/heads/%s' % pushed_branch)
491 f.write('ref: refs/heads/%s' % pushed_branch)
492
492
493 target_repo = GitRepository(target_repo.path)
493 target_repo = GitRepository(target_repo.path)
494
494
495 assert (target_repo.branches[pushed_branch] ==
495 assert (target_repo.branches[pushed_branch] ==
496 self.repo.branches['master'])
496 self.repo.branches['master'])
497
497
498 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 def test_local_push_non_bare_target_branch_is_checked_out(self):
499 target_repo = self.get_clone_repo()
499 target_repo = self.get_clone_repo()
500
500
501 pushed_branch = 'pushed_branch'
501 pushed_branch = 'pushed_branch'
502 # Create a new branch in source repo
502 # Create a new branch in source repo
503 new_branch_commit = target_repo.commit_ids[-3]
503 new_branch_commit = target_repo.commit_ids[-3]
504 target_repo._checkout(new_branch_commit)
504 target_repo._checkout(new_branch_commit)
505 target_repo._checkout(pushed_branch, create=True)
505 target_repo._checkout(pushed_branch, create=True)
506
506
507 self.repo._local_push('master', target_repo.path, pushed_branch)
507 self.repo._local_push('master', target_repo.path, pushed_branch)
508
508
509 target_repo = GitRepository(target_repo.path)
509 target_repo = GitRepository(target_repo.path)
510
510
511 assert (target_repo.branches[pushed_branch] ==
511 assert (target_repo.branches[pushed_branch] ==
512 self.repo.branches['master'])
512 self.repo.branches['master'])
513
513
514 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
515 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
516 with pytest.raises(RepositoryError):
516 with pytest.raises(RepositoryError):
517 self.repo._local_push('master', target_repo.path, 'master')
517 self.repo._local_push('master', target_repo.path, 'master')
518
518
519 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
520 target_repo = self.get_empty_repo(bare=True)
520 target_repo = self.get_empty_repo(bare=True)
521
521
522 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
523 self.repo._local_push(
523 self.repo._local_push(
524 'master', target_repo.path, 'master', enable_hooks=True)
524 'master', target_repo.path, 'master', enable_hooks=True)
525 env = run_mock.call_args[1]['extra_env']
525 env = run_mock.call_args[1]['extra_env']
526 assert 'RC_SKIP_HOOKS' not in env
526 assert 'RC_SKIP_HOOKS' not in env
527
527
528 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 def _add_failing_hook(self, repo_path, hook_name, bare=False):
529 path_components = (
529 path_components = (
530 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
531 hook_path = os.path.join(repo_path, *path_components)
531 hook_path = os.path.join(repo_path, *path_components)
532 with open(hook_path, 'w') as f:
532 with open(hook_path, 'w') as f:
533 script_lines = [
533 script_lines = [
534 '#!%s' % sys.executable,
534 '#!%s' % sys.executable,
535 'import os',
535 'import os',
536 'import sys',
536 'import sys',
537 'if os.environ.get("RC_SKIP_HOOKS"):',
537 'if os.environ.get("RC_SKIP_HOOKS"):',
538 ' sys.exit(0)',
538 ' sys.exit(0)',
539 'sys.exit(1)',
539 'sys.exit(1)',
540 ]
540 ]
541 f.write('\n'.join(script_lines))
541 f.write('\n'.join(script_lines))
542 os.chmod(hook_path, 0755)
542 os.chmod(hook_path, 0755)
543
543
544 def test_local_push_does_not_execute_hook(self):
544 def test_local_push_does_not_execute_hook(self):
545 target_repo = self.get_empty_repo()
545 target_repo = self.get_empty_repo()
546
546
547 pushed_branch = 'pushed_branch'
547 pushed_branch = 'pushed_branch'
548 self._add_failing_hook(target_repo.path, 'pre-receive')
548 self._add_failing_hook(target_repo.path, 'pre-receive')
549 self.repo._local_push('master', target_repo.path, pushed_branch)
549 self.repo._local_push('master', target_repo.path, pushed_branch)
550 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 # Fix the HEAD of the target repo, or otherwise GitRepository won't
551 # report any branches.
551 # report any branches.
552 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
553 f.write('ref: refs/heads/%s' % pushed_branch)
553 f.write('ref: refs/heads/%s' % pushed_branch)
554
554
555 target_repo = GitRepository(target_repo.path)
555 target_repo = GitRepository(target_repo.path)
556
556
557 assert (target_repo.branches[pushed_branch] ==
557 assert (target_repo.branches[pushed_branch] ==
558 self.repo.branches['master'])
558 self.repo.branches['master'])
559
559
560 def test_local_push_executes_hook(self):
560 def test_local_push_executes_hook(self):
561 target_repo = self.get_empty_repo(bare=True)
561 target_repo = self.get_empty_repo(bare=True)
562 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
563 with pytest.raises(RepositoryError):
563 with pytest.raises(RepositoryError):
564 self.repo._local_push(
564 self.repo._local_push(
565 'master', target_repo.path, 'master', enable_hooks=True)
565 'master', target_repo.path, 'master', enable_hooks=True)
566
566
567 def test_maybe_prepare_merge_workspace(self):
567 def test_maybe_prepare_merge_workspace(self):
568 workspace = self.repo._maybe_prepare_merge_workspace(
568 workspace = self.repo._maybe_prepare_merge_workspace(
569 'pr2', Reference('branch', 'master', 'unused'),
569 2, 'pr2', Reference('branch', 'master', 'unused'),
570 Reference('branch', 'master', 'unused'))
570 Reference('branch', 'master', 'unused'))
571
571
572 assert os.path.isdir(workspace)
572 assert os.path.isdir(workspace)
573 workspace_repo = GitRepository(workspace)
573 workspace_repo = GitRepository(workspace)
574 assert workspace_repo.branches == self.repo.branches
574 assert workspace_repo.branches == self.repo.branches
575
575
576 # Calling it a second time should also succeed
576 # Calling it a second time should also succeed
577 workspace = self.repo._maybe_prepare_merge_workspace(
577 workspace = self.repo._maybe_prepare_merge_workspace(
578 'pr2', Reference('branch', 'master', 'unused'),
578 2, 'pr2', Reference('branch', 'master', 'unused'),
579 Reference('branch', 'master', 'unused'))
579 Reference('branch', 'master', 'unused'))
580 assert os.path.isdir(workspace)
580 assert os.path.isdir(workspace)
581
581
582 def test_maybe_prepare_merge_workspace_different_refs(self):
582 def test_maybe_prepare_merge_workspace_different_refs(self):
583 workspace = self.repo._maybe_prepare_merge_workspace(
583 workspace = self.repo._maybe_prepare_merge_workspace(
584 'pr2', Reference('branch', 'master', 'unused'),
584 2, 'pr2', Reference('branch', 'master', 'unused'),
585 Reference('branch', 'develop', 'unused'))
585 Reference('branch', 'develop', 'unused'))
586
586
587 assert os.path.isdir(workspace)
587 assert os.path.isdir(workspace)
588 workspace_repo = GitRepository(workspace)
588 workspace_repo = GitRepository(workspace)
589 assert workspace_repo.branches == self.repo.branches
589 assert workspace_repo.branches == self.repo.branches
590
590
591 # Calling it a second time should also succeed
591 # Calling it a second time should also succeed
592 workspace = self.repo._maybe_prepare_merge_workspace(
592 workspace = self.repo._maybe_prepare_merge_workspace(
593 'pr2', Reference('branch', 'master', 'unused'),
593 2, 'pr2', Reference('branch', 'master', 'unused'),
594 Reference('branch', 'develop', 'unused'))
594 Reference('branch', 'develop', 'unused'))
595 assert os.path.isdir(workspace)
595 assert os.path.isdir(workspace)
596
596
597 def test_cleanup_merge_workspace(self):
597 def test_cleanup_merge_workspace(self):
598 workspace = self.repo._maybe_prepare_merge_workspace(
598 workspace = self.repo._maybe_prepare_merge_workspace(
599 'pr3', Reference('branch', 'master', 'unused'),
599 2, 'pr3', Reference('branch', 'master', 'unused'),
600 Reference('branch', 'master', 'unused'))
600 Reference('branch', 'master', 'unused'))
601 self.repo.cleanup_merge_workspace('pr3')
601 self.repo.cleanup_merge_workspace(2, 'pr3')
602
602
603 assert not os.path.exists(workspace)
603 assert not os.path.exists(workspace)
604
604
605 def test_cleanup_merge_workspace_invalid_workspace_id(self):
605 def test_cleanup_merge_workspace_invalid_workspace_id(self):
606 # No assert: because in case of an inexistent workspace this function
606 # No assert: because in case of an inexistent workspace this function
607 # should still succeed.
607 # should still succeed.
608 self.repo.cleanup_merge_workspace('pr4')
608 self.repo.cleanup_merge_workspace(1, 'pr4')
609
609
610 def test_set_refs(self):
610 def test_set_refs(self):
611 test_ref = 'refs/test-refs/abcde'
611 test_ref = 'refs/test-refs/abcde'
612 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
612 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
613
613
614 self.repo.set_refs(test_ref, test_commit_id)
614 self.repo.set_refs(test_ref, test_commit_id)
615 stdout, _ = self.repo.run_git_command(['show-ref'])
615 stdout, _ = self.repo.run_git_command(['show-ref'])
616 assert test_ref in stdout
616 assert test_ref in stdout
617 assert test_commit_id in stdout
617 assert test_commit_id in stdout
618
618
619 def test_remove_ref(self):
619 def test_remove_ref(self):
620 test_ref = 'refs/test-refs/abcde'
620 test_ref = 'refs/test-refs/abcde'
621 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
621 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
622 self.repo.set_refs(test_ref, test_commit_id)
622 self.repo.set_refs(test_ref, test_commit_id)
623 stdout, _ = self.repo.run_git_command(['show-ref'])
623 stdout, _ = self.repo.run_git_command(['show-ref'])
624 assert test_ref in stdout
624 assert test_ref in stdout
625 assert test_commit_id in stdout
625 assert test_commit_id in stdout
626
626
627 self.repo.remove_ref(test_ref)
627 self.repo.remove_ref(test_ref)
628 stdout, _ = self.repo.run_git_command(['show-ref'])
628 stdout, _ = self.repo.run_git_command(['show-ref'])
629 assert test_ref not in stdout
629 assert test_ref not in stdout
630 assert test_commit_id not in stdout
630 assert test_commit_id not in stdout
631
631
632
632
633 class TestGitCommit(object):
633 class TestGitCommit(object):
634
634
635 @pytest.fixture(autouse=True)
635 @pytest.fixture(autouse=True)
636 def prepare(self):
636 def prepare(self):
637 self.repo = GitRepository(TEST_GIT_REPO)
637 self.repo = GitRepository(TEST_GIT_REPO)
638
638
639 def test_default_commit(self):
639 def test_default_commit(self):
640 tip = self.repo.get_commit()
640 tip = self.repo.get_commit()
641 assert tip == self.repo.get_commit(None)
641 assert tip == self.repo.get_commit(None)
642 assert tip == self.repo.get_commit('tip')
642 assert tip == self.repo.get_commit('tip')
643
643
644 def test_root_node(self):
644 def test_root_node(self):
645 tip = self.repo.get_commit()
645 tip = self.repo.get_commit()
646 assert tip.root is tip.get_node('')
646 assert tip.root is tip.get_node('')
647
647
648 def test_lazy_fetch(self):
648 def test_lazy_fetch(self):
649 """
649 """
650 Test if commit's nodes expands and are cached as we walk through
650 Test if commit's nodes expands and are cached as we walk through
651 the commit. This test is somewhat hard to write as order of tests
651 the commit. This test is somewhat hard to write as order of tests
652 is a key here. Written by running command after command in a shell.
652 is a key here. Written by running command after command in a shell.
653 """
653 """
654 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
654 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
655 assert commit_id in self.repo.commit_ids
655 assert commit_id in self.repo.commit_ids
656 commit = self.repo.get_commit(commit_id)
656 commit = self.repo.get_commit(commit_id)
657 assert len(commit.nodes) == 0
657 assert len(commit.nodes) == 0
658 root = commit.root
658 root = commit.root
659 assert len(commit.nodes) == 1
659 assert len(commit.nodes) == 1
660 assert len(root.nodes) == 8
660 assert len(root.nodes) == 8
661 # accessing root.nodes updates commit.nodes
661 # accessing root.nodes updates commit.nodes
662 assert len(commit.nodes) == 9
662 assert len(commit.nodes) == 9
663
663
664 docs = root.get_node('docs')
664 docs = root.get_node('docs')
665 # we haven't yet accessed anything new as docs dir was already cached
665 # we haven't yet accessed anything new as docs dir was already cached
666 assert len(commit.nodes) == 9
666 assert len(commit.nodes) == 9
667 assert len(docs.nodes) == 8
667 assert len(docs.nodes) == 8
668 # accessing docs.nodes updates commit.nodes
668 # accessing docs.nodes updates commit.nodes
669 assert len(commit.nodes) == 17
669 assert len(commit.nodes) == 17
670
670
671 assert docs is commit.get_node('docs')
671 assert docs is commit.get_node('docs')
672 assert docs is root.nodes[0]
672 assert docs is root.nodes[0]
673 assert docs is root.dirs[0]
673 assert docs is root.dirs[0]
674 assert docs is commit.get_node('docs')
674 assert docs is commit.get_node('docs')
675
675
676 def test_nodes_with_commit(self):
676 def test_nodes_with_commit(self):
677 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
677 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
678 commit = self.repo.get_commit(commit_id)
678 commit = self.repo.get_commit(commit_id)
679 root = commit.root
679 root = commit.root
680 docs = root.get_node('docs')
680 docs = root.get_node('docs')
681 assert docs is commit.get_node('docs')
681 assert docs is commit.get_node('docs')
682 api = docs.get_node('api')
682 api = docs.get_node('api')
683 assert api is commit.get_node('docs/api')
683 assert api is commit.get_node('docs/api')
684 index = api.get_node('index.rst')
684 index = api.get_node('index.rst')
685 assert index is commit.get_node('docs/api/index.rst')
685 assert index is commit.get_node('docs/api/index.rst')
686 assert index is commit.get_node('docs')\
686 assert index is commit.get_node('docs')\
687 .get_node('api')\
687 .get_node('api')\
688 .get_node('index.rst')
688 .get_node('index.rst')
689
689
690 def test_branch_and_tags(self):
690 def test_branch_and_tags(self):
691 """
691 """
692 rev0 = self.repo.commit_ids[0]
692 rev0 = self.repo.commit_ids[0]
693 commit0 = self.repo.get_commit(rev0)
693 commit0 = self.repo.get_commit(rev0)
694 assert commit0.branch == 'master'
694 assert commit0.branch == 'master'
695 assert commit0.tags == []
695 assert commit0.tags == []
696
696
697 rev10 = self.repo.commit_ids[10]
697 rev10 = self.repo.commit_ids[10]
698 commit10 = self.repo.get_commit(rev10)
698 commit10 = self.repo.get_commit(rev10)
699 assert commit10.branch == 'master'
699 assert commit10.branch == 'master'
700 assert commit10.tags == []
700 assert commit10.tags == []
701
701
702 rev44 = self.repo.commit_ids[44]
702 rev44 = self.repo.commit_ids[44]
703 commit44 = self.repo.get_commit(rev44)
703 commit44 = self.repo.get_commit(rev44)
704 assert commit44.branch == 'web-branch'
704 assert commit44.branch == 'web-branch'
705
705
706 tip = self.repo.get_commit('tip')
706 tip = self.repo.get_commit('tip')
707 assert 'tip' in tip.tags
707 assert 'tip' in tip.tags
708 """
708 """
709 # Those tests would fail - branches are now going
709 # Those tests would fail - branches are now going
710 # to be changed at main API in order to support git backend
710 # to be changed at main API in order to support git backend
711 pass
711 pass
712
712
713 def test_file_size(self):
713 def test_file_size(self):
714 to_check = (
714 to_check = (
715 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
715 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
716 'vcs/backends/BaseRepository.py', 502),
716 'vcs/backends/BaseRepository.py', 502),
717 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
717 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
718 'vcs/backends/hg.py', 854),
718 'vcs/backends/hg.py', 854),
719 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
719 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
720 'setup.py', 1068),
720 'setup.py', 1068),
721
721
722 ('d955cd312c17b02143c04fa1099a352b04368118',
722 ('d955cd312c17b02143c04fa1099a352b04368118',
723 'vcs/backends/base.py', 2921),
723 'vcs/backends/base.py', 2921),
724 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
724 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
725 'vcs/backends/base.py', 3936),
725 'vcs/backends/base.py', 3936),
726 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
726 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
727 'vcs/backends/base.py', 6189),
727 'vcs/backends/base.py', 6189),
728 )
728 )
729 for commit_id, path, size in to_check:
729 for commit_id, path, size in to_check:
730 node = self.repo.get_commit(commit_id).get_node(path)
730 node = self.repo.get_commit(commit_id).get_node(path)
731 assert node.is_file()
731 assert node.is_file()
732 assert node.size == size
732 assert node.size == size
733
733
734 def test_file_history_from_commits(self):
734 def test_file_history_from_commits(self):
735 node = self.repo[10].get_node('setup.py')
735 node = self.repo[10].get_node('setup.py')
736 commit_ids = [commit.raw_id for commit in node.history]
736 commit_ids = [commit.raw_id for commit in node.history]
737 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
737 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
738
738
739 node = self.repo[20].get_node('setup.py')
739 node = self.repo[20].get_node('setup.py')
740 node_ids = [commit.raw_id for commit in node.history]
740 node_ids = [commit.raw_id for commit in node.history]
741 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
741 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
742 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
742 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
743
743
744 # special case we check history from commit that has this particular
744 # special case we check history from commit that has this particular
745 # file changed this means we check if it's included as well
745 # file changed this means we check if it's included as well
746 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
746 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
747 .get_node('setup.py')
747 .get_node('setup.py')
748 node_ids = [commit.raw_id for commit in node.history]
748 node_ids = [commit.raw_id for commit in node.history]
749 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
749 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
750 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
750 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
751
751
752 def test_file_history(self):
752 def test_file_history(self):
753 # we can only check if those commits are present in the history
753 # we can only check if those commits are present in the history
754 # as we cannot update this test every time file is changed
754 # as we cannot update this test every time file is changed
755 files = {
755 files = {
756 'setup.py': [
756 'setup.py': [
757 '54386793436c938cff89326944d4c2702340037d',
757 '54386793436c938cff89326944d4c2702340037d',
758 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
758 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
759 '998ed409c795fec2012b1c0ca054d99888b22090',
759 '998ed409c795fec2012b1c0ca054d99888b22090',
760 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
760 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
761 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
761 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
762 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
762 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
763 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
763 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
764 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
764 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
765 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
765 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
766 ],
766 ],
767 'vcs/nodes.py': [
767 'vcs/nodes.py': [
768 '33fa3223355104431402a888fa77a4e9956feb3e',
768 '33fa3223355104431402a888fa77a4e9956feb3e',
769 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
769 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
770 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
770 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
771 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
771 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
772 'c877b68d18e792a66b7f4c529ea02c8f80801542',
772 'c877b68d18e792a66b7f4c529ea02c8f80801542',
773 '4313566d2e417cb382948f8d9d7c765330356054',
773 '4313566d2e417cb382948f8d9d7c765330356054',
774 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
774 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
775 '54386793436c938cff89326944d4c2702340037d',
775 '54386793436c938cff89326944d4c2702340037d',
776 '54000345d2e78b03a99d561399e8e548de3f3203',
776 '54000345d2e78b03a99d561399e8e548de3f3203',
777 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
777 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
778 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
778 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
779 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
779 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
780 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
780 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
781 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
781 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
782 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
782 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
783 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
783 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
784 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
784 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
785 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
785 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
786 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
786 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
787 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
787 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
788 'f15c21f97864b4f071cddfbf2750ec2e23859414',
788 'f15c21f97864b4f071cddfbf2750ec2e23859414',
789 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
789 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
790 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
790 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
791 '84dec09632a4458f79f50ddbbd155506c460b4f9',
791 '84dec09632a4458f79f50ddbbd155506c460b4f9',
792 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
792 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
793 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
793 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
794 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
794 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
795 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
795 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
796 '6970b057cffe4aab0a792aa634c89f4bebf01441',
796 '6970b057cffe4aab0a792aa634c89f4bebf01441',
797 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
797 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
798 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
798 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
799 ],
799 ],
800 'vcs/backends/git.py': [
800 'vcs/backends/git.py': [
801 '4cf116ad5a457530381135e2f4c453e68a1b0105',
801 '4cf116ad5a457530381135e2f4c453e68a1b0105',
802 '9a751d84d8e9408e736329767387f41b36935153',
802 '9a751d84d8e9408e736329767387f41b36935153',
803 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
803 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
804 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
804 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
805 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
805 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
806 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
806 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
807 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
807 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
808 '54000345d2e78b03a99d561399e8e548de3f3203',
808 '54000345d2e78b03a99d561399e8e548de3f3203',
809 ],
809 ],
810 }
810 }
811 for path, commit_ids in files.items():
811 for path, commit_ids in files.items():
812 node = self.repo.get_commit(commit_ids[0]).get_node(path)
812 node = self.repo.get_commit(commit_ids[0]).get_node(path)
813 node_ids = [commit.raw_id for commit in node.history]
813 node_ids = [commit.raw_id for commit in node.history]
814 assert set(commit_ids).issubset(set(node_ids)), (
814 assert set(commit_ids).issubset(set(node_ids)), (
815 "We assumed that %s is subset of commit_ids for which file %s "
815 "We assumed that %s is subset of commit_ids for which file %s "
816 "has been changed, and history of that node returned: %s"
816 "has been changed, and history of that node returned: %s"
817 % (commit_ids, path, node_ids))
817 % (commit_ids, path, node_ids))
818
818
819 def test_file_annotate(self):
819 def test_file_annotate(self):
820 files = {
820 files = {
821 'vcs/backends/__init__.py': {
821 'vcs/backends/__init__.py': {
822 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
822 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
823 'lines_no': 1,
823 'lines_no': 1,
824 'commits': [
824 'commits': [
825 'c1214f7e79e02fc37156ff215cd71275450cffc3',
825 'c1214f7e79e02fc37156ff215cd71275450cffc3',
826 ],
826 ],
827 },
827 },
828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
829 'lines_no': 21,
829 'lines_no': 21,
830 'commits': [
830 'commits': [
831 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
831 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
852 ],
852 ],
853 },
853 },
854 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
854 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
855 'lines_no': 32,
855 'lines_no': 32,
856 'commits': [
856 'commits': [
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
859 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
859 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
862 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 '54000345d2e78b03a99d561399e8e548de3f3203',
864 '54000345d2e78b03a99d561399e8e548de3f3203',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
867 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
867 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
872 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
872 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
873 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
873 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
874 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
874 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
882 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
882 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
883 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
883 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
889 ],
889 ],
890 },
890 },
891 },
891 },
892 }
892 }
893
893
894 for fname, commit_dict in files.items():
894 for fname, commit_dict in files.items():
895 for commit_id, __ in commit_dict.items():
895 for commit_id, __ in commit_dict.items():
896 commit = self.repo.get_commit(commit_id)
896 commit = self.repo.get_commit(commit_id)
897
897
898 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
898 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
899 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
899 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
900 assert l1_1 == l1_2
900 assert l1_1 == l1_2
901 l1 = l1_1
901 l1 = l1_1
902 l2 = files[fname][commit_id]['commits']
902 l2 = files[fname][commit_id]['commits']
903 assert l1 == l2, (
903 assert l1 == l2, (
904 "The lists of commit_ids for %s@commit_id %s"
904 "The lists of commit_ids for %s@commit_id %s"
905 "from annotation list should match each other, "
905 "from annotation list should match each other, "
906 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
906 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
907
907
908 def test_files_state(self):
908 def test_files_state(self):
909 """
909 """
910 Tests state of FileNodes.
910 Tests state of FileNodes.
911 """
911 """
912 node = self.repo\
912 node = self.repo\
913 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
913 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
914 .get_node('vcs/utils/diffs.py')
914 .get_node('vcs/utils/diffs.py')
915 assert node.state, NodeState.ADDED
915 assert node.state, NodeState.ADDED
916 assert node.added
916 assert node.added
917 assert not node.changed
917 assert not node.changed
918 assert not node.not_changed
918 assert not node.not_changed
919 assert not node.removed
919 assert not node.removed
920
920
921 node = self.repo\
921 node = self.repo\
922 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
922 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
923 .get_node('.hgignore')
923 .get_node('.hgignore')
924 assert node.state, NodeState.CHANGED
924 assert node.state, NodeState.CHANGED
925 assert not node.added
925 assert not node.added
926 assert node.changed
926 assert node.changed
927 assert not node.not_changed
927 assert not node.not_changed
928 assert not node.removed
928 assert not node.removed
929
929
930 node = self.repo\
930 node = self.repo\
931 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
931 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
932 .get_node('setup.py')
932 .get_node('setup.py')
933 assert node.state, NodeState.NOT_CHANGED
933 assert node.state, NodeState.NOT_CHANGED
934 assert not node.added
934 assert not node.added
935 assert not node.changed
935 assert not node.changed
936 assert node.not_changed
936 assert node.not_changed
937 assert not node.removed
937 assert not node.removed
938
938
939 # If node has REMOVED state then trying to fetch it would raise
939 # If node has REMOVED state then trying to fetch it would raise
940 # CommitError exception
940 # CommitError exception
941 commit = self.repo.get_commit(
941 commit = self.repo.get_commit(
942 'fa6600f6848800641328adbf7811fd2372c02ab2')
942 'fa6600f6848800641328adbf7811fd2372c02ab2')
943 path = 'vcs/backends/BaseRepository.py'
943 path = 'vcs/backends/BaseRepository.py'
944 with pytest.raises(NodeDoesNotExistError):
944 with pytest.raises(NodeDoesNotExistError):
945 commit.get_node(path)
945 commit.get_node(path)
946 # but it would be one of ``removed`` (commit's attribute)
946 # but it would be one of ``removed`` (commit's attribute)
947 assert path in [rf.path for rf in commit.removed]
947 assert path in [rf.path for rf in commit.removed]
948
948
949 commit = self.repo.get_commit(
949 commit = self.repo.get_commit(
950 '54386793436c938cff89326944d4c2702340037d')
950 '54386793436c938cff89326944d4c2702340037d')
951 changed = [
951 changed = [
952 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
952 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
953 'vcs/nodes.py']
953 'vcs/nodes.py']
954 assert set(changed) == set([f.path for f in commit.changed])
954 assert set(changed) == set([f.path for f in commit.changed])
955
955
956 def test_unicode_branch_refs(self):
956 def test_unicode_branch_refs(self):
957 unicode_branches = {
957 unicode_branches = {
958 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
958 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
959 u'refs/heads/uniçö∂e': 'ürl',
959 u'refs/heads/uniçö∂e': 'ürl',
960 }
960 }
961 with mock.patch(
961 with mock.patch(
962 ("rhodecode.lib.vcs.backends.git.repository"
962 ("rhodecode.lib.vcs.backends.git.repository"
963 ".GitRepository._refs"),
963 ".GitRepository._refs"),
964 unicode_branches):
964 unicode_branches):
965 branches = self.repo.branches
965 branches = self.repo.branches
966
966
967 assert 'unicode' in branches
967 assert 'unicode' in branches
968 assert u'uniçö∂e' in branches
968 assert u'uniçö∂e' in branches
969
969
970 def test_unicode_tag_refs(self):
970 def test_unicode_tag_refs(self):
971 unicode_tags = {
971 unicode_tags = {
972 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 u'refs/tags/uniçö∂e': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 u'refs/tags/uniçö∂e': '6c0ce52b229aa978889e91b38777f800e85f330b',
974 }
974 }
975 with mock.patch(
975 with mock.patch(
976 ("rhodecode.lib.vcs.backends.git.repository"
976 ("rhodecode.lib.vcs.backends.git.repository"
977 ".GitRepository._refs"),
977 ".GitRepository._refs"),
978 unicode_tags):
978 unicode_tags):
979 tags = self.repo.tags
979 tags = self.repo.tags
980
980
981 assert 'unicode' in tags
981 assert 'unicode' in tags
982 assert u'uniçö∂e' in tags
982 assert u'uniçö∂e' in tags
983
983
984 def test_commit_message_is_unicode(self):
984 def test_commit_message_is_unicode(self):
985 for commit in self.repo:
985 for commit in self.repo:
986 assert type(commit.message) == unicode
986 assert type(commit.message) == unicode
987
987
988 def test_commit_author_is_unicode(self):
988 def test_commit_author_is_unicode(self):
989 for commit in self.repo:
989 for commit in self.repo:
990 assert type(commit.author) == unicode
990 assert type(commit.author) == unicode
991
991
992 def test_repo_files_content_is_unicode(self):
992 def test_repo_files_content_is_unicode(self):
993 commit = self.repo.get_commit()
993 commit = self.repo.get_commit()
994 for node in commit.get_node('/'):
994 for node in commit.get_node('/'):
995 if node.is_file():
995 if node.is_file():
996 assert type(node.content) == unicode
996 assert type(node.content) == unicode
997
997
998 def test_wrong_path(self):
998 def test_wrong_path(self):
999 # There is 'setup.py' in the root dir but not there:
999 # There is 'setup.py' in the root dir but not there:
1000 path = 'foo/bar/setup.py'
1000 path = 'foo/bar/setup.py'
1001 tip = self.repo.get_commit()
1001 tip = self.repo.get_commit()
1002 with pytest.raises(VCSError):
1002 with pytest.raises(VCSError):
1003 tip.get_node(path)
1003 tip.get_node(path)
1004
1004
1005 @pytest.mark.parametrize("author_email, commit_id", [
1005 @pytest.mark.parametrize("author_email, commit_id", [
1006 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1006 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1007 ('lukasz.balcerzak@python-center.pl',
1007 ('lukasz.balcerzak@python-center.pl',
1008 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1008 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1009 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1009 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1010 ])
1010 ])
1011 def test_author_email(self, author_email, commit_id):
1011 def test_author_email(self, author_email, commit_id):
1012 commit = self.repo.get_commit(commit_id)
1012 commit = self.repo.get_commit(commit_id)
1013 assert author_email == commit.author_email
1013 assert author_email == commit.author_email
1014
1014
1015 @pytest.mark.parametrize("author, commit_id", [
1015 @pytest.mark.parametrize("author, commit_id", [
1016 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1016 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1017 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1017 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1018 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1018 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1019 ])
1019 ])
1020 def test_author_username(self, author, commit_id):
1020 def test_author_username(self, author, commit_id):
1021 commit = self.repo.get_commit(commit_id)
1021 commit = self.repo.get_commit(commit_id)
1022 assert author == commit.author_name
1022 assert author == commit.author_name
1023
1023
1024
1024
1025 class TestLargeFileRepo(object):
1025 class TestLargeFileRepo(object):
1026
1026
1027 def test_large_file(self, backend_git):
1027 def test_large_file(self, backend_git):
1028 conf = make_db_config()
1028 conf = make_db_config()
1029 repo = backend_git.create_test_repo('largefiles', conf)
1029 repo = backend_git.create_test_repo('largefiles', conf)
1030
1030
1031 tip = repo.scm_instance().get_commit()
1031 tip = repo.scm_instance().get_commit()
1032
1032
1033 # extract stored LF node into the origin cache
1033 # extract stored LF node into the origin cache
1034 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1034 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1035
1035
1036 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1036 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1037 oid_path = os.path.join(lfs_store, oid)
1037 oid_path = os.path.join(lfs_store, oid)
1038 oid_destination = os.path.join(
1038 oid_destination = os.path.join(
1039 conf.get('vcs_git_lfs', 'store_location'), oid)
1039 conf.get('vcs_git_lfs', 'store_location'), oid)
1040 shutil.copy(oid_path, oid_destination)
1040 shutil.copy(oid_path, oid_destination)
1041
1041
1042 node = tip.get_node('1MB.zip')
1042 node = tip.get_node('1MB.zip')
1043
1043
1044 lf_node = node.get_largefile_node()
1044 lf_node = node.get_largefile_node()
1045
1045
1046 assert lf_node.is_largefile() is True
1046 assert lf_node.is_largefile() is True
1047 assert lf_node.size == 1024000
1047 assert lf_node.size == 1024000
1048 assert lf_node.name == '1MB.zip'
1048 assert lf_node.name == '1MB.zip'
1049
1049
1050
1050
1051 @pytest.mark.usefixtures("vcs_repository_support")
1051 @pytest.mark.usefixtures("vcs_repository_support")
1052 class TestGitSpecificWithRepo(BackendTestMixin):
1052 class TestGitSpecificWithRepo(BackendTestMixin):
1053
1053
1054 @classmethod
1054 @classmethod
1055 def _get_commits(cls):
1055 def _get_commits(cls):
1056 return [
1056 return [
1057 {
1057 {
1058 'message': 'Initial',
1058 'message': 'Initial',
1059 'author': 'Joe Doe <joe.doe@example.com>',
1059 'author': 'Joe Doe <joe.doe@example.com>',
1060 'date': datetime.datetime(2010, 1, 1, 20),
1060 'date': datetime.datetime(2010, 1, 1, 20),
1061 'added': [
1061 'added': [
1062 FileNode('foobar/static/js/admin/base.js', content='base'),
1062 FileNode('foobar/static/js/admin/base.js', content='base'),
1063 FileNode(
1063 FileNode(
1064 'foobar/static/admin', content='admin',
1064 'foobar/static/admin', content='admin',
1065 mode=0120000), # this is a link
1065 mode=0120000), # this is a link
1066 FileNode('foo', content='foo'),
1066 FileNode('foo', content='foo'),
1067 ],
1067 ],
1068 },
1068 },
1069 {
1069 {
1070 'message': 'Second',
1070 'message': 'Second',
1071 'author': 'Joe Doe <joe.doe@example.com>',
1071 'author': 'Joe Doe <joe.doe@example.com>',
1072 'date': datetime.datetime(2010, 1, 1, 22),
1072 'date': datetime.datetime(2010, 1, 1, 22),
1073 'added': [
1073 'added': [
1074 FileNode('foo2', content='foo2'),
1074 FileNode('foo2', content='foo2'),
1075 ],
1075 ],
1076 },
1076 },
1077 ]
1077 ]
1078
1078
1079 def test_paths_slow_traversing(self):
1079 def test_paths_slow_traversing(self):
1080 commit = self.repo.get_commit()
1080 commit = self.repo.get_commit()
1081 assert commit.get_node('foobar').get_node('static').get_node('js')\
1081 assert commit.get_node('foobar').get_node('static').get_node('js')\
1082 .get_node('admin').get_node('base.js').content == 'base'
1082 .get_node('admin').get_node('base.js').content == 'base'
1083
1083
1084 def test_paths_fast_traversing(self):
1084 def test_paths_fast_traversing(self):
1085 commit = self.repo.get_commit()
1085 commit = self.repo.get_commit()
1086 assert (
1086 assert (
1087 commit.get_node('foobar/static/js/admin/base.js').content ==
1087 commit.get_node('foobar/static/js/admin/base.js').content ==
1088 'base')
1088 'base')
1089
1089
1090 def test_get_diff_runs_git_command_with_hashes(self):
1090 def test_get_diff_runs_git_command_with_hashes(self):
1091 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1091 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1092 self.repo.get_diff(self.repo[0], self.repo[1])
1092 self.repo.get_diff(self.repo[0], self.repo[1])
1093 self.repo.run_git_command.assert_called_once_with(
1093 self.repo.run_git_command.assert_called_once_with(
1094 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1094 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1095 '--abbrev=40', self.repo._get_commit_id(0),
1095 '--abbrev=40', self.repo._get_commit_id(0),
1096 self.repo._get_commit_id(1)])
1096 self.repo._get_commit_id(1)])
1097
1097
1098 def test_get_diff_runs_git_command_with_str_hashes(self):
1098 def test_get_diff_runs_git_command_with_str_hashes(self):
1099 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1099 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1100 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1100 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1101 self.repo.run_git_command.assert_called_once_with(
1101 self.repo.run_git_command.assert_called_once_with(
1102 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1102 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1103 '--abbrev=40', self.repo._get_commit_id(1)])
1103 '--abbrev=40', self.repo._get_commit_id(1)])
1104
1104
1105 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1105 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1106 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1106 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1107 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1107 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1108 self.repo.run_git_command.assert_called_once_with(
1108 self.repo.run_git_command.assert_called_once_with(
1109 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1109 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1110 '--abbrev=40', self.repo._get_commit_id(0),
1110 '--abbrev=40', self.repo._get_commit_id(0),
1111 self.repo._get_commit_id(1), '--', 'foo'])
1111 self.repo._get_commit_id(1), '--', 'foo'])
1112
1112
1113
1113
1114 @pytest.mark.usefixtures("vcs_repository_support")
1114 @pytest.mark.usefixtures("vcs_repository_support")
1115 class TestGitRegression(BackendTestMixin):
1115 class TestGitRegression(BackendTestMixin):
1116
1116
1117 @classmethod
1117 @classmethod
1118 def _get_commits(cls):
1118 def _get_commits(cls):
1119 return [
1119 return [
1120 {
1120 {
1121 'message': 'Initial',
1121 'message': 'Initial',
1122 'author': 'Joe Doe <joe.doe@example.com>',
1122 'author': 'Joe Doe <joe.doe@example.com>',
1123 'date': datetime.datetime(2010, 1, 1, 20),
1123 'date': datetime.datetime(2010, 1, 1, 20),
1124 'added': [
1124 'added': [
1125 FileNode('bot/__init__.py', content='base'),
1125 FileNode('bot/__init__.py', content='base'),
1126 FileNode('bot/templates/404.html', content='base'),
1126 FileNode('bot/templates/404.html', content='base'),
1127 FileNode('bot/templates/500.html', content='base'),
1127 FileNode('bot/templates/500.html', content='base'),
1128 ],
1128 ],
1129 },
1129 },
1130 {
1130 {
1131 'message': 'Second',
1131 'message': 'Second',
1132 'author': 'Joe Doe <joe.doe@example.com>',
1132 'author': 'Joe Doe <joe.doe@example.com>',
1133 'date': datetime.datetime(2010, 1, 1, 22),
1133 'date': datetime.datetime(2010, 1, 1, 22),
1134 'added': [
1134 'added': [
1135 FileNode('bot/build/migrations/1.py', content='foo2'),
1135 FileNode('bot/build/migrations/1.py', content='foo2'),
1136 FileNode('bot/build/migrations/2.py', content='foo2'),
1136 FileNode('bot/build/migrations/2.py', content='foo2'),
1137 FileNode(
1137 FileNode(
1138 'bot/build/static/templates/f.html', content='foo2'),
1138 'bot/build/static/templates/f.html', content='foo2'),
1139 FileNode(
1139 FileNode(
1140 'bot/build/static/templates/f1.html', content='foo2'),
1140 'bot/build/static/templates/f1.html', content='foo2'),
1141 FileNode('bot/build/templates/err.html', content='foo2'),
1141 FileNode('bot/build/templates/err.html', content='foo2'),
1142 FileNode('bot/build/templates/err2.html', content='foo2'),
1142 FileNode('bot/build/templates/err2.html', content='foo2'),
1143 ],
1143 ],
1144 },
1144 },
1145 ]
1145 ]
1146
1146
1147 @pytest.mark.parametrize("path, expected_paths", [
1147 @pytest.mark.parametrize("path, expected_paths", [
1148 ('bot', [
1148 ('bot', [
1149 'bot/build',
1149 'bot/build',
1150 'bot/templates',
1150 'bot/templates',
1151 'bot/__init__.py']),
1151 'bot/__init__.py']),
1152 ('bot/build', [
1152 ('bot/build', [
1153 'bot/build/migrations',
1153 'bot/build/migrations',
1154 'bot/build/static',
1154 'bot/build/static',
1155 'bot/build/templates']),
1155 'bot/build/templates']),
1156 ('bot/build/static', [
1156 ('bot/build/static', [
1157 'bot/build/static/templates']),
1157 'bot/build/static/templates']),
1158 ('bot/build/static/templates', [
1158 ('bot/build/static/templates', [
1159 'bot/build/static/templates/f.html',
1159 'bot/build/static/templates/f.html',
1160 'bot/build/static/templates/f1.html']),
1160 'bot/build/static/templates/f1.html']),
1161 ('bot/build/templates', [
1161 ('bot/build/templates', [
1162 'bot/build/templates/err.html',
1162 'bot/build/templates/err.html',
1163 'bot/build/templates/err2.html']),
1163 'bot/build/templates/err2.html']),
1164 ('bot/templates/', [
1164 ('bot/templates/', [
1165 'bot/templates/404.html',
1165 'bot/templates/404.html',
1166 'bot/templates/500.html']),
1166 'bot/templates/500.html']),
1167 ])
1167 ])
1168 def test_similar_paths(self, path, expected_paths):
1168 def test_similar_paths(self, path, expected_paths):
1169 commit = self.repo.get_commit()
1169 commit = self.repo.get_commit()
1170 paths = [n.path for n in commit.get_nodes(path)]
1170 paths = [n.path for n in commit.get_nodes(path)]
1171 assert paths == expected_paths
1171 assert paths == expected_paths
1172
1172
1173
1173
1174 class TestDiscoverGitVersion:
1174 class TestDiscoverGitVersion:
1175
1175
1176 def test_returns_git_version(self, baseapp):
1176 def test_returns_git_version(self, baseapp):
1177 version = discover_git_version()
1177 version = discover_git_version()
1178 assert version
1178 assert version
1179
1179
1180 def test_returns_empty_string_without_vcsserver(self):
1180 def test_returns_empty_string_without_vcsserver(self):
1181 mock_connection = mock.Mock()
1181 mock_connection = mock.Mock()
1182 mock_connection.discover_git_version = mock.Mock(
1182 mock_connection.discover_git_version = mock.Mock(
1183 side_effect=Exception)
1183 side_effect=Exception)
1184 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1184 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1185 version = discover_git_version()
1185 version = discover_git_version()
1186 assert version == ''
1186 assert version == ''
1187
1187
1188
1188
1189 class TestGetSubmoduleUrl(object):
1189 class TestGetSubmoduleUrl(object):
1190 def test_submodules_file_found(self):
1190 def test_submodules_file_found(self):
1191 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1191 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1192 node = mock.Mock()
1192 node = mock.Mock()
1193 with mock.patch.object(
1193 with mock.patch.object(
1194 commit, 'get_node', return_value=node) as get_node_mock:
1194 commit, 'get_node', return_value=node) as get_node_mock:
1195 node.content = (
1195 node.content = (
1196 '[submodule "subrepo1"]\n'
1196 '[submodule "subrepo1"]\n'
1197 '\tpath = subrepo1\n'
1197 '\tpath = subrepo1\n'
1198 '\turl = https://code.rhodecode.com/dulwich\n'
1198 '\turl = https://code.rhodecode.com/dulwich\n'
1199 )
1199 )
1200 result = commit._get_submodule_url('subrepo1')
1200 result = commit._get_submodule_url('subrepo1')
1201 get_node_mock.assert_called_once_with('.gitmodules')
1201 get_node_mock.assert_called_once_with('.gitmodules')
1202 assert result == 'https://code.rhodecode.com/dulwich'
1202 assert result == 'https://code.rhodecode.com/dulwich'
1203
1203
1204 def test_complex_submodule_path(self):
1204 def test_complex_submodule_path(self):
1205 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1205 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1206 node = mock.Mock()
1206 node = mock.Mock()
1207 with mock.patch.object(
1207 with mock.patch.object(
1208 commit, 'get_node', return_value=node) as get_node_mock:
1208 commit, 'get_node', return_value=node) as get_node_mock:
1209 node.content = (
1209 node.content = (
1210 '[submodule "complex/subrepo/path"]\n'
1210 '[submodule "complex/subrepo/path"]\n'
1211 '\tpath = complex/subrepo/path\n'
1211 '\tpath = complex/subrepo/path\n'
1212 '\turl = https://code.rhodecode.com/dulwich\n'
1212 '\turl = https://code.rhodecode.com/dulwich\n'
1213 )
1213 )
1214 result = commit._get_submodule_url('complex/subrepo/path')
1214 result = commit._get_submodule_url('complex/subrepo/path')
1215 get_node_mock.assert_called_once_with('.gitmodules')
1215 get_node_mock.assert_called_once_with('.gitmodules')
1216 assert result == 'https://code.rhodecode.com/dulwich'
1216 assert result == 'https://code.rhodecode.com/dulwich'
1217
1217
1218 def test_submodules_file_not_found(self):
1218 def test_submodules_file_not_found(self):
1219 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1219 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1220 with mock.patch.object(
1220 with mock.patch.object(
1221 commit, 'get_node', side_effect=NodeDoesNotExistError):
1221 commit, 'get_node', side_effect=NodeDoesNotExistError):
1222 result = commit._get_submodule_url('complex/subrepo/path')
1222 result = commit._get_submodule_url('complex/subrepo/path')
1223 assert result is None
1223 assert result is None
1224
1224
1225 def test_path_not_found(self):
1225 def test_path_not_found(self):
1226 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1226 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1227 node = mock.Mock()
1227 node = mock.Mock()
1228 with mock.patch.object(
1228 with mock.patch.object(
1229 commit, 'get_node', return_value=node) as get_node_mock:
1229 commit, 'get_node', return_value=node) as get_node_mock:
1230 node.content = (
1230 node.content = (
1231 '[submodule "subrepo1"]\n'
1231 '[submodule "subrepo1"]\n'
1232 '\tpath = subrepo1\n'
1232 '\tpath = subrepo1\n'
1233 '\turl = https://code.rhodecode.com/dulwich\n'
1233 '\turl = https://code.rhodecode.com/dulwich\n'
1234 )
1234 )
1235 result = commit._get_submodule_url('subrepo2')
1235 result = commit._get_submodule_url('subrepo2')
1236 get_node_mock.assert_called_once_with('.gitmodules')
1236 get_node_mock.assert_called_once_with('.gitmodules')
1237 assert result is None
1237 assert result is None
1238
1238
1239 def test_returns_cached_values(self):
1239 def test_returns_cached_values(self):
1240 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1240 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1241 node = mock.Mock()
1241 node = mock.Mock()
1242 with mock.patch.object(
1242 with mock.patch.object(
1243 commit, 'get_node', return_value=node) as get_node_mock:
1243 commit, 'get_node', return_value=node) as get_node_mock:
1244 node.content = (
1244 node.content = (
1245 '[submodule "subrepo1"]\n'
1245 '[submodule "subrepo1"]\n'
1246 '\tpath = subrepo1\n'
1246 '\tpath = subrepo1\n'
1247 '\turl = https://code.rhodecode.com/dulwich\n'
1247 '\turl = https://code.rhodecode.com/dulwich\n'
1248 )
1248 )
1249 for _ in range(3):
1249 for _ in range(3):
1250 commit._get_submodule_url('subrepo1')
1250 commit._get_submodule_url('subrepo1')
1251 get_node_mock.assert_called_once_with('.gitmodules')
1251 get_node_mock.assert_called_once_with('.gitmodules')
1252
1252
1253 def test_get_node_returns_a_link(self):
1253 def test_get_node_returns_a_link(self):
1254 repository = mock.Mock()
1254 repository = mock.Mock()
1255 repository.alias = 'git'
1255 repository.alias = 'git'
1256 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1256 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1257 submodule_url = 'https://code.rhodecode.com/dulwich'
1257 submodule_url = 'https://code.rhodecode.com/dulwich'
1258 get_id_patch = mock.patch.object(
1258 get_id_patch = mock.patch.object(
1259 commit, '_get_id_for_path', return_value=(1, 'link'))
1259 commit, '_get_id_for_path', return_value=(1, 'link'))
1260 get_submodule_patch = mock.patch.object(
1260 get_submodule_patch = mock.patch.object(
1261 commit, '_get_submodule_url', return_value=submodule_url)
1261 commit, '_get_submodule_url', return_value=submodule_url)
1262
1262
1263 with get_id_patch, get_submodule_patch as submodule_mock:
1263 with get_id_patch, get_submodule_patch as submodule_mock:
1264 node = commit.get_node('/abcde')
1264 node = commit.get_node('/abcde')
1265
1265
1266 submodule_mock.assert_called_once_with('/abcde')
1266 submodule_mock.assert_called_once_with('/abcde')
1267 assert type(node) == SubModuleNode
1267 assert type(node) == SubModuleNode
1268 assert node.url == submodule_url
1268 assert node.url == submodule_url
1269
1269
1270 def test_get_nodes_returns_links(self):
1270 def test_get_nodes_returns_links(self):
1271 repository = mock.MagicMock()
1271 repository = mock.MagicMock()
1272 repository.alias = 'git'
1272 repository.alias = 'git'
1273 repository._remote.tree_items.return_value = [
1273 repository._remote.tree_items.return_value = [
1274 ('subrepo', 'stat', 1, 'link')
1274 ('subrepo', 'stat', 1, 'link')
1275 ]
1275 ]
1276 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1276 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1277 submodule_url = 'https://code.rhodecode.com/dulwich'
1277 submodule_url = 'https://code.rhodecode.com/dulwich'
1278 get_id_patch = mock.patch.object(
1278 get_id_patch = mock.patch.object(
1279 commit, '_get_id_for_path', return_value=(1, 'tree'))
1279 commit, '_get_id_for_path', return_value=(1, 'tree'))
1280 get_submodule_patch = mock.patch.object(
1280 get_submodule_patch = mock.patch.object(
1281 commit, '_get_submodule_url', return_value=submodule_url)
1281 commit, '_get_submodule_url', return_value=submodule_url)
1282
1282
1283 with get_id_patch, get_submodule_patch as submodule_mock:
1283 with get_id_patch, get_submodule_patch as submodule_mock:
1284 nodes = commit.get_nodes('/abcde')
1284 nodes = commit.get_nodes('/abcde')
1285
1285
1286 submodule_mock.assert_called_once_with('/abcde/subrepo')
1286 submodule_mock.assert_called_once_with('/abcde/subrepo')
1287 assert len(nodes) == 1
1287 assert len(nodes) == 1
1288 assert type(nodes[0]) == SubModuleNode
1288 assert type(nodes[0]) == SubModuleNode
1289 assert nodes[0].url == submodule_url
1289 assert nodes[0].url == submodule_url
@@ -1,1183 +1,1186 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils import make_db_config
26 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.vcs import backends
27 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 Reference, MergeResponse, MergeFailureReason)
29 Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 from rhodecode.lib.vcs.exceptions import (
31 from rhodecode.lib.vcs.exceptions import (
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35
35
36
36
37 pytestmark = pytest.mark.backends("hg")
37 pytestmark = pytest.mark.backends("hg")
38
38
39
39
40 def repo_path_generator():
40 def repo_path_generator():
41 """
41 """
42 Return a different path to be used for cloning repos.
42 Return a different path to be used for cloning repos.
43 """
43 """
44 i = 0
44 i = 0
45 while True:
45 while True:
46 i += 1
46 i += 1
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48
48
49
50 REPO_PATH_GENERATOR = repo_path_generator()
49 REPO_PATH_GENERATOR = repo_path_generator()
51
50
52
51
53 @pytest.fixture(scope='class', autouse=True)
52 @pytest.fixture(scope='class', autouse=True)
54 def repo(request, baseapp):
53 def repo(request, baseapp):
55 repo = MercurialRepository(TEST_HG_REPO)
54 repo = MercurialRepository(TEST_HG_REPO)
56 if request.cls:
55 if request.cls:
57 request.cls.repo = repo
56 request.cls.repo = repo
58 return repo
57 return repo
59
58
60
59
61 class TestMercurialRepository:
60 class TestMercurialRepository:
62
61
63 # pylint: disable=protected-access
62 # pylint: disable=protected-access
64
63
65 def get_clone_repo(self):
64 def get_clone_repo(self):
66 """
65 """
67 Return a clone of the base repo.
66 Return a clone of the base repo.
68 """
67 """
69 clone_path = next(REPO_PATH_GENERATOR)
68 clone_path = next(REPO_PATH_GENERATOR)
70 repo_clone = MercurialRepository(
69 repo_clone = MercurialRepository(
71 clone_path, create=True, src_url=self.repo.path)
70 clone_path, create=True, src_url=self.repo.path)
72
71
73 return repo_clone
72 return repo_clone
74
73
75 def get_empty_repo(self):
74 def get_empty_repo(self):
76 """
75 """
77 Return an empty repo.
76 Return an empty repo.
78 """
77 """
79 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
80
79
81 def test_wrong_repo_path(self):
80 def test_wrong_repo_path(self):
82 wrong_repo_path = '/tmp/errorrepo_hg'
81 wrong_repo_path = '/tmp/errorrepo_hg'
83 with pytest.raises(RepositoryError):
82 with pytest.raises(RepositoryError):
84 MercurialRepository(wrong_repo_path)
83 MercurialRepository(wrong_repo_path)
85
84
86 def test_unicode_path_repo(self):
85 def test_unicode_path_repo(self):
87 with pytest.raises(VCSError):
86 with pytest.raises(VCSError):
88 MercurialRepository(u'iShouldFail')
87 MercurialRepository(u'iShouldFail')
89
88
90 def test_unicode_commit_id(self):
89 def test_unicode_commit_id(self):
91 with pytest.raises(CommitDoesNotExistError):
90 with pytest.raises(CommitDoesNotExistError):
92 self.repo.get_commit(u'unicode-commit-id')
91 self.repo.get_commit(u'unicode-commit-id')
93 with pytest.raises(CommitDoesNotExistError):
92 with pytest.raises(CommitDoesNotExistError):
94 self.repo.get_commit(u'unícøde-spéçial-chäråcter-commit-id')
93 self.repo.get_commit(u'unícøde-spéçial-chäråcter-commit-id')
95
94
96 def test_unicode_bookmark(self):
95 def test_unicode_bookmark(self):
97 self.repo.bookmark(u'unicode-bookmark')
96 self.repo.bookmark(u'unicode-bookmark')
98 self.repo.bookmark(u'unícøde-spéçial-chäråcter-bookmark')
97 self.repo.bookmark(u'unícøde-spéçial-chäråcter-bookmark')
99
98
100 def test_unicode_branch(self):
99 def test_unicode_branch(self):
101 with pytest.raises(KeyError):
100 with pytest.raises(KeyError):
102 self.repo.branches[u'unicode-branch']
101 self.repo.branches[u'unicode-branch']
103 with pytest.raises(KeyError):
102 with pytest.raises(KeyError):
104 self.repo.branches[u'unícøde-spéçial-chäråcter-branch']
103 self.repo.branches[u'unícøde-spéçial-chäråcter-branch']
105
104
106 def test_repo_clone(self):
105 def test_repo_clone(self):
107 if os.path.exists(TEST_HG_REPO_CLONE):
106 if os.path.exists(TEST_HG_REPO_CLONE):
108 self.fail(
107 self.fail(
109 'Cannot test mercurial clone repo as location %s already '
108 'Cannot test mercurial clone repo as location %s already '
110 'exists. You should manually remove it first.'
109 'exists. You should manually remove it first.'
111 % TEST_HG_REPO_CLONE)
110 % TEST_HG_REPO_CLONE)
112
111
113 repo = MercurialRepository(TEST_HG_REPO)
112 repo = MercurialRepository(TEST_HG_REPO)
114 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
115 src_url=TEST_HG_REPO)
114 src_url=TEST_HG_REPO)
116 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
117 # Checking hashes of commits should be enough
116 # Checking hashes of commits should be enough
118 for commit in repo.get_commits():
117 for commit in repo.get_commits():
119 raw_id = commit.raw_id
118 raw_id = commit.raw_id
120 assert raw_id == repo_clone.get_commit(raw_id).raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
121
120
122 def test_repo_clone_with_update(self):
121 def test_repo_clone_with_update(self):
123 repo = MercurialRepository(TEST_HG_REPO)
122 repo = MercurialRepository(TEST_HG_REPO)
124 repo_clone = MercurialRepository(
123 repo_clone = MercurialRepository(
125 TEST_HG_REPO_CLONE + '_w_update',
124 TEST_HG_REPO_CLONE + '_w_update',
126 src_url=TEST_HG_REPO, update_after_clone=True)
125 src_url=TEST_HG_REPO, update_after_clone=True)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128
127
129 # check if current workdir was updated
128 # check if current workdir was updated
130 assert os.path.isfile(
129 assert os.path.isfile(
131 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
132
131
133 def test_repo_clone_without_update(self):
132 def test_repo_clone_without_update(self):
134 repo = MercurialRepository(TEST_HG_REPO)
133 repo = MercurialRepository(TEST_HG_REPO)
135 repo_clone = MercurialRepository(
134 repo_clone = MercurialRepository(
136 TEST_HG_REPO_CLONE + '_wo_update',
135 TEST_HG_REPO_CLONE + '_wo_update',
137 src_url=TEST_HG_REPO, update_after_clone=False)
136 src_url=TEST_HG_REPO, update_after_clone=False)
138 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
139 assert not os.path.isfile(
138 assert not os.path.isfile(
140 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
141
140
142 def test_commit_ids(self):
141 def test_commit_ids(self):
143 # there are 21 commits at bitbucket now
142 # there are 21 commits at bitbucket now
144 # so we can assume they would be available from now on
143 # so we can assume they would be available from now on
145 subset = set([
144 subset = set([
146 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
147 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
148 '6cba7170863a2411822803fa77a0a264f1310b35',
147 '6cba7170863a2411822803fa77a0a264f1310b35',
149 '56349e29c2af3ac913b28bde9a2c6154436e615b',
148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
150 '2dda4e345facb0ccff1a191052dd1606dba6781d',
149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
151 '6fff84722075f1607a30f436523403845f84cd9e',
150 '6fff84722075f1607a30f436523403845f84cd9e',
152 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
153 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
154 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
155 'be90031137367893f1c406e0a8683010fd115b79',
154 'be90031137367893f1c406e0a8683010fd115b79',
156 'db8e58be770518cbb2b1cdfa69146e47cd481481',
155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
157 '84478366594b424af694a6c784cb991a16b87c21',
156 '84478366594b424af694a6c784cb991a16b87c21',
158 '17f8e105dddb9f339600389c6dc7175d395a535c',
157 '17f8e105dddb9f339600389c6dc7175d395a535c',
159 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
160 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
161 '786facd2c61deb9cf91e9534735124fb8fc11842',
160 '786facd2c61deb9cf91e9534735124fb8fc11842',
162 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
163 'aa6a0de05b7612707db567078e130a6cd114a9a7',
162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
164 'eada5a770da98ab0dd7325e29d00e0714f228d09'
163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
165 ])
164 ])
166 assert subset.issubset(set(self.repo.commit_ids))
165 assert subset.issubset(set(self.repo.commit_ids))
167
166
168 # check if we have the proper order of commits
167 # check if we have the proper order of commits
169 org = [
168 org = [
170 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
171 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
172 '6cba7170863a2411822803fa77a0a264f1310b35',
171 '6cba7170863a2411822803fa77a0a264f1310b35',
173 '56349e29c2af3ac913b28bde9a2c6154436e615b',
172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
174 '2dda4e345facb0ccff1a191052dd1606dba6781d',
173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
175 '6fff84722075f1607a30f436523403845f84cd9e',
174 '6fff84722075f1607a30f436523403845f84cd9e',
176 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
177 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
178 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
179 'be90031137367893f1c406e0a8683010fd115b79',
178 'be90031137367893f1c406e0a8683010fd115b79',
180 'db8e58be770518cbb2b1cdfa69146e47cd481481',
179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
181 '84478366594b424af694a6c784cb991a16b87c21',
180 '84478366594b424af694a6c784cb991a16b87c21',
182 '17f8e105dddb9f339600389c6dc7175d395a535c',
181 '17f8e105dddb9f339600389c6dc7175d395a535c',
183 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
184 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
185 '786facd2c61deb9cf91e9534735124fb8fc11842',
184 '786facd2c61deb9cf91e9534735124fb8fc11842',
186 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
187 'aa6a0de05b7612707db567078e130a6cd114a9a7',
186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
188 'eada5a770da98ab0dd7325e29d00e0714f228d09',
187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
189 '2c1885c735575ca478bf9e17b0029dca68824458',
188 '2c1885c735575ca478bf9e17b0029dca68824458',
190 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
191 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
192 '4fb8326d78e5120da2c7468dcf7098997be385da',
191 '4fb8326d78e5120da2c7468dcf7098997be385da',
193 '62b4a097164940bd66030c4db51687f3ec035eed',
192 '62b4a097164940bd66030c4db51687f3ec035eed',
194 '536c1a19428381cfea92ac44985304f6a8049569',
193 '536c1a19428381cfea92ac44985304f6a8049569',
195 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
196 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
197 'f8940bcb890a98c4702319fbe36db75ea309b475',
196 'f8940bcb890a98c4702319fbe36db75ea309b475',
198 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
199 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
200 'ee87846a61c12153b51543bf860e1026c6d3dcba',
199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
201 ]
200 ]
202 assert org == self.repo.commit_ids[:31]
201 assert org == self.repo.commit_ids[:31]
203
202
204 def test_iter_slice(self):
203 def test_iter_slice(self):
205 sliced = list(self.repo[:10])
204 sliced = list(self.repo[:10])
206 itered = list(self.repo)[:10]
205 itered = list(self.repo)[:10]
207 assert sliced == itered
206 assert sliced == itered
208
207
209 def test_slicing(self):
208 def test_slicing(self):
210 # 4 1 5 10 95
209 # 4 1 5 10 95
211 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
212 (10, 20, 10), (5, 100, 95)]:
211 (10, 20, 10), (5, 100, 95)]:
213 indexes = list(self.repo[sfrom:sto])
212 indexes = list(self.repo[sfrom:sto])
214 assert len(indexes) == size
213 assert len(indexes) == size
215 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
216 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
217
216
218 def test_branches(self):
217 def test_branches(self):
219 # TODO: Need more tests here
218 # TODO: Need more tests here
220
219
221 # active branches
220 # active branches
222 assert 'default' in self.repo.branches
221 assert 'default' in self.repo.branches
223 assert 'stable' in self.repo.branches
222 assert 'stable' in self.repo.branches
224
223
225 # closed
224 # closed
226 assert 'git' in self.repo._get_branches(closed=True)
225 assert 'git' in self.repo._get_branches(closed=True)
227 assert 'web' in self.repo._get_branches(closed=True)
226 assert 'web' in self.repo._get_branches(closed=True)
228
227
229 for name, id in self.repo.branches.items():
228 for name, id in self.repo.branches.items():
230 assert isinstance(self.repo.get_commit(id), MercurialCommit)
229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
231
230
232 def test_tip_in_tags(self):
231 def test_tip_in_tags(self):
233 # tip is always a tag
232 # tip is always a tag
234 assert 'tip' in self.repo.tags
233 assert 'tip' in self.repo.tags
235
234
236 def test_tip_commit_in_tags(self):
235 def test_tip_commit_in_tags(self):
237 tip = self.repo.get_commit()
236 tip = self.repo.get_commit()
238 assert self.repo.tags['tip'] == tip.raw_id
237 assert self.repo.tags['tip'] == tip.raw_id
239
238
240 def test_initial_commit(self):
239 def test_initial_commit(self):
241 init_commit = self.repo.get_commit(commit_idx=0)
240 init_commit = self.repo.get_commit(commit_idx=0)
242 init_author = init_commit.author
241 init_author = init_commit.author
243
242
244 assert init_commit.message == 'initial import'
243 assert init_commit.message == 'initial import'
245 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
246 assert init_author == init_commit.committer
245 assert init_author == init_commit.committer
247 assert sorted(init_commit._file_paths) == sorted([
246 assert sorted(init_commit._file_paths) == sorted([
248 'vcs/__init__.py',
247 'vcs/__init__.py',
249 'vcs/backends/BaseRepository.py',
248 'vcs/backends/BaseRepository.py',
250 'vcs/backends/__init__.py',
249 'vcs/backends/__init__.py',
251 ])
250 ])
252 assert sorted(init_commit._dir_paths) == sorted(
251 assert sorted(init_commit._dir_paths) == sorted(
253 ['', 'vcs', 'vcs/backends'])
252 ['', 'vcs', 'vcs/backends'])
254
253
255 assert init_commit._dir_paths + init_commit._file_paths == \
254 assert init_commit._dir_paths + init_commit._file_paths == \
256 init_commit._paths
255 init_commit._paths
257
256
258 with pytest.raises(NodeDoesNotExistError):
257 with pytest.raises(NodeDoesNotExistError):
259 init_commit.get_node(path='foobar')
258 init_commit.get_node(path='foobar')
260
259
261 node = init_commit.get_node('vcs/')
260 node = init_commit.get_node('vcs/')
262 assert hasattr(node, 'kind')
261 assert hasattr(node, 'kind')
263 assert node.kind == NodeKind.DIR
262 assert node.kind == NodeKind.DIR
264
263
265 node = init_commit.get_node('vcs')
264 node = init_commit.get_node('vcs')
266 assert hasattr(node, 'kind')
265 assert hasattr(node, 'kind')
267 assert node.kind == NodeKind.DIR
266 assert node.kind == NodeKind.DIR
268
267
269 node = init_commit.get_node('vcs/__init__.py')
268 node = init_commit.get_node('vcs/__init__.py')
270 assert hasattr(node, 'kind')
269 assert hasattr(node, 'kind')
271 assert node.kind == NodeKind.FILE
270 assert node.kind == NodeKind.FILE
272
271
273 def test_not_existing_commit(self):
272 def test_not_existing_commit(self):
274 # rawid
273 # rawid
275 with pytest.raises(RepositoryError):
274 with pytest.raises(RepositoryError):
276 self.repo.get_commit('abcd' * 10)
275 self.repo.get_commit('abcd' * 10)
277 # shortid
276 # shortid
278 with pytest.raises(RepositoryError):
277 with pytest.raises(RepositoryError):
279 self.repo.get_commit('erro' * 4)
278 self.repo.get_commit('erro' * 4)
280 # numeric
279 # numeric
281 with pytest.raises(RepositoryError):
280 with pytest.raises(RepositoryError):
282 self.repo.get_commit(commit_idx=self.repo.count() + 1)
281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
283
282
284 # Small chance we ever get to this one
283 # Small chance we ever get to this one
285 idx = pow(2, 30)
284 idx = pow(2, 30)
286 with pytest.raises(RepositoryError):
285 with pytest.raises(RepositoryError):
287 self.repo.get_commit(commit_idx=idx)
286 self.repo.get_commit(commit_idx=idx)
288
287
289 def test_commit10(self):
288 def test_commit10(self):
290 commit10 = self.repo.get_commit(commit_idx=10)
289 commit10 = self.repo.get_commit(commit_idx=10)
291 README = """===
290 README = """===
292 VCS
291 VCS
293 ===
292 ===
294
293
295 Various Version Control System management abstraction layer for Python.
294 Various Version Control System management abstraction layer for Python.
296
295
297 Introduction
296 Introduction
298 ------------
297 ------------
299
298
300 TODO: To be written...
299 TODO: To be written...
301
300
302 """
301 """
303 node = commit10.get_node('README.rst')
302 node = commit10.get_node('README.rst')
304 assert node.kind == NodeKind.FILE
303 assert node.kind == NodeKind.FILE
305 assert node.content == README
304 assert node.content == README
306
305
307 def test_local_clone(self):
306 def test_local_clone(self):
308 clone_path = next(REPO_PATH_GENERATOR)
307 clone_path = next(REPO_PATH_GENERATOR)
309 self.repo._local_clone(clone_path)
308 self.repo._local_clone(clone_path)
310 repo_clone = MercurialRepository(clone_path)
309 repo_clone = MercurialRepository(clone_path)
311
310
312 assert self.repo.commit_ids == repo_clone.commit_ids
311 assert self.repo.commit_ids == repo_clone.commit_ids
313
312
314 def test_local_clone_fails_if_target_exists(self):
313 def test_local_clone_fails_if_target_exists(self):
315 with pytest.raises(RepositoryError):
314 with pytest.raises(RepositoryError):
316 self.repo._local_clone(self.repo.path)
315 self.repo._local_clone(self.repo.path)
317
316
318 def test_update(self):
317 def test_update(self):
319 repo_clone = self.get_clone_repo()
318 repo_clone = self.get_clone_repo()
320 branches = repo_clone.branches
319 branches = repo_clone.branches
321
320
322 repo_clone._update('default')
321 repo_clone._update('default')
323 assert branches['default'] == repo_clone._identify()
322 assert branches['default'] == repo_clone._identify()
324 repo_clone._update('stable')
323 repo_clone._update('stable')
325 assert branches['stable'] == repo_clone._identify()
324 assert branches['stable'] == repo_clone._identify()
326
325
327 def test_local_pull_branch(self):
326 def test_local_pull_branch(self):
328 target_repo = self.get_empty_repo()
327 target_repo = self.get_empty_repo()
329 source_repo = self.get_clone_repo()
328 source_repo = self.get_clone_repo()
330
329
331 default = Reference(
330 default = Reference(
332 'branch', 'default', source_repo.branches['default'])
331 'branch', 'default', source_repo.branches['default'])
333 target_repo._local_pull(source_repo.path, default)
332 target_repo._local_pull(source_repo.path, default)
334 target_repo = MercurialRepository(target_repo.path)
333 target_repo = MercurialRepository(target_repo.path)
335 assert (target_repo.branches['default'] ==
334 assert (target_repo.branches['default'] ==
336 source_repo.branches['default'])
335 source_repo.branches['default'])
337
336
338 stable = Reference('branch', 'stable', source_repo.branches['stable'])
337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
339 target_repo._local_pull(source_repo.path, stable)
338 target_repo._local_pull(source_repo.path, stable)
340 target_repo = MercurialRepository(target_repo.path)
339 target_repo = MercurialRepository(target_repo.path)
341 assert target_repo.branches['stable'] == source_repo.branches['stable']
340 assert target_repo.branches['stable'] == source_repo.branches['stable']
342
341
343 def test_local_pull_bookmark(self):
342 def test_local_pull_bookmark(self):
344 target_repo = self.get_empty_repo()
343 target_repo = self.get_empty_repo()
345 source_repo = self.get_clone_repo()
344 source_repo = self.get_clone_repo()
346
345
347 commits = list(source_repo.get_commits(branch_name='default'))
346 commits = list(source_repo.get_commits(branch_name='default'))
348 foo1_id = commits[-5].raw_id
347 foo1_id = commits[-5].raw_id
349 foo1 = Reference('book', 'foo1', foo1_id)
348 foo1 = Reference('book', 'foo1', foo1_id)
350 source_repo._update(foo1_id)
349 source_repo._update(foo1_id)
351 source_repo.bookmark('foo1')
350 source_repo.bookmark('foo1')
352
351
353 foo2_id = commits[-3].raw_id
352 foo2_id = commits[-3].raw_id
354 foo2 = Reference('book', 'foo2', foo2_id)
353 foo2 = Reference('book', 'foo2', foo2_id)
355 source_repo._update(foo2_id)
354 source_repo._update(foo2_id)
356 source_repo.bookmark('foo2')
355 source_repo.bookmark('foo2')
357
356
358 target_repo._local_pull(source_repo.path, foo1)
357 target_repo._local_pull(source_repo.path, foo1)
359 target_repo = MercurialRepository(target_repo.path)
358 target_repo = MercurialRepository(target_repo.path)
360 assert target_repo.branches['default'] == commits[-5].raw_id
359 assert target_repo.branches['default'] == commits[-5].raw_id
361
360
362 target_repo._local_pull(source_repo.path, foo2)
361 target_repo._local_pull(source_repo.path, foo2)
363 target_repo = MercurialRepository(target_repo.path)
362 target_repo = MercurialRepository(target_repo.path)
364 assert target_repo.branches['default'] == commits[-3].raw_id
363 assert target_repo.branches['default'] == commits[-3].raw_id
365
364
366 def test_local_pull_commit(self):
365 def test_local_pull_commit(self):
367 target_repo = self.get_empty_repo()
366 target_repo = self.get_empty_repo()
368 source_repo = self.get_clone_repo()
367 source_repo = self.get_clone_repo()
369
368
370 commits = list(source_repo.get_commits(branch_name='default'))
369 commits = list(source_repo.get_commits(branch_name='default'))
371 commit_id = commits[-5].raw_id
370 commit_id = commits[-5].raw_id
372 commit = Reference('rev', commit_id, commit_id)
371 commit = Reference('rev', commit_id, commit_id)
373 target_repo._local_pull(source_repo.path, commit)
372 target_repo._local_pull(source_repo.path, commit)
374 target_repo = MercurialRepository(target_repo.path)
373 target_repo = MercurialRepository(target_repo.path)
375 assert target_repo.branches['default'] == commit_id
374 assert target_repo.branches['default'] == commit_id
376
375
377 commit_id = commits[-3].raw_id
376 commit_id = commits[-3].raw_id
378 commit = Reference('rev', commit_id, commit_id)
377 commit = Reference('rev', commit_id, commit_id)
379 target_repo._local_pull(source_repo.path, commit)
378 target_repo._local_pull(source_repo.path, commit)
380 target_repo = MercurialRepository(target_repo.path)
379 target_repo = MercurialRepository(target_repo.path)
381 assert target_repo.branches['default'] == commit_id
380 assert target_repo.branches['default'] == commit_id
382
381
383 def test_local_pull_from_same_repo(self):
382 def test_local_pull_from_same_repo(self):
384 reference = Reference('branch', 'default', None)
383 reference = Reference('branch', 'default', None)
385 with pytest.raises(ValueError):
384 with pytest.raises(ValueError):
386 self.repo._local_pull(self.repo.path, reference)
385 self.repo._local_pull(self.repo.path, reference)
387
386
388 def test_validate_pull_reference_raises_on_missing_reference(
387 def test_validate_pull_reference_raises_on_missing_reference(
389 self, vcsbackend_hg):
388 self, vcsbackend_hg):
390 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
391 reference = Reference(
390 reference = Reference(
392 'book', 'invalid_reference', 'a' * 40)
391 'book', 'invalid_reference', 'a' * 40)
393
392
394 with pytest.raises(CommitDoesNotExistError):
393 with pytest.raises(CommitDoesNotExistError):
395 target_repo._validate_pull_reference(reference)
394 target_repo._validate_pull_reference(reference)
396
395
397 def test_heads(self):
396 def test_heads(self):
398 assert set(self.repo._heads()) == set(self.repo.branches.values())
397 assert set(self.repo._heads()) == set(self.repo.branches.values())
399
398
400 def test_ancestor(self):
399 def test_ancestor(self):
401 commits = [
400 commits = [
402 c.raw_id for c in self.repo.get_commits(branch_name='default')]
401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
403 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
404 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
405
404
406 def test_local_push(self):
405 def test_local_push(self):
407 target_repo = self.get_empty_repo()
406 target_repo = self.get_empty_repo()
408
407
409 revisions = list(self.repo.get_commits(branch_name='default'))
408 revisions = list(self.repo.get_commits(branch_name='default'))
410 revision = revisions[-5].raw_id
409 revision = revisions[-5].raw_id
411 self.repo._local_push(revision, target_repo.path)
410 self.repo._local_push(revision, target_repo.path)
412
411
413 target_repo = MercurialRepository(target_repo.path)
412 target_repo = MercurialRepository(target_repo.path)
414
413
415 assert target_repo.branches['default'] == revision
414 assert target_repo.branches['default'] == revision
416
415
417 def test_hooks_can_be_enabled_for_local_push(self):
416 def test_hooks_can_be_enabled_for_local_push(self):
418 revision = 'deadbeef'
417 revision = 'deadbeef'
419 repo_path = 'test_group/test_repo'
418 repo_path = 'test_group/test_repo'
420 with mock.patch.object(self.repo, '_remote') as remote_mock:
419 with mock.patch.object(self.repo, '_remote') as remote_mock:
421 self.repo._local_push(revision, repo_path, enable_hooks=True)
420 self.repo._local_push(revision, repo_path, enable_hooks=True)
422 remote_mock.push.assert_called_once_with(
421 remote_mock.push.assert_called_once_with(
423 [revision], repo_path, hooks=True, push_branches=False)
422 [revision], repo_path, hooks=True, push_branches=False)
424
423
425 def test_local_merge(self, vcsbackend_hg):
424 def test_local_merge(self, vcsbackend_hg):
426 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
427 source_repo = vcsbackend_hg.clone_repo(target_repo)
426 source_repo = vcsbackend_hg.clone_repo(target_repo)
428 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
429 target_repo = MercurialRepository(target_repo.path)
428 target_repo = MercurialRepository(target_repo.path)
430 target_rev = target_repo.branches['default']
429 target_rev = target_repo.branches['default']
431 target_ref = Reference(
430 target_ref = Reference(
432 type='branch', name='default', commit_id=target_rev)
431 type='branch', name='default', commit_id=target_rev)
433 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
434 source_repo = MercurialRepository(source_repo.path)
433 source_repo = MercurialRepository(source_repo.path)
435 source_rev = source_repo.branches['default']
434 source_rev = source_repo.branches['default']
436 source_ref = Reference(
435 source_ref = Reference(
437 type='branch', name='default', commit_id=source_rev)
436 type='branch', name='default', commit_id=source_rev)
438
437
439 target_repo._local_pull(source_repo.path, source_ref)
438 target_repo._local_pull(source_repo.path, source_ref)
440
439
441 merge_message = 'Merge message\n\nDescription:...'
440 merge_message = 'Merge message\n\nDescription:...'
442 user_name = 'Albert Einstein'
441 user_name = 'Albert Einstein'
443 user_email = 'albert@einstein.com'
442 user_email = 'albert@einstein.com'
444 merge_commit_id, needs_push = target_repo._local_merge(
443 merge_commit_id, needs_push = target_repo._local_merge(
445 target_ref, merge_message, user_name, user_email, source_ref)
444 target_ref, merge_message, user_name, user_email, source_ref)
446 assert needs_push
445 assert needs_push
447
446
448 target_repo = MercurialRepository(target_repo.path)
447 target_repo = MercurialRepository(target_repo.path)
449 assert target_repo.commit_ids[-3] == target_rev
448 assert target_repo.commit_ids[-3] == target_rev
450 assert target_repo.commit_ids[-2] == source_rev
449 assert target_repo.commit_ids[-2] == source_rev
451 last_commit = target_repo.get_commit(merge_commit_id)
450 last_commit = target_repo.get_commit(merge_commit_id)
452 assert last_commit.message.strip() == merge_message
451 assert last_commit.message.strip() == merge_message
453 assert last_commit.author == '%s <%s>' % (user_name, user_email)
452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
454
453
455 assert not os.path.exists(
454 assert not os.path.exists(
456 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
457
456
458 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
459 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
460 source_repo = vcsbackend_hg.clone_repo(target_repo)
459 source_repo = vcsbackend_hg.clone_repo(target_repo)
461 target_rev = target_repo.branches['default']
460 target_rev = target_repo.branches['default']
462 target_ref = Reference(
461 target_ref = Reference(
463 type='branch', name='default', commit_id=target_rev)
462 type='branch', name='default', commit_id=target_rev)
464 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
465 source_repo = MercurialRepository(source_repo.path)
464 source_repo = MercurialRepository(source_repo.path)
466 source_rev = source_repo.branches['default']
465 source_rev = source_repo.branches['default']
467 source_ref = Reference(
466 source_ref = Reference(
468 type='branch', name='default', commit_id=source_rev)
467 type='branch', name='default', commit_id=source_rev)
469
468
470 target_repo._local_pull(source_repo.path, source_ref)
469 target_repo._local_pull(source_repo.path, source_ref)
471
470
472 merge_message = 'Merge message\n\nDescription:...'
471 merge_message = 'Merge message\n\nDescription:...'
473 user_name = 'Albert Einstein'
472 user_name = 'Albert Einstein'
474 user_email = 'albert@einstein.com'
473 user_email = 'albert@einstein.com'
475 merge_commit_id, needs_push = target_repo._local_merge(
474 merge_commit_id, needs_push = target_repo._local_merge(
476 target_ref, merge_message, user_name, user_email, source_ref)
475 target_ref, merge_message, user_name, user_email, source_ref)
477 assert merge_commit_id == source_rev
476 assert merge_commit_id == source_rev
478 assert needs_push
477 assert needs_push
479
478
480 target_repo = MercurialRepository(target_repo.path)
479 target_repo = MercurialRepository(target_repo.path)
481 assert target_repo.commit_ids[-2] == target_rev
480 assert target_repo.commit_ids[-2] == target_rev
482 assert target_repo.commit_ids[-1] == source_rev
481 assert target_repo.commit_ids[-1] == source_rev
483
482
484 assert not os.path.exists(
483 assert not os.path.exists(
485 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
486
485
487 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
488 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
489 target_rev = target_repo.branches['default']
488 target_rev = target_repo.branches['default']
490 target_ref = Reference(
489 target_ref = Reference(
491 type='branch', name='default', commit_id=target_rev)
490 type='branch', name='default', commit_id=target_rev)
492
491
493 merge_message = 'Merge message\n\nDescription:...'
492 merge_message = 'Merge message\n\nDescription:...'
494 user_name = 'Albert Einstein'
493 user_name = 'Albert Einstein'
495 user_email = 'albert@einstein.com'
494 user_email = 'albert@einstein.com'
496 merge_commit_id, needs_push = target_repo._local_merge(
495 merge_commit_id, needs_push = target_repo._local_merge(
497 target_ref, merge_message, user_name, user_email, target_ref)
496 target_ref, merge_message, user_name, user_email, target_ref)
498 assert merge_commit_id == target_rev
497 assert merge_commit_id == target_rev
499 assert not needs_push
498 assert not needs_push
500
499
501 target_repo = MercurialRepository(target_repo.path)
500 target_repo = MercurialRepository(target_repo.path)
502 assert target_repo.commit_ids[-1] == target_rev
501 assert target_repo.commit_ids[-1] == target_rev
503
502
504 assert not os.path.exists(
503 assert not os.path.exists(
505 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
506
505
507 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
508 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
509 source_repo = vcsbackend_hg.clone_repo(target_repo)
508 source_repo = vcsbackend_hg.clone_repo(target_repo)
510 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
511 target_repo = MercurialRepository(target_repo.path)
510 target_repo = MercurialRepository(target_repo.path)
512 target_rev = target_repo.branches['default']
511 target_rev = target_repo.branches['default']
513 target_ref = Reference(
512 target_ref = Reference(
514 type='branch', name='default', commit_id=target_rev)
513 type='branch', name='default', commit_id=target_rev)
515 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
516 source_repo = MercurialRepository(source_repo.path)
515 source_repo = MercurialRepository(source_repo.path)
517 source_rev = source_repo.branches['default']
516 source_rev = source_repo.branches['default']
518 source_ref = Reference(
517 source_ref = Reference(
519 type='branch', name='default', commit_id=source_rev)
518 type='branch', name='default', commit_id=source_rev)
520
519
521 target_repo._local_pull(source_repo.path, source_ref)
520 target_repo._local_pull(source_repo.path, source_ref)
522 with pytest.raises(RepositoryError):
521 with pytest.raises(RepositoryError):
523 target_repo._local_merge(
522 target_repo._local_merge(
524 target_ref, 'merge_message', 'user name', 'user@name.com',
523 target_ref, 'merge_message', 'user name', 'user@name.com',
525 source_ref)
524 source_ref)
526
525
527 # Check we are not left in an intermediate merge state
526 # Check we are not left in an intermediate merge state
528 assert not os.path.exists(
527 assert not os.path.exists(
529 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
530
529
531 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
532 commits = [
531 commits = [
533 {'message': 'a'},
532 {'message': 'a'},
534 {'message': 'b', 'branch': 'b'},
533 {'message': 'b', 'branch': 'b'},
535 ]
534 ]
536 repo = backend_hg.create_repo(commits)
535 repo = backend_hg.create_repo(commits)
537 commit_ids = backend_hg.commit_ids
536 commit_ids = backend_hg.commit_ids
538 target_ref = Reference(
537 target_ref = Reference(
539 type='branch', name='default', commit_id=commit_ids['a'])
538 type='branch', name='default', commit_id=commit_ids['a'])
540 source_ref = Reference(
539 source_ref = Reference(
541 type='branch', name='b', commit_id=commit_ids['b'])
540 type='branch', name='b', commit_id=commit_ids['b'])
542 merge_message = 'Merge message\n\nDescription:...'
541 merge_message = 'Merge message\n\nDescription:...'
543 user_name = 'Albert Einstein'
542 user_name = 'Albert Einstein'
544 user_email = 'albert@einstein.com'
543 user_email = 'albert@einstein.com'
545 vcs_repo = repo.scm_instance()
544 vcs_repo = repo.scm_instance()
546 merge_commit_id, needs_push = vcs_repo._local_merge(
545 merge_commit_id, needs_push = vcs_repo._local_merge(
547 target_ref, merge_message, user_name, user_email, source_ref)
546 target_ref, merge_message, user_name, user_email, source_ref)
548 assert merge_commit_id != source_ref.commit_id
547 assert merge_commit_id != source_ref.commit_id
549 assert needs_push is True
548 assert needs_push is True
550 commit = vcs_repo.get_commit(merge_commit_id)
549 commit = vcs_repo.get_commit(merge_commit_id)
551 assert commit.merge is True
550 assert commit.merge is True
552 assert commit.message == merge_message
551 assert commit.message == merge_message
553
552
554 def test_maybe_prepare_merge_workspace(self):
553 def test_maybe_prepare_merge_workspace(self):
555 workspace = self.repo._maybe_prepare_merge_workspace(
554 workspace = self.repo._maybe_prepare_merge_workspace(
556 'pr2', 'unused', 'unused2')
555 1, 'pr2', 'unused', 'unused2')
557
556
558 assert os.path.isdir(workspace)
557 assert os.path.isdir(workspace)
559 workspace_repo = MercurialRepository(workspace)
558 workspace_repo = MercurialRepository(workspace)
560 assert workspace_repo.branches == self.repo.branches
559 assert workspace_repo.branches == self.repo.branches
561
560
562 # Calling it a second time should also succeed
561 # Calling it a second time should also succeed
563 workspace = self.repo._maybe_prepare_merge_workspace(
562 workspace = self.repo._maybe_prepare_merge_workspace(
564 'pr2', 'unused', 'unused2')
563 1, 'pr2', 'unused', 'unused2')
565 assert os.path.isdir(workspace)
564 assert os.path.isdir(workspace)
566
565
567 def test_cleanup_merge_workspace(self):
566 def test_cleanup_merge_workspace(self):
568 workspace = self.repo._maybe_prepare_merge_workspace(
567 workspace = self.repo._maybe_prepare_merge_workspace(
569 'pr3', 'unused', 'unused2')
568 1, 'pr3', 'unused', 'unused2')
570 self.repo.cleanup_merge_workspace('pr3')
569
570 assert os.path.isdir(workspace)
571 self.repo.cleanup_merge_workspace(1, 'pr3')
571
572
572 assert not os.path.exists(workspace)
573 assert not os.path.exists(workspace)
573
574
574 def test_cleanup_merge_workspace_invalid_workspace_id(self):
575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
575 # No assert: because in case of an inexistent workspace this function
576 # No assert: because in case of an inexistent workspace this function
576 # should still succeed.
577 # should still succeed.
577 self.repo.cleanup_merge_workspace('pr4')
578 self.repo.cleanup_merge_workspace(1, 'pr4')
578
579
579 def test_merge_target_is_bookmark(self, vcsbackend_hg):
580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
580 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
581 source_repo = vcsbackend_hg.clone_repo(target_repo)
582 source_repo = vcsbackend_hg.clone_repo(target_repo)
582 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
583 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
584 imc = source_repo.in_memory_commit
585 imc = source_repo.in_memory_commit
585 imc.add(FileNode('file_x', content=source_repo.name))
586 imc.add(FileNode('file_x', content=source_repo.name))
586 imc.commit(
587 imc.commit(
587 message=u'Automatic commit from repo merge test',
588 message=u'Automatic commit from repo merge test',
588 author=u'Automatic')
589 author=u'Automatic')
589 target_commit = target_repo.get_commit()
590 target_commit = target_repo.get_commit()
590 source_commit = source_repo.get_commit()
591 source_commit = source_repo.get_commit()
591 default_branch = target_repo.DEFAULT_BRANCH_NAME
592 default_branch = target_repo.DEFAULT_BRANCH_NAME
592 bookmark_name = 'bookmark'
593 bookmark_name = 'bookmark'
593 target_repo._update(default_branch)
594 target_repo._update(default_branch)
594 target_repo.bookmark(bookmark_name)
595 target_repo.bookmark(bookmark_name)
595 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
596 source_ref = Reference('branch', default_branch, source_commit.raw_id)
597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
597 workspace = 'test-merge'
598 workspace_id = 'test-merge'
598
599 repo_id = repo_id_generator(target_repo.path)
599 merge_response = target_repo.merge(
600 merge_response = target_repo.merge(
600 target_ref, source_repo, source_ref, workspace,
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
601 'test user', 'test@rhodecode.com', 'merge message 1',
602 'test user', 'test@rhodecode.com', 'merge message 1',
602 dry_run=False)
603 dry_run=False)
603 expected_merge_response = MergeResponse(
604 expected_merge_response = MergeResponse(
604 True, True, merge_response.merge_ref,
605 True, True, merge_response.merge_ref,
605 MergeFailureReason.NONE)
606 MergeFailureReason.NONE)
606 assert merge_response == expected_merge_response
607 assert merge_response == expected_merge_response
607
608
608 target_repo = backends.get_backend(vcsbackend_hg.alias)(
609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
609 target_repo.path)
610 target_repo.path)
610 target_commits = list(target_repo.get_commits())
611 target_commits = list(target_repo.get_commits())
611 commit_ids = [c.raw_id for c in target_commits[:-1]]
612 commit_ids = [c.raw_id for c in target_commits[:-1]]
612 assert source_ref.commit_id in commit_ids
613 assert source_ref.commit_id in commit_ids
613 assert target_ref.commit_id in commit_ids
614 assert target_ref.commit_id in commit_ids
614
615
615 merge_commit = target_commits[-1]
616 merge_commit = target_commits[-1]
616 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
617 assert merge_commit.message.strip() == 'merge message 1'
618 assert merge_commit.message.strip() == 'merge message 1'
618 assert merge_commit.author == 'test user <test@rhodecode.com>'
619 assert merge_commit.author == 'test user <test@rhodecode.com>'
619
620
620 # Check the bookmark was updated in the target repo
621 # Check the bookmark was updated in the target repo
621 assert (
622 assert (
622 target_repo.bookmarks[bookmark_name] ==
623 target_repo.bookmarks[bookmark_name] ==
623 merge_response.merge_ref.commit_id)
624 merge_response.merge_ref.commit_id)
624
625
625 def test_merge_source_is_bookmark(self, vcsbackend_hg):
626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
626 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
627 source_repo = vcsbackend_hg.clone_repo(target_repo)
628 source_repo = vcsbackend_hg.clone_repo(target_repo)
628 imc = source_repo.in_memory_commit
629 imc = source_repo.in_memory_commit
629 imc.add(FileNode('file_x', content=source_repo.name))
630 imc.add(FileNode('file_x', content=source_repo.name))
630 imc.commit(
631 imc.commit(
631 message=u'Automatic commit from repo merge test',
632 message=u'Automatic commit from repo merge test',
632 author=u'Automatic')
633 author=u'Automatic')
633 target_commit = target_repo.get_commit()
634 target_commit = target_repo.get_commit()
634 source_commit = source_repo.get_commit()
635 source_commit = source_repo.get_commit()
635 default_branch = target_repo.DEFAULT_BRANCH_NAME
636 default_branch = target_repo.DEFAULT_BRANCH_NAME
636 bookmark_name = 'bookmark'
637 bookmark_name = 'bookmark'
637 target_ref = Reference('branch', default_branch, target_commit.raw_id)
638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
638 source_repo._update(default_branch)
639 source_repo._update(default_branch)
639 source_repo.bookmark(bookmark_name)
640 source_repo.bookmark(bookmark_name)
640 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
641 workspace = 'test-merge'
642 workspace_id = 'test-merge'
642
643 repo_id = repo_id_generator(target_repo.path)
643 merge_response = target_repo.merge(
644 merge_response = target_repo.merge(
644 target_ref, source_repo, source_ref, workspace,
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
645 'test user', 'test@rhodecode.com', 'merge message 1',
646 'test user', 'test@rhodecode.com', 'merge message 1',
646 dry_run=False)
647 dry_run=False)
647 expected_merge_response = MergeResponse(
648 expected_merge_response = MergeResponse(
648 True, True, merge_response.merge_ref,
649 True, True, merge_response.merge_ref,
649 MergeFailureReason.NONE)
650 MergeFailureReason.NONE)
650 assert merge_response == expected_merge_response
651 assert merge_response == expected_merge_response
651
652
652 target_repo = backends.get_backend(vcsbackend_hg.alias)(
653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
653 target_repo.path)
654 target_repo.path)
654 target_commits = list(target_repo.get_commits())
655 target_commits = list(target_repo.get_commits())
655 commit_ids = [c.raw_id for c in target_commits]
656 commit_ids = [c.raw_id for c in target_commits]
656 assert source_ref.commit_id == commit_ids[-1]
657 assert source_ref.commit_id == commit_ids[-1]
657 assert target_ref.commit_id == commit_ids[-2]
658 assert target_ref.commit_id == commit_ids[-2]
658
659
659 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
660 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
661 source_repo = vcsbackend_hg.clone_repo(target_repo)
662 source_repo = vcsbackend_hg.clone_repo(target_repo)
662 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
663 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
664
665
665 # add an extra head to the target repo
666 # add an extra head to the target repo
666 imc = target_repo.in_memory_commit
667 imc = target_repo.in_memory_commit
667 imc.add(FileNode('file_x', content='foo'))
668 imc.add(FileNode('file_x', content='foo'))
668 commits = list(target_repo.get_commits())
669 commits = list(target_repo.get_commits())
669 imc.commit(
670 imc.commit(
670 message=u'Automatic commit from repo merge test',
671 message=u'Automatic commit from repo merge test',
671 author=u'Automatic', parents=commits[0:1])
672 author=u'Automatic', parents=commits[0:1])
672
673
673 target_commit = target_repo.get_commit()
674 target_commit = target_repo.get_commit()
674 source_commit = source_repo.get_commit()
675 source_commit = source_repo.get_commit()
675 default_branch = target_repo.DEFAULT_BRANCH_NAME
676 default_branch = target_repo.DEFAULT_BRANCH_NAME
676 target_repo._update(default_branch)
677 target_repo._update(default_branch)
677
678
678 target_ref = Reference('branch', default_branch, target_commit.raw_id)
679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
679 source_ref = Reference('branch', default_branch, source_commit.raw_id)
680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
680 workspace = 'test-merge'
681 workspace_id = 'test-merge'
681
682
682 assert len(target_repo._heads(branch='default')) == 2
683 assert len(target_repo._heads(branch='default')) == 2
683 expected_merge_response = MergeResponse(
684 expected_merge_response = MergeResponse(
684 False, False, None,
685 False, False, None,
685 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
686 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
687 repo_id = repo_id_generator(target_repo.path)
686 merge_response = target_repo.merge(
688 merge_response = target_repo.merge(
687 target_ref, source_repo, source_ref, workspace,
689 repo_id, workspace_id, target_ref, source_repo, source_ref,
688 'test user', 'test@rhodecode.com', 'merge message 1',
690 'test user', 'test@rhodecode.com', 'merge message 1',
689 dry_run=False)
691 dry_run=False)
690 assert merge_response == expected_merge_response
692 assert merge_response == expected_merge_response
691
693
692 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
694 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
693 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
695 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
694 source_repo = vcsbackend_hg.clone_repo(target_repo)
696 source_repo = vcsbackend_hg.clone_repo(target_repo)
695 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
697 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
696 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
698 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
697 imc = source_repo.in_memory_commit
699 imc = source_repo.in_memory_commit
698 imc.add(FileNode('file_x', content=source_repo.name))
700 imc.add(FileNode('file_x', content=source_repo.name))
699 imc.commit(
701 imc.commit(
700 message=u'Automatic commit from repo merge test',
702 message=u'Automatic commit from repo merge test',
701 author=u'Automatic')
703 author=u'Automatic')
702 target_commit = target_repo.get_commit()
704 target_commit = target_repo.get_commit()
703 source_commit = source_repo.get_commit()
705 source_commit = source_repo.get_commit()
704
706
705 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
707 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
706
708
707 default_branch = target_repo.DEFAULT_BRANCH_NAME
709 default_branch = target_repo.DEFAULT_BRANCH_NAME
708 bookmark_name = 'bookmark'
710 bookmark_name = 'bookmark'
709 source_repo._update(default_branch)
711 source_repo._update(default_branch)
710 source_repo.bookmark(bookmark_name)
712 source_repo.bookmark(bookmark_name)
711
713
712 target_ref = Reference('branch', default_branch, target_commit.raw_id)
714 target_ref = Reference('branch', default_branch, target_commit.raw_id)
713 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
715 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
714 workspace = 'test-merge'
716 repo_id = repo_id_generator(target_repo.path)
717 workspace_id = 'test-merge'
715
718
716 merge_response = target_repo.merge(
719 merge_response = target_repo.merge(
717 target_ref, source_repo, source_ref, workspace,
720 repo_id, workspace_id, target_ref, source_repo, source_ref,
718 'test user', 'test@rhodecode.com', 'merge message 1',
721 'test user', 'test@rhodecode.com', 'merge message 1',
719 dry_run=False, use_rebase=True)
722 dry_run=False, use_rebase=True)
720
723
721 expected_merge_response = MergeResponse(
724 expected_merge_response = MergeResponse(
722 True, True, merge_response.merge_ref,
725 True, True, merge_response.merge_ref,
723 MergeFailureReason.NONE)
726 MergeFailureReason.NONE)
724 assert merge_response == expected_merge_response
727 assert merge_response == expected_merge_response
725
728
726 target_repo = backends.get_backend(vcsbackend_hg.alias)(
729 target_repo = backends.get_backend(vcsbackend_hg.alias)(
727 target_repo.path)
730 target_repo.path)
728 last_commit = target_repo.get_commit()
731 last_commit = target_repo.get_commit()
729 assert last_commit.message == source_commit.message
732 assert last_commit.message == source_commit.message
730 assert last_commit.author == source_commit.author
733 assert last_commit.author == source_commit.author
731 # This checks that we effectively did a rebase
734 # This checks that we effectively did a rebase
732 assert last_commit.raw_id != source_commit.raw_id
735 assert last_commit.raw_id != source_commit.raw_id
733
736
734 # Check the target has only 4 commits: 2 were already in target and
737 # Check the target has only 4 commits: 2 were already in target and
735 # only two should have been added
738 # only two should have been added
736 assert len(target_repo.commit_ids) == 2 + 2
739 assert len(target_repo.commit_ids) == 2 + 2
737
740
738
741
739 class TestGetShadowInstance(object):
742 class TestGetShadowInstance(object):
740
743
741 @pytest.fixture
744 @pytest.fixture
742 def repo(self, vcsbackend_hg, monkeypatch):
745 def repo(self, vcsbackend_hg, monkeypatch):
743 repo = vcsbackend_hg.repo
746 repo = vcsbackend_hg.repo
744 monkeypatch.setattr(repo, 'config', mock.Mock())
747 monkeypatch.setattr(repo, 'config', mock.Mock())
745 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
748 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
746 return repo
749 return repo
747
750
748 def test_passes_config(self, repo):
751 def test_passes_config(self, repo):
749 shadow = repo._get_shadow_instance(repo.path)
752 shadow = repo._get_shadow_instance(repo.path)
750 assert shadow.config == repo.config.copy()
753 assert shadow.config == repo.config.copy()
751
754
752 def test_disables_hooks(self, repo):
755 def test_disables_hooks(self, repo):
753 shadow = repo._get_shadow_instance(repo.path)
756 shadow = repo._get_shadow_instance(repo.path)
754 shadow.config.clear_section.assert_called_once_with('hooks')
757 shadow.config.clear_section.assert_called_once_with('hooks')
755
758
756 def test_allows_to_keep_hooks(self, repo):
759 def test_allows_to_keep_hooks(self, repo):
757 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
760 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
758 assert not shadow.config.clear_section.called
761 assert not shadow.config.clear_section.called
759
762
760
763
761 class TestMercurialCommit(object):
764 class TestMercurialCommit(object):
762
765
763 def _test_equality(self, commit):
766 def _test_equality(self, commit):
764 idx = commit.idx
767 idx = commit.idx
765 assert commit == self.repo.get_commit(commit_idx=idx)
768 assert commit == self.repo.get_commit(commit_idx=idx)
766
769
767 def test_equality(self):
770 def test_equality(self):
768 indexes = [0, 10, 20]
771 indexes = [0, 10, 20]
769 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
772 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
770 for commit in commits:
773 for commit in commits:
771 self._test_equality(commit)
774 self._test_equality(commit)
772
775
773 def test_default_commit(self):
776 def test_default_commit(self):
774 tip = self.repo.get_commit('tip')
777 tip = self.repo.get_commit('tip')
775 assert tip == self.repo.get_commit()
778 assert tip == self.repo.get_commit()
776 assert tip == self.repo.get_commit(commit_id=None)
779 assert tip == self.repo.get_commit(commit_id=None)
777 assert tip == self.repo.get_commit(commit_idx=None)
780 assert tip == self.repo.get_commit(commit_idx=None)
778 assert tip == list(self.repo[-1:])[0]
781 assert tip == list(self.repo[-1:])[0]
779
782
780 def test_root_node(self):
783 def test_root_node(self):
781 tip = self.repo.get_commit('tip')
784 tip = self.repo.get_commit('tip')
782 assert tip.root is tip.get_node('')
785 assert tip.root is tip.get_node('')
783
786
784 def test_lazy_fetch(self):
787 def test_lazy_fetch(self):
785 """
788 """
786 Test if commit's nodes expands and are cached as we walk through
789 Test if commit's nodes expands and are cached as we walk through
787 the commit. This test is somewhat hard to write as order of tests
790 the commit. This test is somewhat hard to write as order of tests
788 is a key here. Written by running command after command in a shell.
791 is a key here. Written by running command after command in a shell.
789 """
792 """
790 commit = self.repo.get_commit(commit_idx=45)
793 commit = self.repo.get_commit(commit_idx=45)
791 assert len(commit.nodes) == 0
794 assert len(commit.nodes) == 0
792 root = commit.root
795 root = commit.root
793 assert len(commit.nodes) == 1
796 assert len(commit.nodes) == 1
794 assert len(root.nodes) == 8
797 assert len(root.nodes) == 8
795 # accessing root.nodes updates commit.nodes
798 # accessing root.nodes updates commit.nodes
796 assert len(commit.nodes) == 9
799 assert len(commit.nodes) == 9
797
800
798 docs = root.get_node('docs')
801 docs = root.get_node('docs')
799 # we haven't yet accessed anything new as docs dir was already cached
802 # we haven't yet accessed anything new as docs dir was already cached
800 assert len(commit.nodes) == 9
803 assert len(commit.nodes) == 9
801 assert len(docs.nodes) == 8
804 assert len(docs.nodes) == 8
802 # accessing docs.nodes updates commit.nodes
805 # accessing docs.nodes updates commit.nodes
803 assert len(commit.nodes) == 17
806 assert len(commit.nodes) == 17
804
807
805 assert docs is commit.get_node('docs')
808 assert docs is commit.get_node('docs')
806 assert docs is root.nodes[0]
809 assert docs is root.nodes[0]
807 assert docs is root.dirs[0]
810 assert docs is root.dirs[0]
808 assert docs is commit.get_node('docs')
811 assert docs is commit.get_node('docs')
809
812
810 def test_nodes_with_commit(self):
813 def test_nodes_with_commit(self):
811 commit = self.repo.get_commit(commit_idx=45)
814 commit = self.repo.get_commit(commit_idx=45)
812 root = commit.root
815 root = commit.root
813 docs = root.get_node('docs')
816 docs = root.get_node('docs')
814 assert docs is commit.get_node('docs')
817 assert docs is commit.get_node('docs')
815 api = docs.get_node('api')
818 api = docs.get_node('api')
816 assert api is commit.get_node('docs/api')
819 assert api is commit.get_node('docs/api')
817 index = api.get_node('index.rst')
820 index = api.get_node('index.rst')
818 assert index is commit.get_node('docs/api/index.rst')
821 assert index is commit.get_node('docs/api/index.rst')
819 assert index is commit.get_node(
822 assert index is commit.get_node(
820 'docs').get_node('api').get_node('index.rst')
823 'docs').get_node('api').get_node('index.rst')
821
824
822 def test_branch_and_tags(self):
825 def test_branch_and_tags(self):
823 commit0 = self.repo.get_commit(commit_idx=0)
826 commit0 = self.repo.get_commit(commit_idx=0)
824 assert commit0.branch == 'default'
827 assert commit0.branch == 'default'
825 assert commit0.tags == []
828 assert commit0.tags == []
826
829
827 commit10 = self.repo.get_commit(commit_idx=10)
830 commit10 = self.repo.get_commit(commit_idx=10)
828 assert commit10.branch == 'default'
831 assert commit10.branch == 'default'
829 assert commit10.tags == []
832 assert commit10.tags == []
830
833
831 commit44 = self.repo.get_commit(commit_idx=44)
834 commit44 = self.repo.get_commit(commit_idx=44)
832 assert commit44.branch == 'web'
835 assert commit44.branch == 'web'
833
836
834 tip = self.repo.get_commit('tip')
837 tip = self.repo.get_commit('tip')
835 assert 'tip' in tip.tags
838 assert 'tip' in tip.tags
836
839
837 def test_bookmarks(self):
840 def test_bookmarks(self):
838 commit0 = self.repo.get_commit(commit_idx=0)
841 commit0 = self.repo.get_commit(commit_idx=0)
839 assert commit0.bookmarks == []
842 assert commit0.bookmarks == []
840
843
841 def _test_file_size(self, idx, path, size):
844 def _test_file_size(self, idx, path, size):
842 node = self.repo.get_commit(commit_idx=idx).get_node(path)
845 node = self.repo.get_commit(commit_idx=idx).get_node(path)
843 assert node.is_file()
846 assert node.is_file()
844 assert node.size == size
847 assert node.size == size
845
848
846 def test_file_size(self):
849 def test_file_size(self):
847 to_check = (
850 to_check = (
848 (10, 'setup.py', 1068),
851 (10, 'setup.py', 1068),
849 (20, 'setup.py', 1106),
852 (20, 'setup.py', 1106),
850 (60, 'setup.py', 1074),
853 (60, 'setup.py', 1074),
851
854
852 (10, 'vcs/backends/base.py', 2921),
855 (10, 'vcs/backends/base.py', 2921),
853 (20, 'vcs/backends/base.py', 3936),
856 (20, 'vcs/backends/base.py', 3936),
854 (60, 'vcs/backends/base.py', 6189),
857 (60, 'vcs/backends/base.py', 6189),
855 )
858 )
856 for idx, path, size in to_check:
859 for idx, path, size in to_check:
857 self._test_file_size(idx, path, size)
860 self._test_file_size(idx, path, size)
858
861
859 def test_file_history_from_commits(self):
862 def test_file_history_from_commits(self):
860 node = self.repo[10].get_node('setup.py')
863 node = self.repo[10].get_node('setup.py')
861 commit_ids = [commit.raw_id for commit in node.history]
864 commit_ids = [commit.raw_id for commit in node.history]
862 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
865 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
863
866
864 node = self.repo[20].get_node('setup.py')
867 node = self.repo[20].get_node('setup.py')
865 node_ids = [commit.raw_id for commit in node.history]
868 node_ids = [commit.raw_id for commit in node.history]
866 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
869 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
867 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
870 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
868
871
869 # special case we check history from commit that has this particular
872 # special case we check history from commit that has this particular
870 # file changed this means we check if it's included as well
873 # file changed this means we check if it's included as well
871 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
874 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
872 .get_node('setup.py')
875 .get_node('setup.py')
873 node_ids = [commit.raw_id for commit in node.history]
876 node_ids = [commit.raw_id for commit in node.history]
874 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
877 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
875 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
878 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
876
879
877 def test_file_history(self):
880 def test_file_history(self):
878 # we can only check if those commits are present in the history
881 # we can only check if those commits are present in the history
879 # as we cannot update this test every time file is changed
882 # as we cannot update this test every time file is changed
880 files = {
883 files = {
881 'setup.py': [7, 18, 45, 46, 47, 69, 77],
884 'setup.py': [7, 18, 45, 46, 47, 69, 77],
882 'vcs/nodes.py': [
885 'vcs/nodes.py': [
883 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
886 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
884 'vcs/backends/hg.py': [
887 'vcs/backends/hg.py': [
885 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
888 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
886 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
889 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
887 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
890 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
888 }
891 }
889 for path, indexes in files.items():
892 for path, indexes in files.items():
890 tip = self.repo.get_commit(commit_idx=indexes[-1])
893 tip = self.repo.get_commit(commit_idx=indexes[-1])
891 node = tip.get_node(path)
894 node = tip.get_node(path)
892 node_indexes = [commit.idx for commit in node.history]
895 node_indexes = [commit.idx for commit in node.history]
893 assert set(indexes).issubset(set(node_indexes)), (
896 assert set(indexes).issubset(set(node_indexes)), (
894 "We assumed that %s is subset of commits for which file %s "
897 "We assumed that %s is subset of commits for which file %s "
895 "has been changed, and history of that node returned: %s"
898 "has been changed, and history of that node returned: %s"
896 % (indexes, path, node_indexes))
899 % (indexes, path, node_indexes))
897
900
898 def test_file_annotate(self):
901 def test_file_annotate(self):
899 files = {
902 files = {
900 'vcs/backends/__init__.py': {
903 'vcs/backends/__init__.py': {
901 89: {
904 89: {
902 'lines_no': 31,
905 'lines_no': 31,
903 'commits': [
906 'commits': [
904 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
907 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
905 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
908 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
906 32, 32, 32, 32, 37, 32, 37, 37, 32,
909 32, 32, 32, 32, 37, 32, 37, 37, 32,
907 32, 32
910 32, 32
908 ]
911 ]
909 },
912 },
910 20: {
913 20: {
911 'lines_no': 1,
914 'lines_no': 1,
912 'commits': [4]
915 'commits': [4]
913 },
916 },
914 55: {
917 55: {
915 'lines_no': 31,
918 'lines_no': 31,
916 'commits': [
919 'commits': [
917 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
920 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
918 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
921 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
919 32, 32, 32, 32, 37, 32, 37, 37, 32,
922 32, 32, 32, 32, 37, 32, 37, 37, 32,
920 32, 32
923 32, 32
921 ]
924 ]
922 }
925 }
923 },
926 },
924 'vcs/exceptions.py': {
927 'vcs/exceptions.py': {
925 89: {
928 89: {
926 'lines_no': 18,
929 'lines_no': 18,
927 'commits': [
930 'commits': [
928 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
931 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
929 16, 16, 17, 16, 16, 18, 18, 18
932 16, 16, 17, 16, 16, 18, 18, 18
930 ]
933 ]
931 },
934 },
932 20: {
935 20: {
933 'lines_no': 18,
936 'lines_no': 18,
934 'commits': [
937 'commits': [
935 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
938 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
936 16, 16, 17, 16, 16, 18, 18, 18
939 16, 16, 17, 16, 16, 18, 18, 18
937 ]
940 ]
938 },
941 },
939 55: {
942 55: {
940 'lines_no': 18,
943 'lines_no': 18,
941 'commits': [
944 'commits': [
942 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
945 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
943 17, 16, 16, 18, 18, 18
946 17, 16, 16, 18, 18, 18
944 ]
947 ]
945 }
948 }
946 },
949 },
947 'MANIFEST.in': {
950 'MANIFEST.in': {
948 89: {
951 89: {
949 'lines_no': 5,
952 'lines_no': 5,
950 'commits': [7, 7, 7, 71, 71]
953 'commits': [7, 7, 7, 71, 71]
951 },
954 },
952 20: {
955 20: {
953 'lines_no': 3,
956 'lines_no': 3,
954 'commits': [7, 7, 7]
957 'commits': [7, 7, 7]
955 },
958 },
956 55: {
959 55: {
957 'lines_no': 3,
960 'lines_no': 3,
958 'commits': [7, 7, 7]
961 'commits': [7, 7, 7]
959 }
962 }
960 }
963 }
961 }
964 }
962
965
963 for fname, commit_dict in files.items():
966 for fname, commit_dict in files.items():
964 for idx, __ in commit_dict.items():
967 for idx, __ in commit_dict.items():
965 commit = self.repo.get_commit(commit_idx=idx)
968 commit = self.repo.get_commit(commit_idx=idx)
966 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
969 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
967 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
970 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
968 assert l1_1 == l1_2
971 assert l1_1 == l1_2
969 l1 = l1_2 = [
972 l1 = l1_2 = [
970 x[2]().idx for x in commit.get_file_annotate(fname)]
973 x[2]().idx for x in commit.get_file_annotate(fname)]
971 l2 = files[fname][idx]['commits']
974 l2 = files[fname][idx]['commits']
972 assert l1 == l2, (
975 assert l1 == l2, (
973 "The lists of commit for %s@commit_id%s"
976 "The lists of commit for %s@commit_id%s"
974 "from annotation list should match each other,"
977 "from annotation list should match each other,"
975 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
978 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
976
979
977 def test_commit_state(self):
980 def test_commit_state(self):
978 """
981 """
979 Tests which files have been added/changed/removed at particular commit
982 Tests which files have been added/changed/removed at particular commit
980 """
983 """
981
984
982 # commit_id 46ad32a4f974:
985 # commit_id 46ad32a4f974:
983 # hg st --rev 46ad32a4f974
986 # hg st --rev 46ad32a4f974
984 # changed: 13
987 # changed: 13
985 # added: 20
988 # added: 20
986 # removed: 1
989 # removed: 1
987 changed = set([
990 changed = set([
988 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
991 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
989 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
992 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
990 'vcs/__init__.py', 'vcs/backends/__init__.py',
993 'vcs/__init__.py', 'vcs/backends/__init__.py',
991 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
994 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
992 'vcs/utils/__init__.py'])
995 'vcs/utils/__init__.py'])
993
996
994 added = set([
997 added = set([
995 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
998 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
996 'docs/api/index.rst', 'docs/api/nodes.rst',
999 'docs/api/index.rst', 'docs/api/nodes.rst',
997 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1000 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
998 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1001 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
999 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1002 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1000 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1003 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1001 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1004 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1002 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1005 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1003 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1006 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1004 'vcs/web/simplevcs/views.py'])
1007 'vcs/web/simplevcs/views.py'])
1005
1008
1006 removed = set(['docs/api.rst'])
1009 removed = set(['docs/api.rst'])
1007
1010
1008 commit64 = self.repo.get_commit('46ad32a4f974')
1011 commit64 = self.repo.get_commit('46ad32a4f974')
1009 assert set((node.path for node in commit64.added)) == added
1012 assert set((node.path for node in commit64.added)) == added
1010 assert set((node.path for node in commit64.changed)) == changed
1013 assert set((node.path for node in commit64.changed)) == changed
1011 assert set((node.path for node in commit64.removed)) == removed
1014 assert set((node.path for node in commit64.removed)) == removed
1012
1015
1013 # commit_id b090f22d27d6:
1016 # commit_id b090f22d27d6:
1014 # hg st --rev b090f22d27d6
1017 # hg st --rev b090f22d27d6
1015 # changed: 13
1018 # changed: 13
1016 # added: 20
1019 # added: 20
1017 # removed: 1
1020 # removed: 1
1018 commit88 = self.repo.get_commit('b090f22d27d6')
1021 commit88 = self.repo.get_commit('b090f22d27d6')
1019 assert set((node.path for node in commit88.added)) == set()
1022 assert set((node.path for node in commit88.added)) == set()
1020 assert set((node.path for node in commit88.changed)) == \
1023 assert set((node.path for node in commit88.changed)) == \
1021 set(['.hgignore'])
1024 set(['.hgignore'])
1022 assert set((node.path for node in commit88.removed)) == set()
1025 assert set((node.path for node in commit88.removed)) == set()
1023
1026
1024 #
1027 #
1025 # 85:
1028 # 85:
1026 # added: 2 [
1029 # added: 2 [
1027 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1030 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1028 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1031 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1029 # removed: 1 ['vcs/utils/web.py']
1032 # removed: 1 ['vcs/utils/web.py']
1030 commit85 = self.repo.get_commit(commit_idx=85)
1033 commit85 = self.repo.get_commit(commit_idx=85)
1031 assert set((node.path for node in commit85.added)) == set([
1034 assert set((node.path for node in commit85.added)) == set([
1032 'vcs/utils/diffs.py',
1035 'vcs/utils/diffs.py',
1033 'vcs/web/simplevcs/views/diffs.py'])
1036 'vcs/web/simplevcs/views/diffs.py'])
1034 assert set((node.path for node in commit85.changed)) == set([
1037 assert set((node.path for node in commit85.changed)) == set([
1035 'vcs/web/simplevcs/models.py',
1038 'vcs/web/simplevcs/models.py',
1036 'vcs/web/simplevcs/utils.py',
1039 'vcs/web/simplevcs/utils.py',
1037 'vcs/web/simplevcs/views/__init__.py',
1040 'vcs/web/simplevcs/views/__init__.py',
1038 'vcs/web/simplevcs/views/repository.py',
1041 'vcs/web/simplevcs/views/repository.py',
1039 ])
1042 ])
1040 assert set((node.path for node in commit85.removed)) == \
1043 assert set((node.path for node in commit85.removed)) == \
1041 set(['vcs/utils/web.py'])
1044 set(['vcs/utils/web.py'])
1042
1045
1043 def test_files_state(self):
1046 def test_files_state(self):
1044 """
1047 """
1045 Tests state of FileNodes.
1048 Tests state of FileNodes.
1046 """
1049 """
1047 commit = self.repo.get_commit(commit_idx=85)
1050 commit = self.repo.get_commit(commit_idx=85)
1048 node = commit.get_node('vcs/utils/diffs.py')
1051 node = commit.get_node('vcs/utils/diffs.py')
1049 assert node.state, NodeState.ADDED
1052 assert node.state, NodeState.ADDED
1050 assert node.added
1053 assert node.added
1051 assert not node.changed
1054 assert not node.changed
1052 assert not node.not_changed
1055 assert not node.not_changed
1053 assert not node.removed
1056 assert not node.removed
1054
1057
1055 commit = self.repo.get_commit(commit_idx=88)
1058 commit = self.repo.get_commit(commit_idx=88)
1056 node = commit.get_node('.hgignore')
1059 node = commit.get_node('.hgignore')
1057 assert node.state, NodeState.CHANGED
1060 assert node.state, NodeState.CHANGED
1058 assert not node.added
1061 assert not node.added
1059 assert node.changed
1062 assert node.changed
1060 assert not node.not_changed
1063 assert not node.not_changed
1061 assert not node.removed
1064 assert not node.removed
1062
1065
1063 commit = self.repo.get_commit(commit_idx=85)
1066 commit = self.repo.get_commit(commit_idx=85)
1064 node = commit.get_node('setup.py')
1067 node = commit.get_node('setup.py')
1065 assert node.state, NodeState.NOT_CHANGED
1068 assert node.state, NodeState.NOT_CHANGED
1066 assert not node.added
1069 assert not node.added
1067 assert not node.changed
1070 assert not node.changed
1068 assert node.not_changed
1071 assert node.not_changed
1069 assert not node.removed
1072 assert not node.removed
1070
1073
1071 # If node has REMOVED state then trying to fetch it would raise
1074 # If node has REMOVED state then trying to fetch it would raise
1072 # CommitError exception
1075 # CommitError exception
1073 commit = self.repo.get_commit(commit_idx=2)
1076 commit = self.repo.get_commit(commit_idx=2)
1074 path = 'vcs/backends/BaseRepository.py'
1077 path = 'vcs/backends/BaseRepository.py'
1075 with pytest.raises(NodeDoesNotExistError):
1078 with pytest.raises(NodeDoesNotExistError):
1076 commit.get_node(path)
1079 commit.get_node(path)
1077 # but it would be one of ``removed`` (commit's attribute)
1080 # but it would be one of ``removed`` (commit's attribute)
1078 assert path in [rf.path for rf in commit.removed]
1081 assert path in [rf.path for rf in commit.removed]
1079
1082
1080 def test_commit_message_is_unicode(self):
1083 def test_commit_message_is_unicode(self):
1081 for cm in self.repo:
1084 for cm in self.repo:
1082 assert type(cm.message) == unicode
1085 assert type(cm.message) == unicode
1083
1086
1084 def test_commit_author_is_unicode(self):
1087 def test_commit_author_is_unicode(self):
1085 for cm in self.repo:
1088 for cm in self.repo:
1086 assert type(cm.author) == unicode
1089 assert type(cm.author) == unicode
1087
1090
1088 def test_repo_files_content_is_unicode(self):
1091 def test_repo_files_content_is_unicode(self):
1089 test_commit = self.repo.get_commit(commit_idx=100)
1092 test_commit = self.repo.get_commit(commit_idx=100)
1090 for node in test_commit.get_node('/'):
1093 for node in test_commit.get_node('/'):
1091 if node.is_file():
1094 if node.is_file():
1092 assert type(node.content) == unicode
1095 assert type(node.content) == unicode
1093
1096
1094 def test_wrong_path(self):
1097 def test_wrong_path(self):
1095 # There is 'setup.py' in the root dir but not there:
1098 # There is 'setup.py' in the root dir but not there:
1096 path = 'foo/bar/setup.py'
1099 path = 'foo/bar/setup.py'
1097 with pytest.raises(VCSError):
1100 with pytest.raises(VCSError):
1098 self.repo.get_commit().get_node(path)
1101 self.repo.get_commit().get_node(path)
1099
1102
1100 def test_author_email(self):
1103 def test_author_email(self):
1101 assert 'marcin@python-blog.com' == \
1104 assert 'marcin@python-blog.com' == \
1102 self.repo.get_commit('b986218ba1c9').author_email
1105 self.repo.get_commit('b986218ba1c9').author_email
1103 assert 'lukasz.balcerzak@python-center.pl' == \
1106 assert 'lukasz.balcerzak@python-center.pl' == \
1104 self.repo.get_commit('3803844fdbd3').author_email
1107 self.repo.get_commit('3803844fdbd3').author_email
1105 assert '' == self.repo.get_commit('84478366594b').author_email
1108 assert '' == self.repo.get_commit('84478366594b').author_email
1106
1109
1107 def test_author_username(self):
1110 def test_author_username(self):
1108 assert 'Marcin Kuzminski' == \
1111 assert 'Marcin Kuzminski' == \
1109 self.repo.get_commit('b986218ba1c9').author_name
1112 self.repo.get_commit('b986218ba1c9').author_name
1110 assert 'Lukasz Balcerzak' == \
1113 assert 'Lukasz Balcerzak' == \
1111 self.repo.get_commit('3803844fdbd3').author_name
1114 self.repo.get_commit('3803844fdbd3').author_name
1112 assert 'marcink' == \
1115 assert 'marcink' == \
1113 self.repo.get_commit('84478366594b').author_name
1116 self.repo.get_commit('84478366594b').author_name
1114
1117
1115
1118
1116 class TestLargeFileRepo(object):
1119 class TestLargeFileRepo(object):
1117
1120
1118 def test_large_file(self, backend_hg):
1121 def test_large_file(self, backend_hg):
1119 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1122 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1120
1123
1121 tip = repo.scm_instance().get_commit()
1124 tip = repo.scm_instance().get_commit()
1122 node = tip.get_node('.hglf/thisfileislarge')
1125 node = tip.get_node('.hglf/thisfileislarge')
1123
1126
1124 lf_node = node.get_largefile_node()
1127 lf_node = node.get_largefile_node()
1125
1128
1126 assert lf_node.is_largefile() is True
1129 assert lf_node.is_largefile() is True
1127 assert lf_node.size == 1024000
1130 assert lf_node.size == 1024000
1128 assert lf_node.name == '.hglf/thisfileislarge'
1131 assert lf_node.name == '.hglf/thisfileislarge'
1129
1132
1130
1133
1131 class TestGetBranchName(object):
1134 class TestGetBranchName(object):
1132 def test_returns_ref_name_when_type_is_branch(self):
1135 def test_returns_ref_name_when_type_is_branch(self):
1133 ref = self._create_ref('branch', 'fake-name')
1136 ref = self._create_ref('branch', 'fake-name')
1134 result = self.repo._get_branch_name(ref)
1137 result = self.repo._get_branch_name(ref)
1135 assert result == ref.name
1138 assert result == ref.name
1136
1139
1137 @pytest.mark.parametrize("type_", ("book", "tag"))
1140 @pytest.mark.parametrize("type_", ("book", "tag"))
1138 def test_queries_remote_when_type_is_not_branch(self, type_):
1141 def test_queries_remote_when_type_is_not_branch(self, type_):
1139 ref = self._create_ref(type_, 'wrong-fake-name')
1142 ref = self._create_ref(type_, 'wrong-fake-name')
1140 with mock.patch.object(self.repo, "_remote") as remote_mock:
1143 with mock.patch.object(self.repo, "_remote") as remote_mock:
1141 remote_mock.ctx_branch.return_value = "fake-name"
1144 remote_mock.ctx_branch.return_value = "fake-name"
1142 result = self.repo._get_branch_name(ref)
1145 result = self.repo._get_branch_name(ref)
1143 assert result == "fake-name"
1146 assert result == "fake-name"
1144 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1147 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1145
1148
1146 def _create_ref(self, type_, name):
1149 def _create_ref(self, type_, name):
1147 ref = mock.Mock()
1150 ref = mock.Mock()
1148 ref.type = type_
1151 ref.type = type_
1149 ref.name = 'wrong-fake-name'
1152 ref.name = 'wrong-fake-name'
1150 ref.commit_id = "deadbeef"
1153 ref.commit_id = "deadbeef"
1151 return ref
1154 return ref
1152
1155
1153
1156
1154 class TestIsTheSameBranch(object):
1157 class TestIsTheSameBranch(object):
1155 def test_returns_true_when_branches_are_equal(self):
1158 def test_returns_true_when_branches_are_equal(self):
1156 source_ref = mock.Mock(name="source-ref")
1159 source_ref = mock.Mock(name="source-ref")
1157 target_ref = mock.Mock(name="target-ref")
1160 target_ref = mock.Mock(name="target-ref")
1158 branch_name_patcher = mock.patch.object(
1161 branch_name_patcher = mock.patch.object(
1159 self.repo, "_get_branch_name", return_value="default")
1162 self.repo, "_get_branch_name", return_value="default")
1160 with branch_name_patcher as branch_name_mock:
1163 with branch_name_patcher as branch_name_mock:
1161 result = self.repo._is_the_same_branch(source_ref, target_ref)
1164 result = self.repo._is_the_same_branch(source_ref, target_ref)
1162
1165
1163 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1166 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1164 assert branch_name_mock.call_args_list == expected_calls
1167 assert branch_name_mock.call_args_list == expected_calls
1165 assert result is True
1168 assert result is True
1166
1169
1167 def test_returns_false_when_branches_are_not_equal(self):
1170 def test_returns_false_when_branches_are_not_equal(self):
1168 source_ref = mock.Mock(name="source-ref")
1171 source_ref = mock.Mock(name="source-ref")
1169 source_ref.name = "source-branch"
1172 source_ref.name = "source-branch"
1170 target_ref = mock.Mock(name="target-ref")
1173 target_ref = mock.Mock(name="target-ref")
1171 source_ref.name = "target-branch"
1174 source_ref.name = "target-branch"
1172
1175
1173 def side_effect(ref):
1176 def side_effect(ref):
1174 return ref.name
1177 return ref.name
1175
1178
1176 branch_name_patcher = mock.patch.object(
1179 branch_name_patcher = mock.patch.object(
1177 self.repo, "_get_branch_name", side_effect=side_effect)
1180 self.repo, "_get_branch_name", side_effect=side_effect)
1178 with branch_name_patcher as branch_name_mock:
1181 with branch_name_patcher as branch_name_mock:
1179 result = self.repo._is_the_same_branch(source_ref, target_ref)
1182 result = self.repo._is_the_same_branch(source_ref, target_ref)
1180
1183
1181 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1184 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1182 assert branch_name_mock.call_args_list == expected_calls
1185 assert branch_name_mock.call_args_list == expected_calls
1183 assert result is False
1186 assert result is False
@@ -1,183 +1,183 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2018 RhodeCode GmbH
3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from mock import call, patch
23 from mock import call, patch
24
24
25 from rhodecode.lib.vcs.backends.base import Reference
25 from rhodecode.lib.vcs.backends.base import Reference
26
26
27
27
28 class TestMercurialRemoteRepoInvalidation(object):
28 class TestMercurialRemoteRepoInvalidation(object):
29 """
29 """
30 If the VCSServer is running with multiple processes or/and instances.
30 If the VCSServer is running with multiple processes or/and instances.
31 Operations on repositories are potentially handled by different processes
31 Operations on repositories are potentially handled by different processes
32 in a random fashion. The mercurial repository objects used in the VCSServer
32 in a random fashion. The mercurial repository objects used in the VCSServer
33 are caching the commits of the repo. Therefore we have to invalidate the
33 are caching the commits of the repo. Therefore we have to invalidate the
34 VCSServer caching of these objects after a writing operation.
34 VCSServer caching of these objects after a writing operation.
35 """
35 """
36
36
37 # Default reference used as a dummy during tests.
37 # Default reference used as a dummy during tests.
38 default_ref = Reference('branch', 'default', None)
38 default_ref = Reference('branch', 'default', None)
39
39
40 # Methods of vcsserver.hg.HgRemote that are "writing" operations.
40 # Methods of vcsserver.hg.HgRemote that are "writing" operations.
41 writing_methods = [
41 writing_methods = [
42 'bookmark',
42 'bookmark',
43 'commit',
43 'commit',
44 'merge',
44 'merge',
45 'pull',
45 'pull',
46 'pull_cmd',
46 'pull_cmd',
47 'rebase',
47 'rebase',
48 'strip',
48 'strip',
49 'tag',
49 'tag',
50 ]
50 ]
51
51
52 @pytest.mark.parametrize('method_name, method_args', [
52 @pytest.mark.parametrize('method_name, method_args', [
53 ('_local_merge', [default_ref, None, None, None, default_ref]),
53 ('_local_merge', [default_ref, None, None, None, default_ref]),
54 ('_local_pull', ['', default_ref]),
54 ('_local_pull', ['', default_ref]),
55 ('bookmark', [None]),
55 ('bookmark', [None]),
56 ('pull', ['', default_ref]),
56 ('pull', ['', default_ref]),
57 ('remove_tag', ['mytag', None]),
57 ('remove_tag', ['mytag', None]),
58 ('strip', [None]),
58 ('strip', [None]),
59 ('tag', ['newtag', None]),
59 ('tag', ['newtag', None]),
60 ])
60 ])
61 def test_method_invokes_invalidate_on_remote_repo(
61 def test_method_invokes_invalidate_on_remote_repo(
62 self, method_name, method_args, backend_hg):
62 self, method_name, method_args, backend_hg):
63 """
63 """
64 Check that the listed methods are invalidating the VCSServer cache
64 Check that the listed methods are invalidating the VCSServer cache
65 after invoking a writing method of their remote repository object.
65 after invoking a writing method of their remote repository object.
66 """
66 """
67 tags = {'mytag': 'mytag-id'}
67 tags = {'mytag': 'mytag-id'}
68
68
69 def add_tag(name, raw_id, *args, **kwds):
69 def add_tag(name, raw_id, *args, **kwds):
70 tags[name] = raw_id
70 tags[name] = raw_id
71
71
72 repo = backend_hg.repo.scm_instance()
72 repo = backend_hg.repo.scm_instance()
73 with patch.object(repo, '_remote') as remote:
73 with patch.object(repo, '_remote') as remote:
74 remote.lookup.return_value = ('commit-id', 'commit-idx')
74 remote.lookup.return_value = ('commit-id', 'commit-idx')
75 remote.tags.return_value = tags
75 remote.tags.return_value = tags
76 remote._get_tags.return_value = tags
76 remote._get_tags.return_value = tags
77 remote.tag.side_effect = add_tag
77 remote.tag.side_effect = add_tag
78
78
79 # Invoke method.
79 # Invoke method.
80 method = getattr(repo, method_name)
80 method = getattr(repo, method_name)
81 method(*method_args)
81 method(*method_args)
82
82
83 # Assert that every "writing" method is followed by an invocation
83 # Assert that every "writing" method is followed by an invocation
84 # of the cache invalidation method.
84 # of the cache invalidation method.
85 for counter, method_call in enumerate(remote.method_calls):
85 for counter, method_call in enumerate(remote.method_calls):
86 call_name = method_call[0]
86 call_name = method_call[0]
87 if call_name in self.writing_methods:
87 if call_name in self.writing_methods:
88 next_call = remote.method_calls[counter + 1]
88 next_call = remote.method_calls[counter + 1]
89 assert next_call == call.invalidate_vcs_cache()
89 assert next_call == call.invalidate_vcs_cache()
90
90
91 def _prepare_shadow_repo(self, pull_request):
91 def _prepare_shadow_repo(self, pull_request):
92 """
92 """
93 Helper that creates a shadow repo that can be used to reproduce the
93 Helper that creates a shadow repo that can be used to reproduce the
94 CommitDoesNotExistError when pulling in from target and source
94 CommitDoesNotExistError when pulling in from target and source
95 references.
95 references.
96 """
96 """
97 from rhodecode.model.pull_request import PullRequestModel
97 from rhodecode.model.pull_request import PullRequestModel
98
98 repo_id = pull_request.target_repo
99 target_vcs = pull_request.target_repo.scm_instance()
99 target_vcs = pull_request.target_repo.scm_instance()
100 target_ref = pull_request.target_ref_parts
100 target_ref = pull_request.target_ref_parts
101 source_ref = pull_request.source_ref_parts
101 source_ref = pull_request.source_ref_parts
102
102
103 # Create shadow repository.
103 # Create shadow repository.
104 pr = PullRequestModel()
104 pr = PullRequestModel()
105 workspace_id = pr._workspace_id(pull_request)
105 workspace_id = pr._workspace_id(pull_request)
106 shadow_repository_path = target_vcs._maybe_prepare_merge_workspace(
106 shadow_repository_path = target_vcs._maybe_prepare_merge_workspace(
107 workspace_id, target_ref, source_ref)
107 repo_id, workspace_id, target_ref, source_ref)
108 shadow_repo = target_vcs._get_shadow_instance(shadow_repository_path)
108 shadow_repo = target_vcs._get_shadow_instance(shadow_repository_path)
109
109
110 # This will populate the cache of the mercurial repository object
110 # This will populate the cache of the mercurial repository object
111 # inside of the VCSServer.
111 # inside of the VCSServer.
112 shadow_repo.get_commit()
112 shadow_repo.get_commit()
113
113
114 return shadow_repo, source_ref, target_ref
114 return shadow_repo, source_ref, target_ref
115
115
116 @pytest.mark.backends('hg')
116 @pytest.mark.backends('hg')
117 def test_commit_does_not_exist_error_happens(self, pr_util, app):
117 def test_commit_does_not_exist_error_happens(self, pr_util, app):
118 """
118 """
119 This test is somewhat special. It does not really test the system
119 This test is somewhat special. It does not really test the system
120 instead it is more or less a precondition for the
120 instead it is more or less a precondition for the
121 "test_commit_does_not_exist_error_does_not_happen". It deactivates the
121 "test_commit_does_not_exist_error_does_not_happen". It deactivates the
122 cache invalidation and asserts that the error occurs.
122 cache invalidation and asserts that the error occurs.
123 """
123 """
124 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
124 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
125
125
126 pull_request = pr_util.create_pull_request()
126 pull_request = pr_util.create_pull_request()
127 target_vcs = pull_request.target_repo.scm_instance()
127 target_vcs = pull_request.target_repo.scm_instance()
128 source_vcs = pull_request.source_repo.scm_instance()
128 source_vcs = pull_request.source_repo.scm_instance()
129 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
129 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
130 pull_request)
130 pull_request)
131
131
132 # Pull from target and source references but without invalidation of
132 # Pull from target and source references but without invalidation of
133 # RemoteRepo objects and without VCSServer caching of mercurial
133 # RemoteRepo objects and without VCSServer caching of mercurial
134 # repository objects.
134 # repository objects.
135 with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'):
135 with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'):
136 # NOTE: Do not use patch.dict() to disable the cache because it
136 # NOTE: Do not use patch.dict() to disable the cache because it
137 # restores the WHOLE dict and not only the patched keys.
137 # restores the WHOLE dict and not only the patched keys.
138 shadow_repo._remote._wire['cache'] = False
138 shadow_repo._remote._wire['cache'] = False
139 shadow_repo._local_pull(target_vcs.path, target_ref)
139 shadow_repo._local_pull(target_vcs.path, target_ref)
140 shadow_repo._local_pull(source_vcs.path, source_ref)
140 shadow_repo._local_pull(source_vcs.path, source_ref)
141 shadow_repo._remote._wire.pop('cache')
141 shadow_repo._remote._wire.pop('cache')
142
142
143 # Try to lookup the target_ref in shadow repo. This should work because
143 # Try to lookup the target_ref in shadow repo. This should work because
144 # the shadow repo is a clone of the target and always contains all off
144 # the shadow repo is a clone of the target and always contains all off
145 # it's commits in the initial cache.
145 # it's commits in the initial cache.
146 shadow_repo.get_commit(target_ref.commit_id)
146 shadow_repo.get_commit(target_ref.commit_id)
147
147
148 # If we try to lookup the source_ref it should fail because the shadow
148 # If we try to lookup the source_ref it should fail because the shadow
149 # repo commit cache doesn't get invalidated. (Due to patched
149 # repo commit cache doesn't get invalidated. (Due to patched
150 # invalidation and caching above).
150 # invalidation and caching above).
151 with pytest.raises(CommitDoesNotExistError):
151 with pytest.raises(CommitDoesNotExistError):
152 shadow_repo.get_commit(source_ref.commit_id)
152 shadow_repo.get_commit(source_ref.commit_id)
153
153
154 @pytest.mark.backends('hg')
154 @pytest.mark.backends('hg')
155 def test_commit_does_not_exist_error_does_not_happen(self, pr_util, app):
155 def test_commit_does_not_exist_error_does_not_happen(self, pr_util, app):
156 """
156 """
157 This test simulates a pull request merge in which the pull operations
157 This test simulates a pull request merge in which the pull operations
158 are handled by a different VCSServer process than all other operations.
158 are handled by a different VCSServer process than all other operations.
159 Without correct cache invalidation this leads to an error when
159 Without correct cache invalidation this leads to an error when
160 retrieving the pulled commits afterwards.
160 retrieving the pulled commits afterwards.
161 """
161 """
162
162
163 pull_request = pr_util.create_pull_request()
163 pull_request = pr_util.create_pull_request()
164 target_vcs = pull_request.target_repo.scm_instance()
164 target_vcs = pull_request.target_repo.scm_instance()
165 source_vcs = pull_request.source_repo.scm_instance()
165 source_vcs = pull_request.source_repo.scm_instance()
166 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
166 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
167 pull_request)
167 pull_request)
168
168
169 # Pull from target and source references without without VCSServer
169 # Pull from target and source references without without VCSServer
170 # caching of mercurial repository objects but with active invalidation
170 # caching of mercurial repository objects but with active invalidation
171 # of RemoteRepo objects.
171 # of RemoteRepo objects.
172 # NOTE: Do not use patch.dict() to disable the cache because it
172 # NOTE: Do not use patch.dict() to disable the cache because it
173 # restores the WHOLE dict and not only the patched keys.
173 # restores the WHOLE dict and not only the patched keys.
174 shadow_repo._remote._wire['cache'] = False
174 shadow_repo._remote._wire['cache'] = False
175 shadow_repo._local_pull(target_vcs.path, target_ref)
175 shadow_repo._local_pull(target_vcs.path, target_ref)
176 shadow_repo._local_pull(source_vcs.path, source_ref)
176 shadow_repo._local_pull(source_vcs.path, source_ref)
177 shadow_repo._remote._wire.pop('cache')
177 shadow_repo._remote._wire.pop('cache')
178
178
179 # Try to lookup the target and source references in shadow repo. This
179 # Try to lookup the target and source references in shadow repo. This
180 # should work because the RemoteRepo object gets invalidated during the
180 # should work because the RemoteRepo object gets invalidated during the
181 # above pull operations.
181 # above pull operations.
182 shadow_repo.get_commit(target_ref.commit_id)
182 shadow_repo.get_commit(target_ref.commit_id)
183 shadow_repo.get_commit(source_ref.commit_id)
183 shadow_repo.get_commit(source_ref.commit_id)
@@ -1,537 +1,553 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 from urllib2 import URLError
22 from urllib2 import URLError
23
23
24 import mock
24 import mock
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.vcs import backends
27 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 Config, BaseInMemoryCommit, Reference, MergeResponse, MergeFailureReason)
29 Config, BaseInMemoryCommit, Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.exceptions import VCSError, RepositoryError
30 from rhodecode.lib.vcs.exceptions import VCSError, RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.tests.vcs.conftest import BackendTestMixin
32 from rhodecode.tests.vcs.conftest import BackendTestMixin
33 from rhodecode.tests import repo_id_generator
33
34
34
35
35 @pytest.mark.usefixtures("vcs_repository_support")
36 @pytest.mark.usefixtures("vcs_repository_support")
36 class TestRepositoryBase(BackendTestMixin):
37 class TestRepositoryBase(BackendTestMixin):
37 recreate_repo_per_test = False
38 recreate_repo_per_test = False
38
39
39 def test_init_accepts_unicode_path(self, tmpdir):
40 def test_init_accepts_unicode_path(self, tmpdir):
40 path = unicode(tmpdir.join(u'unicode ä'))
41 path = unicode(tmpdir.join(u'unicode ä'))
41 self.Backend(path, create=True)
42 self.Backend(path, create=True)
42
43
43 def test_init_accepts_str_path(self, tmpdir):
44 def test_init_accepts_str_path(self, tmpdir):
44 path = str(tmpdir.join('str ä'))
45 path = str(tmpdir.join('str ä'))
45 self.Backend(path, create=True)
46 self.Backend(path, create=True)
46
47
47 def test_init_fails_if_path_does_not_exist(self, tmpdir):
48 def test_init_fails_if_path_does_not_exist(self, tmpdir):
48 path = unicode(tmpdir.join('i-do-not-exist'))
49 path = unicode(tmpdir.join('i-do-not-exist'))
49 with pytest.raises(VCSError):
50 with pytest.raises(VCSError):
50 self.Backend(path)
51 self.Backend(path)
51
52
52 def test_init_fails_if_path_is_not_a_valid_repository(self, tmpdir):
53 def test_init_fails_if_path_is_not_a_valid_repository(self, tmpdir):
53 path = unicode(tmpdir.mkdir(u'unicode ä'))
54 path = unicode(tmpdir.mkdir(u'unicode ä'))
54 with pytest.raises(VCSError):
55 with pytest.raises(VCSError):
55 self.Backend(path)
56 self.Backend(path)
56
57
57 def test_has_commits_attribute(self):
58 def test_has_commits_attribute(self):
58 self.repo.commit_ids
59 self.repo.commit_ids
59
60
60 def test_name(self):
61 def test_name(self):
61 assert self.repo.name.startswith('vcs-test')
62 assert self.repo.name.startswith('vcs-test')
62
63
63 @pytest.mark.backends("hg", "git")
64 @pytest.mark.backends("hg", "git")
64 def test_has_default_branch_name(self):
65 def test_has_default_branch_name(self):
65 assert self.repo.DEFAULT_BRANCH_NAME is not None
66 assert self.repo.DEFAULT_BRANCH_NAME is not None
66
67
67 @pytest.mark.backends("svn")
68 @pytest.mark.backends("svn")
68 def test_has_no_default_branch_name(self):
69 def test_has_no_default_branch_name(self):
69 assert self.repo.DEFAULT_BRANCH_NAME is None
70 assert self.repo.DEFAULT_BRANCH_NAME is None
70
71
71 def test_has_empty_commit(self):
72 def test_has_empty_commit(self):
72 assert self.repo.EMPTY_COMMIT_ID is not None
73 assert self.repo.EMPTY_COMMIT_ID is not None
73 assert self.repo.EMPTY_COMMIT is not None
74 assert self.repo.EMPTY_COMMIT is not None
74
75
75 def test_empty_changeset_is_deprecated(self):
76 def test_empty_changeset_is_deprecated(self):
76 def get_empty_changeset(repo):
77 def get_empty_changeset(repo):
77 return repo.EMPTY_CHANGESET
78 return repo.EMPTY_CHANGESET
78 pytest.deprecated_call(get_empty_changeset, self.repo)
79 pytest.deprecated_call(get_empty_changeset, self.repo)
79
80
80 def test_bookmarks(self):
81 def test_bookmarks(self):
81 assert len(self.repo.bookmarks) == 0
82 assert len(self.repo.bookmarks) == 0
82
83
83 # TODO: Cover two cases: Local repo path, remote URL
84 # TODO: Cover two cases: Local repo path, remote URL
84 def test_check_url(self):
85 def test_check_url(self):
85 config = Config()
86 config = Config()
86 assert self.Backend.check_url(self.repo.path, config)
87 assert self.Backend.check_url(self.repo.path, config)
87
88
88 def test_check_url_invalid(self):
89 def test_check_url_invalid(self):
89 config = Config()
90 config = Config()
90 with pytest.raises(URLError):
91 with pytest.raises(URLError):
91 self.Backend.check_url(self.repo.path + "invalid", config)
92 self.Backend.check_url(self.repo.path + "invalid", config)
92
93
93 def test_get_contact(self):
94 def test_get_contact(self):
94 assert self.repo.contact
95 assert self.repo.contact
95
96
96 def test_get_description(self):
97 def test_get_description(self):
97 assert self.repo.description
98 assert self.repo.description
98
99
99 def test_get_hook_location(self):
100 def test_get_hook_location(self):
100 assert len(self.repo.get_hook_location()) != 0
101 assert len(self.repo.get_hook_location()) != 0
101
102
102 def test_last_change(self, local_dt_to_utc):
103 def test_last_change(self, local_dt_to_utc):
103 assert self.repo.last_change >= local_dt_to_utc(
104 assert self.repo.last_change >= local_dt_to_utc(
104 datetime.datetime(2010, 1, 1, 21, 0))
105 datetime.datetime(2010, 1, 1, 21, 0))
105
106
106 def test_last_change_in_empty_repository(self, vcsbackend, local_dt_to_utc):
107 def test_last_change_in_empty_repository(self, vcsbackend, local_dt_to_utc):
107 delta = datetime.timedelta(seconds=1)
108 delta = datetime.timedelta(seconds=1)
108
109
109 start = local_dt_to_utc(datetime.datetime.now())
110 start = local_dt_to_utc(datetime.datetime.now())
110 empty_repo = vcsbackend.create_repo()
111 empty_repo = vcsbackend.create_repo()
111 now = local_dt_to_utc(datetime.datetime.now())
112 now = local_dt_to_utc(datetime.datetime.now())
112 assert empty_repo.last_change >= start - delta
113 assert empty_repo.last_change >= start - delta
113 assert empty_repo.last_change <= now + delta
114 assert empty_repo.last_change <= now + delta
114
115
115 def test_repo_equality(self):
116 def test_repo_equality(self):
116 assert self.repo == self.repo
117 assert self.repo == self.repo
117
118
118 def test_repo_equality_broken_object(self):
119 def test_repo_equality_broken_object(self):
119 import copy
120 import copy
120 _repo = copy.copy(self.repo)
121 _repo = copy.copy(self.repo)
121 delattr(_repo, 'path')
122 delattr(_repo, 'path')
122 assert self.repo != _repo
123 assert self.repo != _repo
123
124
124 def test_repo_equality_other_object(self):
125 def test_repo_equality_other_object(self):
125 class dummy(object):
126 class dummy(object):
126 path = self.repo.path
127 path = self.repo.path
127 assert self.repo != dummy()
128 assert self.repo != dummy()
128
129
129 def test_get_commit_is_implemented(self):
130 def test_get_commit_is_implemented(self):
130 self.repo.get_commit()
131 self.repo.get_commit()
131
132
132 def test_get_commits_is_implemented(self):
133 def test_get_commits_is_implemented(self):
133 commit_iter = iter(self.repo.get_commits())
134 commit_iter = iter(self.repo.get_commits())
134 commit = next(commit_iter)
135 commit = next(commit_iter)
135 assert commit.idx == 0
136 assert commit.idx == 0
136
137
137 def test_supports_iteration(self):
138 def test_supports_iteration(self):
138 repo_iter = iter(self.repo)
139 repo_iter = iter(self.repo)
139 commit = next(repo_iter)
140 commit = next(repo_iter)
140 assert commit.idx == 0
141 assert commit.idx == 0
141
142
142 def test_in_memory_commit(self):
143 def test_in_memory_commit(self):
143 imc = self.repo.in_memory_commit
144 imc = self.repo.in_memory_commit
144 assert isinstance(imc, BaseInMemoryCommit)
145 assert isinstance(imc, BaseInMemoryCommit)
145
146
146 @pytest.mark.backends("hg")
147 @pytest.mark.backends("hg")
147 def test__get_url_unicode(self):
148 def test__get_url_unicode(self):
148 url = u'/home/repos/malmö'
149 url = u'/home/repos/malmö'
149 assert self.repo._get_url(url)
150 assert self.repo._get_url(url)
150
151
151
152
152 @pytest.mark.usefixtures("vcs_repository_support")
153 @pytest.mark.usefixtures("vcs_repository_support")
153 class TestDeprecatedRepositoryAPI(BackendTestMixin):
154 class TestDeprecatedRepositoryAPI(BackendTestMixin):
154 recreate_repo_per_test = False
155 recreate_repo_per_test = False
155
156
156 def test_revisions_is_deprecated(self):
157 def test_revisions_is_deprecated(self):
157 def get_revisions(repo):
158 def get_revisions(repo):
158 return repo.revisions
159 return repo.revisions
159 pytest.deprecated_call(get_revisions, self.repo)
160 pytest.deprecated_call(get_revisions, self.repo)
160
161
161 def test_get_changeset_is_deprecated(self):
162 def test_get_changeset_is_deprecated(self):
162 pytest.deprecated_call(self.repo.get_changeset)
163 pytest.deprecated_call(self.repo.get_changeset)
163
164
164 def test_get_changesets_is_deprecated(self):
165 def test_get_changesets_is_deprecated(self):
165 pytest.deprecated_call(self.repo.get_changesets)
166 pytest.deprecated_call(self.repo.get_changesets)
166
167
167 def test_in_memory_changeset_is_deprecated(self):
168 def test_in_memory_changeset_is_deprecated(self):
168 def get_imc(repo):
169 def get_imc(repo):
169 return repo.in_memory_changeset
170 return repo.in_memory_changeset
170 pytest.deprecated_call(get_imc, self.repo)
171 pytest.deprecated_call(get_imc, self.repo)
171
172
172
173
173 # TODO: these tests are incomplete, must check the resulting compare result for
174 # TODO: these tests are incomplete, must check the resulting compare result for
174 # correcteness
175 # correcteness
175 class TestRepositoryCompare:
176 class TestRepositoryCompare:
176
177
177 @pytest.mark.parametrize('merge', [True, False])
178 @pytest.mark.parametrize('merge', [True, False])
178 def test_compare_commits_of_same_repository(self, vcsbackend, merge):
179 def test_compare_commits_of_same_repository(self, vcsbackend, merge):
179 target_repo = vcsbackend.create_repo(number_of_commits=5)
180 target_repo = vcsbackend.create_repo(number_of_commits=5)
180 target_repo.compare(
181 target_repo.compare(
181 target_repo[1].raw_id, target_repo[3].raw_id, target_repo,
182 target_repo[1].raw_id, target_repo[3].raw_id, target_repo,
182 merge=merge)
183 merge=merge)
183
184
184 @pytest.mark.xfail_backends('svn')
185 @pytest.mark.xfail_backends('svn')
185 @pytest.mark.parametrize('merge', [True, False])
186 @pytest.mark.parametrize('merge', [True, False])
186 def test_compare_cloned_repositories(self, vcsbackend, merge):
187 def test_compare_cloned_repositories(self, vcsbackend, merge):
187 target_repo = vcsbackend.create_repo(number_of_commits=5)
188 target_repo = vcsbackend.create_repo(number_of_commits=5)
188 source_repo = vcsbackend.clone_repo(target_repo)
189 source_repo = vcsbackend.clone_repo(target_repo)
189 assert target_repo != source_repo
190 assert target_repo != source_repo
190
191
191 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
192 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
192 source_commit = source_repo.get_commit()
193 source_commit = source_repo.get_commit()
193
194
194 target_repo.compare(
195 target_repo.compare(
195 target_repo[1].raw_id, source_repo[3].raw_id, source_repo,
196 target_repo[1].raw_id, source_repo[3].raw_id, source_repo,
196 merge=merge)
197 merge=merge)
197
198
198 @pytest.mark.xfail_backends('svn')
199 @pytest.mark.xfail_backends('svn')
199 @pytest.mark.parametrize('merge', [True, False])
200 @pytest.mark.parametrize('merge', [True, False])
200 def test_compare_unrelated_repositories(self, vcsbackend, merge):
201 def test_compare_unrelated_repositories(self, vcsbackend, merge):
201 orig = vcsbackend.create_repo(number_of_commits=5)
202 orig = vcsbackend.create_repo(number_of_commits=5)
202 unrelated = vcsbackend.create_repo(number_of_commits=5)
203 unrelated = vcsbackend.create_repo(number_of_commits=5)
203 assert orig != unrelated
204 assert orig != unrelated
204
205
205 orig.compare(
206 orig.compare(
206 orig[1].raw_id, unrelated[3].raw_id, unrelated, merge=merge)
207 orig[1].raw_id, unrelated[3].raw_id, unrelated, merge=merge)
207
208
208
209
209 class TestRepositoryGetCommonAncestor:
210 class TestRepositoryGetCommonAncestor:
210
211
211 def test_get_common_ancestor_from_same_repo_existing(self, vcsbackend):
212 def test_get_common_ancestor_from_same_repo_existing(self, vcsbackend):
212 target_repo = vcsbackend.create_repo(number_of_commits=5)
213 target_repo = vcsbackend.create_repo(number_of_commits=5)
213
214
214 expected_ancestor = target_repo[2].raw_id
215 expected_ancestor = target_repo[2].raw_id
215
216
216 assert target_repo.get_common_ancestor(
217 assert target_repo.get_common_ancestor(
217 commit_id1=target_repo[2].raw_id,
218 commit_id1=target_repo[2].raw_id,
218 commit_id2=target_repo[4].raw_id,
219 commit_id2=target_repo[4].raw_id,
219 repo2=target_repo
220 repo2=target_repo
220 ) == expected_ancestor
221 ) == expected_ancestor
221
222
222 assert target_repo.get_common_ancestor(
223 assert target_repo.get_common_ancestor(
223 commit_id1=target_repo[4].raw_id,
224 commit_id1=target_repo[4].raw_id,
224 commit_id2=target_repo[2].raw_id,
225 commit_id2=target_repo[2].raw_id,
225 repo2=target_repo
226 repo2=target_repo
226 ) == expected_ancestor
227 ) == expected_ancestor
227
228
228 @pytest.mark.xfail_backends("svn")
229 @pytest.mark.xfail_backends("svn")
229 def test_get_common_ancestor_from_cloned_repo_existing(self, vcsbackend):
230 def test_get_common_ancestor_from_cloned_repo_existing(self, vcsbackend):
230 target_repo = vcsbackend.create_repo(number_of_commits=5)
231 target_repo = vcsbackend.create_repo(number_of_commits=5)
231 source_repo = vcsbackend.clone_repo(target_repo)
232 source_repo = vcsbackend.clone_repo(target_repo)
232 assert target_repo != source_repo
233 assert target_repo != source_repo
233
234
234 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
235 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
235 source_commit = source_repo.get_commit()
236 source_commit = source_repo.get_commit()
236
237
237 expected_ancestor = target_repo[4].raw_id
238 expected_ancestor = target_repo[4].raw_id
238
239
239 assert target_repo.get_common_ancestor(
240 assert target_repo.get_common_ancestor(
240 commit_id1=target_repo[4].raw_id,
241 commit_id1=target_repo[4].raw_id,
241 commit_id2=source_commit.raw_id,
242 commit_id2=source_commit.raw_id,
242 repo2=source_repo
243 repo2=source_repo
243 ) == expected_ancestor
244 ) == expected_ancestor
244
245
245 assert target_repo.get_common_ancestor(
246 assert target_repo.get_common_ancestor(
246 commit_id1=source_commit.raw_id,
247 commit_id1=source_commit.raw_id,
247 commit_id2=target_repo[4].raw_id,
248 commit_id2=target_repo[4].raw_id,
248 repo2=target_repo
249 repo2=target_repo
249 ) == expected_ancestor
250 ) == expected_ancestor
250
251
251 @pytest.mark.xfail_backends("svn")
252 @pytest.mark.xfail_backends("svn")
252 def test_get_common_ancestor_from_unrelated_repo_missing(self, vcsbackend):
253 def test_get_common_ancestor_from_unrelated_repo_missing(self, vcsbackend):
253 original = vcsbackend.create_repo(number_of_commits=5)
254 original = vcsbackend.create_repo(number_of_commits=5)
254 unrelated = vcsbackend.create_repo(number_of_commits=5)
255 unrelated = vcsbackend.create_repo(number_of_commits=5)
255 assert original != unrelated
256 assert original != unrelated
256
257
257 assert original.get_common_ancestor(
258 assert original.get_common_ancestor(
258 commit_id1=original[0].raw_id,
259 commit_id1=original[0].raw_id,
259 commit_id2=unrelated[0].raw_id,
260 commit_id2=unrelated[0].raw_id,
260 repo2=unrelated
261 repo2=unrelated
261 ) == None
262 ) == None
262
263
263 assert original.get_common_ancestor(
264 assert original.get_common_ancestor(
264 commit_id1=original[-1].raw_id,
265 commit_id1=original[-1].raw_id,
265 commit_id2=unrelated[-1].raw_id,
266 commit_id2=unrelated[-1].raw_id,
266 repo2=unrelated
267 repo2=unrelated
267 ) == None
268 ) == None
268
269
269
270
270 @pytest.mark.backends("git", "hg")
271 @pytest.mark.backends("git", "hg")
271 class TestRepositoryMerge:
272 class TestRepositoryMerge(object):
272 def prepare_for_success(self, vcsbackend):
273 def prepare_for_success(self, vcsbackend):
273 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
274 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
274 self.source_repo = vcsbackend.clone_repo(self.target_repo)
275 self.source_repo = vcsbackend.clone_repo(self.target_repo)
275 vcsbackend.add_file(self.target_repo, 'README_MERGE1', 'Version 1')
276 vcsbackend.add_file(self.target_repo, 'README_MERGE1', 'Version 1')
276 vcsbackend.add_file(self.source_repo, 'README_MERGE2', 'Version 2')
277 vcsbackend.add_file(self.source_repo, 'README_MERGE2', 'Version 2')
277 imc = self.source_repo.in_memory_commit
278 imc = self.source_repo.in_memory_commit
278 imc.add(FileNode('file_x', content=self.source_repo.name))
279 imc.add(FileNode('file_x', content=self.source_repo.name))
279 imc.commit(
280 imc.commit(
280 message=u'Automatic commit from repo merge test',
281 message=u'Automatic commit from repo merge test',
281 author=u'Automatic')
282 author=u'Automatic')
282 self.target_commit = self.target_repo.get_commit()
283 self.target_commit = self.target_repo.get_commit()
283 self.source_commit = self.source_repo.get_commit()
284 self.source_commit = self.source_repo.get_commit()
284 # This only works for Git and Mercurial
285 # This only works for Git and Mercurial
285 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
286 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
286 self.target_ref = Reference(
287 self.target_ref = Reference(
287 'branch', default_branch, self.target_commit.raw_id)
288 'branch', default_branch, self.target_commit.raw_id)
288 self.source_ref = Reference(
289 self.source_ref = Reference(
289 'branch', default_branch, self.source_commit.raw_id)
290 'branch', default_branch, self.source_commit.raw_id)
290 self.workspace = 'test-merge'
291 self.workspace_id = 'test-merge'
292 self.repo_id = repo_id_generator(self.target_repo.path)
291
293
292 def prepare_for_conflict(self, vcsbackend):
294 def prepare_for_conflict(self, vcsbackend):
293 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
295 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
294 self.source_repo = vcsbackend.clone_repo(self.target_repo)
296 self.source_repo = vcsbackend.clone_repo(self.target_repo)
295 vcsbackend.add_file(self.target_repo, 'README_MERGE', 'Version 1')
297 vcsbackend.add_file(self.target_repo, 'README_MERGE', 'Version 1')
296 vcsbackend.add_file(self.source_repo, 'README_MERGE', 'Version 2')
298 vcsbackend.add_file(self.source_repo, 'README_MERGE', 'Version 2')
297 self.target_commit = self.target_repo.get_commit()
299 self.target_commit = self.target_repo.get_commit()
298 self.source_commit = self.source_repo.get_commit()
300 self.source_commit = self.source_repo.get_commit()
299 # This only works for Git and Mercurial
301 # This only works for Git and Mercurial
300 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
302 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
301 self.target_ref = Reference(
303 self.target_ref = Reference(
302 'branch', default_branch, self.target_commit.raw_id)
304 'branch', default_branch, self.target_commit.raw_id)
303 self.source_ref = Reference(
305 self.source_ref = Reference(
304 'branch', default_branch, self.source_commit.raw_id)
306 'branch', default_branch, self.source_commit.raw_id)
305 self.workspace = 'test-merge'
307 self.workspace_id = 'test-merge'
308 self.repo_id = repo_id_generator(self.target_repo.path)
306
309
307 def test_merge_success(self, vcsbackend):
310 def test_merge_success(self, vcsbackend):
308 self.prepare_for_success(vcsbackend)
311 self.prepare_for_success(vcsbackend)
309
312
310 merge_response = self.target_repo.merge(
313 merge_response = self.target_repo.merge(
311 self.target_ref, self.source_repo, self.source_ref, self.workspace,
314 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
315 self.source_ref,
312 'test user', 'test@rhodecode.com', 'merge message 1',
316 'test user', 'test@rhodecode.com', 'merge message 1',
313 dry_run=False)
317 dry_run=False)
314 expected_merge_response = MergeResponse(
318 expected_merge_response = MergeResponse(
315 True, True, merge_response.merge_ref,
319 True, True, merge_response.merge_ref,
316 MergeFailureReason.NONE)
320 MergeFailureReason.NONE)
317 assert merge_response == expected_merge_response
321 assert merge_response == expected_merge_response
318
322
319 target_repo = backends.get_backend(vcsbackend.alias)(
323 target_repo = backends.get_backend(vcsbackend.alias)(
320 self.target_repo.path)
324 self.target_repo.path)
321 target_commits = list(target_repo.get_commits())
325 target_commits = list(target_repo.get_commits())
322 commit_ids = [c.raw_id for c in target_commits[:-1]]
326 commit_ids = [c.raw_id for c in target_commits[:-1]]
323 assert self.source_ref.commit_id in commit_ids
327 assert self.source_ref.commit_id in commit_ids
324 assert self.target_ref.commit_id in commit_ids
328 assert self.target_ref.commit_id in commit_ids
325
329
326 merge_commit = target_commits[-1]
330 merge_commit = target_commits[-1]
327 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
331 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
328 assert merge_commit.message.strip() == 'merge message 1'
332 assert merge_commit.message.strip() == 'merge message 1'
329 assert merge_commit.author == 'test user <test@rhodecode.com>'
333 assert merge_commit.author == 'test user <test@rhodecode.com>'
330
334
331 # We call it twice so to make sure we can handle updates
335 # We call it twice so to make sure we can handle updates
332 target_ref = Reference(
336 target_ref = Reference(
333 self.target_ref.type, self.target_ref.name,
337 self.target_ref.type, self.target_ref.name,
334 merge_response.merge_ref.commit_id)
338 merge_response.merge_ref.commit_id)
335
339
336 merge_response = target_repo.merge(
340 merge_response = target_repo.merge(
337 target_ref, self.source_repo, self.source_ref, self.workspace,
341 self.repo_id, self.workspace_id, target_ref, self.source_repo, self.source_ref,
338 'test user', 'test@rhodecode.com', 'merge message 2',
342 'test user', 'test@rhodecode.com', 'merge message 2',
339 dry_run=False)
343 dry_run=False)
340 expected_merge_response = MergeResponse(
344 expected_merge_response = MergeResponse(
341 True, True, merge_response.merge_ref,
345 True, True, merge_response.merge_ref,
342 MergeFailureReason.NONE)
346 MergeFailureReason.NONE)
343 assert merge_response == expected_merge_response
347 assert merge_response == expected_merge_response
344
348
345 target_repo = backends.get_backend(
349 target_repo = backends.get_backend(
346 vcsbackend.alias)(self.target_repo.path)
350 vcsbackend.alias)(self.target_repo.path)
347 merge_commit = target_repo.get_commit(
351 merge_commit = target_repo.get_commit(
348 merge_response.merge_ref.commit_id)
352 merge_response.merge_ref.commit_id)
349 assert merge_commit.message.strip() == 'merge message 1'
353 assert merge_commit.message.strip() == 'merge message 1'
350 assert merge_commit.author == 'test user <test@rhodecode.com>'
354 assert merge_commit.author == 'test user <test@rhodecode.com>'
351
355
352 def test_merge_success_dry_run(self, vcsbackend):
356 def test_merge_success_dry_run(self, vcsbackend):
353 self.prepare_for_success(vcsbackend)
357 self.prepare_for_success(vcsbackend)
354
358
355 merge_response = self.target_repo.merge(
359 merge_response = self.target_repo.merge(
356 self.target_ref, self.source_repo, self.source_ref, self.workspace,
360 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
357 dry_run=True)
361 self.source_ref, dry_run=True)
358
362
359 # We call it twice so to make sure we can handle updates
363 # We call it twice so to make sure we can handle updates
360 merge_response_update = self.target_repo.merge(
364 merge_response_update = self.target_repo.merge(
361 self.target_ref, self.source_repo, self.source_ref, self.workspace,
365 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
362 dry_run=True)
366 self.source_ref, dry_run=True)
363
367
364 # Multiple merges may differ in their commit id. Therefore we set the
368 # Multiple merges may differ in their commit id. Therefore we set the
365 # commit id to `None` before comparing the merge responses.
369 # commit id to `None` before comparing the merge responses.
366 merge_response = merge_response._replace(
370 merge_response = merge_response._replace(
367 merge_ref=merge_response.merge_ref._replace(commit_id=None))
371 merge_ref=merge_response.merge_ref._replace(commit_id=None))
368 merge_response_update = merge_response_update._replace(
372 merge_response_update = merge_response_update._replace(
369 merge_ref=merge_response_update.merge_ref._replace(commit_id=None))
373 merge_ref=merge_response_update.merge_ref._replace(commit_id=None))
370
374
371 assert merge_response == merge_response_update
375 assert merge_response == merge_response_update
372 assert merge_response.possible is True
376 assert merge_response.possible is True
373 assert merge_response.executed is False
377 assert merge_response.executed is False
374 assert merge_response.merge_ref
378 assert merge_response.merge_ref
375 assert merge_response.failure_reason is MergeFailureReason.NONE
379 assert merge_response.failure_reason is MergeFailureReason.NONE
376
380
377 @pytest.mark.parametrize('dry_run', [True, False])
381 @pytest.mark.parametrize('dry_run', [True, False])
378 def test_merge_conflict(self, vcsbackend, dry_run):
382 def test_merge_conflict(self, vcsbackend, dry_run):
379 self.prepare_for_conflict(vcsbackend)
383 self.prepare_for_conflict(vcsbackend)
380 expected_merge_response = MergeResponse(
384 expected_merge_response = MergeResponse(
381 False, False, None, MergeFailureReason.MERGE_FAILED)
385 False, False, None, MergeFailureReason.MERGE_FAILED)
382
386
383 merge_response = self.target_repo.merge(
387 merge_response = self.target_repo.merge(
384 self.target_ref, self.source_repo, self.source_ref, self.workspace,
388 self.repo_id, self.workspace_id, self.target_ref,
389 self.source_repo, self.source_ref,
385 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
390 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
386 assert merge_response == expected_merge_response
391 assert merge_response == expected_merge_response
387
392
388 # We call it twice so to make sure we can handle updates
393 # We call it twice so to make sure we can handle updates
389 merge_response = self.target_repo.merge(
394 merge_response = self.target_repo.merge(
390 self.target_ref, self.source_repo, self.source_ref, self.workspace,
395 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
396 self.source_ref,
391 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
397 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
392 assert merge_response == expected_merge_response
398 assert merge_response == expected_merge_response
393
399
394 def test_merge_target_is_not_head(self, vcsbackend):
400 def test_merge_target_is_not_head(self, vcsbackend):
395 self.prepare_for_success(vcsbackend)
401 self.prepare_for_success(vcsbackend)
396 expected_merge_response = MergeResponse(
402 expected_merge_response = MergeResponse(
397 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
403 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
398
404
399 target_ref = Reference(
405 target_ref = Reference(
400 self.target_ref.type, self.target_ref.name, '0' * 40)
406 self.target_ref.type, self.target_ref.name, '0' * 40)
401
407
402 merge_response = self.target_repo.merge(
408 merge_response = self.target_repo.merge(
403 target_ref, self.source_repo, self.source_ref, self.workspace,
409 self.repo_id, self.workspace_id, target_ref, self.source_repo,
404 dry_run=True)
410 self.source_ref, dry_run=True)
405
411
406 assert merge_response == expected_merge_response
412 assert merge_response == expected_merge_response
407
413
408 def test_merge_missing_source_reference(self, vcsbackend):
414 def test_merge_missing_source_reference(self, vcsbackend):
409 self.prepare_for_success(vcsbackend)
415 self.prepare_for_success(vcsbackend)
410 expected_merge_response = MergeResponse(
416 expected_merge_response = MergeResponse(
411 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
417 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
412
418
413 source_ref = Reference(
419 source_ref = Reference(
414 self.source_ref.type, 'not_existing', self.source_ref.commit_id)
420 self.source_ref.type, 'not_existing', self.source_ref.commit_id)
415
421
416 merge_response = self.target_repo.merge(
422 merge_response = self.target_repo.merge(
417 self.target_ref, self.source_repo, source_ref, self.workspace,
423 self.repo_id, self.workspace_id, self.target_ref,
424 self.source_repo, source_ref,
418 dry_run=True)
425 dry_run=True)
419
426
420 assert merge_response == expected_merge_response
427 assert merge_response == expected_merge_response
421
428
422 def test_merge_raises_exception(self, vcsbackend):
429 def test_merge_raises_exception(self, vcsbackend):
423 self.prepare_for_success(vcsbackend)
430 self.prepare_for_success(vcsbackend)
424 expected_merge_response = MergeResponse(
431 expected_merge_response = MergeResponse(
425 False, False, None, MergeFailureReason.UNKNOWN)
432 False, False, None, MergeFailureReason.UNKNOWN)
426
433
427 with mock.patch.object(self.target_repo, '_merge_repo',
434 with mock.patch.object(self.target_repo, '_merge_repo',
428 side_effect=RepositoryError()):
435 side_effect=RepositoryError()):
429 merge_response = self.target_repo.merge(
436 merge_response = self.target_repo.merge(
430 self.target_ref, self.source_repo, self.source_ref,
437 self.repo_id, self.workspace_id, self.target_ref,
431 self.workspace, dry_run=True)
438 self.source_repo, self.source_ref,
439 dry_run=True)
432
440
433 assert merge_response == expected_merge_response
441 assert merge_response == expected_merge_response
434
442
435 def test_merge_invalid_user_name(self, vcsbackend):
443 def test_merge_invalid_user_name(self, vcsbackend):
436 repo = vcsbackend.create_repo(number_of_commits=1)
444 repo = vcsbackend.create_repo(number_of_commits=1)
437 ref = Reference('branch', 'master', 'not_used')
445 ref = Reference('branch', 'master', 'not_used')
446 workspace_id = 'test-errors-in-merge'
447 repo_id = repo_id_generator(workspace_id)
438 with pytest.raises(ValueError):
448 with pytest.raises(ValueError):
439 repo.merge(ref, self, ref, 'workspace_id')
449 repo.merge(repo_id, workspace_id, ref, self, ref)
440
450
441 def test_merge_invalid_user_email(self, vcsbackend):
451 def test_merge_invalid_user_email(self, vcsbackend):
442 repo = vcsbackend.create_repo(number_of_commits=1)
452 repo = vcsbackend.create_repo(number_of_commits=1)
443 ref = Reference('branch', 'master', 'not_used')
453 ref = Reference('branch', 'master', 'not_used')
454 workspace_id = 'test-errors-in-merge'
455 repo_id = repo_id_generator(workspace_id)
444 with pytest.raises(ValueError):
456 with pytest.raises(ValueError):
445 repo.merge(ref, self, ref, 'workspace_id', 'user name')
457 repo.merge(
458 repo_id, workspace_id, ref, self, ref, 'user name')
446
459
447 def test_merge_invalid_message(self, vcsbackend):
460 def test_merge_invalid_message(self, vcsbackend):
448 repo = vcsbackend.create_repo(number_of_commits=1)
461 repo = vcsbackend.create_repo(number_of_commits=1)
449 ref = Reference('branch', 'master', 'not_used')
462 ref = Reference('branch', 'master', 'not_used')
463 workspace_id = 'test-errors-in-merge'
464 repo_id = repo_id_generator(workspace_id)
450 with pytest.raises(ValueError):
465 with pytest.raises(ValueError):
451 repo.merge(
466 repo.merge(
452 ref, self, ref, 'workspace_id', 'user name', 'user@email.com')
467 repo_id, workspace_id, ref, self, ref,
468 'user name', 'user@email.com')
453
469
454
470
455 @pytest.mark.usefixtures("vcs_repository_support")
471 @pytest.mark.usefixtures("vcs_repository_support")
456 class TestRepositoryStrip(BackendTestMixin):
472 class TestRepositoryStrip(BackendTestMixin):
457 recreate_repo_per_test = True
473 recreate_repo_per_test = True
458
474
459 @classmethod
475 @classmethod
460 def _get_commits(cls):
476 def _get_commits(cls):
461 commits = [
477 commits = [
462 {
478 {
463 'message': 'Initial commit',
479 'message': 'Initial commit',
464 'author': 'Joe Doe <joe.doe@example.com>',
480 'author': 'Joe Doe <joe.doe@example.com>',
465 'date': datetime.datetime(2010, 1, 1, 20),
481 'date': datetime.datetime(2010, 1, 1, 20),
466 'branch': 'master',
482 'branch': 'master',
467 'added': [
483 'added': [
468 FileNode('foobar', content='foobar'),
484 FileNode('foobar', content='foobar'),
469 FileNode('foobar2', content='foobar2'),
485 FileNode('foobar2', content='foobar2'),
470 ],
486 ],
471 },
487 },
472 ]
488 ]
473 for x in xrange(10):
489 for x in xrange(10):
474 commit_data = {
490 commit_data = {
475 'message': 'Changed foobar - commit%s' % x,
491 'message': 'Changed foobar - commit%s' % x,
476 'author': 'Jane Doe <jane.doe@example.com>',
492 'author': 'Jane Doe <jane.doe@example.com>',
477 'date': datetime.datetime(2010, 1, 1, 21, x),
493 'date': datetime.datetime(2010, 1, 1, 21, x),
478 'branch': 'master',
494 'branch': 'master',
479 'changed': [
495 'changed': [
480 FileNode('foobar', 'FOOBAR - %s' % x),
496 FileNode('foobar', 'FOOBAR - %s' % x),
481 ],
497 ],
482 }
498 }
483 commits.append(commit_data)
499 commits.append(commit_data)
484 return commits
500 return commits
485
501
486 @pytest.mark.backends("git", "hg")
502 @pytest.mark.backends("git", "hg")
487 def test_strip_commit(self):
503 def test_strip_commit(self):
488 tip = self.repo.get_commit()
504 tip = self.repo.get_commit()
489 assert tip.idx == 10
505 assert tip.idx == 10
490 self.repo.strip(tip.raw_id, self.repo.DEFAULT_BRANCH_NAME)
506 self.repo.strip(tip.raw_id, self.repo.DEFAULT_BRANCH_NAME)
491
507
492 tip = self.repo.get_commit()
508 tip = self.repo.get_commit()
493 assert tip.idx == 9
509 assert tip.idx == 9
494
510
495 @pytest.mark.backends("git", "hg")
511 @pytest.mark.backends("git", "hg")
496 def test_strip_multiple_commits(self):
512 def test_strip_multiple_commits(self):
497 tip = self.repo.get_commit()
513 tip = self.repo.get_commit()
498 assert tip.idx == 10
514 assert tip.idx == 10
499
515
500 old = self.repo.get_commit(commit_idx=5)
516 old = self.repo.get_commit(commit_idx=5)
501 self.repo.strip(old.raw_id, self.repo.DEFAULT_BRANCH_NAME)
517 self.repo.strip(old.raw_id, self.repo.DEFAULT_BRANCH_NAME)
502
518
503 tip = self.repo.get_commit()
519 tip = self.repo.get_commit()
504 assert tip.idx == 4
520 assert tip.idx == 4
505
521
506
522
507 @pytest.mark.backends('hg', 'git')
523 @pytest.mark.backends('hg', 'git')
508 class TestRepositoryPull:
524 class TestRepositoryPull(object):
509
525
510 def test_pull(self, vcsbackend):
526 def test_pull(self, vcsbackend):
511 source_repo = vcsbackend.repo
527 source_repo = vcsbackend.repo
512 target_repo = vcsbackend.create_repo()
528 target_repo = vcsbackend.create_repo()
513 assert len(source_repo.commit_ids) > len(target_repo.commit_ids)
529 assert len(source_repo.commit_ids) > len(target_repo.commit_ids)
514
530
515 target_repo.pull(source_repo.path)
531 target_repo.pull(source_repo.path)
516 # Note: Get a fresh instance, avoids caching trouble
532 # Note: Get a fresh instance, avoids caching trouble
517 target_repo = vcsbackend.backend(target_repo.path)
533 target_repo = vcsbackend.backend(target_repo.path)
518 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
534 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
519
535
520 def test_pull_wrong_path(self, vcsbackend):
536 def test_pull_wrong_path(self, vcsbackend):
521 target_repo = vcsbackend.create_repo()
537 target_repo = vcsbackend.create_repo()
522 with pytest.raises(RepositoryError):
538 with pytest.raises(RepositoryError):
523 target_repo.pull(target_repo.path + "wrong")
539 target_repo.pull(target_repo.path + "wrong")
524
540
525 def test_pull_specific_commits(self, vcsbackend):
541 def test_pull_specific_commits(self, vcsbackend):
526 source_repo = vcsbackend.repo
542 source_repo = vcsbackend.repo
527 target_repo = vcsbackend.create_repo()
543 target_repo = vcsbackend.create_repo()
528
544
529 second_commit = source_repo[1].raw_id
545 second_commit = source_repo[1].raw_id
530 if vcsbackend.alias == 'git':
546 if vcsbackend.alias == 'git':
531 second_commit_ref = 'refs/test-refs/a'
547 second_commit_ref = 'refs/test-refs/a'
532 source_repo.set_refs(second_commit_ref, second_commit)
548 source_repo.set_refs(second_commit_ref, second_commit)
533
549
534 target_repo.pull(source_repo.path, commit_ids=[second_commit])
550 target_repo.pull(source_repo.path, commit_ids=[second_commit])
535 target_repo = vcsbackend.backend(target_repo.path)
551 target_repo = vcsbackend.backend(target_repo.path)
536 assert 2 == len(target_repo.commit_ids)
552 assert 2 == len(target_repo.commit_ids)
537 assert second_commit == target_repo.get_commit().raw_id
553 assert second_commit == target_repo.get_commit().raw_id
General Comments 0
You need to be logged in to leave comments. Login now