##// END OF EJS Templates
shadow-repos: use numeric repo id for creation of shadow repos....
marcink -
r2810:a15bd3a8 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,905 +1,905 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23
23
24 from rhodecode import events
24 from rhodecode import events
25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 from rhodecode.api.utils import (
26 from rhodecode.api.utils import (
27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 validate_repo_permissions, resolve_ref_or_error)
29 validate_repo_permissions, resolve_ref_or_error)
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 from rhodecode.lib.base import vcs_operation_context
31 from rhodecode.lib.base import vcs_operation_context
32 from rhodecode.lib.utils2 import str2bool
32 from rhodecode.lib.utils2 import str2bool
33 from rhodecode.model.changeset_status import ChangesetStatusModel
33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 from rhodecode.model.comment import CommentsModel
34 from rhodecode.model.comment import CommentsModel
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 from rhodecode.model.settings import SettingsModel
37 from rhodecode.model.settings import SettingsModel
38 from rhodecode.model.validation_schema import Invalid
38 from rhodecode.model.validation_schema import Invalid
39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 ReviewerListSchema)
40 ReviewerListSchema)
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 @jsonrpc_method()
45 @jsonrpc_method()
46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)):
46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)):
47 """
47 """
48 Get a pull request based on the given ID.
48 Get a pull request based on the given ID.
49
49
50 :param apiuser: This is filled automatically from the |authtoken|.
50 :param apiuser: This is filled automatically from the |authtoken|.
51 :type apiuser: AuthUser
51 :type apiuser: AuthUser
52 :param repoid: Optional, repository name or repository ID from where
52 :param repoid: Optional, repository name or repository ID from where
53 the pull request was opened.
53 the pull request was opened.
54 :type repoid: str or int
54 :type repoid: str or int
55 :param pullrequestid: ID of the requested pull request.
55 :param pullrequestid: ID of the requested pull request.
56 :type pullrequestid: int
56 :type pullrequestid: int
57
57
58 Example output:
58 Example output:
59
59
60 .. code-block:: bash
60 .. code-block:: bash
61
61
62 "id": <id_given_in_input>,
62 "id": <id_given_in_input>,
63 "result":
63 "result":
64 {
64 {
65 "pull_request_id": "<pull_request_id>",
65 "pull_request_id": "<pull_request_id>",
66 "url": "<url>",
66 "url": "<url>",
67 "title": "<title>",
67 "title": "<title>",
68 "description": "<description>",
68 "description": "<description>",
69 "status" : "<status>",
69 "status" : "<status>",
70 "created_on": "<date_time_created>",
70 "created_on": "<date_time_created>",
71 "updated_on": "<date_time_updated>",
71 "updated_on": "<date_time_updated>",
72 "commit_ids": [
72 "commit_ids": [
73 ...
73 ...
74 "<commit_id>",
74 "<commit_id>",
75 "<commit_id>",
75 "<commit_id>",
76 ...
76 ...
77 ],
77 ],
78 "review_status": "<review_status>",
78 "review_status": "<review_status>",
79 "mergeable": {
79 "mergeable": {
80 "status": "<bool>",
80 "status": "<bool>",
81 "message": "<message>",
81 "message": "<message>",
82 },
82 },
83 "source": {
83 "source": {
84 "clone_url": "<clone_url>",
84 "clone_url": "<clone_url>",
85 "repository": "<repository_name>",
85 "repository": "<repository_name>",
86 "reference":
86 "reference":
87 {
87 {
88 "name": "<name>",
88 "name": "<name>",
89 "type": "<type>",
89 "type": "<type>",
90 "commit_id": "<commit_id>",
90 "commit_id": "<commit_id>",
91 }
91 }
92 },
92 },
93 "target": {
93 "target": {
94 "clone_url": "<clone_url>",
94 "clone_url": "<clone_url>",
95 "repository": "<repository_name>",
95 "repository": "<repository_name>",
96 "reference":
96 "reference":
97 {
97 {
98 "name": "<name>",
98 "name": "<name>",
99 "type": "<type>",
99 "type": "<type>",
100 "commit_id": "<commit_id>",
100 "commit_id": "<commit_id>",
101 }
101 }
102 },
102 },
103 "merge": {
103 "merge": {
104 "clone_url": "<clone_url>",
104 "clone_url": "<clone_url>",
105 "reference":
105 "reference":
106 {
106 {
107 "name": "<name>",
107 "name": "<name>",
108 "type": "<type>",
108 "type": "<type>",
109 "commit_id": "<commit_id>",
109 "commit_id": "<commit_id>",
110 }
110 }
111 },
111 },
112 "author": <user_obj>,
112 "author": <user_obj>,
113 "reviewers": [
113 "reviewers": [
114 ...
114 ...
115 {
115 {
116 "user": "<user_obj>",
116 "user": "<user_obj>",
117 "review_status": "<review_status>",
117 "review_status": "<review_status>",
118 }
118 }
119 ...
119 ...
120 ]
120 ]
121 },
121 },
122 "error": null
122 "error": null
123 """
123 """
124
124
125 pull_request = get_pull_request_or_error(pullrequestid)
125 pull_request = get_pull_request_or_error(pullrequestid)
126 if Optional.extract(repoid):
126 if Optional.extract(repoid):
127 repo = get_repo_or_error(repoid)
127 repo = get_repo_or_error(repoid)
128 else:
128 else:
129 repo = pull_request.target_repo
129 repo = pull_request.target_repo
130
130
131 if not PullRequestModel().check_user_read(
131 if not PullRequestModel().check_user_read(
132 pull_request, apiuser, api=True):
132 pull_request, apiuser, api=True):
133 raise JSONRPCError('repository `%s` or pull request `%s` '
133 raise JSONRPCError('repository `%s` or pull request `%s` '
134 'does not exist' % (repoid, pullrequestid))
134 'does not exist' % (repoid, pullrequestid))
135 data = pull_request.get_api_data()
135 data = pull_request.get_api_data()
136 return data
136 return data
137
137
138
138
139 @jsonrpc_method()
139 @jsonrpc_method()
140 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
140 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
141 """
141 """
142 Get all pull requests from the repository specified in `repoid`.
142 Get all pull requests from the repository specified in `repoid`.
143
143
144 :param apiuser: This is filled automatically from the |authtoken|.
144 :param apiuser: This is filled automatically from the |authtoken|.
145 :type apiuser: AuthUser
145 :type apiuser: AuthUser
146 :param repoid: Optional repository name or repository ID.
146 :param repoid: Optional repository name or repository ID.
147 :type repoid: str or int
147 :type repoid: str or int
148 :param status: Only return pull requests with the specified status.
148 :param status: Only return pull requests with the specified status.
149 Valid options are.
149 Valid options are.
150 * ``new`` (default)
150 * ``new`` (default)
151 * ``open``
151 * ``open``
152 * ``closed``
152 * ``closed``
153 :type status: str
153 :type status: str
154
154
155 Example output:
155 Example output:
156
156
157 .. code-block:: bash
157 .. code-block:: bash
158
158
159 "id": <id_given_in_input>,
159 "id": <id_given_in_input>,
160 "result":
160 "result":
161 [
161 [
162 ...
162 ...
163 {
163 {
164 "pull_request_id": "<pull_request_id>",
164 "pull_request_id": "<pull_request_id>",
165 "url": "<url>",
165 "url": "<url>",
166 "title" : "<title>",
166 "title" : "<title>",
167 "description": "<description>",
167 "description": "<description>",
168 "status": "<status>",
168 "status": "<status>",
169 "created_on": "<date_time_created>",
169 "created_on": "<date_time_created>",
170 "updated_on": "<date_time_updated>",
170 "updated_on": "<date_time_updated>",
171 "commit_ids": [
171 "commit_ids": [
172 ...
172 ...
173 "<commit_id>",
173 "<commit_id>",
174 "<commit_id>",
174 "<commit_id>",
175 ...
175 ...
176 ],
176 ],
177 "review_status": "<review_status>",
177 "review_status": "<review_status>",
178 "mergeable": {
178 "mergeable": {
179 "status": "<bool>",
179 "status": "<bool>",
180 "message: "<message>",
180 "message: "<message>",
181 },
181 },
182 "source": {
182 "source": {
183 "clone_url": "<clone_url>",
183 "clone_url": "<clone_url>",
184 "reference":
184 "reference":
185 {
185 {
186 "name": "<name>",
186 "name": "<name>",
187 "type": "<type>",
187 "type": "<type>",
188 "commit_id": "<commit_id>",
188 "commit_id": "<commit_id>",
189 }
189 }
190 },
190 },
191 "target": {
191 "target": {
192 "clone_url": "<clone_url>",
192 "clone_url": "<clone_url>",
193 "reference":
193 "reference":
194 {
194 {
195 "name": "<name>",
195 "name": "<name>",
196 "type": "<type>",
196 "type": "<type>",
197 "commit_id": "<commit_id>",
197 "commit_id": "<commit_id>",
198 }
198 }
199 },
199 },
200 "merge": {
200 "merge": {
201 "clone_url": "<clone_url>",
201 "clone_url": "<clone_url>",
202 "reference":
202 "reference":
203 {
203 {
204 "name": "<name>",
204 "name": "<name>",
205 "type": "<type>",
205 "type": "<type>",
206 "commit_id": "<commit_id>",
206 "commit_id": "<commit_id>",
207 }
207 }
208 },
208 },
209 "author": <user_obj>,
209 "author": <user_obj>,
210 "reviewers": [
210 "reviewers": [
211 ...
211 ...
212 {
212 {
213 "user": "<user_obj>",
213 "user": "<user_obj>",
214 "review_status": "<review_status>",
214 "review_status": "<review_status>",
215 }
215 }
216 ...
216 ...
217 ]
217 ]
218 }
218 }
219 ...
219 ...
220 ],
220 ],
221 "error": null
221 "error": null
222
222
223 """
223 """
224 repo = get_repo_or_error(repoid)
224 repo = get_repo_or_error(repoid)
225 if not has_superadmin_permission(apiuser):
225 if not has_superadmin_permission(apiuser):
226 _perms = (
226 _perms = (
227 'repository.admin', 'repository.write', 'repository.read',)
227 'repository.admin', 'repository.write', 'repository.read',)
228 validate_repo_permissions(apiuser, repoid, repo, _perms)
228 validate_repo_permissions(apiuser, repoid, repo, _perms)
229
229
230 status = Optional.extract(status)
230 status = Optional.extract(status)
231 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
231 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
232 data = [pr.get_api_data() for pr in pull_requests]
232 data = [pr.get_api_data() for pr in pull_requests]
233 return data
233 return data
234
234
235
235
236 @jsonrpc_method()
236 @jsonrpc_method()
237 def merge_pull_request(
237 def merge_pull_request(
238 request, apiuser, pullrequestid, repoid=Optional(None),
238 request, apiuser, pullrequestid, repoid=Optional(None),
239 userid=Optional(OAttr('apiuser'))):
239 userid=Optional(OAttr('apiuser'))):
240 """
240 """
241 Merge the pull request specified by `pullrequestid` into its target
241 Merge the pull request specified by `pullrequestid` into its target
242 repository.
242 repository.
243
243
244 :param apiuser: This is filled automatically from the |authtoken|.
244 :param apiuser: This is filled automatically from the |authtoken|.
245 :type apiuser: AuthUser
245 :type apiuser: AuthUser
246 :param repoid: Optional, repository name or repository ID of the
246 :param repoid: Optional, repository name or repository ID of the
247 target repository to which the |pr| is to be merged.
247 target repository to which the |pr| is to be merged.
248 :type repoid: str or int
248 :type repoid: str or int
249 :param pullrequestid: ID of the pull request which shall be merged.
249 :param pullrequestid: ID of the pull request which shall be merged.
250 :type pullrequestid: int
250 :type pullrequestid: int
251 :param userid: Merge the pull request as this user.
251 :param userid: Merge the pull request as this user.
252 :type userid: Optional(str or int)
252 :type userid: Optional(str or int)
253
253
254 Example output:
254 Example output:
255
255
256 .. code-block:: bash
256 .. code-block:: bash
257
257
258 "id": <id_given_in_input>,
258 "id": <id_given_in_input>,
259 "result": {
259 "result": {
260 "executed": "<bool>",
260 "executed": "<bool>",
261 "failure_reason": "<int>",
261 "failure_reason": "<int>",
262 "merge_commit_id": "<merge_commit_id>",
262 "merge_commit_id": "<merge_commit_id>",
263 "possible": "<bool>",
263 "possible": "<bool>",
264 "merge_ref": {
264 "merge_ref": {
265 "commit_id": "<commit_id>",
265 "commit_id": "<commit_id>",
266 "type": "<type>",
266 "type": "<type>",
267 "name": "<name>"
267 "name": "<name>"
268 }
268 }
269 },
269 },
270 "error": null
270 "error": null
271 """
271 """
272 pull_request = get_pull_request_or_error(pullrequestid)
272 pull_request = get_pull_request_or_error(pullrequestid)
273 if Optional.extract(repoid):
273 if Optional.extract(repoid):
274 repo = get_repo_or_error(repoid)
274 repo = get_repo_or_error(repoid)
275 else:
275 else:
276 repo = pull_request.target_repo
276 repo = pull_request.target_repo
277
277
278 if not isinstance(userid, Optional):
278 if not isinstance(userid, Optional):
279 if (has_superadmin_permission(apiuser) or
279 if (has_superadmin_permission(apiuser) or
280 HasRepoPermissionAnyApi('repository.admin')(
280 HasRepoPermissionAnyApi('repository.admin')(
281 user=apiuser, repo_name=repo.repo_name)):
281 user=apiuser, repo_name=repo.repo_name)):
282 apiuser = get_user_or_error(userid)
282 apiuser = get_user_or_error(userid)
283 else:
283 else:
284 raise JSONRPCError('userid is not the same as your user')
284 raise JSONRPCError('userid is not the same as your user')
285
285
286 check = MergeCheck.validate(
286 check = MergeCheck.validate(
287 pull_request, user=apiuser, translator=request.translate)
287 pull_request, user=apiuser, translator=request.translate)
288 merge_possible = not check.failed
288 merge_possible = not check.failed
289
289
290 if not merge_possible:
290 if not merge_possible:
291 error_messages = []
291 error_messages = []
292 for err_type, error_msg in check.errors:
292 for err_type, error_msg in check.errors:
293 error_msg = request.translate(error_msg)
293 error_msg = request.translate(error_msg)
294 error_messages.append(error_msg)
294 error_messages.append(error_msg)
295
295
296 reasons = ','.join(error_messages)
296 reasons = ','.join(error_messages)
297 raise JSONRPCError(
297 raise JSONRPCError(
298 'merge not possible for following reasons: {}'.format(reasons))
298 'merge not possible for following reasons: {}'.format(reasons))
299
299
300 target_repo = pull_request.target_repo
300 target_repo = pull_request.target_repo
301 extras = vcs_operation_context(
301 extras = vcs_operation_context(
302 request.environ, repo_name=target_repo.repo_name,
302 request.environ, repo_name=target_repo.repo_name,
303 username=apiuser.username, action='push',
303 username=apiuser.username, action='push',
304 scm=target_repo.repo_type)
304 scm=target_repo.repo_type)
305 merge_response = PullRequestModel().merge(
305 merge_response = PullRequestModel().merge_repo(
306 pull_request, apiuser, extras=extras)
306 pull_request, apiuser, extras=extras)
307 if merge_response.executed:
307 if merge_response.executed:
308 PullRequestModel().close_pull_request(
308 PullRequestModel().close_pull_request(
309 pull_request.pull_request_id, apiuser)
309 pull_request.pull_request_id, apiuser)
310
310
311 Session().commit()
311 Session().commit()
312
312
313 # In previous versions the merge response directly contained the merge
313 # In previous versions the merge response directly contained the merge
314 # commit id. It is now contained in the merge reference object. To be
314 # commit id. It is now contained in the merge reference object. To be
315 # backwards compatible we have to extract it again.
315 # backwards compatible we have to extract it again.
316 merge_response = merge_response._asdict()
316 merge_response = merge_response._asdict()
317 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
317 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
318
318
319 return merge_response
319 return merge_response
320
320
321
321
322 @jsonrpc_method()
322 @jsonrpc_method()
323 def get_pull_request_comments(
323 def get_pull_request_comments(
324 request, apiuser, pullrequestid, repoid=Optional(None)):
324 request, apiuser, pullrequestid, repoid=Optional(None)):
325 """
325 """
326 Get all comments of pull request specified with the `pullrequestid`
326 Get all comments of pull request specified with the `pullrequestid`
327
327
328 :param apiuser: This is filled automatically from the |authtoken|.
328 :param apiuser: This is filled automatically from the |authtoken|.
329 :type apiuser: AuthUser
329 :type apiuser: AuthUser
330 :param repoid: Optional repository name or repository ID.
330 :param repoid: Optional repository name or repository ID.
331 :type repoid: str or int
331 :type repoid: str or int
332 :param pullrequestid: The pull request ID.
332 :param pullrequestid: The pull request ID.
333 :type pullrequestid: int
333 :type pullrequestid: int
334
334
335 Example output:
335 Example output:
336
336
337 .. code-block:: bash
337 .. code-block:: bash
338
338
339 id : <id_given_in_input>
339 id : <id_given_in_input>
340 result : [
340 result : [
341 {
341 {
342 "comment_author": {
342 "comment_author": {
343 "active": true,
343 "active": true,
344 "full_name_or_username": "Tom Gore",
344 "full_name_or_username": "Tom Gore",
345 "username": "admin"
345 "username": "admin"
346 },
346 },
347 "comment_created_on": "2017-01-02T18:43:45.533",
347 "comment_created_on": "2017-01-02T18:43:45.533",
348 "comment_f_path": null,
348 "comment_f_path": null,
349 "comment_id": 25,
349 "comment_id": 25,
350 "comment_lineno": null,
350 "comment_lineno": null,
351 "comment_status": {
351 "comment_status": {
352 "status": "under_review",
352 "status": "under_review",
353 "status_lbl": "Under Review"
353 "status_lbl": "Under Review"
354 },
354 },
355 "comment_text": "Example text",
355 "comment_text": "Example text",
356 "comment_type": null,
356 "comment_type": null,
357 "pull_request_version": null
357 "pull_request_version": null
358 }
358 }
359 ],
359 ],
360 error : null
360 error : null
361 """
361 """
362
362
363 pull_request = get_pull_request_or_error(pullrequestid)
363 pull_request = get_pull_request_or_error(pullrequestid)
364 if Optional.extract(repoid):
364 if Optional.extract(repoid):
365 repo = get_repo_or_error(repoid)
365 repo = get_repo_or_error(repoid)
366 else:
366 else:
367 repo = pull_request.target_repo
367 repo = pull_request.target_repo
368
368
369 if not PullRequestModel().check_user_read(
369 if not PullRequestModel().check_user_read(
370 pull_request, apiuser, api=True):
370 pull_request, apiuser, api=True):
371 raise JSONRPCError('repository `%s` or pull request `%s` '
371 raise JSONRPCError('repository `%s` or pull request `%s` '
372 'does not exist' % (repoid, pullrequestid))
372 'does not exist' % (repoid, pullrequestid))
373
373
374 (pull_request_latest,
374 (pull_request_latest,
375 pull_request_at_ver,
375 pull_request_at_ver,
376 pull_request_display_obj,
376 pull_request_display_obj,
377 at_version) = PullRequestModel().get_pr_version(
377 at_version) = PullRequestModel().get_pr_version(
378 pull_request.pull_request_id, version=None)
378 pull_request.pull_request_id, version=None)
379
379
380 versions = pull_request_display_obj.versions()
380 versions = pull_request_display_obj.versions()
381 ver_map = {
381 ver_map = {
382 ver.pull_request_version_id: cnt
382 ver.pull_request_version_id: cnt
383 for cnt, ver in enumerate(versions, 1)
383 for cnt, ver in enumerate(versions, 1)
384 }
384 }
385
385
386 # GENERAL COMMENTS with versions #
386 # GENERAL COMMENTS with versions #
387 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
387 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
388 q = q.order_by(ChangesetComment.comment_id.asc())
388 q = q.order_by(ChangesetComment.comment_id.asc())
389 general_comments = q.all()
389 general_comments = q.all()
390
390
391 # INLINE COMMENTS with versions #
391 # INLINE COMMENTS with versions #
392 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
392 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
393 q = q.order_by(ChangesetComment.comment_id.asc())
393 q = q.order_by(ChangesetComment.comment_id.asc())
394 inline_comments = q.all()
394 inline_comments = q.all()
395
395
396 data = []
396 data = []
397 for comment in inline_comments + general_comments:
397 for comment in inline_comments + general_comments:
398 full_data = comment.get_api_data()
398 full_data = comment.get_api_data()
399 pr_version_id = None
399 pr_version_id = None
400 if comment.pull_request_version_id:
400 if comment.pull_request_version_id:
401 pr_version_id = 'v{}'.format(
401 pr_version_id = 'v{}'.format(
402 ver_map[comment.pull_request_version_id])
402 ver_map[comment.pull_request_version_id])
403
403
404 # sanitize some entries
404 # sanitize some entries
405
405
406 full_data['pull_request_version'] = pr_version_id
406 full_data['pull_request_version'] = pr_version_id
407 full_data['comment_author'] = {
407 full_data['comment_author'] = {
408 'username': full_data['comment_author'].username,
408 'username': full_data['comment_author'].username,
409 'full_name_or_username': full_data['comment_author'].full_name_or_username,
409 'full_name_or_username': full_data['comment_author'].full_name_or_username,
410 'active': full_data['comment_author'].active,
410 'active': full_data['comment_author'].active,
411 }
411 }
412
412
413 if full_data['comment_status']:
413 if full_data['comment_status']:
414 full_data['comment_status'] = {
414 full_data['comment_status'] = {
415 'status': full_data['comment_status'][0].status,
415 'status': full_data['comment_status'][0].status,
416 'status_lbl': full_data['comment_status'][0].status_lbl,
416 'status_lbl': full_data['comment_status'][0].status_lbl,
417 }
417 }
418 else:
418 else:
419 full_data['comment_status'] = {}
419 full_data['comment_status'] = {}
420
420
421 data.append(full_data)
421 data.append(full_data)
422 return data
422 return data
423
423
424
424
425 @jsonrpc_method()
425 @jsonrpc_method()
426 def comment_pull_request(
426 def comment_pull_request(
427 request, apiuser, pullrequestid, repoid=Optional(None),
427 request, apiuser, pullrequestid, repoid=Optional(None),
428 message=Optional(None), commit_id=Optional(None), status=Optional(None),
428 message=Optional(None), commit_id=Optional(None), status=Optional(None),
429 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
429 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
430 resolves_comment_id=Optional(None),
430 resolves_comment_id=Optional(None),
431 userid=Optional(OAttr('apiuser'))):
431 userid=Optional(OAttr('apiuser'))):
432 """
432 """
433 Comment on the pull request specified with the `pullrequestid`,
433 Comment on the pull request specified with the `pullrequestid`,
434 in the |repo| specified by the `repoid`, and optionally change the
434 in the |repo| specified by the `repoid`, and optionally change the
435 review status.
435 review status.
436
436
437 :param apiuser: This is filled automatically from the |authtoken|.
437 :param apiuser: This is filled automatically from the |authtoken|.
438 :type apiuser: AuthUser
438 :type apiuser: AuthUser
439 :param repoid: Optional repository name or repository ID.
439 :param repoid: Optional repository name or repository ID.
440 :type repoid: str or int
440 :type repoid: str or int
441 :param pullrequestid: The pull request ID.
441 :param pullrequestid: The pull request ID.
442 :type pullrequestid: int
442 :type pullrequestid: int
443 :param commit_id: Specify the commit_id for which to set a comment. If
443 :param commit_id: Specify the commit_id for which to set a comment. If
444 given commit_id is different than latest in the PR status
444 given commit_id is different than latest in the PR status
445 change won't be performed.
445 change won't be performed.
446 :type commit_id: str
446 :type commit_id: str
447 :param message: The text content of the comment.
447 :param message: The text content of the comment.
448 :type message: str
448 :type message: str
449 :param status: (**Optional**) Set the approval status of the pull
449 :param status: (**Optional**) Set the approval status of the pull
450 request. One of: 'not_reviewed', 'approved', 'rejected',
450 request. One of: 'not_reviewed', 'approved', 'rejected',
451 'under_review'
451 'under_review'
452 :type status: str
452 :type status: str
453 :param comment_type: Comment type, one of: 'note', 'todo'
453 :param comment_type: Comment type, one of: 'note', 'todo'
454 :type comment_type: Optional(str), default: 'note'
454 :type comment_type: Optional(str), default: 'note'
455 :param userid: Comment on the pull request as this user
455 :param userid: Comment on the pull request as this user
456 :type userid: Optional(str or int)
456 :type userid: Optional(str or int)
457
457
458 Example output:
458 Example output:
459
459
460 .. code-block:: bash
460 .. code-block:: bash
461
461
462 id : <id_given_in_input>
462 id : <id_given_in_input>
463 result : {
463 result : {
464 "pull_request_id": "<Integer>",
464 "pull_request_id": "<Integer>",
465 "comment_id": "<Integer>",
465 "comment_id": "<Integer>",
466 "status": {"given": <given_status>,
466 "status": {"given": <given_status>,
467 "was_changed": <bool status_was_actually_changed> },
467 "was_changed": <bool status_was_actually_changed> },
468 },
468 },
469 error : null
469 error : null
470 """
470 """
471 pull_request = get_pull_request_or_error(pullrequestid)
471 pull_request = get_pull_request_or_error(pullrequestid)
472 if Optional.extract(repoid):
472 if Optional.extract(repoid):
473 repo = get_repo_or_error(repoid)
473 repo = get_repo_or_error(repoid)
474 else:
474 else:
475 repo = pull_request.target_repo
475 repo = pull_request.target_repo
476
476
477 if not isinstance(userid, Optional):
477 if not isinstance(userid, Optional):
478 if (has_superadmin_permission(apiuser) or
478 if (has_superadmin_permission(apiuser) or
479 HasRepoPermissionAnyApi('repository.admin')(
479 HasRepoPermissionAnyApi('repository.admin')(
480 user=apiuser, repo_name=repo.repo_name)):
480 user=apiuser, repo_name=repo.repo_name)):
481 apiuser = get_user_or_error(userid)
481 apiuser = get_user_or_error(userid)
482 else:
482 else:
483 raise JSONRPCError('userid is not the same as your user')
483 raise JSONRPCError('userid is not the same as your user')
484
484
485 if not PullRequestModel().check_user_read(
485 if not PullRequestModel().check_user_read(
486 pull_request, apiuser, api=True):
486 pull_request, apiuser, api=True):
487 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
487 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
488 message = Optional.extract(message)
488 message = Optional.extract(message)
489 status = Optional.extract(status)
489 status = Optional.extract(status)
490 commit_id = Optional.extract(commit_id)
490 commit_id = Optional.extract(commit_id)
491 comment_type = Optional.extract(comment_type)
491 comment_type = Optional.extract(comment_type)
492 resolves_comment_id = Optional.extract(resolves_comment_id)
492 resolves_comment_id = Optional.extract(resolves_comment_id)
493
493
494 if not message and not status:
494 if not message and not status:
495 raise JSONRPCError(
495 raise JSONRPCError(
496 'Both message and status parameters are missing. '
496 'Both message and status parameters are missing. '
497 'At least one is required.')
497 'At least one is required.')
498
498
499 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
499 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
500 status is not None):
500 status is not None):
501 raise JSONRPCError('Unknown comment status: `%s`' % status)
501 raise JSONRPCError('Unknown comment status: `%s`' % status)
502
502
503 if commit_id and commit_id not in pull_request.revisions:
503 if commit_id and commit_id not in pull_request.revisions:
504 raise JSONRPCError(
504 raise JSONRPCError(
505 'Invalid commit_id `%s` for this pull request.' % commit_id)
505 'Invalid commit_id `%s` for this pull request.' % commit_id)
506
506
507 allowed_to_change_status = PullRequestModel().check_user_change_status(
507 allowed_to_change_status = PullRequestModel().check_user_change_status(
508 pull_request, apiuser)
508 pull_request, apiuser)
509
509
510 # if commit_id is passed re-validated if user is allowed to change status
510 # if commit_id is passed re-validated if user is allowed to change status
511 # based on latest commit_id from the PR
511 # based on latest commit_id from the PR
512 if commit_id:
512 if commit_id:
513 commit_idx = pull_request.revisions.index(commit_id)
513 commit_idx = pull_request.revisions.index(commit_id)
514 if commit_idx != 0:
514 if commit_idx != 0:
515 allowed_to_change_status = False
515 allowed_to_change_status = False
516
516
517 if resolves_comment_id:
517 if resolves_comment_id:
518 comment = ChangesetComment.get(resolves_comment_id)
518 comment = ChangesetComment.get(resolves_comment_id)
519 if not comment:
519 if not comment:
520 raise JSONRPCError(
520 raise JSONRPCError(
521 'Invalid resolves_comment_id `%s` for this pull request.'
521 'Invalid resolves_comment_id `%s` for this pull request.'
522 % resolves_comment_id)
522 % resolves_comment_id)
523 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
523 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
524 raise JSONRPCError(
524 raise JSONRPCError(
525 'Comment `%s` is wrong type for setting status to resolved.'
525 'Comment `%s` is wrong type for setting status to resolved.'
526 % resolves_comment_id)
526 % resolves_comment_id)
527
527
528 text = message
528 text = message
529 status_label = ChangesetStatus.get_status_lbl(status)
529 status_label = ChangesetStatus.get_status_lbl(status)
530 if status and allowed_to_change_status:
530 if status and allowed_to_change_status:
531 st_message = ('Status change %(transition_icon)s %(status)s'
531 st_message = ('Status change %(transition_icon)s %(status)s'
532 % {'transition_icon': '>', 'status': status_label})
532 % {'transition_icon': '>', 'status': status_label})
533 text = message or st_message
533 text = message or st_message
534
534
535 rc_config = SettingsModel().get_all_settings()
535 rc_config = SettingsModel().get_all_settings()
536 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
536 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
537
537
538 status_change = status and allowed_to_change_status
538 status_change = status and allowed_to_change_status
539 comment = CommentsModel().create(
539 comment = CommentsModel().create(
540 text=text,
540 text=text,
541 repo=pull_request.target_repo.repo_id,
541 repo=pull_request.target_repo.repo_id,
542 user=apiuser.user_id,
542 user=apiuser.user_id,
543 pull_request=pull_request.pull_request_id,
543 pull_request=pull_request.pull_request_id,
544 f_path=None,
544 f_path=None,
545 line_no=None,
545 line_no=None,
546 status_change=(status_label if status_change else None),
546 status_change=(status_label if status_change else None),
547 status_change_type=(status if status_change else None),
547 status_change_type=(status if status_change else None),
548 closing_pr=False,
548 closing_pr=False,
549 renderer=renderer,
549 renderer=renderer,
550 comment_type=comment_type,
550 comment_type=comment_type,
551 resolves_comment_id=resolves_comment_id,
551 resolves_comment_id=resolves_comment_id,
552 auth_user=apiuser
552 auth_user=apiuser
553 )
553 )
554
554
555 if allowed_to_change_status and status:
555 if allowed_to_change_status and status:
556 ChangesetStatusModel().set_status(
556 ChangesetStatusModel().set_status(
557 pull_request.target_repo.repo_id,
557 pull_request.target_repo.repo_id,
558 status,
558 status,
559 apiuser.user_id,
559 apiuser.user_id,
560 comment,
560 comment,
561 pull_request=pull_request.pull_request_id
561 pull_request=pull_request.pull_request_id
562 )
562 )
563 Session().flush()
563 Session().flush()
564
564
565 Session().commit()
565 Session().commit()
566 data = {
566 data = {
567 'pull_request_id': pull_request.pull_request_id,
567 'pull_request_id': pull_request.pull_request_id,
568 'comment_id': comment.comment_id if comment else None,
568 'comment_id': comment.comment_id if comment else None,
569 'status': {'given': status, 'was_changed': status_change},
569 'status': {'given': status, 'was_changed': status_change},
570 }
570 }
571 return data
571 return data
572
572
573
573
574 @jsonrpc_method()
574 @jsonrpc_method()
575 def create_pull_request(
575 def create_pull_request(
576 request, apiuser, source_repo, target_repo, source_ref, target_ref,
576 request, apiuser, source_repo, target_repo, source_ref, target_ref,
577 title, description=Optional(''), reviewers=Optional(None)):
577 title, description=Optional(''), reviewers=Optional(None)):
578 """
578 """
579 Creates a new pull request.
579 Creates a new pull request.
580
580
581 Accepts refs in the following formats:
581 Accepts refs in the following formats:
582
582
583 * branch:<branch_name>:<sha>
583 * branch:<branch_name>:<sha>
584 * branch:<branch_name>
584 * branch:<branch_name>
585 * bookmark:<bookmark_name>:<sha> (Mercurial only)
585 * bookmark:<bookmark_name>:<sha> (Mercurial only)
586 * bookmark:<bookmark_name> (Mercurial only)
586 * bookmark:<bookmark_name> (Mercurial only)
587
587
588 :param apiuser: This is filled automatically from the |authtoken|.
588 :param apiuser: This is filled automatically from the |authtoken|.
589 :type apiuser: AuthUser
589 :type apiuser: AuthUser
590 :param source_repo: Set the source repository name.
590 :param source_repo: Set the source repository name.
591 :type source_repo: str
591 :type source_repo: str
592 :param target_repo: Set the target repository name.
592 :param target_repo: Set the target repository name.
593 :type target_repo: str
593 :type target_repo: str
594 :param source_ref: Set the source ref name.
594 :param source_ref: Set the source ref name.
595 :type source_ref: str
595 :type source_ref: str
596 :param target_ref: Set the target ref name.
596 :param target_ref: Set the target ref name.
597 :type target_ref: str
597 :type target_ref: str
598 :param title: Set the pull request title.
598 :param title: Set the pull request title.
599 :type title: str
599 :type title: str
600 :param description: Set the pull request description.
600 :param description: Set the pull request description.
601 :type description: Optional(str)
601 :type description: Optional(str)
602 :param reviewers: Set the new pull request reviewers list.
602 :param reviewers: Set the new pull request reviewers list.
603 Reviewer defined by review rules will be added automatically to the
603 Reviewer defined by review rules will be added automatically to the
604 defined list.
604 defined list.
605 :type reviewers: Optional(list)
605 :type reviewers: Optional(list)
606 Accepts username strings or objects of the format:
606 Accepts username strings or objects of the format:
607
607
608 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
608 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
609 """
609 """
610
610
611 source_db_repo = get_repo_or_error(source_repo)
611 source_db_repo = get_repo_or_error(source_repo)
612 target_db_repo = get_repo_or_error(target_repo)
612 target_db_repo = get_repo_or_error(target_repo)
613 if not has_superadmin_permission(apiuser):
613 if not has_superadmin_permission(apiuser):
614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
615 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
615 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
616
616
617 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
617 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
618 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
618 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
619 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
619 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
620 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
620 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
621 source_scm = source_db_repo.scm_instance()
621 source_scm = source_db_repo.scm_instance()
622 target_scm = target_db_repo.scm_instance()
622 target_scm = target_db_repo.scm_instance()
623
623
624 commit_ranges = target_scm.compare(
624 commit_ranges = target_scm.compare(
625 target_commit.raw_id, source_commit.raw_id, source_scm,
625 target_commit.raw_id, source_commit.raw_id, source_scm,
626 merge=True, pre_load=[])
626 merge=True, pre_load=[])
627
627
628 ancestor = target_scm.get_common_ancestor(
628 ancestor = target_scm.get_common_ancestor(
629 target_commit.raw_id, source_commit.raw_id, source_scm)
629 target_commit.raw_id, source_commit.raw_id, source_scm)
630
630
631 if not commit_ranges:
631 if not commit_ranges:
632 raise JSONRPCError('no commits found')
632 raise JSONRPCError('no commits found')
633
633
634 if not ancestor:
634 if not ancestor:
635 raise JSONRPCError('no common ancestor found')
635 raise JSONRPCError('no common ancestor found')
636
636
637 reviewer_objects = Optional.extract(reviewers) or []
637 reviewer_objects = Optional.extract(reviewers) or []
638
638
639 if reviewer_objects:
639 if reviewer_objects:
640 schema = ReviewerListSchema()
640 schema = ReviewerListSchema()
641 try:
641 try:
642 reviewer_objects = schema.deserialize(reviewer_objects)
642 reviewer_objects = schema.deserialize(reviewer_objects)
643 except Invalid as err:
643 except Invalid as err:
644 raise JSONRPCValidationError(colander_exc=err)
644 raise JSONRPCValidationError(colander_exc=err)
645
645
646 # validate users
646 # validate users
647 for reviewer_object in reviewer_objects:
647 for reviewer_object in reviewer_objects:
648 user = get_user_or_error(reviewer_object['username'])
648 user = get_user_or_error(reviewer_object['username'])
649 reviewer_object['user_id'] = user.user_id
649 reviewer_object['user_id'] = user.user_id
650
650
651 get_default_reviewers_data, get_validated_reviewers = \
651 get_default_reviewers_data, get_validated_reviewers = \
652 PullRequestModel().get_reviewer_functions()
652 PullRequestModel().get_reviewer_functions()
653
653
654 reviewer_rules = get_default_reviewers_data(
654 reviewer_rules = get_default_reviewers_data(
655 apiuser.get_instance(), source_db_repo,
655 apiuser.get_instance(), source_db_repo,
656 source_commit, target_db_repo, target_commit)
656 source_commit, target_db_repo, target_commit)
657
657
658 # specified rules are later re-validated, thus we can assume users will
658 # specified rules are later re-validated, thus we can assume users will
659 # eventually provide those that meet the reviewer criteria.
659 # eventually provide those that meet the reviewer criteria.
660 if not reviewer_objects:
660 if not reviewer_objects:
661 reviewer_objects = reviewer_rules['reviewers']
661 reviewer_objects = reviewer_rules['reviewers']
662
662
663 try:
663 try:
664 reviewers = get_validated_reviewers(
664 reviewers = get_validated_reviewers(
665 reviewer_objects, reviewer_rules)
665 reviewer_objects, reviewer_rules)
666 except ValueError as e:
666 except ValueError as e:
667 raise JSONRPCError('Reviewers Validation: {}'.format(e))
667 raise JSONRPCError('Reviewers Validation: {}'.format(e))
668
668
669 pull_request_model = PullRequestModel()
669 pull_request_model = PullRequestModel()
670 pull_request = pull_request_model.create(
670 pull_request = pull_request_model.create(
671 created_by=apiuser.user_id,
671 created_by=apiuser.user_id,
672 source_repo=source_repo,
672 source_repo=source_repo,
673 source_ref=full_source_ref,
673 source_ref=full_source_ref,
674 target_repo=target_repo,
674 target_repo=target_repo,
675 target_ref=full_target_ref,
675 target_ref=full_target_ref,
676 revisions=reversed(
676 revisions=reversed(
677 [commit.raw_id for commit in reversed(commit_ranges)]),
677 [commit.raw_id for commit in reversed(commit_ranges)]),
678 reviewers=reviewers,
678 reviewers=reviewers,
679 title=title,
679 title=title,
680 description=Optional.extract(description),
680 description=Optional.extract(description),
681 auth_user=apiuser
681 auth_user=apiuser
682 )
682 )
683
683
684 Session().commit()
684 Session().commit()
685 data = {
685 data = {
686 'msg': 'Created new pull request `{}`'.format(title),
686 'msg': 'Created new pull request `{}`'.format(title),
687 'pull_request_id': pull_request.pull_request_id,
687 'pull_request_id': pull_request.pull_request_id,
688 }
688 }
689 return data
689 return data
690
690
691
691
692 @jsonrpc_method()
692 @jsonrpc_method()
693 def update_pull_request(
693 def update_pull_request(
694 request, apiuser, pullrequestid, repoid=Optional(None),
694 request, apiuser, pullrequestid, repoid=Optional(None),
695 title=Optional(''), description=Optional(''), reviewers=Optional(None),
695 title=Optional(''), description=Optional(''), reviewers=Optional(None),
696 update_commits=Optional(None)):
696 update_commits=Optional(None)):
697 """
697 """
698 Updates a pull request.
698 Updates a pull request.
699
699
700 :param apiuser: This is filled automatically from the |authtoken|.
700 :param apiuser: This is filled automatically from the |authtoken|.
701 :type apiuser: AuthUser
701 :type apiuser: AuthUser
702 :param repoid: Optional repository name or repository ID.
702 :param repoid: Optional repository name or repository ID.
703 :type repoid: str or int
703 :type repoid: str or int
704 :param pullrequestid: The pull request ID.
704 :param pullrequestid: The pull request ID.
705 :type pullrequestid: int
705 :type pullrequestid: int
706 :param title: Set the pull request title.
706 :param title: Set the pull request title.
707 :type title: str
707 :type title: str
708 :param description: Update pull request description.
708 :param description: Update pull request description.
709 :type description: Optional(str)
709 :type description: Optional(str)
710 :param reviewers: Update pull request reviewers list with new value.
710 :param reviewers: Update pull request reviewers list with new value.
711 :type reviewers: Optional(list)
711 :type reviewers: Optional(list)
712 Accepts username strings or objects of the format:
712 Accepts username strings or objects of the format:
713
713
714 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
714 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
715
715
716 :param update_commits: Trigger update of commits for this pull request
716 :param update_commits: Trigger update of commits for this pull request
717 :type: update_commits: Optional(bool)
717 :type: update_commits: Optional(bool)
718
718
719 Example output:
719 Example output:
720
720
721 .. code-block:: bash
721 .. code-block:: bash
722
722
723 id : <id_given_in_input>
723 id : <id_given_in_input>
724 result : {
724 result : {
725 "msg": "Updated pull request `63`",
725 "msg": "Updated pull request `63`",
726 "pull_request": <pull_request_object>,
726 "pull_request": <pull_request_object>,
727 "updated_reviewers": {
727 "updated_reviewers": {
728 "added": [
728 "added": [
729 "username"
729 "username"
730 ],
730 ],
731 "removed": []
731 "removed": []
732 },
732 },
733 "updated_commits": {
733 "updated_commits": {
734 "added": [
734 "added": [
735 "<sha1_hash>"
735 "<sha1_hash>"
736 ],
736 ],
737 "common": [
737 "common": [
738 "<sha1_hash>",
738 "<sha1_hash>",
739 "<sha1_hash>",
739 "<sha1_hash>",
740 ],
740 ],
741 "removed": []
741 "removed": []
742 }
742 }
743 }
743 }
744 error : null
744 error : null
745 """
745 """
746
746
747 pull_request = get_pull_request_or_error(pullrequestid)
747 pull_request = get_pull_request_or_error(pullrequestid)
748 if Optional.extract(repoid):
748 if Optional.extract(repoid):
749 repo = get_repo_or_error(repoid)
749 repo = get_repo_or_error(repoid)
750 else:
750 else:
751 repo = pull_request.target_repo
751 repo = pull_request.target_repo
752
752
753 if not PullRequestModel().check_user_update(
753 if not PullRequestModel().check_user_update(
754 pull_request, apiuser, api=True):
754 pull_request, apiuser, api=True):
755 raise JSONRPCError(
755 raise JSONRPCError(
756 'pull request `%s` update failed, no permission to update.' % (
756 'pull request `%s` update failed, no permission to update.' % (
757 pullrequestid,))
757 pullrequestid,))
758 if pull_request.is_closed():
758 if pull_request.is_closed():
759 raise JSONRPCError(
759 raise JSONRPCError(
760 'pull request `%s` update failed, pull request is closed' % (
760 'pull request `%s` update failed, pull request is closed' % (
761 pullrequestid,))
761 pullrequestid,))
762
762
763 reviewer_objects = Optional.extract(reviewers) or []
763 reviewer_objects = Optional.extract(reviewers) or []
764
764
765 if reviewer_objects:
765 if reviewer_objects:
766 schema = ReviewerListSchema()
766 schema = ReviewerListSchema()
767 try:
767 try:
768 reviewer_objects = schema.deserialize(reviewer_objects)
768 reviewer_objects = schema.deserialize(reviewer_objects)
769 except Invalid as err:
769 except Invalid as err:
770 raise JSONRPCValidationError(colander_exc=err)
770 raise JSONRPCValidationError(colander_exc=err)
771
771
772 # validate users
772 # validate users
773 for reviewer_object in reviewer_objects:
773 for reviewer_object in reviewer_objects:
774 user = get_user_or_error(reviewer_object['username'])
774 user = get_user_or_error(reviewer_object['username'])
775 reviewer_object['user_id'] = user.user_id
775 reviewer_object['user_id'] = user.user_id
776
776
777 get_default_reviewers_data, get_validated_reviewers = \
777 get_default_reviewers_data, get_validated_reviewers = \
778 PullRequestModel().get_reviewer_functions()
778 PullRequestModel().get_reviewer_functions()
779
779
780 # re-use stored rules
780 # re-use stored rules
781 reviewer_rules = pull_request.reviewer_data
781 reviewer_rules = pull_request.reviewer_data
782 try:
782 try:
783 reviewers = get_validated_reviewers(
783 reviewers = get_validated_reviewers(
784 reviewer_objects, reviewer_rules)
784 reviewer_objects, reviewer_rules)
785 except ValueError as e:
785 except ValueError as e:
786 raise JSONRPCError('Reviewers Validation: {}'.format(e))
786 raise JSONRPCError('Reviewers Validation: {}'.format(e))
787 else:
787 else:
788 reviewers = []
788 reviewers = []
789
789
790 title = Optional.extract(title)
790 title = Optional.extract(title)
791 description = Optional.extract(description)
791 description = Optional.extract(description)
792 if title or description:
792 if title or description:
793 PullRequestModel().edit(
793 PullRequestModel().edit(
794 pull_request, title or pull_request.title,
794 pull_request, title or pull_request.title,
795 description or pull_request.description, apiuser)
795 description or pull_request.description, apiuser)
796 Session().commit()
796 Session().commit()
797
797
798 commit_changes = {"added": [], "common": [], "removed": []}
798 commit_changes = {"added": [], "common": [], "removed": []}
799 if str2bool(Optional.extract(update_commits)):
799 if str2bool(Optional.extract(update_commits)):
800 if PullRequestModel().has_valid_update_type(pull_request):
800 if PullRequestModel().has_valid_update_type(pull_request):
801 update_response = PullRequestModel().update_commits(
801 update_response = PullRequestModel().update_commits(
802 pull_request)
802 pull_request)
803 commit_changes = update_response.changes or commit_changes
803 commit_changes = update_response.changes or commit_changes
804 Session().commit()
804 Session().commit()
805
805
806 reviewers_changes = {"added": [], "removed": []}
806 reviewers_changes = {"added": [], "removed": []}
807 if reviewers:
807 if reviewers:
808 added_reviewers, removed_reviewers = \
808 added_reviewers, removed_reviewers = \
809 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
809 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
810
810
811 reviewers_changes['added'] = sorted(
811 reviewers_changes['added'] = sorted(
812 [get_user_or_error(n).username for n in added_reviewers])
812 [get_user_or_error(n).username for n in added_reviewers])
813 reviewers_changes['removed'] = sorted(
813 reviewers_changes['removed'] = sorted(
814 [get_user_or_error(n).username for n in removed_reviewers])
814 [get_user_or_error(n).username for n in removed_reviewers])
815 Session().commit()
815 Session().commit()
816
816
817 data = {
817 data = {
818 'msg': 'Updated pull request `{}`'.format(
818 'msg': 'Updated pull request `{}`'.format(
819 pull_request.pull_request_id),
819 pull_request.pull_request_id),
820 'pull_request': pull_request.get_api_data(),
820 'pull_request': pull_request.get_api_data(),
821 'updated_commits': commit_changes,
821 'updated_commits': commit_changes,
822 'updated_reviewers': reviewers_changes
822 'updated_reviewers': reviewers_changes
823 }
823 }
824
824
825 return data
825 return data
826
826
827
827
828 @jsonrpc_method()
828 @jsonrpc_method()
829 def close_pull_request(
829 def close_pull_request(
830 request, apiuser, pullrequestid, repoid=Optional(None),
830 request, apiuser, pullrequestid, repoid=Optional(None),
831 userid=Optional(OAttr('apiuser')), message=Optional('')):
831 userid=Optional(OAttr('apiuser')), message=Optional('')):
832 """
832 """
833 Close the pull request specified by `pullrequestid`.
833 Close the pull request specified by `pullrequestid`.
834
834
835 :param apiuser: This is filled automatically from the |authtoken|.
835 :param apiuser: This is filled automatically from the |authtoken|.
836 :type apiuser: AuthUser
836 :type apiuser: AuthUser
837 :param repoid: Repository name or repository ID to which the pull
837 :param repoid: Repository name or repository ID to which the pull
838 request belongs.
838 request belongs.
839 :type repoid: str or int
839 :type repoid: str or int
840 :param pullrequestid: ID of the pull request to be closed.
840 :param pullrequestid: ID of the pull request to be closed.
841 :type pullrequestid: int
841 :type pullrequestid: int
842 :param userid: Close the pull request as this user.
842 :param userid: Close the pull request as this user.
843 :type userid: Optional(str or int)
843 :type userid: Optional(str or int)
844 :param message: Optional message to close the Pull Request with. If not
844 :param message: Optional message to close the Pull Request with. If not
845 specified it will be generated automatically.
845 specified it will be generated automatically.
846 :type message: Optional(str)
846 :type message: Optional(str)
847
847
848 Example output:
848 Example output:
849
849
850 .. code-block:: bash
850 .. code-block:: bash
851
851
852 "id": <id_given_in_input>,
852 "id": <id_given_in_input>,
853 "result": {
853 "result": {
854 "pull_request_id": "<int>",
854 "pull_request_id": "<int>",
855 "close_status": "<str:status_lbl>,
855 "close_status": "<str:status_lbl>,
856 "closed": "<bool>"
856 "closed": "<bool>"
857 },
857 },
858 "error": null
858 "error": null
859
859
860 """
860 """
861 _ = request.translate
861 _ = request.translate
862
862
863 pull_request = get_pull_request_or_error(pullrequestid)
863 pull_request = get_pull_request_or_error(pullrequestid)
864 if Optional.extract(repoid):
864 if Optional.extract(repoid):
865 repo = get_repo_or_error(repoid)
865 repo = get_repo_or_error(repoid)
866 else:
866 else:
867 repo = pull_request.target_repo
867 repo = pull_request.target_repo
868
868
869 if not isinstance(userid, Optional):
869 if not isinstance(userid, Optional):
870 if (has_superadmin_permission(apiuser) or
870 if (has_superadmin_permission(apiuser) or
871 HasRepoPermissionAnyApi('repository.admin')(
871 HasRepoPermissionAnyApi('repository.admin')(
872 user=apiuser, repo_name=repo.repo_name)):
872 user=apiuser, repo_name=repo.repo_name)):
873 apiuser = get_user_or_error(userid)
873 apiuser = get_user_or_error(userid)
874 else:
874 else:
875 raise JSONRPCError('userid is not the same as your user')
875 raise JSONRPCError('userid is not the same as your user')
876
876
877 if pull_request.is_closed():
877 if pull_request.is_closed():
878 raise JSONRPCError(
878 raise JSONRPCError(
879 'pull request `%s` is already closed' % (pullrequestid,))
879 'pull request `%s` is already closed' % (pullrequestid,))
880
880
881 # only owner or admin or person with write permissions
881 # only owner or admin or person with write permissions
882 allowed_to_close = PullRequestModel().check_user_update(
882 allowed_to_close = PullRequestModel().check_user_update(
883 pull_request, apiuser, api=True)
883 pull_request, apiuser, api=True)
884
884
885 if not allowed_to_close:
885 if not allowed_to_close:
886 raise JSONRPCError(
886 raise JSONRPCError(
887 'pull request `%s` close failed, no permission to close.' % (
887 'pull request `%s` close failed, no permission to close.' % (
888 pullrequestid,))
888 pullrequestid,))
889
889
890 # message we're using to close the PR, else it's automatically generated
890 # message we're using to close the PR, else it's automatically generated
891 message = Optional.extract(message)
891 message = Optional.extract(message)
892
892
893 # finally close the PR, with proper message comment
893 # finally close the PR, with proper message comment
894 comment, status = PullRequestModel().close_pull_request_with_comment(
894 comment, status = PullRequestModel().close_pull_request_with_comment(
895 pull_request, apiuser, repo, message=message)
895 pull_request, apiuser, repo, message=message)
896 status_lbl = ChangesetStatus.get_status_lbl(status)
896 status_lbl = ChangesetStatus.get_status_lbl(status)
897
897
898 Session().commit()
898 Session().commit()
899
899
900 data = {
900 data = {
901 'pull_request_id': pull_request.pull_request_id,
901 'pull_request_id': pull_request.pull_request_id,
902 'close_status': status_lbl,
902 'close_status': status_lbl,
903 'closed': True,
903 'closed': True,
904 }
904 }
905 return data
905 return data
@@ -1,1203 +1,1203 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 from rhodecode.tests.utils import AssertResponse
35 from rhodecode.tests.utils import AssertResponse
36
36
37
37
38 def route_path(name, params=None, **kwargs):
38 def route_path(name, params=None, **kwargs):
39 import urllib
39 import urllib
40
40
41 base_url = {
41 base_url = {
42 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 }[name].format(**kwargs)
56 }[name].format(**kwargs)
57
57
58 if params:
58 if params:
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 return base_url
60 return base_url
61
61
62
62
63 @pytest.mark.usefixtures('app', 'autologin_user')
63 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.backends("git", "hg")
64 @pytest.mark.backends("git", "hg")
65 class TestPullrequestsView(object):
65 class TestPullrequestsView(object):
66
66
67 def test_index(self, backend):
67 def test_index(self, backend):
68 self.app.get(route_path(
68 self.app.get(route_path(
69 'pullrequest_new',
69 'pullrequest_new',
70 repo_name=backend.repo_name))
70 repo_name=backend.repo_name))
71
71
72 def test_option_menu_create_pull_request_exists(self, backend):
72 def test_option_menu_create_pull_request_exists(self, backend):
73 repo_name = backend.repo_name
73 repo_name = backend.repo_name
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75
75
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 'pullrequest_new', repo_name=repo_name)
77 'pullrequest_new', repo_name=repo_name)
78 response.mustcontain(create_pr_link)
78 response.mustcontain(create_pr_link)
79
79
80 def test_create_pr_form_with_raw_commit_id(self, backend):
80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 repo = backend.repo
81 repo = backend.repo
82
82
83 self.app.get(
83 self.app.get(
84 route_path('pullrequest_new',
84 route_path('pullrequest_new',
85 repo_name=repo.repo_name,
85 repo_name=repo.repo_name,
86 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 def test_show(self, pr_util, pr_merge_enabled):
90 def test_show(self, pr_util, pr_merge_enabled):
91 pull_request = pr_util.create_pull_request(
91 pull_request = pr_util.create_pull_request(
92 mergeable=pr_merge_enabled, enable_notifications=False)
92 mergeable=pr_merge_enabled, enable_notifications=False)
93
93
94 response = self.app.get(route_path(
94 response = self.app.get(route_path(
95 'pullrequest_show',
95 'pullrequest_show',
96 repo_name=pull_request.target_repo.scm_instance().name,
96 repo_name=pull_request.target_repo.scm_instance().name,
97 pull_request_id=pull_request.pull_request_id))
97 pull_request_id=pull_request.pull_request_id))
98
98
99 for commit_id in pull_request.revisions:
99 for commit_id in pull_request.revisions:
100 response.mustcontain(commit_id)
100 response.mustcontain(commit_id)
101
101
102 assert pull_request.target_ref_parts.type in response
102 assert pull_request.target_ref_parts.type in response
103 assert pull_request.target_ref_parts.name in response
103 assert pull_request.target_ref_parts.name in response
104 target_clone_url = pull_request.target_repo.clone_url()
104 target_clone_url = pull_request.target_repo.clone_url()
105 assert target_clone_url in response
105 assert target_clone_url in response
106
106
107 assert 'class="pull-request-merge"' in response
107 assert 'class="pull-request-merge"' in response
108 assert (
108 assert (
109 'Server-side pull request merging is disabled.'
109 'Server-side pull request merging is disabled.'
110 in response) != pr_merge_enabled
110 in response) != pr_merge_enabled
111
111
112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 # Logout
113 # Logout
114 response = self.app.post(
114 response = self.app.post(
115 h.route_path('logout'),
115 h.route_path('logout'),
116 params={'csrf_token': csrf_token})
116 params={'csrf_token': csrf_token})
117 # Login as regular user
117 # Login as regular user
118 response = self.app.post(h.route_path('login'),
118 response = self.app.post(h.route_path('login'),
119 {'username': TEST_USER_REGULAR_LOGIN,
119 {'username': TEST_USER_REGULAR_LOGIN,
120 'password': 'test12'})
120 'password': 'test12'})
121
121
122 pull_request = pr_util.create_pull_request(
122 pull_request = pr_util.create_pull_request(
123 author=TEST_USER_REGULAR_LOGIN)
123 author=TEST_USER_REGULAR_LOGIN)
124
124
125 response = self.app.get(route_path(
125 response = self.app.get(route_path(
126 'pullrequest_show',
126 'pullrequest_show',
127 repo_name=pull_request.target_repo.scm_instance().name,
127 repo_name=pull_request.target_repo.scm_instance().name,
128 pull_request_id=pull_request.pull_request_id))
128 pull_request_id=pull_request.pull_request_id))
129
129
130 response.mustcontain('Server-side pull request merging is disabled.')
130 response.mustcontain('Server-side pull request merging is disabled.')
131
131
132 assert_response = response.assert_response()
132 assert_response = response.assert_response()
133 # for regular user without a merge permissions, we don't see it
133 # for regular user without a merge permissions, we don't see it
134 assert_response.no_element_exists('#close-pull-request-action')
134 assert_response.no_element_exists('#close-pull-request-action')
135
135
136 user_util.grant_user_permission_to_repo(
136 user_util.grant_user_permission_to_repo(
137 pull_request.target_repo,
137 pull_request.target_repo,
138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 'repository.write')
139 'repository.write')
140 response = self.app.get(route_path(
140 response = self.app.get(route_path(
141 'pullrequest_show',
141 'pullrequest_show',
142 repo_name=pull_request.target_repo.scm_instance().name,
142 repo_name=pull_request.target_repo.scm_instance().name,
143 pull_request_id=pull_request.pull_request_id))
143 pull_request_id=pull_request.pull_request_id))
144
144
145 response.mustcontain('Server-side pull request merging is disabled.')
145 response.mustcontain('Server-side pull request merging is disabled.')
146
146
147 assert_response = response.assert_response()
147 assert_response = response.assert_response()
148 # now regular user has a merge permissions, we have CLOSE button
148 # now regular user has a merge permissions, we have CLOSE button
149 assert_response.one_element_exists('#close-pull-request-action')
149 assert_response.one_element_exists('#close-pull-request-action')
150
150
151 def test_show_invalid_commit_id(self, pr_util):
151 def test_show_invalid_commit_id(self, pr_util):
152 # Simulating invalid revisions which will cause a lookup error
152 # Simulating invalid revisions which will cause a lookup error
153 pull_request = pr_util.create_pull_request()
153 pull_request = pr_util.create_pull_request()
154 pull_request.revisions = ['invalid']
154 pull_request.revisions = ['invalid']
155 Session().add(pull_request)
155 Session().add(pull_request)
156 Session().commit()
156 Session().commit()
157
157
158 response = self.app.get(route_path(
158 response = self.app.get(route_path(
159 'pullrequest_show',
159 'pullrequest_show',
160 repo_name=pull_request.target_repo.scm_instance().name,
160 repo_name=pull_request.target_repo.scm_instance().name,
161 pull_request_id=pull_request.pull_request_id))
161 pull_request_id=pull_request.pull_request_id))
162
162
163 for commit_id in pull_request.revisions:
163 for commit_id in pull_request.revisions:
164 response.mustcontain(commit_id)
164 response.mustcontain(commit_id)
165
165
166 def test_show_invalid_source_reference(self, pr_util):
166 def test_show_invalid_source_reference(self, pr_util):
167 pull_request = pr_util.create_pull_request()
167 pull_request = pr_util.create_pull_request()
168 pull_request.source_ref = 'branch:b:invalid'
168 pull_request.source_ref = 'branch:b:invalid'
169 Session().add(pull_request)
169 Session().add(pull_request)
170 Session().commit()
170 Session().commit()
171
171
172 self.app.get(route_path(
172 self.app.get(route_path(
173 'pullrequest_show',
173 'pullrequest_show',
174 repo_name=pull_request.target_repo.scm_instance().name,
174 repo_name=pull_request.target_repo.scm_instance().name,
175 pull_request_id=pull_request.pull_request_id))
175 pull_request_id=pull_request.pull_request_id))
176
176
177 def test_edit_title_description(self, pr_util, csrf_token):
177 def test_edit_title_description(self, pr_util, csrf_token):
178 pull_request = pr_util.create_pull_request()
178 pull_request = pr_util.create_pull_request()
179 pull_request_id = pull_request.pull_request_id
179 pull_request_id = pull_request.pull_request_id
180
180
181 response = self.app.post(
181 response = self.app.post(
182 route_path('pullrequest_update',
182 route_path('pullrequest_update',
183 repo_name=pull_request.target_repo.repo_name,
183 repo_name=pull_request.target_repo.repo_name,
184 pull_request_id=pull_request_id),
184 pull_request_id=pull_request_id),
185 params={
185 params={
186 'edit_pull_request': 'true',
186 'edit_pull_request': 'true',
187 'title': 'New title',
187 'title': 'New title',
188 'description': 'New description',
188 'description': 'New description',
189 'csrf_token': csrf_token})
189 'csrf_token': csrf_token})
190
190
191 assert_session_flash(
191 assert_session_flash(
192 response, u'Pull request title & description updated.',
192 response, u'Pull request title & description updated.',
193 category='success')
193 category='success')
194
194
195 pull_request = PullRequest.get(pull_request_id)
195 pull_request = PullRequest.get(pull_request_id)
196 assert pull_request.title == 'New title'
196 assert pull_request.title == 'New title'
197 assert pull_request.description == 'New description'
197 assert pull_request.description == 'New description'
198
198
199 def test_edit_title_description_closed(self, pr_util, csrf_token):
199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 pull_request = pr_util.create_pull_request()
200 pull_request = pr_util.create_pull_request()
201 pull_request_id = pull_request.pull_request_id
201 pull_request_id = pull_request.pull_request_id
202 repo_name = pull_request.target_repo.repo_name
202 repo_name = pull_request.target_repo.repo_name
203 pr_util.close()
203 pr_util.close()
204
204
205 response = self.app.post(
205 response = self.app.post(
206 route_path('pullrequest_update',
206 route_path('pullrequest_update',
207 repo_name=repo_name, pull_request_id=pull_request_id),
207 repo_name=repo_name, pull_request_id=pull_request_id),
208 params={
208 params={
209 'edit_pull_request': 'true',
209 'edit_pull_request': 'true',
210 'title': 'New title',
210 'title': 'New title',
211 'description': 'New description',
211 'description': 'New description',
212 'csrf_token': csrf_token}, status=200)
212 'csrf_token': csrf_token}, status=200)
213 assert_session_flash(
213 assert_session_flash(
214 response, u'Cannot update closed pull requests.',
214 response, u'Cannot update closed pull requests.',
215 category='error')
215 category='error')
216
216
217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219
219
220 pull_request = pr_util.create_pull_request()
220 pull_request = pr_util.create_pull_request()
221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 Session().add(pull_request)
222 Session().add(pull_request)
223 Session().commit()
223 Session().commit()
224
224
225 pull_request_id = pull_request.pull_request_id
225 pull_request_id = pull_request.pull_request_id
226
226
227 response = self.app.post(
227 response = self.app.post(
228 route_path('pullrequest_update',
228 route_path('pullrequest_update',
229 repo_name=pull_request.target_repo.repo_name,
229 repo_name=pull_request.target_repo.repo_name,
230 pull_request_id=pull_request_id),
230 pull_request_id=pull_request_id),
231 params={'update_commits': 'true',
231 params={'update_commits': 'true',
232 'csrf_token': csrf_token})
232 'csrf_token': csrf_token})
233
233
234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 UpdateFailureReason.MISSING_SOURCE_REF])
235 UpdateFailureReason.MISSING_SOURCE_REF])
236 assert_session_flash(response, expected_msg, category='error')
236 assert_session_flash(response, expected_msg, category='error')
237
237
238 def test_missing_target_reference(self, pr_util, csrf_token):
238 def test_missing_target_reference(self, pr_util, csrf_token):
239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 pull_request = pr_util.create_pull_request(
240 pull_request = pr_util.create_pull_request(
241 approved=True, mergeable=True)
241 approved=True, mergeable=True)
242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 Session().add(pull_request)
243 Session().add(pull_request)
244 Session().commit()
244 Session().commit()
245
245
246 pull_request_id = pull_request.pull_request_id
246 pull_request_id = pull_request.pull_request_id
247 pull_request_url = route_path(
247 pull_request_url = route_path(
248 'pullrequest_show',
248 'pullrequest_show',
249 repo_name=pull_request.target_repo.repo_name,
249 repo_name=pull_request.target_repo.repo_name,
250 pull_request_id=pull_request_id)
250 pull_request_id=pull_request_id)
251
251
252 response = self.app.get(pull_request_url)
252 response = self.app.get(pull_request_url)
253
253
254 assertr = AssertResponse(response)
254 assertr = AssertResponse(response)
255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 MergeFailureReason.MISSING_TARGET_REF]
256 MergeFailureReason.MISSING_TARGET_REF]
257 assertr.element_contains(
257 assertr.element_contains(
258 'span[data-role="merge-message"]', str(expected_msg))
258 'span[data-role="merge-message"]', str(expected_msg))
259
259
260 def test_comment_and_close_pull_request_custom_message_approved(
260 def test_comment_and_close_pull_request_custom_message_approved(
261 self, pr_util, csrf_token, xhr_header):
261 self, pr_util, csrf_token, xhr_header):
262
262
263 pull_request = pr_util.create_pull_request(approved=True)
263 pull_request = pr_util.create_pull_request(approved=True)
264 pull_request_id = pull_request.pull_request_id
264 pull_request_id = pull_request.pull_request_id
265 author = pull_request.user_id
265 author = pull_request.user_id
266 repo = pull_request.target_repo.repo_id
266 repo = pull_request.target_repo.repo_id
267
267
268 self.app.post(
268 self.app.post(
269 route_path('pullrequest_comment_create',
269 route_path('pullrequest_comment_create',
270 repo_name=pull_request.target_repo.scm_instance().name,
270 repo_name=pull_request.target_repo.scm_instance().name,
271 pull_request_id=pull_request_id),
271 pull_request_id=pull_request_id),
272 params={
272 params={
273 'close_pull_request': '1',
273 'close_pull_request': '1',
274 'text': 'Closing a PR',
274 'text': 'Closing a PR',
275 'csrf_token': csrf_token},
275 'csrf_token': csrf_token},
276 extra_environ=xhr_header,)
276 extra_environ=xhr_header,)
277
277
278 journal = UserLog.query()\
278 journal = UserLog.query()\
279 .filter(UserLog.user_id == author)\
279 .filter(UserLog.user_id == author)\
280 .filter(UserLog.repository_id == repo) \
280 .filter(UserLog.repository_id == repo) \
281 .order_by('user_log_id') \
281 .order_by('user_log_id') \
282 .all()
282 .all()
283 assert journal[-1].action == 'repo.pull_request.close'
283 assert journal[-1].action == 'repo.pull_request.close'
284
284
285 pull_request = PullRequest.get(pull_request_id)
285 pull_request = PullRequest.get(pull_request_id)
286 assert pull_request.is_closed()
286 assert pull_request.is_closed()
287
287
288 status = ChangesetStatusModel().get_status(
288 status = ChangesetStatusModel().get_status(
289 pull_request.source_repo, pull_request=pull_request)
289 pull_request.source_repo, pull_request=pull_request)
290 assert status == ChangesetStatus.STATUS_APPROVED
290 assert status == ChangesetStatus.STATUS_APPROVED
291 comments = ChangesetComment().query() \
291 comments = ChangesetComment().query() \
292 .filter(ChangesetComment.pull_request == pull_request) \
292 .filter(ChangesetComment.pull_request == pull_request) \
293 .order_by(ChangesetComment.comment_id.asc())\
293 .order_by(ChangesetComment.comment_id.asc())\
294 .all()
294 .all()
295 assert comments[-1].text == 'Closing a PR'
295 assert comments[-1].text == 'Closing a PR'
296
296
297 def test_comment_force_close_pull_request_rejected(
297 def test_comment_force_close_pull_request_rejected(
298 self, pr_util, csrf_token, xhr_header):
298 self, pr_util, csrf_token, xhr_header):
299 pull_request = pr_util.create_pull_request()
299 pull_request = pr_util.create_pull_request()
300 pull_request_id = pull_request.pull_request_id
300 pull_request_id = pull_request.pull_request_id
301 PullRequestModel().update_reviewers(
301 PullRequestModel().update_reviewers(
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 pull_request.author)
303 pull_request.author)
304 author = pull_request.user_id
304 author = pull_request.user_id
305 repo = pull_request.target_repo.repo_id
305 repo = pull_request.target_repo.repo_id
306
306
307 self.app.post(
307 self.app.post(
308 route_path('pullrequest_comment_create',
308 route_path('pullrequest_comment_create',
309 repo_name=pull_request.target_repo.scm_instance().name,
309 repo_name=pull_request.target_repo.scm_instance().name,
310 pull_request_id=pull_request_id),
310 pull_request_id=pull_request_id),
311 params={
311 params={
312 'close_pull_request': '1',
312 'close_pull_request': '1',
313 'csrf_token': csrf_token},
313 'csrf_token': csrf_token},
314 extra_environ=xhr_header)
314 extra_environ=xhr_header)
315
315
316 pull_request = PullRequest.get(pull_request_id)
316 pull_request = PullRequest.get(pull_request_id)
317
317
318 journal = UserLog.query()\
318 journal = UserLog.query()\
319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 .order_by('user_log_id') \
320 .order_by('user_log_id') \
321 .all()
321 .all()
322 assert journal[-1].action == 'repo.pull_request.close'
322 assert journal[-1].action == 'repo.pull_request.close'
323
323
324 # check only the latest status, not the review status
324 # check only the latest status, not the review status
325 status = ChangesetStatusModel().get_status(
325 status = ChangesetStatusModel().get_status(
326 pull_request.source_repo, pull_request=pull_request)
326 pull_request.source_repo, pull_request=pull_request)
327 assert status == ChangesetStatus.STATUS_REJECTED
327 assert status == ChangesetStatus.STATUS_REJECTED
328
328
329 def test_comment_and_close_pull_request(
329 def test_comment_and_close_pull_request(
330 self, pr_util, csrf_token, xhr_header):
330 self, pr_util, csrf_token, xhr_header):
331 pull_request = pr_util.create_pull_request()
331 pull_request = pr_util.create_pull_request()
332 pull_request_id = pull_request.pull_request_id
332 pull_request_id = pull_request.pull_request_id
333
333
334 response = self.app.post(
334 response = self.app.post(
335 route_path('pullrequest_comment_create',
335 route_path('pullrequest_comment_create',
336 repo_name=pull_request.target_repo.scm_instance().name,
336 repo_name=pull_request.target_repo.scm_instance().name,
337 pull_request_id=pull_request.pull_request_id),
337 pull_request_id=pull_request.pull_request_id),
338 params={
338 params={
339 'close_pull_request': 'true',
339 'close_pull_request': 'true',
340 'csrf_token': csrf_token},
340 'csrf_token': csrf_token},
341 extra_environ=xhr_header)
341 extra_environ=xhr_header)
342
342
343 assert response.json
343 assert response.json
344
344
345 pull_request = PullRequest.get(pull_request_id)
345 pull_request = PullRequest.get(pull_request_id)
346 assert pull_request.is_closed()
346 assert pull_request.is_closed()
347
347
348 # check only the latest status, not the review status
348 # check only the latest status, not the review status
349 status = ChangesetStatusModel().get_status(
349 status = ChangesetStatusModel().get_status(
350 pull_request.source_repo, pull_request=pull_request)
350 pull_request.source_repo, pull_request=pull_request)
351 assert status == ChangesetStatus.STATUS_REJECTED
351 assert status == ChangesetStatus.STATUS_REJECTED
352
352
353 def test_create_pull_request(self, backend, csrf_token):
353 def test_create_pull_request(self, backend, csrf_token):
354 commits = [
354 commits = [
355 {'message': 'ancestor'},
355 {'message': 'ancestor'},
356 {'message': 'change'},
356 {'message': 'change'},
357 {'message': 'change2'},
357 {'message': 'change2'},
358 ]
358 ]
359 commit_ids = backend.create_master_repo(commits)
359 commit_ids = backend.create_master_repo(commits)
360 target = backend.create_repo(heads=['ancestor'])
360 target = backend.create_repo(heads=['ancestor'])
361 source = backend.create_repo(heads=['change2'])
361 source = backend.create_repo(heads=['change2'])
362
362
363 response = self.app.post(
363 response = self.app.post(
364 route_path('pullrequest_create', repo_name=source.repo_name),
364 route_path('pullrequest_create', repo_name=source.repo_name),
365 [
365 [
366 ('source_repo', source.repo_name),
366 ('source_repo', source.repo_name),
367 ('source_ref', 'branch:default:' + commit_ids['change2']),
367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 ('target_repo', target.repo_name),
368 ('target_repo', target.repo_name),
369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 ('common_ancestor', commit_ids['ancestor']),
370 ('common_ancestor', commit_ids['ancestor']),
371 ('pullrequest_desc', 'Description'),
371 ('pullrequest_desc', 'Description'),
372 ('pullrequest_title', 'Title'),
372 ('pullrequest_title', 'Title'),
373 ('__start__', 'review_members:sequence'),
373 ('__start__', 'review_members:sequence'),
374 ('__start__', 'reviewer:mapping'),
374 ('__start__', 'reviewer:mapping'),
375 ('user_id', '1'),
375 ('user_id', '1'),
376 ('__start__', 'reasons:sequence'),
376 ('__start__', 'reasons:sequence'),
377 ('reason', 'Some reason'),
377 ('reason', 'Some reason'),
378 ('__end__', 'reasons:sequence'),
378 ('__end__', 'reasons:sequence'),
379 ('__start__', 'rules:sequence'),
379 ('__start__', 'rules:sequence'),
380 ('__end__', 'rules:sequence'),
380 ('__end__', 'rules:sequence'),
381 ('mandatory', 'False'),
381 ('mandatory', 'False'),
382 ('__end__', 'reviewer:mapping'),
382 ('__end__', 'reviewer:mapping'),
383 ('__end__', 'review_members:sequence'),
383 ('__end__', 'review_members:sequence'),
384 ('__start__', 'revisions:sequence'),
384 ('__start__', 'revisions:sequence'),
385 ('revisions', commit_ids['change']),
385 ('revisions', commit_ids['change']),
386 ('revisions', commit_ids['change2']),
386 ('revisions', commit_ids['change2']),
387 ('__end__', 'revisions:sequence'),
387 ('__end__', 'revisions:sequence'),
388 ('user', ''),
388 ('user', ''),
389 ('csrf_token', csrf_token),
389 ('csrf_token', csrf_token),
390 ],
390 ],
391 status=302)
391 status=302)
392
392
393 location = response.headers['Location']
393 location = response.headers['Location']
394 pull_request_id = location.rsplit('/', 1)[1]
394 pull_request_id = location.rsplit('/', 1)[1]
395 assert pull_request_id != 'new'
395 assert pull_request_id != 'new'
396 pull_request = PullRequest.get(int(pull_request_id))
396 pull_request = PullRequest.get(int(pull_request_id))
397
397
398 # check that we have now both revisions
398 # check that we have now both revisions
399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
402 assert pull_request.target_ref == expected_target_ref
402 assert pull_request.target_ref == expected_target_ref
403
403
404 def test_reviewer_notifications(self, backend, csrf_token):
404 def test_reviewer_notifications(self, backend, csrf_token):
405 # We have to use the app.post for this test so it will create the
405 # We have to use the app.post for this test so it will create the
406 # notifications properly with the new PR
406 # notifications properly with the new PR
407 commits = [
407 commits = [
408 {'message': 'ancestor',
408 {'message': 'ancestor',
409 'added': [FileNode('file_A', content='content_of_ancestor')]},
409 'added': [FileNode('file_A', content='content_of_ancestor')]},
410 {'message': 'change',
410 {'message': 'change',
411 'added': [FileNode('file_a', content='content_of_change')]},
411 'added': [FileNode('file_a', content='content_of_change')]},
412 {'message': 'change-child'},
412 {'message': 'change-child'},
413 {'message': 'ancestor-child', 'parents': ['ancestor'],
413 {'message': 'ancestor-child', 'parents': ['ancestor'],
414 'added': [
414 'added': [
415 FileNode('file_B', content='content_of_ancestor_child')]},
415 FileNode('file_B', content='content_of_ancestor_child')]},
416 {'message': 'ancestor-child-2'},
416 {'message': 'ancestor-child-2'},
417 ]
417 ]
418 commit_ids = backend.create_master_repo(commits)
418 commit_ids = backend.create_master_repo(commits)
419 target = backend.create_repo(heads=['ancestor-child'])
419 target = backend.create_repo(heads=['ancestor-child'])
420 source = backend.create_repo(heads=['change'])
420 source = backend.create_repo(heads=['change'])
421
421
422 response = self.app.post(
422 response = self.app.post(
423 route_path('pullrequest_create', repo_name=source.repo_name),
423 route_path('pullrequest_create', repo_name=source.repo_name),
424 [
424 [
425 ('source_repo', source.repo_name),
425 ('source_repo', source.repo_name),
426 ('source_ref', 'branch:default:' + commit_ids['change']),
426 ('source_ref', 'branch:default:' + commit_ids['change']),
427 ('target_repo', target.repo_name),
427 ('target_repo', target.repo_name),
428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
429 ('common_ancestor', commit_ids['ancestor']),
429 ('common_ancestor', commit_ids['ancestor']),
430 ('pullrequest_desc', 'Description'),
430 ('pullrequest_desc', 'Description'),
431 ('pullrequest_title', 'Title'),
431 ('pullrequest_title', 'Title'),
432 ('__start__', 'review_members:sequence'),
432 ('__start__', 'review_members:sequence'),
433 ('__start__', 'reviewer:mapping'),
433 ('__start__', 'reviewer:mapping'),
434 ('user_id', '2'),
434 ('user_id', '2'),
435 ('__start__', 'reasons:sequence'),
435 ('__start__', 'reasons:sequence'),
436 ('reason', 'Some reason'),
436 ('reason', 'Some reason'),
437 ('__end__', 'reasons:sequence'),
437 ('__end__', 'reasons:sequence'),
438 ('__start__', 'rules:sequence'),
438 ('__start__', 'rules:sequence'),
439 ('__end__', 'rules:sequence'),
439 ('__end__', 'rules:sequence'),
440 ('mandatory', 'False'),
440 ('mandatory', 'False'),
441 ('__end__', 'reviewer:mapping'),
441 ('__end__', 'reviewer:mapping'),
442 ('__end__', 'review_members:sequence'),
442 ('__end__', 'review_members:sequence'),
443 ('__start__', 'revisions:sequence'),
443 ('__start__', 'revisions:sequence'),
444 ('revisions', commit_ids['change']),
444 ('revisions', commit_ids['change']),
445 ('__end__', 'revisions:sequence'),
445 ('__end__', 'revisions:sequence'),
446 ('user', ''),
446 ('user', ''),
447 ('csrf_token', csrf_token),
447 ('csrf_token', csrf_token),
448 ],
448 ],
449 status=302)
449 status=302)
450
450
451 location = response.headers['Location']
451 location = response.headers['Location']
452
452
453 pull_request_id = location.rsplit('/', 1)[1]
453 pull_request_id = location.rsplit('/', 1)[1]
454 assert pull_request_id != 'new'
454 assert pull_request_id != 'new'
455 pull_request = PullRequest.get(int(pull_request_id))
455 pull_request = PullRequest.get(int(pull_request_id))
456
456
457 # Check that a notification was made
457 # Check that a notification was made
458 notifications = Notification.query()\
458 notifications = Notification.query()\
459 .filter(Notification.created_by == pull_request.author.user_id,
459 .filter(Notification.created_by == pull_request.author.user_id,
460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
461 Notification.subject.contains(
461 Notification.subject.contains(
462 "wants you to review pull request #%s" % pull_request_id))
462 "wants you to review pull request #%s" % pull_request_id))
463 assert len(notifications.all()) == 1
463 assert len(notifications.all()) == 1
464
464
465 # Change reviewers and check that a notification was made
465 # Change reviewers and check that a notification was made
466 PullRequestModel().update_reviewers(
466 PullRequestModel().update_reviewers(
467 pull_request.pull_request_id, [(1, [], False, [])],
467 pull_request.pull_request_id, [(1, [], False, [])],
468 pull_request.author)
468 pull_request.author)
469 assert len(notifications.all()) == 2
469 assert len(notifications.all()) == 2
470
470
471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
472 csrf_token):
472 csrf_token):
473 commits = [
473 commits = [
474 {'message': 'ancestor',
474 {'message': 'ancestor',
475 'added': [FileNode('file_A', content='content_of_ancestor')]},
475 'added': [FileNode('file_A', content='content_of_ancestor')]},
476 {'message': 'change',
476 {'message': 'change',
477 'added': [FileNode('file_a', content='content_of_change')]},
477 'added': [FileNode('file_a', content='content_of_change')]},
478 {'message': 'change-child'},
478 {'message': 'change-child'},
479 {'message': 'ancestor-child', 'parents': ['ancestor'],
479 {'message': 'ancestor-child', 'parents': ['ancestor'],
480 'added': [
480 'added': [
481 FileNode('file_B', content='content_of_ancestor_child')]},
481 FileNode('file_B', content='content_of_ancestor_child')]},
482 {'message': 'ancestor-child-2'},
482 {'message': 'ancestor-child-2'},
483 ]
483 ]
484 commit_ids = backend.create_master_repo(commits)
484 commit_ids = backend.create_master_repo(commits)
485 target = backend.create_repo(heads=['ancestor-child'])
485 target = backend.create_repo(heads=['ancestor-child'])
486 source = backend.create_repo(heads=['change'])
486 source = backend.create_repo(heads=['change'])
487
487
488 response = self.app.post(
488 response = self.app.post(
489 route_path('pullrequest_create', repo_name=source.repo_name),
489 route_path('pullrequest_create', repo_name=source.repo_name),
490 [
490 [
491 ('source_repo', source.repo_name),
491 ('source_repo', source.repo_name),
492 ('source_ref', 'branch:default:' + commit_ids['change']),
492 ('source_ref', 'branch:default:' + commit_ids['change']),
493 ('target_repo', target.repo_name),
493 ('target_repo', target.repo_name),
494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
495 ('common_ancestor', commit_ids['ancestor']),
495 ('common_ancestor', commit_ids['ancestor']),
496 ('pullrequest_desc', 'Description'),
496 ('pullrequest_desc', 'Description'),
497 ('pullrequest_title', 'Title'),
497 ('pullrequest_title', 'Title'),
498 ('__start__', 'review_members:sequence'),
498 ('__start__', 'review_members:sequence'),
499 ('__start__', 'reviewer:mapping'),
499 ('__start__', 'reviewer:mapping'),
500 ('user_id', '1'),
500 ('user_id', '1'),
501 ('__start__', 'reasons:sequence'),
501 ('__start__', 'reasons:sequence'),
502 ('reason', 'Some reason'),
502 ('reason', 'Some reason'),
503 ('__end__', 'reasons:sequence'),
503 ('__end__', 'reasons:sequence'),
504 ('__start__', 'rules:sequence'),
504 ('__start__', 'rules:sequence'),
505 ('__end__', 'rules:sequence'),
505 ('__end__', 'rules:sequence'),
506 ('mandatory', 'False'),
506 ('mandatory', 'False'),
507 ('__end__', 'reviewer:mapping'),
507 ('__end__', 'reviewer:mapping'),
508 ('__end__', 'review_members:sequence'),
508 ('__end__', 'review_members:sequence'),
509 ('__start__', 'revisions:sequence'),
509 ('__start__', 'revisions:sequence'),
510 ('revisions', commit_ids['change']),
510 ('revisions', commit_ids['change']),
511 ('__end__', 'revisions:sequence'),
511 ('__end__', 'revisions:sequence'),
512 ('user', ''),
512 ('user', ''),
513 ('csrf_token', csrf_token),
513 ('csrf_token', csrf_token),
514 ],
514 ],
515 status=302)
515 status=302)
516
516
517 location = response.headers['Location']
517 location = response.headers['Location']
518
518
519 pull_request_id = location.rsplit('/', 1)[1]
519 pull_request_id = location.rsplit('/', 1)[1]
520 assert pull_request_id != 'new'
520 assert pull_request_id != 'new'
521 pull_request = PullRequest.get(int(pull_request_id))
521 pull_request = PullRequest.get(int(pull_request_id))
522
522
523 # target_ref has to point to the ancestor's commit_id in order to
523 # target_ref has to point to the ancestor's commit_id in order to
524 # show the correct diff
524 # show the correct diff
525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
526 assert pull_request.target_ref == expected_target_ref
526 assert pull_request.target_ref == expected_target_ref
527
527
528 # Check generated diff contents
528 # Check generated diff contents
529 response = response.follow()
529 response = response.follow()
530 assert 'content_of_ancestor' not in response.body
530 assert 'content_of_ancestor' not in response.body
531 assert 'content_of_ancestor-child' not in response.body
531 assert 'content_of_ancestor-child' not in response.body
532 assert 'content_of_change' in response.body
532 assert 'content_of_change' in response.body
533
533
534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
535 # Clear any previous calls to rcextensions
535 # Clear any previous calls to rcextensions
536 rhodecode.EXTENSIONS.calls.clear()
536 rhodecode.EXTENSIONS.calls.clear()
537
537
538 pull_request = pr_util.create_pull_request(
538 pull_request = pr_util.create_pull_request(
539 approved=True, mergeable=True)
539 approved=True, mergeable=True)
540 pull_request_id = pull_request.pull_request_id
540 pull_request_id = pull_request.pull_request_id
541 repo_name = pull_request.target_repo.scm_instance().name,
541 repo_name = pull_request.target_repo.scm_instance().name,
542
542
543 response = self.app.post(
543 response = self.app.post(
544 route_path('pullrequest_merge',
544 route_path('pullrequest_merge',
545 repo_name=str(repo_name[0]),
545 repo_name=str(repo_name[0]),
546 pull_request_id=pull_request_id),
546 pull_request_id=pull_request_id),
547 params={'csrf_token': csrf_token}).follow()
547 params={'csrf_token': csrf_token}).follow()
548
548
549 pull_request = PullRequest.get(pull_request_id)
549 pull_request = PullRequest.get(pull_request_id)
550
550
551 assert response.status_int == 200
551 assert response.status_int == 200
552 assert pull_request.is_closed()
552 assert pull_request.is_closed()
553 assert_pull_request_status(
553 assert_pull_request_status(
554 pull_request, ChangesetStatus.STATUS_APPROVED)
554 pull_request, ChangesetStatus.STATUS_APPROVED)
555
555
556 # Check the relevant log entries were added
556 # Check the relevant log entries were added
557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
558 actions = [log.action for log in user_logs]
558 actions = [log.action for log in user_logs]
559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
560 expected_actions = [
560 expected_actions = [
561 u'repo.pull_request.close',
561 u'repo.pull_request.close',
562 u'repo.pull_request.merge',
562 u'repo.pull_request.merge',
563 u'repo.pull_request.comment.create'
563 u'repo.pull_request.comment.create'
564 ]
564 ]
565 assert actions == expected_actions
565 assert actions == expected_actions
566
566
567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
568 actions = [log for log in user_logs]
568 actions = [log for log in user_logs]
569 assert actions[-1].action == 'user.push'
569 assert actions[-1].action == 'user.push'
570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
571
571
572 # Check post_push rcextension was really executed
572 # Check post_push rcextension was really executed
573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
574 assert len(push_calls) == 1
574 assert len(push_calls) == 1
575 unused_last_call_args, last_call_kwargs = push_calls[0]
575 unused_last_call_args, last_call_kwargs = push_calls[0]
576 assert last_call_kwargs['action'] == 'push'
576 assert last_call_kwargs['action'] == 'push'
577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
578
578
579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
580 pull_request = pr_util.create_pull_request(mergeable=False)
580 pull_request = pr_util.create_pull_request(mergeable=False)
581 pull_request_id = pull_request.pull_request_id
581 pull_request_id = pull_request.pull_request_id
582 pull_request = PullRequest.get(pull_request_id)
582 pull_request = PullRequest.get(pull_request_id)
583
583
584 response = self.app.post(
584 response = self.app.post(
585 route_path('pullrequest_merge',
585 route_path('pullrequest_merge',
586 repo_name=pull_request.target_repo.scm_instance().name,
586 repo_name=pull_request.target_repo.scm_instance().name,
587 pull_request_id=pull_request.pull_request_id),
587 pull_request_id=pull_request.pull_request_id),
588 params={'csrf_token': csrf_token}).follow()
588 params={'csrf_token': csrf_token}).follow()
589
589
590 assert response.status_int == 200
590 assert response.status_int == 200
591 response.mustcontain(
591 response.mustcontain(
592 'Merge is not currently possible because of below failed checks.')
592 'Merge is not currently possible because of below failed checks.')
593 response.mustcontain('Server-side pull request merging is disabled.')
593 response.mustcontain('Server-side pull request merging is disabled.')
594
594
595 @pytest.mark.skip_backends('svn')
595 @pytest.mark.skip_backends('svn')
596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
597 pull_request = pr_util.create_pull_request(mergeable=True)
597 pull_request = pr_util.create_pull_request(mergeable=True)
598 pull_request_id = pull_request.pull_request_id
598 pull_request_id = pull_request.pull_request_id
599 repo_name = pull_request.target_repo.scm_instance().name
599 repo_name = pull_request.target_repo.scm_instance().name
600
600
601 response = self.app.post(
601 response = self.app.post(
602 route_path('pullrequest_merge',
602 route_path('pullrequest_merge',
603 repo_name=repo_name,
603 repo_name=repo_name,
604 pull_request_id=pull_request_id),
604 pull_request_id=pull_request_id),
605 params={'csrf_token': csrf_token}).follow()
605 params={'csrf_token': csrf_token}).follow()
606
606
607 assert response.status_int == 200
607 assert response.status_int == 200
608
608
609 response.mustcontain(
609 response.mustcontain(
610 'Merge is not currently possible because of below failed checks.')
610 'Merge is not currently possible because of below failed checks.')
611 response.mustcontain('Pull request reviewer approval is pending.')
611 response.mustcontain('Pull request reviewer approval is pending.')
612
612
613 def test_merge_pull_request_renders_failure_reason(
613 def test_merge_pull_request_renders_failure_reason(
614 self, user_regular, csrf_token, pr_util):
614 self, user_regular, csrf_token, pr_util):
615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
616 pull_request_id = pull_request.pull_request_id
616 pull_request_id = pull_request.pull_request_id
617 repo_name = pull_request.target_repo.scm_instance().name
617 repo_name = pull_request.target_repo.scm_instance().name
618
618
619 model_patcher = mock.patch.multiple(
619 model_patcher = mock.patch.multiple(
620 PullRequestModel,
620 PullRequestModel,
621 merge=mock.Mock(return_value=MergeResponse(
621 merge_repo=mock.Mock(return_value=MergeResponse(
622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
624
624
625 with model_patcher:
625 with model_patcher:
626 response = self.app.post(
626 response = self.app.post(
627 route_path('pullrequest_merge',
627 route_path('pullrequest_merge',
628 repo_name=repo_name,
628 repo_name=repo_name,
629 pull_request_id=pull_request_id),
629 pull_request_id=pull_request_id),
630 params={'csrf_token': csrf_token}, status=302)
630 params={'csrf_token': csrf_token}, status=302)
631
631
632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
633 MergeFailureReason.PUSH_FAILED])
633 MergeFailureReason.PUSH_FAILED])
634
634
635 def test_update_source_revision(self, backend, csrf_token):
635 def test_update_source_revision(self, backend, csrf_token):
636 commits = [
636 commits = [
637 {'message': 'ancestor'},
637 {'message': 'ancestor'},
638 {'message': 'change'},
638 {'message': 'change'},
639 {'message': 'change-2'},
639 {'message': 'change-2'},
640 ]
640 ]
641 commit_ids = backend.create_master_repo(commits)
641 commit_ids = backend.create_master_repo(commits)
642 target = backend.create_repo(heads=['ancestor'])
642 target = backend.create_repo(heads=['ancestor'])
643 source = backend.create_repo(heads=['change'])
643 source = backend.create_repo(heads=['change'])
644
644
645 # create pr from a in source to A in target
645 # create pr from a in source to A in target
646 pull_request = PullRequest()
646 pull_request = PullRequest()
647 pull_request.source_repo = source
647 pull_request.source_repo = source
648 # TODO: johbo: Make sure that we write the source ref this way!
648 # TODO: johbo: Make sure that we write the source ref this way!
649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
651 pull_request.target_repo = target
651 pull_request.target_repo = target
652
652
653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
654 branch=backend.default_branch_name,
654 branch=backend.default_branch_name,
655 commit_id=commit_ids['ancestor'])
655 commit_id=commit_ids['ancestor'])
656 pull_request.revisions = [commit_ids['change']]
656 pull_request.revisions = [commit_ids['change']]
657 pull_request.title = u"Test"
657 pull_request.title = u"Test"
658 pull_request.description = u"Description"
658 pull_request.description = u"Description"
659 pull_request.author = UserModel().get_by_username(
659 pull_request.author = UserModel().get_by_username(
660 TEST_USER_ADMIN_LOGIN)
660 TEST_USER_ADMIN_LOGIN)
661 Session().add(pull_request)
661 Session().add(pull_request)
662 Session().commit()
662 Session().commit()
663 pull_request_id = pull_request.pull_request_id
663 pull_request_id = pull_request.pull_request_id
664
664
665 # source has ancestor - change - change-2
665 # source has ancestor - change - change-2
666 backend.pull_heads(source, heads=['change-2'])
666 backend.pull_heads(source, heads=['change-2'])
667
667
668 # update PR
668 # update PR
669 self.app.post(
669 self.app.post(
670 route_path('pullrequest_update',
670 route_path('pullrequest_update',
671 repo_name=target.repo_name,
671 repo_name=target.repo_name,
672 pull_request_id=pull_request_id),
672 pull_request_id=pull_request_id),
673 params={'update_commits': 'true',
673 params={'update_commits': 'true',
674 'csrf_token': csrf_token})
674 'csrf_token': csrf_token})
675
675
676 # check that we have now both revisions
676 # check that we have now both revisions
677 pull_request = PullRequest.get(pull_request_id)
677 pull_request = PullRequest.get(pull_request_id)
678 assert pull_request.revisions == [
678 assert pull_request.revisions == [
679 commit_ids['change-2'], commit_ids['change']]
679 commit_ids['change-2'], commit_ids['change']]
680
680
681 # TODO: johbo: this should be a test on its own
681 # TODO: johbo: this should be a test on its own
682 response = self.app.get(route_path(
682 response = self.app.get(route_path(
683 'pullrequest_new',
683 'pullrequest_new',
684 repo_name=target.repo_name))
684 repo_name=target.repo_name))
685 assert response.status_int == 200
685 assert response.status_int == 200
686 assert 'Pull request updated to' in response.body
686 assert 'Pull request updated to' in response.body
687 assert 'with 1 added, 0 removed commits.' in response.body
687 assert 'with 1 added, 0 removed commits.' in response.body
688
688
689 def test_update_target_revision(self, backend, csrf_token):
689 def test_update_target_revision(self, backend, csrf_token):
690 commits = [
690 commits = [
691 {'message': 'ancestor'},
691 {'message': 'ancestor'},
692 {'message': 'change'},
692 {'message': 'change'},
693 {'message': 'ancestor-new', 'parents': ['ancestor']},
693 {'message': 'ancestor-new', 'parents': ['ancestor']},
694 {'message': 'change-rebased'},
694 {'message': 'change-rebased'},
695 ]
695 ]
696 commit_ids = backend.create_master_repo(commits)
696 commit_ids = backend.create_master_repo(commits)
697 target = backend.create_repo(heads=['ancestor'])
697 target = backend.create_repo(heads=['ancestor'])
698 source = backend.create_repo(heads=['change'])
698 source = backend.create_repo(heads=['change'])
699
699
700 # create pr from a in source to A in target
700 # create pr from a in source to A in target
701 pull_request = PullRequest()
701 pull_request = PullRequest()
702 pull_request.source_repo = source
702 pull_request.source_repo = source
703 # TODO: johbo: Make sure that we write the source ref this way!
703 # TODO: johbo: Make sure that we write the source ref this way!
704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
706 pull_request.target_repo = target
706 pull_request.target_repo = target
707 # TODO: johbo: Target ref should be branch based, since tip can jump
707 # TODO: johbo: Target ref should be branch based, since tip can jump
708 # from branch to branch
708 # from branch to branch
709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
710 branch=backend.default_branch_name,
710 branch=backend.default_branch_name,
711 commit_id=commit_ids['ancestor'])
711 commit_id=commit_ids['ancestor'])
712 pull_request.revisions = [commit_ids['change']]
712 pull_request.revisions = [commit_ids['change']]
713 pull_request.title = u"Test"
713 pull_request.title = u"Test"
714 pull_request.description = u"Description"
714 pull_request.description = u"Description"
715 pull_request.author = UserModel().get_by_username(
715 pull_request.author = UserModel().get_by_username(
716 TEST_USER_ADMIN_LOGIN)
716 TEST_USER_ADMIN_LOGIN)
717 Session().add(pull_request)
717 Session().add(pull_request)
718 Session().commit()
718 Session().commit()
719 pull_request_id = pull_request.pull_request_id
719 pull_request_id = pull_request.pull_request_id
720
720
721 # target has ancestor - ancestor-new
721 # target has ancestor - ancestor-new
722 # source has ancestor - ancestor-new - change-rebased
722 # source has ancestor - ancestor-new - change-rebased
723 backend.pull_heads(target, heads=['ancestor-new'])
723 backend.pull_heads(target, heads=['ancestor-new'])
724 backend.pull_heads(source, heads=['change-rebased'])
724 backend.pull_heads(source, heads=['change-rebased'])
725
725
726 # update PR
726 # update PR
727 self.app.post(
727 self.app.post(
728 route_path('pullrequest_update',
728 route_path('pullrequest_update',
729 repo_name=target.repo_name,
729 repo_name=target.repo_name,
730 pull_request_id=pull_request_id),
730 pull_request_id=pull_request_id),
731 params={'update_commits': 'true',
731 params={'update_commits': 'true',
732 'csrf_token': csrf_token},
732 'csrf_token': csrf_token},
733 status=200)
733 status=200)
734
734
735 # check that we have now both revisions
735 # check that we have now both revisions
736 pull_request = PullRequest.get(pull_request_id)
736 pull_request = PullRequest.get(pull_request_id)
737 assert pull_request.revisions == [commit_ids['change-rebased']]
737 assert pull_request.revisions == [commit_ids['change-rebased']]
738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
739 branch=backend.default_branch_name,
739 branch=backend.default_branch_name,
740 commit_id=commit_ids['ancestor-new'])
740 commit_id=commit_ids['ancestor-new'])
741
741
742 # TODO: johbo: This should be a test on its own
742 # TODO: johbo: This should be a test on its own
743 response = self.app.get(route_path(
743 response = self.app.get(route_path(
744 'pullrequest_new',
744 'pullrequest_new',
745 repo_name=target.repo_name))
745 repo_name=target.repo_name))
746 assert response.status_int == 200
746 assert response.status_int == 200
747 assert 'Pull request updated to' in response.body
747 assert 'Pull request updated to' in response.body
748 assert 'with 1 added, 1 removed commits.' in response.body
748 assert 'with 1 added, 1 removed commits.' in response.body
749
749
750 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
750 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
751 backend = backend_git
751 backend = backend_git
752 commits = [
752 commits = [
753 {'message': 'master-commit-1'},
753 {'message': 'master-commit-1'},
754 {'message': 'master-commit-2-change-1'},
754 {'message': 'master-commit-2-change-1'},
755 {'message': 'master-commit-3-change-2'},
755 {'message': 'master-commit-3-change-2'},
756
756
757 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
757 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
758 {'message': 'feat-commit-2'},
758 {'message': 'feat-commit-2'},
759 ]
759 ]
760 commit_ids = backend.create_master_repo(commits)
760 commit_ids = backend.create_master_repo(commits)
761 target = backend.create_repo(heads=['master-commit-3-change-2'])
761 target = backend.create_repo(heads=['master-commit-3-change-2'])
762 source = backend.create_repo(heads=['feat-commit-2'])
762 source = backend.create_repo(heads=['feat-commit-2'])
763
763
764 # create pr from a in source to A in target
764 # create pr from a in source to A in target
765 pull_request = PullRequest()
765 pull_request = PullRequest()
766 pull_request.source_repo = source
766 pull_request.source_repo = source
767 # TODO: johbo: Make sure that we write the source ref this way!
767 # TODO: johbo: Make sure that we write the source ref this way!
768 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
768 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
769 branch=backend.default_branch_name,
769 branch=backend.default_branch_name,
770 commit_id=commit_ids['master-commit-3-change-2'])
770 commit_id=commit_ids['master-commit-3-change-2'])
771
771
772 pull_request.target_repo = target
772 pull_request.target_repo = target
773 # TODO: johbo: Target ref should be branch based, since tip can jump
773 # TODO: johbo: Target ref should be branch based, since tip can jump
774 # from branch to branch
774 # from branch to branch
775 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
775 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
776 branch=backend.default_branch_name,
776 branch=backend.default_branch_name,
777 commit_id=commit_ids['feat-commit-2'])
777 commit_id=commit_ids['feat-commit-2'])
778
778
779 pull_request.revisions = [
779 pull_request.revisions = [
780 commit_ids['feat-commit-1'],
780 commit_ids['feat-commit-1'],
781 commit_ids['feat-commit-2']
781 commit_ids['feat-commit-2']
782 ]
782 ]
783 pull_request.title = u"Test"
783 pull_request.title = u"Test"
784 pull_request.description = u"Description"
784 pull_request.description = u"Description"
785 pull_request.author = UserModel().get_by_username(
785 pull_request.author = UserModel().get_by_username(
786 TEST_USER_ADMIN_LOGIN)
786 TEST_USER_ADMIN_LOGIN)
787 Session().add(pull_request)
787 Session().add(pull_request)
788 Session().commit()
788 Session().commit()
789 pull_request_id = pull_request.pull_request_id
789 pull_request_id = pull_request.pull_request_id
790
790
791 # PR is created, now we simulate a force-push into target,
791 # PR is created, now we simulate a force-push into target,
792 # that drops a 2 last commits
792 # that drops a 2 last commits
793 vcsrepo = target.scm_instance()
793 vcsrepo = target.scm_instance()
794 vcsrepo.config.clear_section('hooks')
794 vcsrepo.config.clear_section('hooks')
795 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
795 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
796
796
797 # update PR
797 # update PR
798 self.app.post(
798 self.app.post(
799 route_path('pullrequest_update',
799 route_path('pullrequest_update',
800 repo_name=target.repo_name,
800 repo_name=target.repo_name,
801 pull_request_id=pull_request_id),
801 pull_request_id=pull_request_id),
802 params={'update_commits': 'true',
802 params={'update_commits': 'true',
803 'csrf_token': csrf_token},
803 'csrf_token': csrf_token},
804 status=200)
804 status=200)
805
805
806 response = self.app.get(route_path(
806 response = self.app.get(route_path(
807 'pullrequest_new',
807 'pullrequest_new',
808 repo_name=target.repo_name))
808 repo_name=target.repo_name))
809 assert response.status_int == 200
809 assert response.status_int == 200
810 response.mustcontain('Pull request updated to')
810 response.mustcontain('Pull request updated to')
811 response.mustcontain('with 0 added, 0 removed commits.')
811 response.mustcontain('with 0 added, 0 removed commits.')
812
812
813 def test_update_of_ancestor_reference(self, backend, csrf_token):
813 def test_update_of_ancestor_reference(self, backend, csrf_token):
814 commits = [
814 commits = [
815 {'message': 'ancestor'},
815 {'message': 'ancestor'},
816 {'message': 'change'},
816 {'message': 'change'},
817 {'message': 'change-2'},
817 {'message': 'change-2'},
818 {'message': 'ancestor-new', 'parents': ['ancestor']},
818 {'message': 'ancestor-new', 'parents': ['ancestor']},
819 {'message': 'change-rebased'},
819 {'message': 'change-rebased'},
820 ]
820 ]
821 commit_ids = backend.create_master_repo(commits)
821 commit_ids = backend.create_master_repo(commits)
822 target = backend.create_repo(heads=['ancestor'])
822 target = backend.create_repo(heads=['ancestor'])
823 source = backend.create_repo(heads=['change'])
823 source = backend.create_repo(heads=['change'])
824
824
825 # create pr from a in source to A in target
825 # create pr from a in source to A in target
826 pull_request = PullRequest()
826 pull_request = PullRequest()
827 pull_request.source_repo = source
827 pull_request.source_repo = source
828 # TODO: johbo: Make sure that we write the source ref this way!
828 # TODO: johbo: Make sure that we write the source ref this way!
829 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
829 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
830 branch=backend.default_branch_name,
830 branch=backend.default_branch_name,
831 commit_id=commit_ids['change'])
831 commit_id=commit_ids['change'])
832 pull_request.target_repo = target
832 pull_request.target_repo = target
833 # TODO: johbo: Target ref should be branch based, since tip can jump
833 # TODO: johbo: Target ref should be branch based, since tip can jump
834 # from branch to branch
834 # from branch to branch
835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
836 branch=backend.default_branch_name,
836 branch=backend.default_branch_name,
837 commit_id=commit_ids['ancestor'])
837 commit_id=commit_ids['ancestor'])
838 pull_request.revisions = [commit_ids['change']]
838 pull_request.revisions = [commit_ids['change']]
839 pull_request.title = u"Test"
839 pull_request.title = u"Test"
840 pull_request.description = u"Description"
840 pull_request.description = u"Description"
841 pull_request.author = UserModel().get_by_username(
841 pull_request.author = UserModel().get_by_username(
842 TEST_USER_ADMIN_LOGIN)
842 TEST_USER_ADMIN_LOGIN)
843 Session().add(pull_request)
843 Session().add(pull_request)
844 Session().commit()
844 Session().commit()
845 pull_request_id = pull_request.pull_request_id
845 pull_request_id = pull_request.pull_request_id
846
846
847 # target has ancestor - ancestor-new
847 # target has ancestor - ancestor-new
848 # source has ancestor - ancestor-new - change-rebased
848 # source has ancestor - ancestor-new - change-rebased
849 backend.pull_heads(target, heads=['ancestor-new'])
849 backend.pull_heads(target, heads=['ancestor-new'])
850 backend.pull_heads(source, heads=['change-rebased'])
850 backend.pull_heads(source, heads=['change-rebased'])
851
851
852 # update PR
852 # update PR
853 self.app.post(
853 self.app.post(
854 route_path('pullrequest_update',
854 route_path('pullrequest_update',
855 repo_name=target.repo_name,
855 repo_name=target.repo_name,
856 pull_request_id=pull_request_id),
856 pull_request_id=pull_request_id),
857 params={'update_commits': 'true',
857 params={'update_commits': 'true',
858 'csrf_token': csrf_token},
858 'csrf_token': csrf_token},
859 status=200)
859 status=200)
860
860
861 # Expect the target reference to be updated correctly
861 # Expect the target reference to be updated correctly
862 pull_request = PullRequest.get(pull_request_id)
862 pull_request = PullRequest.get(pull_request_id)
863 assert pull_request.revisions == [commit_ids['change-rebased']]
863 assert pull_request.revisions == [commit_ids['change-rebased']]
864 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
864 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
865 branch=backend.default_branch_name,
865 branch=backend.default_branch_name,
866 commit_id=commit_ids['ancestor-new'])
866 commit_id=commit_ids['ancestor-new'])
867 assert pull_request.target_ref == expected_target_ref
867 assert pull_request.target_ref == expected_target_ref
868
868
869 def test_remove_pull_request_branch(self, backend_git, csrf_token):
869 def test_remove_pull_request_branch(self, backend_git, csrf_token):
870 branch_name = 'development'
870 branch_name = 'development'
871 commits = [
871 commits = [
872 {'message': 'initial-commit'},
872 {'message': 'initial-commit'},
873 {'message': 'old-feature'},
873 {'message': 'old-feature'},
874 {'message': 'new-feature', 'branch': branch_name},
874 {'message': 'new-feature', 'branch': branch_name},
875 ]
875 ]
876 repo = backend_git.create_repo(commits)
876 repo = backend_git.create_repo(commits)
877 commit_ids = backend_git.commit_ids
877 commit_ids = backend_git.commit_ids
878
878
879 pull_request = PullRequest()
879 pull_request = PullRequest()
880 pull_request.source_repo = repo
880 pull_request.source_repo = repo
881 pull_request.target_repo = repo
881 pull_request.target_repo = repo
882 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
882 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
883 branch=branch_name, commit_id=commit_ids['new-feature'])
883 branch=branch_name, commit_id=commit_ids['new-feature'])
884 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
884 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
885 branch=backend_git.default_branch_name,
885 branch=backend_git.default_branch_name,
886 commit_id=commit_ids['old-feature'])
886 commit_id=commit_ids['old-feature'])
887 pull_request.revisions = [commit_ids['new-feature']]
887 pull_request.revisions = [commit_ids['new-feature']]
888 pull_request.title = u"Test"
888 pull_request.title = u"Test"
889 pull_request.description = u"Description"
889 pull_request.description = u"Description"
890 pull_request.author = UserModel().get_by_username(
890 pull_request.author = UserModel().get_by_username(
891 TEST_USER_ADMIN_LOGIN)
891 TEST_USER_ADMIN_LOGIN)
892 Session().add(pull_request)
892 Session().add(pull_request)
893 Session().commit()
893 Session().commit()
894
894
895 vcs = repo.scm_instance()
895 vcs = repo.scm_instance()
896 vcs.remove_ref('refs/heads/{}'.format(branch_name))
896 vcs.remove_ref('refs/heads/{}'.format(branch_name))
897
897
898 response = self.app.get(route_path(
898 response = self.app.get(route_path(
899 'pullrequest_show',
899 'pullrequest_show',
900 repo_name=repo.repo_name,
900 repo_name=repo.repo_name,
901 pull_request_id=pull_request.pull_request_id))
901 pull_request_id=pull_request.pull_request_id))
902
902
903 assert response.status_int == 200
903 assert response.status_int == 200
904 assert_response = AssertResponse(response)
904 assert_response = AssertResponse(response)
905 assert_response.element_contains(
905 assert_response.element_contains(
906 '#changeset_compare_view_content .alert strong',
906 '#changeset_compare_view_content .alert strong',
907 'Missing commits')
907 'Missing commits')
908 assert_response.element_contains(
908 assert_response.element_contains(
909 '#changeset_compare_view_content .alert',
909 '#changeset_compare_view_content .alert',
910 'This pull request cannot be displayed, because one or more'
910 'This pull request cannot be displayed, because one or more'
911 ' commits no longer exist in the source repository.')
911 ' commits no longer exist in the source repository.')
912
912
913 def test_strip_commits_from_pull_request(
913 def test_strip_commits_from_pull_request(
914 self, backend, pr_util, csrf_token):
914 self, backend, pr_util, csrf_token):
915 commits = [
915 commits = [
916 {'message': 'initial-commit'},
916 {'message': 'initial-commit'},
917 {'message': 'old-feature'},
917 {'message': 'old-feature'},
918 {'message': 'new-feature', 'parents': ['initial-commit']},
918 {'message': 'new-feature', 'parents': ['initial-commit']},
919 ]
919 ]
920 pull_request = pr_util.create_pull_request(
920 pull_request = pr_util.create_pull_request(
921 commits, target_head='initial-commit', source_head='new-feature',
921 commits, target_head='initial-commit', source_head='new-feature',
922 revisions=['new-feature'])
922 revisions=['new-feature'])
923
923
924 vcs = pr_util.source_repository.scm_instance()
924 vcs = pr_util.source_repository.scm_instance()
925 if backend.alias == 'git':
925 if backend.alias == 'git':
926 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
926 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
927 else:
927 else:
928 vcs.strip(pr_util.commit_ids['new-feature'])
928 vcs.strip(pr_util.commit_ids['new-feature'])
929
929
930 response = self.app.get(route_path(
930 response = self.app.get(route_path(
931 'pullrequest_show',
931 'pullrequest_show',
932 repo_name=pr_util.target_repository.repo_name,
932 repo_name=pr_util.target_repository.repo_name,
933 pull_request_id=pull_request.pull_request_id))
933 pull_request_id=pull_request.pull_request_id))
934
934
935 assert response.status_int == 200
935 assert response.status_int == 200
936 assert_response = AssertResponse(response)
936 assert_response = AssertResponse(response)
937 assert_response.element_contains(
937 assert_response.element_contains(
938 '#changeset_compare_view_content .alert strong',
938 '#changeset_compare_view_content .alert strong',
939 'Missing commits')
939 'Missing commits')
940 assert_response.element_contains(
940 assert_response.element_contains(
941 '#changeset_compare_view_content .alert',
941 '#changeset_compare_view_content .alert',
942 'This pull request cannot be displayed, because one or more'
942 'This pull request cannot be displayed, because one or more'
943 ' commits no longer exist in the source repository.')
943 ' commits no longer exist in the source repository.')
944 assert_response.element_contains(
944 assert_response.element_contains(
945 '#update_commits',
945 '#update_commits',
946 'Update commits')
946 'Update commits')
947
947
948 def test_strip_commits_and_update(
948 def test_strip_commits_and_update(
949 self, backend, pr_util, csrf_token):
949 self, backend, pr_util, csrf_token):
950 commits = [
950 commits = [
951 {'message': 'initial-commit'},
951 {'message': 'initial-commit'},
952 {'message': 'old-feature'},
952 {'message': 'old-feature'},
953 {'message': 'new-feature', 'parents': ['old-feature']},
953 {'message': 'new-feature', 'parents': ['old-feature']},
954 ]
954 ]
955 pull_request = pr_util.create_pull_request(
955 pull_request = pr_util.create_pull_request(
956 commits, target_head='old-feature', source_head='new-feature',
956 commits, target_head='old-feature', source_head='new-feature',
957 revisions=['new-feature'], mergeable=True)
957 revisions=['new-feature'], mergeable=True)
958
958
959 vcs = pr_util.source_repository.scm_instance()
959 vcs = pr_util.source_repository.scm_instance()
960 if backend.alias == 'git':
960 if backend.alias == 'git':
961 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
961 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
962 else:
962 else:
963 vcs.strip(pr_util.commit_ids['new-feature'])
963 vcs.strip(pr_util.commit_ids['new-feature'])
964
964
965 response = self.app.post(
965 response = self.app.post(
966 route_path('pullrequest_update',
966 route_path('pullrequest_update',
967 repo_name=pull_request.target_repo.repo_name,
967 repo_name=pull_request.target_repo.repo_name,
968 pull_request_id=pull_request.pull_request_id),
968 pull_request_id=pull_request.pull_request_id),
969 params={'update_commits': 'true',
969 params={'update_commits': 'true',
970 'csrf_token': csrf_token})
970 'csrf_token': csrf_token})
971
971
972 assert response.status_int == 200
972 assert response.status_int == 200
973 assert response.body == 'true'
973 assert response.body == 'true'
974
974
975 # Make sure that after update, it won't raise 500 errors
975 # Make sure that after update, it won't raise 500 errors
976 response = self.app.get(route_path(
976 response = self.app.get(route_path(
977 'pullrequest_show',
977 'pullrequest_show',
978 repo_name=pr_util.target_repository.repo_name,
978 repo_name=pr_util.target_repository.repo_name,
979 pull_request_id=pull_request.pull_request_id))
979 pull_request_id=pull_request.pull_request_id))
980
980
981 assert response.status_int == 200
981 assert response.status_int == 200
982 assert_response = AssertResponse(response)
982 assert_response = AssertResponse(response)
983 assert_response.element_contains(
983 assert_response.element_contains(
984 '#changeset_compare_view_content .alert strong',
984 '#changeset_compare_view_content .alert strong',
985 'Missing commits')
985 'Missing commits')
986
986
987 def test_branch_is_a_link(self, pr_util):
987 def test_branch_is_a_link(self, pr_util):
988 pull_request = pr_util.create_pull_request()
988 pull_request = pr_util.create_pull_request()
989 pull_request.source_ref = 'branch:origin:1234567890abcdef'
989 pull_request.source_ref = 'branch:origin:1234567890abcdef'
990 pull_request.target_ref = 'branch:target:abcdef1234567890'
990 pull_request.target_ref = 'branch:target:abcdef1234567890'
991 Session().add(pull_request)
991 Session().add(pull_request)
992 Session().commit()
992 Session().commit()
993
993
994 response = self.app.get(route_path(
994 response = self.app.get(route_path(
995 'pullrequest_show',
995 'pullrequest_show',
996 repo_name=pull_request.target_repo.scm_instance().name,
996 repo_name=pull_request.target_repo.scm_instance().name,
997 pull_request_id=pull_request.pull_request_id))
997 pull_request_id=pull_request.pull_request_id))
998 assert response.status_int == 200
998 assert response.status_int == 200
999 assert_response = AssertResponse(response)
999 assert_response = AssertResponse(response)
1000
1000
1001 origin = assert_response.get_element('.pr-origininfo .tag')
1001 origin = assert_response.get_element('.pr-origininfo .tag')
1002 origin_children = origin.getchildren()
1002 origin_children = origin.getchildren()
1003 assert len(origin_children) == 1
1003 assert len(origin_children) == 1
1004 target = assert_response.get_element('.pr-targetinfo .tag')
1004 target = assert_response.get_element('.pr-targetinfo .tag')
1005 target_children = target.getchildren()
1005 target_children = target.getchildren()
1006 assert len(target_children) == 1
1006 assert len(target_children) == 1
1007
1007
1008 expected_origin_link = route_path(
1008 expected_origin_link = route_path(
1009 'repo_changelog',
1009 'repo_changelog',
1010 repo_name=pull_request.source_repo.scm_instance().name,
1010 repo_name=pull_request.source_repo.scm_instance().name,
1011 params=dict(branch='origin'))
1011 params=dict(branch='origin'))
1012 expected_target_link = route_path(
1012 expected_target_link = route_path(
1013 'repo_changelog',
1013 'repo_changelog',
1014 repo_name=pull_request.target_repo.scm_instance().name,
1014 repo_name=pull_request.target_repo.scm_instance().name,
1015 params=dict(branch='target'))
1015 params=dict(branch='target'))
1016 assert origin_children[0].attrib['href'] == expected_origin_link
1016 assert origin_children[0].attrib['href'] == expected_origin_link
1017 assert origin_children[0].text == 'branch: origin'
1017 assert origin_children[0].text == 'branch: origin'
1018 assert target_children[0].attrib['href'] == expected_target_link
1018 assert target_children[0].attrib['href'] == expected_target_link
1019 assert target_children[0].text == 'branch: target'
1019 assert target_children[0].text == 'branch: target'
1020
1020
1021 def test_bookmark_is_not_a_link(self, pr_util):
1021 def test_bookmark_is_not_a_link(self, pr_util):
1022 pull_request = pr_util.create_pull_request()
1022 pull_request = pr_util.create_pull_request()
1023 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1023 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1024 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1024 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1025 Session().add(pull_request)
1025 Session().add(pull_request)
1026 Session().commit()
1026 Session().commit()
1027
1027
1028 response = self.app.get(route_path(
1028 response = self.app.get(route_path(
1029 'pullrequest_show',
1029 'pullrequest_show',
1030 repo_name=pull_request.target_repo.scm_instance().name,
1030 repo_name=pull_request.target_repo.scm_instance().name,
1031 pull_request_id=pull_request.pull_request_id))
1031 pull_request_id=pull_request.pull_request_id))
1032 assert response.status_int == 200
1032 assert response.status_int == 200
1033 assert_response = AssertResponse(response)
1033 assert_response = AssertResponse(response)
1034
1034
1035 origin = assert_response.get_element('.pr-origininfo .tag')
1035 origin = assert_response.get_element('.pr-origininfo .tag')
1036 assert origin.text.strip() == 'bookmark: origin'
1036 assert origin.text.strip() == 'bookmark: origin'
1037 assert origin.getchildren() == []
1037 assert origin.getchildren() == []
1038
1038
1039 target = assert_response.get_element('.pr-targetinfo .tag')
1039 target = assert_response.get_element('.pr-targetinfo .tag')
1040 assert target.text.strip() == 'bookmark: target'
1040 assert target.text.strip() == 'bookmark: target'
1041 assert target.getchildren() == []
1041 assert target.getchildren() == []
1042
1042
1043 def test_tag_is_not_a_link(self, pr_util):
1043 def test_tag_is_not_a_link(self, pr_util):
1044 pull_request = pr_util.create_pull_request()
1044 pull_request = pr_util.create_pull_request()
1045 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1045 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1046 pull_request.target_ref = 'tag:target:abcdef1234567890'
1046 pull_request.target_ref = 'tag:target:abcdef1234567890'
1047 Session().add(pull_request)
1047 Session().add(pull_request)
1048 Session().commit()
1048 Session().commit()
1049
1049
1050 response = self.app.get(route_path(
1050 response = self.app.get(route_path(
1051 'pullrequest_show',
1051 'pullrequest_show',
1052 repo_name=pull_request.target_repo.scm_instance().name,
1052 repo_name=pull_request.target_repo.scm_instance().name,
1053 pull_request_id=pull_request.pull_request_id))
1053 pull_request_id=pull_request.pull_request_id))
1054 assert response.status_int == 200
1054 assert response.status_int == 200
1055 assert_response = AssertResponse(response)
1055 assert_response = AssertResponse(response)
1056
1056
1057 origin = assert_response.get_element('.pr-origininfo .tag')
1057 origin = assert_response.get_element('.pr-origininfo .tag')
1058 assert origin.text.strip() == 'tag: origin'
1058 assert origin.text.strip() == 'tag: origin'
1059 assert origin.getchildren() == []
1059 assert origin.getchildren() == []
1060
1060
1061 target = assert_response.get_element('.pr-targetinfo .tag')
1061 target = assert_response.get_element('.pr-targetinfo .tag')
1062 assert target.text.strip() == 'tag: target'
1062 assert target.text.strip() == 'tag: target'
1063 assert target.getchildren() == []
1063 assert target.getchildren() == []
1064
1064
1065 @pytest.mark.parametrize('mergeable', [True, False])
1065 @pytest.mark.parametrize('mergeable', [True, False])
1066 def test_shadow_repository_link(
1066 def test_shadow_repository_link(
1067 self, mergeable, pr_util, http_host_only_stub):
1067 self, mergeable, pr_util, http_host_only_stub):
1068 """
1068 """
1069 Check that the pull request summary page displays a link to the shadow
1069 Check that the pull request summary page displays a link to the shadow
1070 repository if the pull request is mergeable. If it is not mergeable
1070 repository if the pull request is mergeable. If it is not mergeable
1071 the link should not be displayed.
1071 the link should not be displayed.
1072 """
1072 """
1073 pull_request = pr_util.create_pull_request(
1073 pull_request = pr_util.create_pull_request(
1074 mergeable=mergeable, enable_notifications=False)
1074 mergeable=mergeable, enable_notifications=False)
1075 target_repo = pull_request.target_repo.scm_instance()
1075 target_repo = pull_request.target_repo.scm_instance()
1076 pr_id = pull_request.pull_request_id
1076 pr_id = pull_request.pull_request_id
1077 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1077 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1078 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1078 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1079
1079
1080 response = self.app.get(route_path(
1080 response = self.app.get(route_path(
1081 'pullrequest_show',
1081 'pullrequest_show',
1082 repo_name=target_repo.name,
1082 repo_name=target_repo.name,
1083 pull_request_id=pr_id))
1083 pull_request_id=pr_id))
1084
1084
1085 assertr = AssertResponse(response)
1085 assertr = AssertResponse(response)
1086 if mergeable:
1086 if mergeable:
1087 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1087 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1088 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1088 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1089 else:
1089 else:
1090 assertr.no_element_exists('.pr-mergeinfo')
1090 assertr.no_element_exists('.pr-mergeinfo')
1091
1091
1092
1092
1093 @pytest.mark.usefixtures('app')
1093 @pytest.mark.usefixtures('app')
1094 @pytest.mark.backends("git", "hg")
1094 @pytest.mark.backends("git", "hg")
1095 class TestPullrequestsControllerDelete(object):
1095 class TestPullrequestsControllerDelete(object):
1096 def test_pull_request_delete_button_permissions_admin(
1096 def test_pull_request_delete_button_permissions_admin(
1097 self, autologin_user, user_admin, pr_util):
1097 self, autologin_user, user_admin, pr_util):
1098 pull_request = pr_util.create_pull_request(
1098 pull_request = pr_util.create_pull_request(
1099 author=user_admin.username, enable_notifications=False)
1099 author=user_admin.username, enable_notifications=False)
1100
1100
1101 response = self.app.get(route_path(
1101 response = self.app.get(route_path(
1102 'pullrequest_show',
1102 'pullrequest_show',
1103 repo_name=pull_request.target_repo.scm_instance().name,
1103 repo_name=pull_request.target_repo.scm_instance().name,
1104 pull_request_id=pull_request.pull_request_id))
1104 pull_request_id=pull_request.pull_request_id))
1105
1105
1106 response.mustcontain('id="delete_pullrequest"')
1106 response.mustcontain('id="delete_pullrequest"')
1107 response.mustcontain('Confirm to delete this pull request')
1107 response.mustcontain('Confirm to delete this pull request')
1108
1108
1109 def test_pull_request_delete_button_permissions_owner(
1109 def test_pull_request_delete_button_permissions_owner(
1110 self, autologin_regular_user, user_regular, pr_util):
1110 self, autologin_regular_user, user_regular, pr_util):
1111 pull_request = pr_util.create_pull_request(
1111 pull_request = pr_util.create_pull_request(
1112 author=user_regular.username, enable_notifications=False)
1112 author=user_regular.username, enable_notifications=False)
1113
1113
1114 response = self.app.get(route_path(
1114 response = self.app.get(route_path(
1115 'pullrequest_show',
1115 'pullrequest_show',
1116 repo_name=pull_request.target_repo.scm_instance().name,
1116 repo_name=pull_request.target_repo.scm_instance().name,
1117 pull_request_id=pull_request.pull_request_id))
1117 pull_request_id=pull_request.pull_request_id))
1118
1118
1119 response.mustcontain('id="delete_pullrequest"')
1119 response.mustcontain('id="delete_pullrequest"')
1120 response.mustcontain('Confirm to delete this pull request')
1120 response.mustcontain('Confirm to delete this pull request')
1121
1121
1122 def test_pull_request_delete_button_permissions_forbidden(
1122 def test_pull_request_delete_button_permissions_forbidden(
1123 self, autologin_regular_user, user_regular, user_admin, pr_util):
1123 self, autologin_regular_user, user_regular, user_admin, pr_util):
1124 pull_request = pr_util.create_pull_request(
1124 pull_request = pr_util.create_pull_request(
1125 author=user_admin.username, enable_notifications=False)
1125 author=user_admin.username, enable_notifications=False)
1126
1126
1127 response = self.app.get(route_path(
1127 response = self.app.get(route_path(
1128 'pullrequest_show',
1128 'pullrequest_show',
1129 repo_name=pull_request.target_repo.scm_instance().name,
1129 repo_name=pull_request.target_repo.scm_instance().name,
1130 pull_request_id=pull_request.pull_request_id))
1130 pull_request_id=pull_request.pull_request_id))
1131 response.mustcontain(no=['id="delete_pullrequest"'])
1131 response.mustcontain(no=['id="delete_pullrequest"'])
1132 response.mustcontain(no=['Confirm to delete this pull request'])
1132 response.mustcontain(no=['Confirm to delete this pull request'])
1133
1133
1134 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1134 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1135 self, autologin_regular_user, user_regular, user_admin, pr_util,
1135 self, autologin_regular_user, user_regular, user_admin, pr_util,
1136 user_util):
1136 user_util):
1137
1137
1138 pull_request = pr_util.create_pull_request(
1138 pull_request = pr_util.create_pull_request(
1139 author=user_admin.username, enable_notifications=False)
1139 author=user_admin.username, enable_notifications=False)
1140
1140
1141 user_util.grant_user_permission_to_repo(
1141 user_util.grant_user_permission_to_repo(
1142 pull_request.target_repo, user_regular,
1142 pull_request.target_repo, user_regular,
1143 'repository.write')
1143 'repository.write')
1144
1144
1145 response = self.app.get(route_path(
1145 response = self.app.get(route_path(
1146 'pullrequest_show',
1146 'pullrequest_show',
1147 repo_name=pull_request.target_repo.scm_instance().name,
1147 repo_name=pull_request.target_repo.scm_instance().name,
1148 pull_request_id=pull_request.pull_request_id))
1148 pull_request_id=pull_request.pull_request_id))
1149
1149
1150 response.mustcontain('id="open_edit_pullrequest"')
1150 response.mustcontain('id="open_edit_pullrequest"')
1151 response.mustcontain('id="delete_pullrequest"')
1151 response.mustcontain('id="delete_pullrequest"')
1152 response.mustcontain(no=['Confirm to delete this pull request'])
1152 response.mustcontain(no=['Confirm to delete this pull request'])
1153
1153
1154 def test_delete_comment_returns_404_if_comment_does_not_exist(
1154 def test_delete_comment_returns_404_if_comment_does_not_exist(
1155 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1155 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1156
1156
1157 pull_request = pr_util.create_pull_request(
1157 pull_request = pr_util.create_pull_request(
1158 author=user_admin.username, enable_notifications=False)
1158 author=user_admin.username, enable_notifications=False)
1159
1159
1160 self.app.post(
1160 self.app.post(
1161 route_path(
1161 route_path(
1162 'pullrequest_comment_delete',
1162 'pullrequest_comment_delete',
1163 repo_name=pull_request.target_repo.scm_instance().name,
1163 repo_name=pull_request.target_repo.scm_instance().name,
1164 pull_request_id=pull_request.pull_request_id,
1164 pull_request_id=pull_request.pull_request_id,
1165 comment_id=1024404),
1165 comment_id=1024404),
1166 extra_environ=xhr_header,
1166 extra_environ=xhr_header,
1167 params={'csrf_token': csrf_token},
1167 params={'csrf_token': csrf_token},
1168 status=404
1168 status=404
1169 )
1169 )
1170
1170
1171 def test_delete_comment(
1171 def test_delete_comment(
1172 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1172 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1173
1173
1174 pull_request = pr_util.create_pull_request(
1174 pull_request = pr_util.create_pull_request(
1175 author=user_admin.username, enable_notifications=False)
1175 author=user_admin.username, enable_notifications=False)
1176 comment = pr_util.create_comment()
1176 comment = pr_util.create_comment()
1177 comment_id = comment.comment_id
1177 comment_id = comment.comment_id
1178
1178
1179 response = self.app.post(
1179 response = self.app.post(
1180 route_path(
1180 route_path(
1181 'pullrequest_comment_delete',
1181 'pullrequest_comment_delete',
1182 repo_name=pull_request.target_repo.scm_instance().name,
1182 repo_name=pull_request.target_repo.scm_instance().name,
1183 pull_request_id=pull_request.pull_request_id,
1183 pull_request_id=pull_request.pull_request_id,
1184 comment_id=comment_id),
1184 comment_id=comment_id),
1185 extra_environ=xhr_header,
1185 extra_environ=xhr_header,
1186 params={'csrf_token': csrf_token},
1186 params={'csrf_token': csrf_token},
1187 status=200
1187 status=200
1188 )
1188 )
1189 assert response.body == 'true'
1189 assert response.body == 'true'
1190
1190
1191
1191
1192 def assert_pull_request_status(pull_request, expected_status):
1192 def assert_pull_request_status(pull_request, expected_status):
1193 status = ChangesetStatusModel().calculated_review_status(
1193 status = ChangesetStatusModel().calculated_review_status(
1194 pull_request=pull_request)
1194 pull_request=pull_request)
1195 assert status == expected_status
1195 assert status == expected_status
1196
1196
1197
1197
1198 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1198 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1199 @pytest.mark.usefixtures("autologin_user")
1199 @pytest.mark.usefixtures("autologin_user")
1200 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1200 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1201 response = app.get(
1201 response = app.get(
1202 route_path(route, repo_name=backend_svn.repo_name), status=404)
1202 route_path(route, repo_name=backend_svn.repo_name), status=404)
1203
1203
@@ -1,1306 +1,1307 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34
34
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.base import vcs_operation_context
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 NotAnonymous, CSRFRequired)
41 NotAnonymous, CSRFRequired)
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
45 RepositoryRequirementError, EmptyRepositoryError)
45 RepositoryRequirementError, EmptyRepositoryError)
46 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
49 ChangesetComment, ChangesetStatus, Repository)
49 ChangesetComment, ChangesetStatus, Repository)
50 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.forms import PullRequestForm
51 from rhodecode.model.meta import Session
51 from rhodecode.model.meta import Session
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
53 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.scm import ScmModel
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
59
59
60 def load_default_context(self):
60 def load_default_context(self):
61 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c = self._get_local_tmpl_context(include_app_defaults=True)
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
64
64
65 return c
65 return c
66
66
67 def _get_pull_requests_list(
67 def _get_pull_requests_list(
68 self, repo_name, source, filter_type, opened_by, statuses):
68 self, repo_name, source, filter_type, opened_by, statuses):
69
69
70 draw, start, limit = self._extract_chunk(self.request)
70 draw, start, limit = self._extract_chunk(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 _render = self.request.get_partial_renderer(
72 _render = self.request.get_partial_renderer(
73 'rhodecode:templates/data_table/_dt_elements.mako')
73 'rhodecode:templates/data_table/_dt_elements.mako')
74
74
75 # pagination
75 # pagination
76
76
77 if filter_type == 'awaiting_review':
77 if filter_type == 'awaiting_review':
78 pull_requests = PullRequestModel().get_awaiting_review(
78 pull_requests = PullRequestModel().get_awaiting_review(
79 repo_name, source=source, opened_by=opened_by,
79 repo_name, source=source, opened_by=opened_by,
80 statuses=statuses, offset=start, length=limit,
80 statuses=statuses, offset=start, length=limit,
81 order_by=order_by, order_dir=order_dir)
81 order_by=order_by, order_dir=order_dir)
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 repo_name, source=source, statuses=statuses,
83 repo_name, source=source, statuses=statuses,
84 opened_by=opened_by)
84 opened_by=opened_by)
85 elif filter_type == 'awaiting_my_review':
85 elif filter_type == 'awaiting_my_review':
86 pull_requests = PullRequestModel().get_awaiting_my_review(
86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 repo_name, source=source, opened_by=opened_by,
87 repo_name, source=source, opened_by=opened_by,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 offset=start, length=limit, order_by=order_by,
89 offset=start, length=limit, order_by=order_by,
90 order_dir=order_dir)
90 order_dir=order_dir)
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 statuses=statuses, opened_by=opened_by)
93 statuses=statuses, opened_by=opened_by)
94 else:
94 else:
95 pull_requests = PullRequestModel().get_all(
95 pull_requests = PullRequestModel().get_all(
96 repo_name, source=source, opened_by=opened_by,
96 repo_name, source=source, opened_by=opened_by,
97 statuses=statuses, offset=start, length=limit,
97 statuses=statuses, offset=start, length=limit,
98 order_by=order_by, order_dir=order_dir)
98 order_by=order_by, order_dir=order_dir)
99 pull_requests_total_count = PullRequestModel().count_all(
99 pull_requests_total_count = PullRequestModel().count_all(
100 repo_name, source=source, statuses=statuses,
100 repo_name, source=source, statuses=statuses,
101 opened_by=opened_by)
101 opened_by=opened_by)
102
102
103 data = []
103 data = []
104 comments_model = CommentsModel()
104 comments_model = CommentsModel()
105 for pr in pull_requests:
105 for pr in pull_requests:
106 comments = comments_model.get_all_comments(
106 comments = comments_model.get_all_comments(
107 self.db_repo.repo_id, pull_request=pr)
107 self.db_repo.repo_id, pull_request=pr)
108
108
109 data.append({
109 data.append({
110 'name': _render('pullrequest_name',
110 'name': _render('pullrequest_name',
111 pr.pull_request_id, pr.target_repo.repo_name),
111 pr.pull_request_id, pr.target_repo.repo_name),
112 'name_raw': pr.pull_request_id,
112 'name_raw': pr.pull_request_id,
113 'status': _render('pullrequest_status',
113 'status': _render('pullrequest_status',
114 pr.calculated_review_status()),
114 pr.calculated_review_status()),
115 'title': _render(
115 'title': _render(
116 'pullrequest_title', pr.title, pr.description),
116 'pullrequest_title', pr.title, pr.description),
117 'description': h.escape(pr.description),
117 'description': h.escape(pr.description),
118 'updated_on': _render('pullrequest_updated_on',
118 'updated_on': _render('pullrequest_updated_on',
119 h.datetime_to_time(pr.updated_on)),
119 h.datetime_to_time(pr.updated_on)),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'created_on': _render('pullrequest_updated_on',
121 'created_on': _render('pullrequest_updated_on',
122 h.datetime_to_time(pr.created_on)),
122 h.datetime_to_time(pr.created_on)),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'author': _render('pullrequest_author',
124 'author': _render('pullrequest_author',
125 pr.author.full_contact, ),
125 pr.author.full_contact, ),
126 'author_raw': pr.author.full_name,
126 'author_raw': pr.author.full_name,
127 'comments': _render('pullrequest_comments', len(comments)),
127 'comments': _render('pullrequest_comments', len(comments)),
128 'comments_raw': len(comments),
128 'comments_raw': len(comments),
129 'closed': pr.is_closed(),
129 'closed': pr.is_closed(),
130 })
130 })
131
131
132 data = ({
132 data = ({
133 'draw': draw,
133 'draw': draw,
134 'data': data,
134 'data': data,
135 'recordsTotal': pull_requests_total_count,
135 'recordsTotal': pull_requests_total_count,
136 'recordsFiltered': pull_requests_total_count,
136 'recordsFiltered': pull_requests_total_count,
137 })
137 })
138 return data
138 return data
139
139
140 @LoginRequired()
140 @LoginRequired()
141 @HasRepoPermissionAnyDecorator(
141 @HasRepoPermissionAnyDecorator(
142 'repository.read', 'repository.write', 'repository.admin')
142 'repository.read', 'repository.write', 'repository.admin')
143 @view_config(
143 @view_config(
144 route_name='pullrequest_show_all', request_method='GET',
144 route_name='pullrequest_show_all', request_method='GET',
145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 def pull_request_list(self):
146 def pull_request_list(self):
147 c = self.load_default_context()
147 c = self.load_default_context()
148
148
149 req_get = self.request.GET
149 req_get = self.request.GET
150 c.source = str2bool(req_get.get('source'))
150 c.source = str2bool(req_get.get('source'))
151 c.closed = str2bool(req_get.get('closed'))
151 c.closed = str2bool(req_get.get('closed'))
152 c.my = str2bool(req_get.get('my'))
152 c.my = str2bool(req_get.get('my'))
153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155
155
156 c.active = 'open'
156 c.active = 'open'
157 if c.my:
157 if c.my:
158 c.active = 'my'
158 c.active = 'my'
159 if c.closed:
159 if c.closed:
160 c.active = 'closed'
160 c.active = 'closed'
161 if c.awaiting_review and not c.source:
161 if c.awaiting_review and not c.source:
162 c.active = 'awaiting'
162 c.active = 'awaiting'
163 if c.source and not c.awaiting_review:
163 if c.source and not c.awaiting_review:
164 c.active = 'source'
164 c.active = 'source'
165 if c.awaiting_my_review:
165 if c.awaiting_my_review:
166 c.active = 'awaiting_my'
166 c.active = 'awaiting_my'
167
167
168 return self._get_template_context(c)
168 return self._get_template_context(c)
169
169
170 @LoginRequired()
170 @LoginRequired()
171 @HasRepoPermissionAnyDecorator(
171 @HasRepoPermissionAnyDecorator(
172 'repository.read', 'repository.write', 'repository.admin')
172 'repository.read', 'repository.write', 'repository.admin')
173 @view_config(
173 @view_config(
174 route_name='pullrequest_show_all_data', request_method='GET',
174 route_name='pullrequest_show_all_data', request_method='GET',
175 renderer='json_ext', xhr=True)
175 renderer='json_ext', xhr=True)
176 def pull_request_list_data(self):
176 def pull_request_list_data(self):
177 self.load_default_context()
177 self.load_default_context()
178
178
179 # additional filters
179 # additional filters
180 req_get = self.request.GET
180 req_get = self.request.GET
181 source = str2bool(req_get.get('source'))
181 source = str2bool(req_get.get('source'))
182 closed = str2bool(req_get.get('closed'))
182 closed = str2bool(req_get.get('closed'))
183 my = str2bool(req_get.get('my'))
183 my = str2bool(req_get.get('my'))
184 awaiting_review = str2bool(req_get.get('awaiting_review'))
184 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186
186
187 filter_type = 'awaiting_review' if awaiting_review \
187 filter_type = 'awaiting_review' if awaiting_review \
188 else 'awaiting_my_review' if awaiting_my_review \
188 else 'awaiting_my_review' if awaiting_my_review \
189 else None
189 else None
190
190
191 opened_by = None
191 opened_by = None
192 if my:
192 if my:
193 opened_by = [self._rhodecode_user.user_id]
193 opened_by = [self._rhodecode_user.user_id]
194
194
195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 if closed:
196 if closed:
197 statuses = [PullRequest.STATUS_CLOSED]
197 statuses = [PullRequest.STATUS_CLOSED]
198
198
199 data = self._get_pull_requests_list(
199 data = self._get_pull_requests_list(
200 repo_name=self.db_repo_name, source=source,
200 repo_name=self.db_repo_name, source=source,
201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202
202
203 return data
203 return data
204
204
205 def _is_diff_cache_enabled(self, target_repo):
205 def _is_diff_cache_enabled(self, target_repo):
206 caching_enabled = self._get_general_setting(
206 caching_enabled = self._get_general_setting(
207 target_repo, 'rhodecode_diff_cache')
207 target_repo, 'rhodecode_diff_cache')
208 log.debug('Diff caching enabled: %s', caching_enabled)
208 log.debug('Diff caching enabled: %s', caching_enabled)
209 return caching_enabled
209 return caching_enabled
210
210
211 def _get_diffset(self, source_repo_name, source_repo,
211 def _get_diffset(self, source_repo_name, source_repo,
212 source_ref_id, target_ref_id,
212 source_ref_id, target_ref_id,
213 target_commit, source_commit, diff_limit, file_limit,
213 target_commit, source_commit, diff_limit, file_limit,
214 fulldiff):
214 fulldiff):
215
215
216 vcs_diff = PullRequestModel().get_diff(
216 vcs_diff = PullRequestModel().get_diff(
217 source_repo, source_ref_id, target_ref_id)
217 source_repo, source_ref_id, target_ref_id)
218
218
219 diff_processor = diffs.DiffProcessor(
219 diff_processor = diffs.DiffProcessor(
220 vcs_diff, format='newdiff', diff_limit=diff_limit,
220 vcs_diff, format='newdiff', diff_limit=diff_limit,
221 file_limit=file_limit, show_full_diff=fulldiff)
221 file_limit=file_limit, show_full_diff=fulldiff)
222
222
223 _parsed = diff_processor.prepare()
223 _parsed = diff_processor.prepare()
224
224
225 diffset = codeblocks.DiffSet(
225 diffset = codeblocks.DiffSet(
226 repo_name=self.db_repo_name,
226 repo_name=self.db_repo_name,
227 source_repo_name=source_repo_name,
227 source_repo_name=source_repo_name,
228 source_node_getter=codeblocks.diffset_node_getter(target_commit),
228 source_node_getter=codeblocks.diffset_node_getter(target_commit),
229 target_node_getter=codeblocks.diffset_node_getter(source_commit),
229 target_node_getter=codeblocks.diffset_node_getter(source_commit),
230 )
230 )
231 diffset = self.path_filter.render_patchset_filtered(
231 diffset = self.path_filter.render_patchset_filtered(
232 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
232 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
233
233
234 return diffset
234 return diffset
235
235
236 @LoginRequired()
236 @LoginRequired()
237 @HasRepoPermissionAnyDecorator(
237 @HasRepoPermissionAnyDecorator(
238 'repository.read', 'repository.write', 'repository.admin')
238 'repository.read', 'repository.write', 'repository.admin')
239 @view_config(
239 @view_config(
240 route_name='pullrequest_show', request_method='GET',
240 route_name='pullrequest_show', request_method='GET',
241 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
241 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
242 def pull_request_show(self):
242 def pull_request_show(self):
243 pull_request_id = self.request.matchdict['pull_request_id']
243 pull_request_id = self.request.matchdict['pull_request_id']
244
244
245 c = self.load_default_context()
245 c = self.load_default_context()
246
246
247 version = self.request.GET.get('version')
247 version = self.request.GET.get('version')
248 from_version = self.request.GET.get('from_version') or version
248 from_version = self.request.GET.get('from_version') or version
249 merge_checks = self.request.GET.get('merge_checks')
249 merge_checks = self.request.GET.get('merge_checks')
250 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
250 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
251 force_refresh = str2bool(self.request.GET.get('force_refresh'))
251 force_refresh = str2bool(self.request.GET.get('force_refresh'))
252
252
253 (pull_request_latest,
253 (pull_request_latest,
254 pull_request_at_ver,
254 pull_request_at_ver,
255 pull_request_display_obj,
255 pull_request_display_obj,
256 at_version) = PullRequestModel().get_pr_version(
256 at_version) = PullRequestModel().get_pr_version(
257 pull_request_id, version=version)
257 pull_request_id, version=version)
258 pr_closed = pull_request_latest.is_closed()
258 pr_closed = pull_request_latest.is_closed()
259
259
260 if pr_closed and (version or from_version):
260 if pr_closed and (version or from_version):
261 # not allow to browse versions
261 # not allow to browse versions
262 raise HTTPFound(h.route_path(
262 raise HTTPFound(h.route_path(
263 'pullrequest_show', repo_name=self.db_repo_name,
263 'pullrequest_show', repo_name=self.db_repo_name,
264 pull_request_id=pull_request_id))
264 pull_request_id=pull_request_id))
265
265
266 versions = pull_request_display_obj.versions()
266 versions = pull_request_display_obj.versions()
267
267
268 c.at_version = at_version
268 c.at_version = at_version
269 c.at_version_num = (at_version
269 c.at_version_num = (at_version
270 if at_version and at_version != 'latest'
270 if at_version and at_version != 'latest'
271 else None)
271 else None)
272 c.at_version_pos = ChangesetComment.get_index_from_version(
272 c.at_version_pos = ChangesetComment.get_index_from_version(
273 c.at_version_num, versions)
273 c.at_version_num, versions)
274
274
275 (prev_pull_request_latest,
275 (prev_pull_request_latest,
276 prev_pull_request_at_ver,
276 prev_pull_request_at_ver,
277 prev_pull_request_display_obj,
277 prev_pull_request_display_obj,
278 prev_at_version) = PullRequestModel().get_pr_version(
278 prev_at_version) = PullRequestModel().get_pr_version(
279 pull_request_id, version=from_version)
279 pull_request_id, version=from_version)
280
280
281 c.from_version = prev_at_version
281 c.from_version = prev_at_version
282 c.from_version_num = (prev_at_version
282 c.from_version_num = (prev_at_version
283 if prev_at_version and prev_at_version != 'latest'
283 if prev_at_version and prev_at_version != 'latest'
284 else None)
284 else None)
285 c.from_version_pos = ChangesetComment.get_index_from_version(
285 c.from_version_pos = ChangesetComment.get_index_from_version(
286 c.from_version_num, versions)
286 c.from_version_num, versions)
287
287
288 # define if we're in COMPARE mode or VIEW at version mode
288 # define if we're in COMPARE mode or VIEW at version mode
289 compare = at_version != prev_at_version
289 compare = at_version != prev_at_version
290
290
291 # pull_requests repo_name we opened it against
291 # pull_requests repo_name we opened it against
292 # ie. target_repo must match
292 # ie. target_repo must match
293 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
293 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
294 raise HTTPNotFound()
294 raise HTTPNotFound()
295
295
296 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
296 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
297 pull_request_at_ver)
297 pull_request_at_ver)
298
298
299 c.pull_request = pull_request_display_obj
299 c.pull_request = pull_request_display_obj
300 c.pull_request_latest = pull_request_latest
300 c.pull_request_latest = pull_request_latest
301
301
302 if compare or (at_version and not at_version == 'latest'):
302 if compare or (at_version and not at_version == 'latest'):
303 c.allowed_to_change_status = False
303 c.allowed_to_change_status = False
304 c.allowed_to_update = False
304 c.allowed_to_update = False
305 c.allowed_to_merge = False
305 c.allowed_to_merge = False
306 c.allowed_to_delete = False
306 c.allowed_to_delete = False
307 c.allowed_to_comment = False
307 c.allowed_to_comment = False
308 c.allowed_to_close = False
308 c.allowed_to_close = False
309 else:
309 else:
310 can_change_status = PullRequestModel().check_user_change_status(
310 can_change_status = PullRequestModel().check_user_change_status(
311 pull_request_at_ver, self._rhodecode_user)
311 pull_request_at_ver, self._rhodecode_user)
312 c.allowed_to_change_status = can_change_status and not pr_closed
312 c.allowed_to_change_status = can_change_status and not pr_closed
313
313
314 c.allowed_to_update = PullRequestModel().check_user_update(
314 c.allowed_to_update = PullRequestModel().check_user_update(
315 pull_request_latest, self._rhodecode_user) and not pr_closed
315 pull_request_latest, self._rhodecode_user) and not pr_closed
316 c.allowed_to_merge = PullRequestModel().check_user_merge(
316 c.allowed_to_merge = PullRequestModel().check_user_merge(
317 pull_request_latest, self._rhodecode_user) and not pr_closed
317 pull_request_latest, self._rhodecode_user) and not pr_closed
318 c.allowed_to_delete = PullRequestModel().check_user_delete(
318 c.allowed_to_delete = PullRequestModel().check_user_delete(
319 pull_request_latest, self._rhodecode_user) and not pr_closed
319 pull_request_latest, self._rhodecode_user) and not pr_closed
320 c.allowed_to_comment = not pr_closed
320 c.allowed_to_comment = not pr_closed
321 c.allowed_to_close = c.allowed_to_merge and not pr_closed
321 c.allowed_to_close = c.allowed_to_merge and not pr_closed
322
322
323 c.forbid_adding_reviewers = False
323 c.forbid_adding_reviewers = False
324 c.forbid_author_to_review = False
324 c.forbid_author_to_review = False
325 c.forbid_commit_author_to_review = False
325 c.forbid_commit_author_to_review = False
326
326
327 if pull_request_latest.reviewer_data and \
327 if pull_request_latest.reviewer_data and \
328 'rules' in pull_request_latest.reviewer_data:
328 'rules' in pull_request_latest.reviewer_data:
329 rules = pull_request_latest.reviewer_data['rules'] or {}
329 rules = pull_request_latest.reviewer_data['rules'] or {}
330 try:
330 try:
331 c.forbid_adding_reviewers = rules.get(
331 c.forbid_adding_reviewers = rules.get(
332 'forbid_adding_reviewers')
332 'forbid_adding_reviewers')
333 c.forbid_author_to_review = rules.get(
333 c.forbid_author_to_review = rules.get(
334 'forbid_author_to_review')
334 'forbid_author_to_review')
335 c.forbid_commit_author_to_review = rules.get(
335 c.forbid_commit_author_to_review = rules.get(
336 'forbid_commit_author_to_review')
336 'forbid_commit_author_to_review')
337 except Exception:
337 except Exception:
338 pass
338 pass
339
339
340 # check merge capabilities
340 # check merge capabilities
341 _merge_check = MergeCheck.validate(
341 _merge_check = MergeCheck.validate(
342 pull_request_latest, user=self._rhodecode_user,
342 pull_request_latest, user=self._rhodecode_user,
343 translator=self.request.translate, force_shadow_repo_refresh=force_refresh)
343 translator=self.request.translate,
344 force_shadow_repo_refresh=force_refresh)
344 c.pr_merge_errors = _merge_check.error_details
345 c.pr_merge_errors = _merge_check.error_details
345 c.pr_merge_possible = not _merge_check.failed
346 c.pr_merge_possible = not _merge_check.failed
346 c.pr_merge_message = _merge_check.merge_msg
347 c.pr_merge_message = _merge_check.merge_msg
347
348
348 c.pr_merge_info = MergeCheck.get_merge_conditions(
349 c.pr_merge_info = MergeCheck.get_merge_conditions(
349 pull_request_latest, translator=self.request.translate)
350 pull_request_latest, translator=self.request.translate)
350
351
351 c.pull_request_review_status = _merge_check.review_status
352 c.pull_request_review_status = _merge_check.review_status
352 if merge_checks:
353 if merge_checks:
353 self.request.override_renderer = \
354 self.request.override_renderer = \
354 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
355 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
355 return self._get_template_context(c)
356 return self._get_template_context(c)
356
357
357 comments_model = CommentsModel()
358 comments_model = CommentsModel()
358
359
359 # reviewers and statuses
360 # reviewers and statuses
360 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
361 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
361 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
362 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
362
363
363 # GENERAL COMMENTS with versions #
364 # GENERAL COMMENTS with versions #
364 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
365 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
365 q = q.order_by(ChangesetComment.comment_id.asc())
366 q = q.order_by(ChangesetComment.comment_id.asc())
366 general_comments = q
367 general_comments = q
367
368
368 # pick comments we want to render at current version
369 # pick comments we want to render at current version
369 c.comment_versions = comments_model.aggregate_comments(
370 c.comment_versions = comments_model.aggregate_comments(
370 general_comments, versions, c.at_version_num)
371 general_comments, versions, c.at_version_num)
371 c.comments = c.comment_versions[c.at_version_num]['until']
372 c.comments = c.comment_versions[c.at_version_num]['until']
372
373
373 # INLINE COMMENTS with versions #
374 # INLINE COMMENTS with versions #
374 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
375 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
375 q = q.order_by(ChangesetComment.comment_id.asc())
376 q = q.order_by(ChangesetComment.comment_id.asc())
376 inline_comments = q
377 inline_comments = q
377
378
378 c.inline_versions = comments_model.aggregate_comments(
379 c.inline_versions = comments_model.aggregate_comments(
379 inline_comments, versions, c.at_version_num, inline=True)
380 inline_comments, versions, c.at_version_num, inline=True)
380
381
381 # inject latest version
382 # inject latest version
382 latest_ver = PullRequest.get_pr_display_object(
383 latest_ver = PullRequest.get_pr_display_object(
383 pull_request_latest, pull_request_latest)
384 pull_request_latest, pull_request_latest)
384
385
385 c.versions = versions + [latest_ver]
386 c.versions = versions + [latest_ver]
386
387
387 # if we use version, then do not show later comments
388 # if we use version, then do not show later comments
388 # than current version
389 # than current version
389 display_inline_comments = collections.defaultdict(
390 display_inline_comments = collections.defaultdict(
390 lambda: collections.defaultdict(list))
391 lambda: collections.defaultdict(list))
391 for co in inline_comments:
392 for co in inline_comments:
392 if c.at_version_num:
393 if c.at_version_num:
393 # pick comments that are at least UPTO given version, so we
394 # pick comments that are at least UPTO given version, so we
394 # don't render comments for higher version
395 # don't render comments for higher version
395 should_render = co.pull_request_version_id and \
396 should_render = co.pull_request_version_id and \
396 co.pull_request_version_id <= c.at_version_num
397 co.pull_request_version_id <= c.at_version_num
397 else:
398 else:
398 # showing all, for 'latest'
399 # showing all, for 'latest'
399 should_render = True
400 should_render = True
400
401
401 if should_render:
402 if should_render:
402 display_inline_comments[co.f_path][co.line_no].append(co)
403 display_inline_comments[co.f_path][co.line_no].append(co)
403
404
404 # load diff data into template context, if we use compare mode then
405 # load diff data into template context, if we use compare mode then
405 # diff is calculated based on changes between versions of PR
406 # diff is calculated based on changes between versions of PR
406
407
407 source_repo = pull_request_at_ver.source_repo
408 source_repo = pull_request_at_ver.source_repo
408 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
409 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
409
410
410 target_repo = pull_request_at_ver.target_repo
411 target_repo = pull_request_at_ver.target_repo
411 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
412 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
412
413
413 if compare:
414 if compare:
414 # in compare switch the diff base to latest commit from prev version
415 # in compare switch the diff base to latest commit from prev version
415 target_ref_id = prev_pull_request_display_obj.revisions[0]
416 target_ref_id = prev_pull_request_display_obj.revisions[0]
416
417
417 # despite opening commits for bookmarks/branches/tags, we always
418 # despite opening commits for bookmarks/branches/tags, we always
418 # convert this to rev to prevent changes after bookmark or branch change
419 # convert this to rev to prevent changes after bookmark or branch change
419 c.source_ref_type = 'rev'
420 c.source_ref_type = 'rev'
420 c.source_ref = source_ref_id
421 c.source_ref = source_ref_id
421
422
422 c.target_ref_type = 'rev'
423 c.target_ref_type = 'rev'
423 c.target_ref = target_ref_id
424 c.target_ref = target_ref_id
424
425
425 c.source_repo = source_repo
426 c.source_repo = source_repo
426 c.target_repo = target_repo
427 c.target_repo = target_repo
427
428
428 c.commit_ranges = []
429 c.commit_ranges = []
429 source_commit = EmptyCommit()
430 source_commit = EmptyCommit()
430 target_commit = EmptyCommit()
431 target_commit = EmptyCommit()
431 c.missing_requirements = False
432 c.missing_requirements = False
432
433
433 source_scm = source_repo.scm_instance()
434 source_scm = source_repo.scm_instance()
434 target_scm = target_repo.scm_instance()
435 target_scm = target_repo.scm_instance()
435
436
436 shadow_scm = None
437 shadow_scm = None
437 try:
438 try:
438 shadow_scm = pull_request_latest.get_shadow_repo()
439 shadow_scm = pull_request_latest.get_shadow_repo()
439 except Exception:
440 except Exception:
440 log.debug('Failed to get shadow repo', exc_info=True)
441 log.debug('Failed to get shadow repo', exc_info=True)
441 # try first the existing source_repo, and then shadow
442 # try first the existing source_repo, and then shadow
442 # repo if we can obtain one
443 # repo if we can obtain one
443 commits_source_repo = source_scm or shadow_scm
444 commits_source_repo = source_scm or shadow_scm
444
445
445 c.commits_source_repo = commits_source_repo
446 c.commits_source_repo = commits_source_repo
446 c.ancestor = None # set it to None, to hide it from PR view
447 c.ancestor = None # set it to None, to hide it from PR view
447
448
448 # empty version means latest, so we keep this to prevent
449 # empty version means latest, so we keep this to prevent
449 # double caching
450 # double caching
450 version_normalized = version or 'latest'
451 version_normalized = version or 'latest'
451 from_version_normalized = from_version or 'latest'
452 from_version_normalized = from_version or 'latest'
452
453
453 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
454 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
454 target_repo)
455 target_repo)
455 cache_file_path = diff_cache_exist(
456 cache_file_path = diff_cache_exist(
456 cache_path, 'pull_request', pull_request_id, version_normalized,
457 cache_path, 'pull_request', pull_request_id, version_normalized,
457 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
458 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
458
459
459 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
460 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
460 force_recache = str2bool(self.request.GET.get('force_recache'))
461 force_recache = str2bool(self.request.GET.get('force_recache'))
461
462
462 cached_diff = None
463 cached_diff = None
463 if caching_enabled:
464 if caching_enabled:
464 cached_diff = load_cached_diff(cache_file_path)
465 cached_diff = load_cached_diff(cache_file_path)
465
466
466 has_proper_commit_cache = (
467 has_proper_commit_cache = (
467 cached_diff and cached_diff.get('commits')
468 cached_diff and cached_diff.get('commits')
468 and len(cached_diff.get('commits', [])) == 5
469 and len(cached_diff.get('commits', [])) == 5
469 and cached_diff.get('commits')[0]
470 and cached_diff.get('commits')[0]
470 and cached_diff.get('commits')[3])
471 and cached_diff.get('commits')[3])
471 if not force_recache and has_proper_commit_cache:
472 if not force_recache and has_proper_commit_cache:
472 diff_commit_cache = \
473 diff_commit_cache = \
473 (ancestor_commit, commit_cache, missing_requirements,
474 (ancestor_commit, commit_cache, missing_requirements,
474 source_commit, target_commit) = cached_diff['commits']
475 source_commit, target_commit) = cached_diff['commits']
475 else:
476 else:
476 diff_commit_cache = \
477 diff_commit_cache = \
477 (ancestor_commit, commit_cache, missing_requirements,
478 (ancestor_commit, commit_cache, missing_requirements,
478 source_commit, target_commit) = self.get_commits(
479 source_commit, target_commit) = self.get_commits(
479 commits_source_repo,
480 commits_source_repo,
480 pull_request_at_ver,
481 pull_request_at_ver,
481 source_commit,
482 source_commit,
482 source_ref_id,
483 source_ref_id,
483 source_scm,
484 source_scm,
484 target_commit,
485 target_commit,
485 target_ref_id,
486 target_ref_id,
486 target_scm)
487 target_scm)
487
488
488 # register our commit range
489 # register our commit range
489 for comm in commit_cache.values():
490 for comm in commit_cache.values():
490 c.commit_ranges.append(comm)
491 c.commit_ranges.append(comm)
491
492
492 c.missing_requirements = missing_requirements
493 c.missing_requirements = missing_requirements
493 c.ancestor_commit = ancestor_commit
494 c.ancestor_commit = ancestor_commit
494 c.statuses = source_repo.statuses(
495 c.statuses = source_repo.statuses(
495 [x.raw_id for x in c.commit_ranges])
496 [x.raw_id for x in c.commit_ranges])
496
497
497 # auto collapse if we have more than limit
498 # auto collapse if we have more than limit
498 collapse_limit = diffs.DiffProcessor._collapse_commits_over
499 collapse_limit = diffs.DiffProcessor._collapse_commits_over
499 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
500 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
500 c.compare_mode = compare
501 c.compare_mode = compare
501
502
502 # diff_limit is the old behavior, will cut off the whole diff
503 # diff_limit is the old behavior, will cut off the whole diff
503 # if the limit is applied otherwise will just hide the
504 # if the limit is applied otherwise will just hide the
504 # big files from the front-end
505 # big files from the front-end
505 diff_limit = c.visual.cut_off_limit_diff
506 diff_limit = c.visual.cut_off_limit_diff
506 file_limit = c.visual.cut_off_limit_file
507 file_limit = c.visual.cut_off_limit_file
507
508
508 c.missing_commits = False
509 c.missing_commits = False
509 if (c.missing_requirements
510 if (c.missing_requirements
510 or isinstance(source_commit, EmptyCommit)
511 or isinstance(source_commit, EmptyCommit)
511 or source_commit == target_commit):
512 or source_commit == target_commit):
512
513
513 c.missing_commits = True
514 c.missing_commits = True
514 else:
515 else:
515 c.inline_comments = display_inline_comments
516 c.inline_comments = display_inline_comments
516
517
517 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
518 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
518 if not force_recache and has_proper_diff_cache:
519 if not force_recache and has_proper_diff_cache:
519 c.diffset = cached_diff['diff']
520 c.diffset = cached_diff['diff']
520 (ancestor_commit, commit_cache, missing_requirements,
521 (ancestor_commit, commit_cache, missing_requirements,
521 source_commit, target_commit) = cached_diff['commits']
522 source_commit, target_commit) = cached_diff['commits']
522 else:
523 else:
523 c.diffset = self._get_diffset(
524 c.diffset = self._get_diffset(
524 c.source_repo.repo_name, commits_source_repo,
525 c.source_repo.repo_name, commits_source_repo,
525 source_ref_id, target_ref_id,
526 source_ref_id, target_ref_id,
526 target_commit, source_commit,
527 target_commit, source_commit,
527 diff_limit, file_limit, c.fulldiff)
528 diff_limit, file_limit, c.fulldiff)
528
529
529 # save cached diff
530 # save cached diff
530 if caching_enabled:
531 if caching_enabled:
531 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
532 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
532
533
533 c.limited_diff = c.diffset.limited_diff
534 c.limited_diff = c.diffset.limited_diff
534
535
535 # calculate removed files that are bound to comments
536 # calculate removed files that are bound to comments
536 comment_deleted_files = [
537 comment_deleted_files = [
537 fname for fname in display_inline_comments
538 fname for fname in display_inline_comments
538 if fname not in c.diffset.file_stats]
539 if fname not in c.diffset.file_stats]
539
540
540 c.deleted_files_comments = collections.defaultdict(dict)
541 c.deleted_files_comments = collections.defaultdict(dict)
541 for fname, per_line_comments in display_inline_comments.items():
542 for fname, per_line_comments in display_inline_comments.items():
542 if fname in comment_deleted_files:
543 if fname in comment_deleted_files:
543 c.deleted_files_comments[fname]['stats'] = 0
544 c.deleted_files_comments[fname]['stats'] = 0
544 c.deleted_files_comments[fname]['comments'] = list()
545 c.deleted_files_comments[fname]['comments'] = list()
545 for lno, comments in per_line_comments.items():
546 for lno, comments in per_line_comments.items():
546 c.deleted_files_comments[fname]['comments'].extend(
547 c.deleted_files_comments[fname]['comments'].extend(
547 comments)
548 comments)
548
549
549 # this is a hack to properly display links, when creating PR, the
550 # this is a hack to properly display links, when creating PR, the
550 # compare view and others uses different notation, and
551 # compare view and others uses different notation, and
551 # compare_commits.mako renders links based on the target_repo.
552 # compare_commits.mako renders links based on the target_repo.
552 # We need to swap that here to generate it properly on the html side
553 # We need to swap that here to generate it properly on the html side
553 c.target_repo = c.source_repo
554 c.target_repo = c.source_repo
554
555
555 c.commit_statuses = ChangesetStatus.STATUSES
556 c.commit_statuses = ChangesetStatus.STATUSES
556
557
557 c.show_version_changes = not pr_closed
558 c.show_version_changes = not pr_closed
558 if c.show_version_changes:
559 if c.show_version_changes:
559 cur_obj = pull_request_at_ver
560 cur_obj = pull_request_at_ver
560 prev_obj = prev_pull_request_at_ver
561 prev_obj = prev_pull_request_at_ver
561
562
562 old_commit_ids = prev_obj.revisions
563 old_commit_ids = prev_obj.revisions
563 new_commit_ids = cur_obj.revisions
564 new_commit_ids = cur_obj.revisions
564 commit_changes = PullRequestModel()._calculate_commit_id_changes(
565 commit_changes = PullRequestModel()._calculate_commit_id_changes(
565 old_commit_ids, new_commit_ids)
566 old_commit_ids, new_commit_ids)
566 c.commit_changes_summary = commit_changes
567 c.commit_changes_summary = commit_changes
567
568
568 # calculate the diff for commits between versions
569 # calculate the diff for commits between versions
569 c.commit_changes = []
570 c.commit_changes = []
570 mark = lambda cs, fw: list(
571 mark = lambda cs, fw: list(
571 h.itertools.izip_longest([], cs, fillvalue=fw))
572 h.itertools.izip_longest([], cs, fillvalue=fw))
572 for c_type, raw_id in mark(commit_changes.added, 'a') \
573 for c_type, raw_id in mark(commit_changes.added, 'a') \
573 + mark(commit_changes.removed, 'r') \
574 + mark(commit_changes.removed, 'r') \
574 + mark(commit_changes.common, 'c'):
575 + mark(commit_changes.common, 'c'):
575
576
576 if raw_id in commit_cache:
577 if raw_id in commit_cache:
577 commit = commit_cache[raw_id]
578 commit = commit_cache[raw_id]
578 else:
579 else:
579 try:
580 try:
580 commit = commits_source_repo.get_commit(raw_id)
581 commit = commits_source_repo.get_commit(raw_id)
581 except CommitDoesNotExistError:
582 except CommitDoesNotExistError:
582 # in case we fail extracting still use "dummy" commit
583 # in case we fail extracting still use "dummy" commit
583 # for display in commit diff
584 # for display in commit diff
584 commit = h.AttributeDict(
585 commit = h.AttributeDict(
585 {'raw_id': raw_id,
586 {'raw_id': raw_id,
586 'message': 'EMPTY or MISSING COMMIT'})
587 'message': 'EMPTY or MISSING COMMIT'})
587 c.commit_changes.append([c_type, commit])
588 c.commit_changes.append([c_type, commit])
588
589
589 # current user review statuses for each version
590 # current user review statuses for each version
590 c.review_versions = {}
591 c.review_versions = {}
591 if self._rhodecode_user.user_id in allowed_reviewers:
592 if self._rhodecode_user.user_id in allowed_reviewers:
592 for co in general_comments:
593 for co in general_comments:
593 if co.author.user_id == self._rhodecode_user.user_id:
594 if co.author.user_id == self._rhodecode_user.user_id:
594 status = co.status_change
595 status = co.status_change
595 if status:
596 if status:
596 _ver_pr = status[0].comment.pull_request_version_id
597 _ver_pr = status[0].comment.pull_request_version_id
597 c.review_versions[_ver_pr] = status[0]
598 c.review_versions[_ver_pr] = status[0]
598
599
599 return self._get_template_context(c)
600 return self._get_template_context(c)
600
601
601 def get_commits(
602 def get_commits(
602 self, commits_source_repo, pull_request_at_ver, source_commit,
603 self, commits_source_repo, pull_request_at_ver, source_commit,
603 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
604 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
604 commit_cache = collections.OrderedDict()
605 commit_cache = collections.OrderedDict()
605 missing_requirements = False
606 missing_requirements = False
606 try:
607 try:
607 pre_load = ["author", "branch", "date", "message"]
608 pre_load = ["author", "branch", "date", "message"]
608 show_revs = pull_request_at_ver.revisions
609 show_revs = pull_request_at_ver.revisions
609 for rev in show_revs:
610 for rev in show_revs:
610 comm = commits_source_repo.get_commit(
611 comm = commits_source_repo.get_commit(
611 commit_id=rev, pre_load=pre_load)
612 commit_id=rev, pre_load=pre_load)
612 commit_cache[comm.raw_id] = comm
613 commit_cache[comm.raw_id] = comm
613
614
614 # Order here matters, we first need to get target, and then
615 # Order here matters, we first need to get target, and then
615 # the source
616 # the source
616 target_commit = commits_source_repo.get_commit(
617 target_commit = commits_source_repo.get_commit(
617 commit_id=safe_str(target_ref_id))
618 commit_id=safe_str(target_ref_id))
618
619
619 source_commit = commits_source_repo.get_commit(
620 source_commit = commits_source_repo.get_commit(
620 commit_id=safe_str(source_ref_id))
621 commit_id=safe_str(source_ref_id))
621 except CommitDoesNotExistError:
622 except CommitDoesNotExistError:
622 log.warning(
623 log.warning(
623 'Failed to get commit from `{}` repo'.format(
624 'Failed to get commit from `{}` repo'.format(
624 commits_source_repo), exc_info=True)
625 commits_source_repo), exc_info=True)
625 except RepositoryRequirementError:
626 except RepositoryRequirementError:
626 log.warning(
627 log.warning(
627 'Failed to get all required data from repo', exc_info=True)
628 'Failed to get all required data from repo', exc_info=True)
628 missing_requirements = True
629 missing_requirements = True
629 ancestor_commit = None
630 ancestor_commit = None
630 try:
631 try:
631 ancestor_id = source_scm.get_common_ancestor(
632 ancestor_id = source_scm.get_common_ancestor(
632 source_commit.raw_id, target_commit.raw_id, target_scm)
633 source_commit.raw_id, target_commit.raw_id, target_scm)
633 ancestor_commit = source_scm.get_commit(ancestor_id)
634 ancestor_commit = source_scm.get_commit(ancestor_id)
634 except Exception:
635 except Exception:
635 ancestor_commit = None
636 ancestor_commit = None
636 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
637 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
637
638
638 def assure_not_empty_repo(self):
639 def assure_not_empty_repo(self):
639 _ = self.request.translate
640 _ = self.request.translate
640
641
641 try:
642 try:
642 self.db_repo.scm_instance().get_commit()
643 self.db_repo.scm_instance().get_commit()
643 except EmptyRepositoryError:
644 except EmptyRepositoryError:
644 h.flash(h.literal(_('There are no commits yet')),
645 h.flash(h.literal(_('There are no commits yet')),
645 category='warning')
646 category='warning')
646 raise HTTPFound(
647 raise HTTPFound(
647 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
648 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
648
649
649 @LoginRequired()
650 @LoginRequired()
650 @NotAnonymous()
651 @NotAnonymous()
651 @HasRepoPermissionAnyDecorator(
652 @HasRepoPermissionAnyDecorator(
652 'repository.read', 'repository.write', 'repository.admin')
653 'repository.read', 'repository.write', 'repository.admin')
653 @view_config(
654 @view_config(
654 route_name='pullrequest_new', request_method='GET',
655 route_name='pullrequest_new', request_method='GET',
655 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
656 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
656 def pull_request_new(self):
657 def pull_request_new(self):
657 _ = self.request.translate
658 _ = self.request.translate
658 c = self.load_default_context()
659 c = self.load_default_context()
659
660
660 self.assure_not_empty_repo()
661 self.assure_not_empty_repo()
661 source_repo = self.db_repo
662 source_repo = self.db_repo
662
663
663 commit_id = self.request.GET.get('commit')
664 commit_id = self.request.GET.get('commit')
664 branch_ref = self.request.GET.get('branch')
665 branch_ref = self.request.GET.get('branch')
665 bookmark_ref = self.request.GET.get('bookmark')
666 bookmark_ref = self.request.GET.get('bookmark')
666
667
667 try:
668 try:
668 source_repo_data = PullRequestModel().generate_repo_data(
669 source_repo_data = PullRequestModel().generate_repo_data(
669 source_repo, commit_id=commit_id,
670 source_repo, commit_id=commit_id,
670 branch=branch_ref, bookmark=bookmark_ref,
671 branch=branch_ref, bookmark=bookmark_ref,
671 translator=self.request.translate)
672 translator=self.request.translate)
672 except CommitDoesNotExistError as e:
673 except CommitDoesNotExistError as e:
673 log.exception(e)
674 log.exception(e)
674 h.flash(_('Commit does not exist'), 'error')
675 h.flash(_('Commit does not exist'), 'error')
675 raise HTTPFound(
676 raise HTTPFound(
676 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
677 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
677
678
678 default_target_repo = source_repo
679 default_target_repo = source_repo
679
680
680 if source_repo.parent:
681 if source_repo.parent:
681 parent_vcs_obj = source_repo.parent.scm_instance()
682 parent_vcs_obj = source_repo.parent.scm_instance()
682 if parent_vcs_obj and not parent_vcs_obj.is_empty():
683 if parent_vcs_obj and not parent_vcs_obj.is_empty():
683 # change default if we have a parent repo
684 # change default if we have a parent repo
684 default_target_repo = source_repo.parent
685 default_target_repo = source_repo.parent
685
686
686 target_repo_data = PullRequestModel().generate_repo_data(
687 target_repo_data = PullRequestModel().generate_repo_data(
687 default_target_repo, translator=self.request.translate)
688 default_target_repo, translator=self.request.translate)
688
689
689 selected_source_ref = source_repo_data['refs']['selected_ref']
690 selected_source_ref = source_repo_data['refs']['selected_ref']
690 title_source_ref = ''
691 title_source_ref = ''
691 if selected_source_ref:
692 if selected_source_ref:
692 title_source_ref = selected_source_ref.split(':', 2)[1]
693 title_source_ref = selected_source_ref.split(':', 2)[1]
693 c.default_title = PullRequestModel().generate_pullrequest_title(
694 c.default_title = PullRequestModel().generate_pullrequest_title(
694 source=source_repo.repo_name,
695 source=source_repo.repo_name,
695 source_ref=title_source_ref,
696 source_ref=title_source_ref,
696 target=default_target_repo.repo_name
697 target=default_target_repo.repo_name
697 )
698 )
698
699
699 c.default_repo_data = {
700 c.default_repo_data = {
700 'source_repo_name': source_repo.repo_name,
701 'source_repo_name': source_repo.repo_name,
701 'source_refs_json': json.dumps(source_repo_data),
702 'source_refs_json': json.dumps(source_repo_data),
702 'target_repo_name': default_target_repo.repo_name,
703 'target_repo_name': default_target_repo.repo_name,
703 'target_refs_json': json.dumps(target_repo_data),
704 'target_refs_json': json.dumps(target_repo_data),
704 }
705 }
705 c.default_source_ref = selected_source_ref
706 c.default_source_ref = selected_source_ref
706
707
707 return self._get_template_context(c)
708 return self._get_template_context(c)
708
709
709 @LoginRequired()
710 @LoginRequired()
710 @NotAnonymous()
711 @NotAnonymous()
711 @HasRepoPermissionAnyDecorator(
712 @HasRepoPermissionAnyDecorator(
712 'repository.read', 'repository.write', 'repository.admin')
713 'repository.read', 'repository.write', 'repository.admin')
713 @view_config(
714 @view_config(
714 route_name='pullrequest_repo_refs', request_method='GET',
715 route_name='pullrequest_repo_refs', request_method='GET',
715 renderer='json_ext', xhr=True)
716 renderer='json_ext', xhr=True)
716 def pull_request_repo_refs(self):
717 def pull_request_repo_refs(self):
717 self.load_default_context()
718 self.load_default_context()
718 target_repo_name = self.request.matchdict['target_repo_name']
719 target_repo_name = self.request.matchdict['target_repo_name']
719 repo = Repository.get_by_repo_name(target_repo_name)
720 repo = Repository.get_by_repo_name(target_repo_name)
720 if not repo:
721 if not repo:
721 raise HTTPNotFound()
722 raise HTTPNotFound()
722
723
723 target_perm = HasRepoPermissionAny(
724 target_perm = HasRepoPermissionAny(
724 'repository.read', 'repository.write', 'repository.admin')(
725 'repository.read', 'repository.write', 'repository.admin')(
725 target_repo_name)
726 target_repo_name)
726 if not target_perm:
727 if not target_perm:
727 raise HTTPNotFound()
728 raise HTTPNotFound()
728
729
729 return PullRequestModel().generate_repo_data(
730 return PullRequestModel().generate_repo_data(
730 repo, translator=self.request.translate)
731 repo, translator=self.request.translate)
731
732
732 @LoginRequired()
733 @LoginRequired()
733 @NotAnonymous()
734 @NotAnonymous()
734 @HasRepoPermissionAnyDecorator(
735 @HasRepoPermissionAnyDecorator(
735 'repository.read', 'repository.write', 'repository.admin')
736 'repository.read', 'repository.write', 'repository.admin')
736 @view_config(
737 @view_config(
737 route_name='pullrequest_repo_destinations', request_method='GET',
738 route_name='pullrequest_repo_destinations', request_method='GET',
738 renderer='json_ext', xhr=True)
739 renderer='json_ext', xhr=True)
739 def pull_request_repo_destinations(self):
740 def pull_request_repo_destinations(self):
740 _ = self.request.translate
741 _ = self.request.translate
741 filter_query = self.request.GET.get('query')
742 filter_query = self.request.GET.get('query')
742
743
743 query = Repository.query() \
744 query = Repository.query() \
744 .order_by(func.length(Repository.repo_name)) \
745 .order_by(func.length(Repository.repo_name)) \
745 .filter(
746 .filter(
746 or_(Repository.repo_name == self.db_repo.repo_name,
747 or_(Repository.repo_name == self.db_repo.repo_name,
747 Repository.fork_id == self.db_repo.repo_id))
748 Repository.fork_id == self.db_repo.repo_id))
748
749
749 if filter_query:
750 if filter_query:
750 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
751 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
751 query = query.filter(
752 query = query.filter(
752 Repository.repo_name.ilike(ilike_expression))
753 Repository.repo_name.ilike(ilike_expression))
753
754
754 add_parent = False
755 add_parent = False
755 if self.db_repo.parent:
756 if self.db_repo.parent:
756 if filter_query in self.db_repo.parent.repo_name:
757 if filter_query in self.db_repo.parent.repo_name:
757 parent_vcs_obj = self.db_repo.parent.scm_instance()
758 parent_vcs_obj = self.db_repo.parent.scm_instance()
758 if parent_vcs_obj and not parent_vcs_obj.is_empty():
759 if parent_vcs_obj and not parent_vcs_obj.is_empty():
759 add_parent = True
760 add_parent = True
760
761
761 limit = 20 - 1 if add_parent else 20
762 limit = 20 - 1 if add_parent else 20
762 all_repos = query.limit(limit).all()
763 all_repos = query.limit(limit).all()
763 if add_parent:
764 if add_parent:
764 all_repos += [self.db_repo.parent]
765 all_repos += [self.db_repo.parent]
765
766
766 repos = []
767 repos = []
767 for obj in ScmModel().get_repos(all_repos):
768 for obj in ScmModel().get_repos(all_repos):
768 repos.append({
769 repos.append({
769 'id': obj['name'],
770 'id': obj['name'],
770 'text': obj['name'],
771 'text': obj['name'],
771 'type': 'repo',
772 'type': 'repo',
772 'repo_id': obj['dbrepo']['repo_id'],
773 'repo_id': obj['dbrepo']['repo_id'],
773 'repo_type': obj['dbrepo']['repo_type'],
774 'repo_type': obj['dbrepo']['repo_type'],
774 'private': obj['dbrepo']['private'],
775 'private': obj['dbrepo']['private'],
775
776
776 })
777 })
777
778
778 data = {
779 data = {
779 'more': False,
780 'more': False,
780 'results': [{
781 'results': [{
781 'text': _('Repositories'),
782 'text': _('Repositories'),
782 'children': repos
783 'children': repos
783 }] if repos else []
784 }] if repos else []
784 }
785 }
785 return data
786 return data
786
787
787 @LoginRequired()
788 @LoginRequired()
788 @NotAnonymous()
789 @NotAnonymous()
789 @HasRepoPermissionAnyDecorator(
790 @HasRepoPermissionAnyDecorator(
790 'repository.read', 'repository.write', 'repository.admin')
791 'repository.read', 'repository.write', 'repository.admin')
791 @CSRFRequired()
792 @CSRFRequired()
792 @view_config(
793 @view_config(
793 route_name='pullrequest_create', request_method='POST',
794 route_name='pullrequest_create', request_method='POST',
794 renderer=None)
795 renderer=None)
795 def pull_request_create(self):
796 def pull_request_create(self):
796 _ = self.request.translate
797 _ = self.request.translate
797 self.assure_not_empty_repo()
798 self.assure_not_empty_repo()
798 self.load_default_context()
799 self.load_default_context()
799
800
800 controls = peppercorn.parse(self.request.POST.items())
801 controls = peppercorn.parse(self.request.POST.items())
801
802
802 try:
803 try:
803 form = PullRequestForm(
804 form = PullRequestForm(
804 self.request.translate, self.db_repo.repo_id)()
805 self.request.translate, self.db_repo.repo_id)()
805 _form = form.to_python(controls)
806 _form = form.to_python(controls)
806 except formencode.Invalid as errors:
807 except formencode.Invalid as errors:
807 if errors.error_dict.get('revisions'):
808 if errors.error_dict.get('revisions'):
808 msg = 'Revisions: %s' % errors.error_dict['revisions']
809 msg = 'Revisions: %s' % errors.error_dict['revisions']
809 elif errors.error_dict.get('pullrequest_title'):
810 elif errors.error_dict.get('pullrequest_title'):
810 msg = errors.error_dict.get('pullrequest_title')
811 msg = errors.error_dict.get('pullrequest_title')
811 else:
812 else:
812 msg = _('Error creating pull request: {}').format(errors)
813 msg = _('Error creating pull request: {}').format(errors)
813 log.exception(msg)
814 log.exception(msg)
814 h.flash(msg, 'error')
815 h.flash(msg, 'error')
815
816
816 # would rather just go back to form ...
817 # would rather just go back to form ...
817 raise HTTPFound(
818 raise HTTPFound(
818 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
819 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
819
820
820 source_repo = _form['source_repo']
821 source_repo = _form['source_repo']
821 source_ref = _form['source_ref']
822 source_ref = _form['source_ref']
822 target_repo = _form['target_repo']
823 target_repo = _form['target_repo']
823 target_ref = _form['target_ref']
824 target_ref = _form['target_ref']
824 commit_ids = _form['revisions'][::-1]
825 commit_ids = _form['revisions'][::-1]
825
826
826 # find the ancestor for this pr
827 # find the ancestor for this pr
827 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
828 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
828 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
829 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
829
830
830 # re-check permissions again here
831 # re-check permissions again here
831 # source_repo we must have read permissions
832 # source_repo we must have read permissions
832
833
833 source_perm = HasRepoPermissionAny(
834 source_perm = HasRepoPermissionAny(
834 'repository.read',
835 'repository.read',
835 'repository.write', 'repository.admin')(source_db_repo.repo_name)
836 'repository.write', 'repository.admin')(source_db_repo.repo_name)
836 if not source_perm:
837 if not source_perm:
837 msg = _('Not Enough permissions to source repo `{}`.'.format(
838 msg = _('Not Enough permissions to source repo `{}`.'.format(
838 source_db_repo.repo_name))
839 source_db_repo.repo_name))
839 h.flash(msg, category='error')
840 h.flash(msg, category='error')
840 # copy the args back to redirect
841 # copy the args back to redirect
841 org_query = self.request.GET.mixed()
842 org_query = self.request.GET.mixed()
842 raise HTTPFound(
843 raise HTTPFound(
843 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
844 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
844 _query=org_query))
845 _query=org_query))
845
846
846 # target repo we must have read permissions, and also later on
847 # target repo we must have read permissions, and also later on
847 # we want to check branch permissions here
848 # we want to check branch permissions here
848 target_perm = HasRepoPermissionAny(
849 target_perm = HasRepoPermissionAny(
849 'repository.read',
850 'repository.read',
850 'repository.write', 'repository.admin')(target_db_repo.repo_name)
851 'repository.write', 'repository.admin')(target_db_repo.repo_name)
851 if not target_perm:
852 if not target_perm:
852 msg = _('Not Enough permissions to target repo `{}`.'.format(
853 msg = _('Not Enough permissions to target repo `{}`.'.format(
853 target_db_repo.repo_name))
854 target_db_repo.repo_name))
854 h.flash(msg, category='error')
855 h.flash(msg, category='error')
855 # copy the args back to redirect
856 # copy the args back to redirect
856 org_query = self.request.GET.mixed()
857 org_query = self.request.GET.mixed()
857 raise HTTPFound(
858 raise HTTPFound(
858 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
859 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
859 _query=org_query))
860 _query=org_query))
860
861
861 source_scm = source_db_repo.scm_instance()
862 source_scm = source_db_repo.scm_instance()
862 target_scm = target_db_repo.scm_instance()
863 target_scm = target_db_repo.scm_instance()
863
864
864 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
865 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
865 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
866 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
866
867
867 ancestor = source_scm.get_common_ancestor(
868 ancestor = source_scm.get_common_ancestor(
868 source_commit.raw_id, target_commit.raw_id, target_scm)
869 source_commit.raw_id, target_commit.raw_id, target_scm)
869
870
870 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
871 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
871 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
872 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
872
873
873 pullrequest_title = _form['pullrequest_title']
874 pullrequest_title = _form['pullrequest_title']
874 title_source_ref = source_ref.split(':', 2)[1]
875 title_source_ref = source_ref.split(':', 2)[1]
875 if not pullrequest_title:
876 if not pullrequest_title:
876 pullrequest_title = PullRequestModel().generate_pullrequest_title(
877 pullrequest_title = PullRequestModel().generate_pullrequest_title(
877 source=source_repo,
878 source=source_repo,
878 source_ref=title_source_ref,
879 source_ref=title_source_ref,
879 target=target_repo
880 target=target_repo
880 )
881 )
881
882
882 description = _form['pullrequest_desc']
883 description = _form['pullrequest_desc']
883
884
884 get_default_reviewers_data, validate_default_reviewers = \
885 get_default_reviewers_data, validate_default_reviewers = \
885 PullRequestModel().get_reviewer_functions()
886 PullRequestModel().get_reviewer_functions()
886
887
887 # recalculate reviewers logic, to make sure we can validate this
888 # recalculate reviewers logic, to make sure we can validate this
888 reviewer_rules = get_default_reviewers_data(
889 reviewer_rules = get_default_reviewers_data(
889 self._rhodecode_db_user, source_db_repo,
890 self._rhodecode_db_user, source_db_repo,
890 source_commit, target_db_repo, target_commit)
891 source_commit, target_db_repo, target_commit)
891
892
892 given_reviewers = _form['review_members']
893 given_reviewers = _form['review_members']
893 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
894 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
894
895
895 try:
896 try:
896 pull_request = PullRequestModel().create(
897 pull_request = PullRequestModel().create(
897 self._rhodecode_user.user_id, source_repo, source_ref,
898 self._rhodecode_user.user_id, source_repo, source_ref,
898 target_repo, target_ref, commit_ids, reviewers,
899 target_repo, target_ref, commit_ids, reviewers,
899 pullrequest_title, description, reviewer_rules,
900 pullrequest_title, description, reviewer_rules,
900 auth_user=self._rhodecode_user
901 auth_user=self._rhodecode_user
901 )
902 )
902 Session().commit()
903 Session().commit()
903
904
904 h.flash(_('Successfully opened new pull request'),
905 h.flash(_('Successfully opened new pull request'),
905 category='success')
906 category='success')
906 except Exception:
907 except Exception:
907 msg = _('Error occurred during creation of this pull request.')
908 msg = _('Error occurred during creation of this pull request.')
908 log.exception(msg)
909 log.exception(msg)
909 h.flash(msg, category='error')
910 h.flash(msg, category='error')
910
911
911 # copy the args back to redirect
912 # copy the args back to redirect
912 org_query = self.request.GET.mixed()
913 org_query = self.request.GET.mixed()
913 raise HTTPFound(
914 raise HTTPFound(
914 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
915 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
915 _query=org_query))
916 _query=org_query))
916
917
917 raise HTTPFound(
918 raise HTTPFound(
918 h.route_path('pullrequest_show', repo_name=target_repo,
919 h.route_path('pullrequest_show', repo_name=target_repo,
919 pull_request_id=pull_request.pull_request_id))
920 pull_request_id=pull_request.pull_request_id))
920
921
921 @LoginRequired()
922 @LoginRequired()
922 @NotAnonymous()
923 @NotAnonymous()
923 @HasRepoPermissionAnyDecorator(
924 @HasRepoPermissionAnyDecorator(
924 'repository.read', 'repository.write', 'repository.admin')
925 'repository.read', 'repository.write', 'repository.admin')
925 @CSRFRequired()
926 @CSRFRequired()
926 @view_config(
927 @view_config(
927 route_name='pullrequest_update', request_method='POST',
928 route_name='pullrequest_update', request_method='POST',
928 renderer='json_ext')
929 renderer='json_ext')
929 def pull_request_update(self):
930 def pull_request_update(self):
930 pull_request = PullRequest.get_or_404(
931 pull_request = PullRequest.get_or_404(
931 self.request.matchdict['pull_request_id'])
932 self.request.matchdict['pull_request_id'])
932 _ = self.request.translate
933 _ = self.request.translate
933
934
934 self.load_default_context()
935 self.load_default_context()
935
936
936 if pull_request.is_closed():
937 if pull_request.is_closed():
937 log.debug('update: forbidden because pull request is closed')
938 log.debug('update: forbidden because pull request is closed')
938 msg = _(u'Cannot update closed pull requests.')
939 msg = _(u'Cannot update closed pull requests.')
939 h.flash(msg, category='error')
940 h.flash(msg, category='error')
940 return True
941 return True
941
942
942 # only owner or admin can update it
943 # only owner or admin can update it
943 allowed_to_update = PullRequestModel().check_user_update(
944 allowed_to_update = PullRequestModel().check_user_update(
944 pull_request, self._rhodecode_user)
945 pull_request, self._rhodecode_user)
945 if allowed_to_update:
946 if allowed_to_update:
946 controls = peppercorn.parse(self.request.POST.items())
947 controls = peppercorn.parse(self.request.POST.items())
947
948
948 if 'review_members' in controls:
949 if 'review_members' in controls:
949 self._update_reviewers(
950 self._update_reviewers(
950 pull_request, controls['review_members'],
951 pull_request, controls['review_members'],
951 pull_request.reviewer_data)
952 pull_request.reviewer_data)
952 elif str2bool(self.request.POST.get('update_commits', 'false')):
953 elif str2bool(self.request.POST.get('update_commits', 'false')):
953 self._update_commits(pull_request)
954 self._update_commits(pull_request)
954 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
955 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
955 self._edit_pull_request(pull_request)
956 self._edit_pull_request(pull_request)
956 else:
957 else:
957 raise HTTPBadRequest()
958 raise HTTPBadRequest()
958 return True
959 return True
959 raise HTTPForbidden()
960 raise HTTPForbidden()
960
961
961 def _edit_pull_request(self, pull_request):
962 def _edit_pull_request(self, pull_request):
962 _ = self.request.translate
963 _ = self.request.translate
963 try:
964 try:
964 PullRequestModel().edit(
965 PullRequestModel().edit(
965 pull_request, self.request.POST.get('title'),
966 pull_request, self.request.POST.get('title'),
966 self.request.POST.get('description'), self._rhodecode_user)
967 self.request.POST.get('description'), self._rhodecode_user)
967 except ValueError:
968 except ValueError:
968 msg = _(u'Cannot update closed pull requests.')
969 msg = _(u'Cannot update closed pull requests.')
969 h.flash(msg, category='error')
970 h.flash(msg, category='error')
970 return
971 return
971 else:
972 else:
972 Session().commit()
973 Session().commit()
973
974
974 msg = _(u'Pull request title & description updated.')
975 msg = _(u'Pull request title & description updated.')
975 h.flash(msg, category='success')
976 h.flash(msg, category='success')
976 return
977 return
977
978
978 def _update_commits(self, pull_request):
979 def _update_commits(self, pull_request):
979 _ = self.request.translate
980 _ = self.request.translate
980 resp = PullRequestModel().update_commits(pull_request)
981 resp = PullRequestModel().update_commits(pull_request)
981
982
982 if resp.executed:
983 if resp.executed:
983
984
984 if resp.target_changed and resp.source_changed:
985 if resp.target_changed and resp.source_changed:
985 changed = 'target and source repositories'
986 changed = 'target and source repositories'
986 elif resp.target_changed and not resp.source_changed:
987 elif resp.target_changed and not resp.source_changed:
987 changed = 'target repository'
988 changed = 'target repository'
988 elif not resp.target_changed and resp.source_changed:
989 elif not resp.target_changed and resp.source_changed:
989 changed = 'source repository'
990 changed = 'source repository'
990 else:
991 else:
991 changed = 'nothing'
992 changed = 'nothing'
992
993
993 msg = _(
994 msg = _(
994 u'Pull request updated to "{source_commit_id}" with '
995 u'Pull request updated to "{source_commit_id}" with '
995 u'{count_added} added, {count_removed} removed commits. '
996 u'{count_added} added, {count_removed} removed commits. '
996 u'Source of changes: {change_source}')
997 u'Source of changes: {change_source}')
997 msg = msg.format(
998 msg = msg.format(
998 source_commit_id=pull_request.source_ref_parts.commit_id,
999 source_commit_id=pull_request.source_ref_parts.commit_id,
999 count_added=len(resp.changes.added),
1000 count_added=len(resp.changes.added),
1000 count_removed=len(resp.changes.removed),
1001 count_removed=len(resp.changes.removed),
1001 change_source=changed)
1002 change_source=changed)
1002 h.flash(msg, category='success')
1003 h.flash(msg, category='success')
1003
1004
1004 channel = '/repo${}$/pr/{}'.format(
1005 channel = '/repo${}$/pr/{}'.format(
1005 pull_request.target_repo.repo_name,
1006 pull_request.target_repo.repo_name,
1006 pull_request.pull_request_id)
1007 pull_request.pull_request_id)
1007 message = msg + (
1008 message = msg + (
1008 ' - <a onclick="window.location.reload()">'
1009 ' - <a onclick="window.location.reload()">'
1009 '<strong>{}</strong></a>'.format(_('Reload page')))
1010 '<strong>{}</strong></a>'.format(_('Reload page')))
1010 channelstream.post_message(
1011 channelstream.post_message(
1011 channel, message, self._rhodecode_user.username,
1012 channel, message, self._rhodecode_user.username,
1012 registry=self.request.registry)
1013 registry=self.request.registry)
1013 else:
1014 else:
1014 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1015 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1015 warning_reasons = [
1016 warning_reasons = [
1016 UpdateFailureReason.NO_CHANGE,
1017 UpdateFailureReason.NO_CHANGE,
1017 UpdateFailureReason.WRONG_REF_TYPE,
1018 UpdateFailureReason.WRONG_REF_TYPE,
1018 ]
1019 ]
1019 category = 'warning' if resp.reason in warning_reasons else 'error'
1020 category = 'warning' if resp.reason in warning_reasons else 'error'
1020 h.flash(msg, category=category)
1021 h.flash(msg, category=category)
1021
1022
1022 @LoginRequired()
1023 @LoginRequired()
1023 @NotAnonymous()
1024 @NotAnonymous()
1024 @HasRepoPermissionAnyDecorator(
1025 @HasRepoPermissionAnyDecorator(
1025 'repository.read', 'repository.write', 'repository.admin')
1026 'repository.read', 'repository.write', 'repository.admin')
1026 @CSRFRequired()
1027 @CSRFRequired()
1027 @view_config(
1028 @view_config(
1028 route_name='pullrequest_merge', request_method='POST',
1029 route_name='pullrequest_merge', request_method='POST',
1029 renderer='json_ext')
1030 renderer='json_ext')
1030 def pull_request_merge(self):
1031 def pull_request_merge(self):
1031 """
1032 """
1032 Merge will perform a server-side merge of the specified
1033 Merge will perform a server-side merge of the specified
1033 pull request, if the pull request is approved and mergeable.
1034 pull request, if the pull request is approved and mergeable.
1034 After successful merging, the pull request is automatically
1035 After successful merging, the pull request is automatically
1035 closed, with a relevant comment.
1036 closed, with a relevant comment.
1036 """
1037 """
1037 pull_request = PullRequest.get_or_404(
1038 pull_request = PullRequest.get_or_404(
1038 self.request.matchdict['pull_request_id'])
1039 self.request.matchdict['pull_request_id'])
1039
1040
1040 self.load_default_context()
1041 self.load_default_context()
1041 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
1042 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
1042 translator=self.request.translate)
1043 translator=self.request.translate)
1043 merge_possible = not check.failed
1044 merge_possible = not check.failed
1044
1045
1045 for err_type, error_msg in check.errors:
1046 for err_type, error_msg in check.errors:
1046 h.flash(error_msg, category=err_type)
1047 h.flash(error_msg, category=err_type)
1047
1048
1048 if merge_possible:
1049 if merge_possible:
1049 log.debug("Pre-conditions checked, trying to merge.")
1050 log.debug("Pre-conditions checked, trying to merge.")
1050 extras = vcs_operation_context(
1051 extras = vcs_operation_context(
1051 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1052 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1052 username=self._rhodecode_db_user.username, action='push',
1053 username=self._rhodecode_db_user.username, action='push',
1053 scm=pull_request.target_repo.repo_type)
1054 scm=pull_request.target_repo.repo_type)
1054 self._merge_pull_request(
1055 self._merge_pull_request(
1055 pull_request, self._rhodecode_db_user, extras)
1056 pull_request, self._rhodecode_db_user, extras)
1056 else:
1057 else:
1057 log.debug("Pre-conditions failed, NOT merging.")
1058 log.debug("Pre-conditions failed, NOT merging.")
1058
1059
1059 raise HTTPFound(
1060 raise HTTPFound(
1060 h.route_path('pullrequest_show',
1061 h.route_path('pullrequest_show',
1061 repo_name=pull_request.target_repo.repo_name,
1062 repo_name=pull_request.target_repo.repo_name,
1062 pull_request_id=pull_request.pull_request_id))
1063 pull_request_id=pull_request.pull_request_id))
1063
1064
1064 def _merge_pull_request(self, pull_request, user, extras):
1065 def _merge_pull_request(self, pull_request, user, extras):
1065 _ = self.request.translate
1066 _ = self.request.translate
1066 merge_resp = PullRequestModel().merge(pull_request, user, extras=extras)
1067 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1067
1068
1068 if merge_resp.executed:
1069 if merge_resp.executed:
1069 log.debug("The merge was successful, closing the pull request.")
1070 log.debug("The merge was successful, closing the pull request.")
1070 PullRequestModel().close_pull_request(
1071 PullRequestModel().close_pull_request(
1071 pull_request.pull_request_id, user)
1072 pull_request.pull_request_id, user)
1072 Session().commit()
1073 Session().commit()
1073 msg = _('Pull request was successfully merged and closed.')
1074 msg = _('Pull request was successfully merged and closed.')
1074 h.flash(msg, category='success')
1075 h.flash(msg, category='success')
1075 else:
1076 else:
1076 log.debug(
1077 log.debug(
1077 "The merge was not successful. Merge response: %s",
1078 "The merge was not successful. Merge response: %s",
1078 merge_resp)
1079 merge_resp)
1079 msg = PullRequestModel().merge_status_message(
1080 msg = PullRequestModel().merge_status_message(
1080 merge_resp.failure_reason)
1081 merge_resp.failure_reason)
1081 h.flash(msg, category='error')
1082 h.flash(msg, category='error')
1082
1083
1083 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1084 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1084 _ = self.request.translate
1085 _ = self.request.translate
1085 get_default_reviewers_data, validate_default_reviewers = \
1086 get_default_reviewers_data, validate_default_reviewers = \
1086 PullRequestModel().get_reviewer_functions()
1087 PullRequestModel().get_reviewer_functions()
1087
1088
1088 try:
1089 try:
1089 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1090 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1090 except ValueError as e:
1091 except ValueError as e:
1091 log.error('Reviewers Validation: {}'.format(e))
1092 log.error('Reviewers Validation: {}'.format(e))
1092 h.flash(e, category='error')
1093 h.flash(e, category='error')
1093 return
1094 return
1094
1095
1095 PullRequestModel().update_reviewers(
1096 PullRequestModel().update_reviewers(
1096 pull_request, reviewers, self._rhodecode_user)
1097 pull_request, reviewers, self._rhodecode_user)
1097 h.flash(_('Pull request reviewers updated.'), category='success')
1098 h.flash(_('Pull request reviewers updated.'), category='success')
1098 Session().commit()
1099 Session().commit()
1099
1100
1100 @LoginRequired()
1101 @LoginRequired()
1101 @NotAnonymous()
1102 @NotAnonymous()
1102 @HasRepoPermissionAnyDecorator(
1103 @HasRepoPermissionAnyDecorator(
1103 'repository.read', 'repository.write', 'repository.admin')
1104 'repository.read', 'repository.write', 'repository.admin')
1104 @CSRFRequired()
1105 @CSRFRequired()
1105 @view_config(
1106 @view_config(
1106 route_name='pullrequest_delete', request_method='POST',
1107 route_name='pullrequest_delete', request_method='POST',
1107 renderer='json_ext')
1108 renderer='json_ext')
1108 def pull_request_delete(self):
1109 def pull_request_delete(self):
1109 _ = self.request.translate
1110 _ = self.request.translate
1110
1111
1111 pull_request = PullRequest.get_or_404(
1112 pull_request = PullRequest.get_or_404(
1112 self.request.matchdict['pull_request_id'])
1113 self.request.matchdict['pull_request_id'])
1113 self.load_default_context()
1114 self.load_default_context()
1114
1115
1115 pr_closed = pull_request.is_closed()
1116 pr_closed = pull_request.is_closed()
1116 allowed_to_delete = PullRequestModel().check_user_delete(
1117 allowed_to_delete = PullRequestModel().check_user_delete(
1117 pull_request, self._rhodecode_user) and not pr_closed
1118 pull_request, self._rhodecode_user) and not pr_closed
1118
1119
1119 # only owner can delete it !
1120 # only owner can delete it !
1120 if allowed_to_delete:
1121 if allowed_to_delete:
1121 PullRequestModel().delete(pull_request, self._rhodecode_user)
1122 PullRequestModel().delete(pull_request, self._rhodecode_user)
1122 Session().commit()
1123 Session().commit()
1123 h.flash(_('Successfully deleted pull request'),
1124 h.flash(_('Successfully deleted pull request'),
1124 category='success')
1125 category='success')
1125 raise HTTPFound(h.route_path('pullrequest_show_all',
1126 raise HTTPFound(h.route_path('pullrequest_show_all',
1126 repo_name=self.db_repo_name))
1127 repo_name=self.db_repo_name))
1127
1128
1128 log.warning('user %s tried to delete pull request without access',
1129 log.warning('user %s tried to delete pull request without access',
1129 self._rhodecode_user)
1130 self._rhodecode_user)
1130 raise HTTPNotFound()
1131 raise HTTPNotFound()
1131
1132
1132 @LoginRequired()
1133 @LoginRequired()
1133 @NotAnonymous()
1134 @NotAnonymous()
1134 @HasRepoPermissionAnyDecorator(
1135 @HasRepoPermissionAnyDecorator(
1135 'repository.read', 'repository.write', 'repository.admin')
1136 'repository.read', 'repository.write', 'repository.admin')
1136 @CSRFRequired()
1137 @CSRFRequired()
1137 @view_config(
1138 @view_config(
1138 route_name='pullrequest_comment_create', request_method='POST',
1139 route_name='pullrequest_comment_create', request_method='POST',
1139 renderer='json_ext')
1140 renderer='json_ext')
1140 def pull_request_comment_create(self):
1141 def pull_request_comment_create(self):
1141 _ = self.request.translate
1142 _ = self.request.translate
1142
1143
1143 pull_request = PullRequest.get_or_404(
1144 pull_request = PullRequest.get_or_404(
1144 self.request.matchdict['pull_request_id'])
1145 self.request.matchdict['pull_request_id'])
1145 pull_request_id = pull_request.pull_request_id
1146 pull_request_id = pull_request.pull_request_id
1146
1147
1147 if pull_request.is_closed():
1148 if pull_request.is_closed():
1148 log.debug('comment: forbidden because pull request is closed')
1149 log.debug('comment: forbidden because pull request is closed')
1149 raise HTTPForbidden()
1150 raise HTTPForbidden()
1150
1151
1151 allowed_to_comment = PullRequestModel().check_user_comment(
1152 allowed_to_comment = PullRequestModel().check_user_comment(
1152 pull_request, self._rhodecode_user)
1153 pull_request, self._rhodecode_user)
1153 if not allowed_to_comment:
1154 if not allowed_to_comment:
1154 log.debug(
1155 log.debug(
1155 'comment: forbidden because pull request is from forbidden repo')
1156 'comment: forbidden because pull request is from forbidden repo')
1156 raise HTTPForbidden()
1157 raise HTTPForbidden()
1157
1158
1158 c = self.load_default_context()
1159 c = self.load_default_context()
1159
1160
1160 status = self.request.POST.get('changeset_status', None)
1161 status = self.request.POST.get('changeset_status', None)
1161 text = self.request.POST.get('text')
1162 text = self.request.POST.get('text')
1162 comment_type = self.request.POST.get('comment_type')
1163 comment_type = self.request.POST.get('comment_type')
1163 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1164 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1164 close_pull_request = self.request.POST.get('close_pull_request')
1165 close_pull_request = self.request.POST.get('close_pull_request')
1165
1166
1166 # the logic here should work like following, if we submit close
1167 # the logic here should work like following, if we submit close
1167 # pr comment, use `close_pull_request_with_comment` function
1168 # pr comment, use `close_pull_request_with_comment` function
1168 # else handle regular comment logic
1169 # else handle regular comment logic
1169
1170
1170 if close_pull_request:
1171 if close_pull_request:
1171 # only owner or admin or person with write permissions
1172 # only owner or admin or person with write permissions
1172 allowed_to_close = PullRequestModel().check_user_update(
1173 allowed_to_close = PullRequestModel().check_user_update(
1173 pull_request, self._rhodecode_user)
1174 pull_request, self._rhodecode_user)
1174 if not allowed_to_close:
1175 if not allowed_to_close:
1175 log.debug('comment: forbidden because not allowed to close '
1176 log.debug('comment: forbidden because not allowed to close '
1176 'pull request %s', pull_request_id)
1177 'pull request %s', pull_request_id)
1177 raise HTTPForbidden()
1178 raise HTTPForbidden()
1178 comment, status = PullRequestModel().close_pull_request_with_comment(
1179 comment, status = PullRequestModel().close_pull_request_with_comment(
1179 pull_request, self._rhodecode_user, self.db_repo, message=text)
1180 pull_request, self._rhodecode_user, self.db_repo, message=text)
1180 Session().flush()
1181 Session().flush()
1181 events.trigger(
1182 events.trigger(
1182 events.PullRequestCommentEvent(pull_request, comment))
1183 events.PullRequestCommentEvent(pull_request, comment))
1183
1184
1184 else:
1185 else:
1185 # regular comment case, could be inline, or one with status.
1186 # regular comment case, could be inline, or one with status.
1186 # for that one we check also permissions
1187 # for that one we check also permissions
1187
1188
1188 allowed_to_change_status = PullRequestModel().check_user_change_status(
1189 allowed_to_change_status = PullRequestModel().check_user_change_status(
1189 pull_request, self._rhodecode_user)
1190 pull_request, self._rhodecode_user)
1190
1191
1191 if status and allowed_to_change_status:
1192 if status and allowed_to_change_status:
1192 message = (_('Status change %(transition_icon)s %(status)s')
1193 message = (_('Status change %(transition_icon)s %(status)s')
1193 % {'transition_icon': '>',
1194 % {'transition_icon': '>',
1194 'status': ChangesetStatus.get_status_lbl(status)})
1195 'status': ChangesetStatus.get_status_lbl(status)})
1195 text = text or message
1196 text = text or message
1196
1197
1197 comment = CommentsModel().create(
1198 comment = CommentsModel().create(
1198 text=text,
1199 text=text,
1199 repo=self.db_repo.repo_id,
1200 repo=self.db_repo.repo_id,
1200 user=self._rhodecode_user.user_id,
1201 user=self._rhodecode_user.user_id,
1201 pull_request=pull_request,
1202 pull_request=pull_request,
1202 f_path=self.request.POST.get('f_path'),
1203 f_path=self.request.POST.get('f_path'),
1203 line_no=self.request.POST.get('line'),
1204 line_no=self.request.POST.get('line'),
1204 status_change=(ChangesetStatus.get_status_lbl(status)
1205 status_change=(ChangesetStatus.get_status_lbl(status)
1205 if status and allowed_to_change_status else None),
1206 if status and allowed_to_change_status else None),
1206 status_change_type=(status
1207 status_change_type=(status
1207 if status and allowed_to_change_status else None),
1208 if status and allowed_to_change_status else None),
1208 comment_type=comment_type,
1209 comment_type=comment_type,
1209 resolves_comment_id=resolves_comment_id,
1210 resolves_comment_id=resolves_comment_id,
1210 auth_user=self._rhodecode_user
1211 auth_user=self._rhodecode_user
1211 )
1212 )
1212
1213
1213 if allowed_to_change_status:
1214 if allowed_to_change_status:
1214 # calculate old status before we change it
1215 # calculate old status before we change it
1215 old_calculated_status = pull_request.calculated_review_status()
1216 old_calculated_status = pull_request.calculated_review_status()
1216
1217
1217 # get status if set !
1218 # get status if set !
1218 if status:
1219 if status:
1219 ChangesetStatusModel().set_status(
1220 ChangesetStatusModel().set_status(
1220 self.db_repo.repo_id,
1221 self.db_repo.repo_id,
1221 status,
1222 status,
1222 self._rhodecode_user.user_id,
1223 self._rhodecode_user.user_id,
1223 comment,
1224 comment,
1224 pull_request=pull_request
1225 pull_request=pull_request
1225 )
1226 )
1226
1227
1227 Session().flush()
1228 Session().flush()
1228 # this is somehow required to get access to some relationship
1229 # this is somehow required to get access to some relationship
1229 # loaded on comment
1230 # loaded on comment
1230 Session().refresh(comment)
1231 Session().refresh(comment)
1231
1232
1232 events.trigger(
1233 events.trigger(
1233 events.PullRequestCommentEvent(pull_request, comment))
1234 events.PullRequestCommentEvent(pull_request, comment))
1234
1235
1235 # we now calculate the status of pull request, and based on that
1236 # we now calculate the status of pull request, and based on that
1236 # calculation we set the commits status
1237 # calculation we set the commits status
1237 calculated_status = pull_request.calculated_review_status()
1238 calculated_status = pull_request.calculated_review_status()
1238 if old_calculated_status != calculated_status:
1239 if old_calculated_status != calculated_status:
1239 PullRequestModel()._trigger_pull_request_hook(
1240 PullRequestModel()._trigger_pull_request_hook(
1240 pull_request, self._rhodecode_user, 'review_status_change')
1241 pull_request, self._rhodecode_user, 'review_status_change')
1241
1242
1242 Session().commit()
1243 Session().commit()
1243
1244
1244 data = {
1245 data = {
1245 'target_id': h.safeid(h.safe_unicode(
1246 'target_id': h.safeid(h.safe_unicode(
1246 self.request.POST.get('f_path'))),
1247 self.request.POST.get('f_path'))),
1247 }
1248 }
1248 if comment:
1249 if comment:
1249 c.co = comment
1250 c.co = comment
1250 rendered_comment = render(
1251 rendered_comment = render(
1251 'rhodecode:templates/changeset/changeset_comment_block.mako',
1252 'rhodecode:templates/changeset/changeset_comment_block.mako',
1252 self._get_template_context(c), self.request)
1253 self._get_template_context(c), self.request)
1253
1254
1254 data.update(comment.get_dict())
1255 data.update(comment.get_dict())
1255 data.update({'rendered_text': rendered_comment})
1256 data.update({'rendered_text': rendered_comment})
1256
1257
1257 return data
1258 return data
1258
1259
1259 @LoginRequired()
1260 @LoginRequired()
1260 @NotAnonymous()
1261 @NotAnonymous()
1261 @HasRepoPermissionAnyDecorator(
1262 @HasRepoPermissionAnyDecorator(
1262 'repository.read', 'repository.write', 'repository.admin')
1263 'repository.read', 'repository.write', 'repository.admin')
1263 @CSRFRequired()
1264 @CSRFRequired()
1264 @view_config(
1265 @view_config(
1265 route_name='pullrequest_comment_delete', request_method='POST',
1266 route_name='pullrequest_comment_delete', request_method='POST',
1266 renderer='json_ext')
1267 renderer='json_ext')
1267 def pull_request_comment_delete(self):
1268 def pull_request_comment_delete(self):
1268 pull_request = PullRequest.get_or_404(
1269 pull_request = PullRequest.get_or_404(
1269 self.request.matchdict['pull_request_id'])
1270 self.request.matchdict['pull_request_id'])
1270
1271
1271 comment = ChangesetComment.get_or_404(
1272 comment = ChangesetComment.get_or_404(
1272 self.request.matchdict['comment_id'])
1273 self.request.matchdict['comment_id'])
1273 comment_id = comment.comment_id
1274 comment_id = comment.comment_id
1274
1275
1275 if pull_request.is_closed():
1276 if pull_request.is_closed():
1276 log.debug('comment: forbidden because pull request is closed')
1277 log.debug('comment: forbidden because pull request is closed')
1277 raise HTTPForbidden()
1278 raise HTTPForbidden()
1278
1279
1279 if not comment:
1280 if not comment:
1280 log.debug('Comment with id:%s not found, skipping', comment_id)
1281 log.debug('Comment with id:%s not found, skipping', comment_id)
1281 # comment already deleted in another call probably
1282 # comment already deleted in another call probably
1282 return True
1283 return True
1283
1284
1284 if comment.pull_request.is_closed():
1285 if comment.pull_request.is_closed():
1285 # don't allow deleting comments on closed pull request
1286 # don't allow deleting comments on closed pull request
1286 raise HTTPForbidden()
1287 raise HTTPForbidden()
1287
1288
1288 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1289 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1289 super_admin = h.HasPermissionAny('hg.admin')()
1290 super_admin = h.HasPermissionAny('hg.admin')()
1290 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1291 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1291 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1292 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1292 comment_repo_admin = is_repo_admin and is_repo_comment
1293 comment_repo_admin = is_repo_admin and is_repo_comment
1293
1294
1294 if super_admin or comment_owner or comment_repo_admin:
1295 if super_admin or comment_owner or comment_repo_admin:
1295 old_calculated_status = comment.pull_request.calculated_review_status()
1296 old_calculated_status = comment.pull_request.calculated_review_status()
1296 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1297 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1297 Session().commit()
1298 Session().commit()
1298 calculated_status = comment.pull_request.calculated_review_status()
1299 calculated_status = comment.pull_request.calculated_review_status()
1299 if old_calculated_status != calculated_status:
1300 if old_calculated_status != calculated_status:
1300 PullRequestModel()._trigger_pull_request_hook(
1301 PullRequestModel()._trigger_pull_request_hook(
1301 comment.pull_request, self._rhodecode_user, 'review_status_change')
1302 comment.pull_request, self._rhodecode_user, 'review_status_change')
1302 return True
1303 return True
1303 else:
1304 else:
1304 log.warning('No permissions for user %s to delete comment_id: %s',
1305 log.warning('No permissions for user %s to delete comment_id: %s',
1305 self._rhodecode_db_user, comment_id)
1306 self._rhodecode_db_user, comment_id)
1306 raise HTTPNotFound()
1307 raise HTTPNotFound()
@@ -1,673 +1,674 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 It's implemented with basic auth function
23 It's implemented with basic auth function
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import importlib
29 import importlib
30 from functools import wraps
30 from functools import wraps
31 from StringIO import StringIO
31 from StringIO import StringIO
32 from lxml import etree
32 from lxml import etree
33
33
34 import time
34 import time
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36
36
37 from pyramid.httpexceptions import (
37 from pyramid.httpexceptions import (
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 from zope.cachedescriptors.property import Lazy as LazyProperty
39 from zope.cachedescriptors.property import Lazy as LazyProperty
40
40
41 import rhodecode
41 import rhodecode
42 from rhodecode.authentication.base import (
42 from rhodecode.authentication.base import (
43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
44 from rhodecode.lib import caches
44 from rhodecode.lib import caches
45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
46 from rhodecode.lib.base import (
46 from rhodecode.lib.base import (
47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
50 from rhodecode.lib.middleware import appenlight
50 from rhodecode.lib.middleware import appenlight
51 from rhodecode.lib.middleware.utils import scm_app_http
51 from rhodecode.lib.middleware.utils import scm_app_http
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 from rhodecode.lib.vcs.backends import base
55 from rhodecode.lib.vcs.backends import base
56
56
57 from rhodecode.model import meta
57 from rhodecode.model import meta
58 from rhodecode.model.db import User, Repository, PullRequest
58 from rhodecode.model.db import User, Repository, PullRequest
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.pull_request import PullRequestModel
60 from rhodecode.model.pull_request import PullRequestModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65
65
66 def extract_svn_txn_id(acl_repo_name, data):
66 def extract_svn_txn_id(acl_repo_name, data):
67 """
67 """
68 Helper method for extraction of svn txn_id from submited XML data during
68 Helper method for extraction of svn txn_id from submited XML data during
69 POST operations
69 POST operations
70 """
70 """
71 try:
71 try:
72 root = etree.fromstring(data)
72 root = etree.fromstring(data)
73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
74 for el in root:
74 for el in root:
75 if el.tag == '{DAV:}source':
75 if el.tag == '{DAV:}source':
76 for sub_el in el:
76 for sub_el in el:
77 if sub_el.tag == '{DAV:}href':
77 if sub_el.tag == '{DAV:}href':
78 match = pat.search(sub_el.text)
78 match = pat.search(sub_el.text)
79 if match:
79 if match:
80 svn_tx_id = match.groupdict()['txn_id']
80 svn_tx_id = match.groupdict()['txn_id']
81 txn_id = caches.compute_key_from_params(
81 txn_id = caches.compute_key_from_params(
82 acl_repo_name, svn_tx_id)
82 acl_repo_name, svn_tx_id)
83 return txn_id
83 return txn_id
84 except Exception:
84 except Exception:
85 log.exception('Failed to extract txn_id')
85 log.exception('Failed to extract txn_id')
86
86
87
87
88 def initialize_generator(factory):
88 def initialize_generator(factory):
89 """
89 """
90 Initializes the returned generator by draining its first element.
90 Initializes the returned generator by draining its first element.
91
91
92 This can be used to give a generator an initializer, which is the code
92 This can be used to give a generator an initializer, which is the code
93 up to the first yield statement. This decorator enforces that the first
93 up to the first yield statement. This decorator enforces that the first
94 produced element has the value ``"__init__"`` to make its special
94 produced element has the value ``"__init__"`` to make its special
95 purpose very explicit in the using code.
95 purpose very explicit in the using code.
96 """
96 """
97
97
98 @wraps(factory)
98 @wraps(factory)
99 def wrapper(*args, **kwargs):
99 def wrapper(*args, **kwargs):
100 gen = factory(*args, **kwargs)
100 gen = factory(*args, **kwargs)
101 try:
101 try:
102 init = gen.next()
102 init = gen.next()
103 except StopIteration:
103 except StopIteration:
104 raise ValueError('Generator must yield at least one element.')
104 raise ValueError('Generator must yield at least one element.')
105 if init != "__init__":
105 if init != "__init__":
106 raise ValueError('First yielded element must be "__init__".')
106 raise ValueError('First yielded element must be "__init__".')
107 return gen
107 return gen
108 return wrapper
108 return wrapper
109
109
110
110
111 class SimpleVCS(object):
111 class SimpleVCS(object):
112 """Common functionality for SCM HTTP handlers."""
112 """Common functionality for SCM HTTP handlers."""
113
113
114 SCM = 'unknown'
114 SCM = 'unknown'
115
115
116 acl_repo_name = None
116 acl_repo_name = None
117 url_repo_name = None
117 url_repo_name = None
118 vcs_repo_name = None
118 vcs_repo_name = None
119 rc_extras = {}
119 rc_extras = {}
120
120
121 # We have to handle requests to shadow repositories different than requests
121 # We have to handle requests to shadow repositories different than requests
122 # to normal repositories. Therefore we have to distinguish them. To do this
122 # to normal repositories. Therefore we have to distinguish them. To do this
123 # we use this regex which will match only on URLs pointing to shadow
123 # we use this regex which will match only on URLs pointing to shadow
124 # repositories.
124 # repositories.
125 shadow_repo_re = re.compile(
125 shadow_repo_re = re.compile(
126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
127 '(?P<target>{slug_pat})/' # target repo
127 '(?P<target>{slug_pat})/' # target repo
128 'pull-request/(?P<pr_id>\d+)/' # pull request
128 'pull-request/(?P<pr_id>\d+)/' # pull request
129 'repository$' # shadow repo
129 'repository$' # shadow repo
130 .format(slug_pat=SLUG_RE.pattern))
130 .format(slug_pat=SLUG_RE.pattern))
131
131
132 def __init__(self, config, registry):
132 def __init__(self, config, registry):
133 self.registry = registry
133 self.registry = registry
134 self.config = config
134 self.config = config
135 # re-populated by specialized middleware
135 # re-populated by specialized middleware
136 self.repo_vcs_config = base.Config()
136 self.repo_vcs_config = base.Config()
137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
138
138
139 registry.rhodecode_settings = self.rhodecode_settings
139 registry.rhodecode_settings = self.rhodecode_settings
140 # authenticate this VCS request using authfunc
140 # authenticate this VCS request using authfunc
141 auth_ret_code_detection = \
141 auth_ret_code_detection = \
142 str2bool(self.config.get('auth_ret_code_detection', False))
142 str2bool(self.config.get('auth_ret_code_detection', False))
143 self.authenticate = BasicAuth(
143 self.authenticate = BasicAuth(
144 '', authenticate, registry, config.get('auth_ret_code'),
144 '', authenticate, registry, config.get('auth_ret_code'),
145 auth_ret_code_detection)
145 auth_ret_code_detection)
146 self.ip_addr = '0.0.0.0'
146 self.ip_addr = '0.0.0.0'
147
147
148 @LazyProperty
148 @LazyProperty
149 def global_vcs_config(self):
149 def global_vcs_config(self):
150 try:
150 try:
151 return VcsSettingsModel().get_ui_settings_as_config_obj()
151 return VcsSettingsModel().get_ui_settings_as_config_obj()
152 except Exception:
152 except Exception:
153 return base.Config()
153 return base.Config()
154
154
155 @property
155 @property
156 def base_path(self):
156 def base_path(self):
157 settings_path = self.repo_vcs_config.get(
157 settings_path = self.repo_vcs_config.get(
158 *VcsSettingsModel.PATH_SETTING)
158 *VcsSettingsModel.PATH_SETTING)
159
159
160 if not settings_path:
160 if not settings_path:
161 settings_path = self.global_vcs_config.get(
161 settings_path = self.global_vcs_config.get(
162 *VcsSettingsModel.PATH_SETTING)
162 *VcsSettingsModel.PATH_SETTING)
163
163
164 if not settings_path:
164 if not settings_path:
165 # try, maybe we passed in explicitly as config option
165 # try, maybe we passed in explicitly as config option
166 settings_path = self.config.get('base_path')
166 settings_path = self.config.get('base_path')
167
167
168 if not settings_path:
168 if not settings_path:
169 raise ValueError('FATAL: base_path is empty')
169 raise ValueError('FATAL: base_path is empty')
170 return settings_path
170 return settings_path
171
171
172 def set_repo_names(self, environ):
172 def set_repo_names(self, environ):
173 """
173 """
174 This will populate the attributes acl_repo_name, url_repo_name,
174 This will populate the attributes acl_repo_name, url_repo_name,
175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
176 shadow) repositories all names are equal. In case of requests to a
176 shadow) repositories all names are equal. In case of requests to a
177 shadow repository the acl-name points to the target repo of the pull
177 shadow repository the acl-name points to the target repo of the pull
178 request and the vcs-name points to the shadow repo file system path.
178 request and the vcs-name points to the shadow repo file system path.
179 The url-name is always the URL used by the vcs client program.
179 The url-name is always the URL used by the vcs client program.
180
180
181 Example in case of a shadow repo:
181 Example in case of a shadow repo:
182 acl_repo_name = RepoGroup/MyRepo
182 acl_repo_name = RepoGroup/MyRepo
183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
185 """
185 """
186 # First we set the repo name from URL for all attributes. This is the
186 # First we set the repo name from URL for all attributes. This is the
187 # default if handling normal (non shadow) repo requests.
187 # default if handling normal (non shadow) repo requests.
188 self.url_repo_name = self._get_repository_name(environ)
188 self.url_repo_name = self._get_repository_name(environ)
189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
190 self.is_shadow_repo = False
190 self.is_shadow_repo = False
191
191
192 # Check if this is a request to a shadow repository.
192 # Check if this is a request to a shadow repository.
193 match = self.shadow_repo_re.match(self.url_repo_name)
193 match = self.shadow_repo_re.match(self.url_repo_name)
194 if match:
194 if match:
195 match_dict = match.groupdict()
195 match_dict = match.groupdict()
196
196
197 # Build acl repo name from regex match.
197 # Build acl repo name from regex match.
198 acl_repo_name = safe_unicode('{groups}{target}'.format(
198 acl_repo_name = safe_unicode('{groups}{target}'.format(
199 groups=match_dict['groups'] or '',
199 groups=match_dict['groups'] or '',
200 target=match_dict['target']))
200 target=match_dict['target']))
201
201
202 # Retrieve pull request instance by ID from regex match.
202 # Retrieve pull request instance by ID from regex match.
203 pull_request = PullRequest.get(match_dict['pr_id'])
203 pull_request = PullRequest.get(match_dict['pr_id'])
204
204
205 # Only proceed if we got a pull request and if acl repo name from
205 # Only proceed if we got a pull request and if acl repo name from
206 # URL equals the target repo name of the pull request.
206 # URL equals the target repo name of the pull request.
207 if pull_request and (acl_repo_name ==
207 if pull_request and \
208 pull_request.target_repo.repo_name):
208 (acl_repo_name == pull_request.target_repo.repo_name):
209 repo_id = pull_request.target_repo.repo_id
209 # Get file system path to shadow repository.
210 # Get file system path to shadow repository.
210 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 target_vcs = pull_request.target_repo.scm_instance()
212 target_vcs = pull_request.target_repo.scm_instance()
212 vcs_repo_name = target_vcs._get_shadow_repository_path(
213 vcs_repo_name = target_vcs._get_shadow_repository_path(
213 workspace_id)
214 repo_id, workspace_id)
214
215
215 # Store names for later usage.
216 # Store names for later usage.
216 self.vcs_repo_name = vcs_repo_name
217 self.vcs_repo_name = vcs_repo_name
217 self.acl_repo_name = acl_repo_name
218 self.acl_repo_name = acl_repo_name
218 self.is_shadow_repo = True
219 self.is_shadow_repo = True
219
220
220 log.debug('Setting all VCS repository names: %s', {
221 log.debug('Setting all VCS repository names: %s', {
221 'acl_repo_name': self.acl_repo_name,
222 'acl_repo_name': self.acl_repo_name,
222 'url_repo_name': self.url_repo_name,
223 'url_repo_name': self.url_repo_name,
223 'vcs_repo_name': self.vcs_repo_name,
224 'vcs_repo_name': self.vcs_repo_name,
224 })
225 })
225
226
226 @property
227 @property
227 def scm_app(self):
228 def scm_app(self):
228 custom_implementation = self.config['vcs.scm_app_implementation']
229 custom_implementation = self.config['vcs.scm_app_implementation']
229 if custom_implementation == 'http':
230 if custom_implementation == 'http':
230 log.info('Using HTTP implementation of scm app.')
231 log.info('Using HTTP implementation of scm app.')
231 scm_app_impl = scm_app_http
232 scm_app_impl = scm_app_http
232 else:
233 else:
233 log.info('Using custom implementation of scm_app: "{}"'.format(
234 log.info('Using custom implementation of scm_app: "{}"'.format(
234 custom_implementation))
235 custom_implementation))
235 scm_app_impl = importlib.import_module(custom_implementation)
236 scm_app_impl = importlib.import_module(custom_implementation)
236 return scm_app_impl
237 return scm_app_impl
237
238
238 def _get_by_id(self, repo_name):
239 def _get_by_id(self, repo_name):
239 """
240 """
240 Gets a special pattern _<ID> from clone url and tries to replace it
241 Gets a special pattern _<ID> from clone url and tries to replace it
241 with a repository_name for support of _<ID> non changeable urls
242 with a repository_name for support of _<ID> non changeable urls
242 """
243 """
243
244
244 data = repo_name.split('/')
245 data = repo_name.split('/')
245 if len(data) >= 2:
246 if len(data) >= 2:
246 from rhodecode.model.repo import RepoModel
247 from rhodecode.model.repo import RepoModel
247 by_id_match = RepoModel().get_repo_by_id(repo_name)
248 by_id_match = RepoModel().get_repo_by_id(repo_name)
248 if by_id_match:
249 if by_id_match:
249 data[1] = by_id_match.repo_name
250 data[1] = by_id_match.repo_name
250
251
251 return safe_str('/'.join(data))
252 return safe_str('/'.join(data))
252
253
253 def _invalidate_cache(self, repo_name):
254 def _invalidate_cache(self, repo_name):
254 """
255 """
255 Set's cache for this repository for invalidation on next access
256 Set's cache for this repository for invalidation on next access
256
257
257 :param repo_name: full repo name, also a cache key
258 :param repo_name: full repo name, also a cache key
258 """
259 """
259 ScmModel().mark_for_invalidation(repo_name)
260 ScmModel().mark_for_invalidation(repo_name)
260
261
261 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
262 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
262 db_repo = Repository.get_by_repo_name(repo_name)
263 db_repo = Repository.get_by_repo_name(repo_name)
263 if not db_repo:
264 if not db_repo:
264 log.debug('Repository `%s` not found inside the database.',
265 log.debug('Repository `%s` not found inside the database.',
265 repo_name)
266 repo_name)
266 return False
267 return False
267
268
268 if db_repo.repo_type != scm_type:
269 if db_repo.repo_type != scm_type:
269 log.warning(
270 log.warning(
270 'Repository `%s` have incorrect scm_type, expected %s got %s',
271 'Repository `%s` have incorrect scm_type, expected %s got %s',
271 repo_name, db_repo.repo_type, scm_type)
272 repo_name, db_repo.repo_type, scm_type)
272 return False
273 return False
273
274
274 config = db_repo._config
275 config = db_repo._config
275 config.set('extensions', 'largefiles', '')
276 config.set('extensions', 'largefiles', '')
276 return is_valid_repo(
277 return is_valid_repo(
277 repo_name, base_path,
278 repo_name, base_path,
278 explicit_scm=scm_type, expect_scm=scm_type, config=config)
279 explicit_scm=scm_type, expect_scm=scm_type, config=config)
279
280
280 def valid_and_active_user(self, user):
281 def valid_and_active_user(self, user):
281 """
282 """
282 Checks if that user is not empty, and if it's actually object it checks
283 Checks if that user is not empty, and if it's actually object it checks
283 if he's active.
284 if he's active.
284
285
285 :param user: user object or None
286 :param user: user object or None
286 :return: boolean
287 :return: boolean
287 """
288 """
288 if user is None:
289 if user is None:
289 return False
290 return False
290
291
291 elif user.active:
292 elif user.active:
292 return True
293 return True
293
294
294 return False
295 return False
295
296
296 @property
297 @property
297 def is_shadow_repo_dir(self):
298 def is_shadow_repo_dir(self):
298 return os.path.isdir(self.vcs_repo_name)
299 return os.path.isdir(self.vcs_repo_name)
299
300
300 def _check_permission(self, action, user, repo_name, ip_addr=None,
301 def _check_permission(self, action, user, repo_name, ip_addr=None,
301 plugin_id='', plugin_cache_active=False, cache_ttl=0):
302 plugin_id='', plugin_cache_active=False, cache_ttl=0):
302 """
303 """
303 Checks permissions using action (push/pull) user and repository
304 Checks permissions using action (push/pull) user and repository
304 name. If plugin_cache and ttl is set it will use the plugin which
305 name. If plugin_cache and ttl is set it will use the plugin which
305 authenticated the user to store the cached permissions result for N
306 authenticated the user to store the cached permissions result for N
306 amount of seconds as in cache_ttl
307 amount of seconds as in cache_ttl
307
308
308 :param action: push or pull action
309 :param action: push or pull action
309 :param user: user instance
310 :param user: user instance
310 :param repo_name: repository name
311 :param repo_name: repository name
311 """
312 """
312
313
313 # get instance of cache manager configured for a namespace
314 # get instance of cache manager configured for a namespace
314 cache_manager = get_perms_cache_manager(
315 cache_manager = get_perms_cache_manager(
315 custom_ttl=cache_ttl, suffix=user.user_id)
316 custom_ttl=cache_ttl, suffix=user.user_id)
316 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
317 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
317 plugin_id, plugin_cache_active, cache_ttl)
318 plugin_id, plugin_cache_active, cache_ttl)
318
319
319 # for environ based password can be empty, but then the validation is
320 # for environ based password can be empty, but then the validation is
320 # on the server that fills in the env data needed for authentication
321 # on the server that fills in the env data needed for authentication
321 _perm_calc_hash = caches.compute_key_from_params(
322 _perm_calc_hash = caches.compute_key_from_params(
322 plugin_id, action, user.user_id, repo_name, ip_addr)
323 plugin_id, action, user.user_id, repo_name, ip_addr)
323
324
324 # _authenticate is a wrapper for .auth() method of plugin.
325 # _authenticate is a wrapper for .auth() method of plugin.
325 # it checks if .auth() sends proper data.
326 # it checks if .auth() sends proper data.
326 # For RhodeCodeExternalAuthPlugin it also maps users to
327 # For RhodeCodeExternalAuthPlugin it also maps users to
327 # Database and maps the attributes returned from .auth()
328 # Database and maps the attributes returned from .auth()
328 # to RhodeCode database. If this function returns data
329 # to RhodeCode database. If this function returns data
329 # then auth is correct.
330 # then auth is correct.
330 start = time.time()
331 start = time.time()
331 log.debug('Running plugin `%s` permissions check', plugin_id)
332 log.debug('Running plugin `%s` permissions check', plugin_id)
332
333
333 def perm_func():
334 def perm_func():
334 """
335 """
335 This function is used internally in Cache of Beaker to calculate
336 This function is used internally in Cache of Beaker to calculate
336 Results
337 Results
337 """
338 """
338 log.debug('auth: calculating permission access now...')
339 log.debug('auth: calculating permission access now...')
339 # check IP
340 # check IP
340 inherit = user.inherit_default_permissions
341 inherit = user.inherit_default_permissions
341 ip_allowed = AuthUser.check_ip_allowed(
342 ip_allowed = AuthUser.check_ip_allowed(
342 user.user_id, ip_addr, inherit_from_default=inherit)
343 user.user_id, ip_addr, inherit_from_default=inherit)
343 if ip_allowed:
344 if ip_allowed:
344 log.info('Access for IP:%s allowed', ip_addr)
345 log.info('Access for IP:%s allowed', ip_addr)
345 else:
346 else:
346 return False
347 return False
347
348
348 if action == 'push':
349 if action == 'push':
349 perms = ('repository.write', 'repository.admin')
350 perms = ('repository.write', 'repository.admin')
350 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
351 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
351 return False
352 return False
352
353
353 else:
354 else:
354 # any other action need at least read permission
355 # any other action need at least read permission
355 perms = (
356 perms = (
356 'repository.read', 'repository.write', 'repository.admin')
357 'repository.read', 'repository.write', 'repository.admin')
357 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
358 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
358 return False
359 return False
359
360
360 return True
361 return True
361
362
362 if plugin_cache_active:
363 if plugin_cache_active:
363 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
364 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
364 perm_result = cache_manager.get(
365 perm_result = cache_manager.get(
365 _perm_calc_hash, createfunc=perm_func)
366 _perm_calc_hash, createfunc=perm_func)
366 else:
367 else:
367 perm_result = perm_func()
368 perm_result = perm_func()
368
369
369 auth_time = time.time() - start
370 auth_time = time.time() - start
370 log.debug('Permissions for plugin `%s` completed in %.3fs, '
371 log.debug('Permissions for plugin `%s` completed in %.3fs, '
371 'expiration time of fetched cache %.1fs.',
372 'expiration time of fetched cache %.1fs.',
372 plugin_id, auth_time, cache_ttl)
373 plugin_id, auth_time, cache_ttl)
373
374
374 return perm_result
375 return perm_result
375
376
376 def _check_ssl(self, environ, start_response):
377 def _check_ssl(self, environ, start_response):
377 """
378 """
378 Checks the SSL check flag and returns False if SSL is not present
379 Checks the SSL check flag and returns False if SSL is not present
379 and required True otherwise
380 and required True otherwise
380 """
381 """
381 org_proto = environ['wsgi._org_proto']
382 org_proto = environ['wsgi._org_proto']
382 # check if we have SSL required ! if not it's a bad request !
383 # check if we have SSL required ! if not it's a bad request !
383 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
384 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
384 if require_ssl and org_proto == 'http':
385 if require_ssl and org_proto == 'http':
385 log.debug(
386 log.debug(
386 'Bad request: detected protocol is `%s` and '
387 'Bad request: detected protocol is `%s` and '
387 'SSL/HTTPS is required.', org_proto)
388 'SSL/HTTPS is required.', org_proto)
388 return False
389 return False
389 return True
390 return True
390
391
391 def _get_default_cache_ttl(self):
392 def _get_default_cache_ttl(self):
392 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
393 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
393 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
394 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
394 plugin_settings = plugin.get_settings()
395 plugin_settings = plugin.get_settings()
395 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
396 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
396 plugin_settings) or (False, 0)
397 plugin_settings) or (False, 0)
397 return plugin_cache_active, cache_ttl
398 return plugin_cache_active, cache_ttl
398
399
399 def __call__(self, environ, start_response):
400 def __call__(self, environ, start_response):
400 try:
401 try:
401 return self._handle_request(environ, start_response)
402 return self._handle_request(environ, start_response)
402 except Exception:
403 except Exception:
403 log.exception("Exception while handling request")
404 log.exception("Exception while handling request")
404 appenlight.track_exception(environ)
405 appenlight.track_exception(environ)
405 return HTTPInternalServerError()(environ, start_response)
406 return HTTPInternalServerError()(environ, start_response)
406 finally:
407 finally:
407 meta.Session.remove()
408 meta.Session.remove()
408
409
409 def _handle_request(self, environ, start_response):
410 def _handle_request(self, environ, start_response):
410
411
411 if not self._check_ssl(environ, start_response):
412 if not self._check_ssl(environ, start_response):
412 reason = ('SSL required, while RhodeCode was unable '
413 reason = ('SSL required, while RhodeCode was unable '
413 'to detect this as SSL request')
414 'to detect this as SSL request')
414 log.debug('User not allowed to proceed, %s', reason)
415 log.debug('User not allowed to proceed, %s', reason)
415 return HTTPNotAcceptable(reason)(environ, start_response)
416 return HTTPNotAcceptable(reason)(environ, start_response)
416
417
417 if not self.url_repo_name:
418 if not self.url_repo_name:
418 log.warning('Repository name is empty: %s', self.url_repo_name)
419 log.warning('Repository name is empty: %s', self.url_repo_name)
419 # failed to get repo name, we fail now
420 # failed to get repo name, we fail now
420 return HTTPNotFound()(environ, start_response)
421 return HTTPNotFound()(environ, start_response)
421 log.debug('Extracted repo name is %s', self.url_repo_name)
422 log.debug('Extracted repo name is %s', self.url_repo_name)
422
423
423 ip_addr = get_ip_addr(environ)
424 ip_addr = get_ip_addr(environ)
424 user_agent = get_user_agent(environ)
425 user_agent = get_user_agent(environ)
425 username = None
426 username = None
426
427
427 # skip passing error to error controller
428 # skip passing error to error controller
428 environ['pylons.status_code_redirect'] = True
429 environ['pylons.status_code_redirect'] = True
429
430
430 # ======================================================================
431 # ======================================================================
431 # GET ACTION PULL or PUSH
432 # GET ACTION PULL or PUSH
432 # ======================================================================
433 # ======================================================================
433 action = self._get_action(environ)
434 action = self._get_action(environ)
434
435
435 # ======================================================================
436 # ======================================================================
436 # Check if this is a request to a shadow repository of a pull request.
437 # Check if this is a request to a shadow repository of a pull request.
437 # In this case only pull action is allowed.
438 # In this case only pull action is allowed.
438 # ======================================================================
439 # ======================================================================
439 if self.is_shadow_repo and action != 'pull':
440 if self.is_shadow_repo and action != 'pull':
440 reason = 'Only pull action is allowed for shadow repositories.'
441 reason = 'Only pull action is allowed for shadow repositories.'
441 log.debug('User not allowed to proceed, %s', reason)
442 log.debug('User not allowed to proceed, %s', reason)
442 return HTTPNotAcceptable(reason)(environ, start_response)
443 return HTTPNotAcceptable(reason)(environ, start_response)
443
444
444 # Check if the shadow repo actually exists, in case someone refers
445 # Check if the shadow repo actually exists, in case someone refers
445 # to it, and it has been deleted because of successful merge.
446 # to it, and it has been deleted because of successful merge.
446 if self.is_shadow_repo and not self.is_shadow_repo_dir:
447 if self.is_shadow_repo and not self.is_shadow_repo_dir:
447 log.debug(
448 log.debug(
448 'Shadow repo detected, and shadow repo dir `%s` is missing',
449 'Shadow repo detected, and shadow repo dir `%s` is missing',
449 self.is_shadow_repo_dir)
450 self.is_shadow_repo_dir)
450 return HTTPNotFound()(environ, start_response)
451 return HTTPNotFound()(environ, start_response)
451
452
452 # ======================================================================
453 # ======================================================================
453 # CHECK ANONYMOUS PERMISSION
454 # CHECK ANONYMOUS PERMISSION
454 # ======================================================================
455 # ======================================================================
455 if action in ['pull', 'push']:
456 if action in ['pull', 'push']:
456 anonymous_user = User.get_default_user()
457 anonymous_user = User.get_default_user()
457 username = anonymous_user.username
458 username = anonymous_user.username
458 if anonymous_user.active:
459 if anonymous_user.active:
459 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
460 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
460 # ONLY check permissions if the user is activated
461 # ONLY check permissions if the user is activated
461 anonymous_perm = self._check_permission(
462 anonymous_perm = self._check_permission(
462 action, anonymous_user, self.acl_repo_name, ip_addr,
463 action, anonymous_user, self.acl_repo_name, ip_addr,
463 plugin_id='anonymous_access',
464 plugin_id='anonymous_access',
464 plugin_cache_active=plugin_cache_active,
465 plugin_cache_active=plugin_cache_active,
465 cache_ttl=cache_ttl,
466 cache_ttl=cache_ttl,
466 )
467 )
467 else:
468 else:
468 anonymous_perm = False
469 anonymous_perm = False
469
470
470 if not anonymous_user.active or not anonymous_perm:
471 if not anonymous_user.active or not anonymous_perm:
471 if not anonymous_user.active:
472 if not anonymous_user.active:
472 log.debug('Anonymous access is disabled, running '
473 log.debug('Anonymous access is disabled, running '
473 'authentication')
474 'authentication')
474
475
475 if not anonymous_perm:
476 if not anonymous_perm:
476 log.debug('Not enough credentials to access this '
477 log.debug('Not enough credentials to access this '
477 'repository as anonymous user')
478 'repository as anonymous user')
478
479
479 username = None
480 username = None
480 # ==============================================================
481 # ==============================================================
481 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
482 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
482 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
483 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
483 # ==============================================================
484 # ==============================================================
484
485
485 # try to auth based on environ, container auth methods
486 # try to auth based on environ, container auth methods
486 log.debug('Running PRE-AUTH for container based authentication')
487 log.debug('Running PRE-AUTH for container based authentication')
487 pre_auth = authenticate(
488 pre_auth = authenticate(
488 '', '', environ, VCS_TYPE, registry=self.registry,
489 '', '', environ, VCS_TYPE, registry=self.registry,
489 acl_repo_name=self.acl_repo_name)
490 acl_repo_name=self.acl_repo_name)
490 if pre_auth and pre_auth.get('username'):
491 if pre_auth and pre_auth.get('username'):
491 username = pre_auth['username']
492 username = pre_auth['username']
492 log.debug('PRE-AUTH got %s as username', username)
493 log.debug('PRE-AUTH got %s as username', username)
493 if pre_auth:
494 if pre_auth:
494 log.debug('PRE-AUTH successful from %s',
495 log.debug('PRE-AUTH successful from %s',
495 pre_auth.get('auth_data', {}).get('_plugin'))
496 pre_auth.get('auth_data', {}).get('_plugin'))
496
497
497 # If not authenticated by the container, running basic auth
498 # If not authenticated by the container, running basic auth
498 # before inject the calling repo_name for special scope checks
499 # before inject the calling repo_name for special scope checks
499 self.authenticate.acl_repo_name = self.acl_repo_name
500 self.authenticate.acl_repo_name = self.acl_repo_name
500
501
501 plugin_cache_active, cache_ttl = False, 0
502 plugin_cache_active, cache_ttl = False, 0
502 plugin = None
503 plugin = None
503 if not username:
504 if not username:
504 self.authenticate.realm = self.authenticate.get_rc_realm()
505 self.authenticate.realm = self.authenticate.get_rc_realm()
505
506
506 try:
507 try:
507 auth_result = self.authenticate(environ)
508 auth_result = self.authenticate(environ)
508 except (UserCreationError, NotAllowedToCreateUserError) as e:
509 except (UserCreationError, NotAllowedToCreateUserError) as e:
509 log.error(e)
510 log.error(e)
510 reason = safe_str(e)
511 reason = safe_str(e)
511 return HTTPNotAcceptable(reason)(environ, start_response)
512 return HTTPNotAcceptable(reason)(environ, start_response)
512
513
513 if isinstance(auth_result, dict):
514 if isinstance(auth_result, dict):
514 AUTH_TYPE.update(environ, 'basic')
515 AUTH_TYPE.update(environ, 'basic')
515 REMOTE_USER.update(environ, auth_result['username'])
516 REMOTE_USER.update(environ, auth_result['username'])
516 username = auth_result['username']
517 username = auth_result['username']
517 plugin = auth_result.get('auth_data', {}).get('_plugin')
518 plugin = auth_result.get('auth_data', {}).get('_plugin')
518 log.info(
519 log.info(
519 'MAIN-AUTH successful for user `%s` from %s plugin',
520 'MAIN-AUTH successful for user `%s` from %s plugin',
520 username, plugin)
521 username, plugin)
521
522
522 plugin_cache_active, cache_ttl = auth_result.get(
523 plugin_cache_active, cache_ttl = auth_result.get(
523 'auth_data', {}).get('_ttl_cache') or (False, 0)
524 'auth_data', {}).get('_ttl_cache') or (False, 0)
524 else:
525 else:
525 return auth_result.wsgi_application(
526 return auth_result.wsgi_application(
526 environ, start_response)
527 environ, start_response)
527
528
528
529
529 # ==============================================================
530 # ==============================================================
530 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
531 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
531 # ==============================================================
532 # ==============================================================
532 user = User.get_by_username(username)
533 user = User.get_by_username(username)
533 if not self.valid_and_active_user(user):
534 if not self.valid_and_active_user(user):
534 return HTTPForbidden()(environ, start_response)
535 return HTTPForbidden()(environ, start_response)
535 username = user.username
536 username = user.username
536 user.update_lastactivity()
537 user.update_lastactivity()
537 meta.Session().commit()
538 meta.Session().commit()
538
539
539 # check user attributes for password change flag
540 # check user attributes for password change flag
540 user_obj = user
541 user_obj = user
541 if user_obj and user_obj.username != User.DEFAULT_USER and \
542 if user_obj and user_obj.username != User.DEFAULT_USER and \
542 user_obj.user_data.get('force_password_change'):
543 user_obj.user_data.get('force_password_change'):
543 reason = 'password change required'
544 reason = 'password change required'
544 log.debug('User not allowed to authenticate, %s', reason)
545 log.debug('User not allowed to authenticate, %s', reason)
545 return HTTPNotAcceptable(reason)(environ, start_response)
546 return HTTPNotAcceptable(reason)(environ, start_response)
546
547
547 # check permissions for this repository
548 # check permissions for this repository
548 perm = self._check_permission(
549 perm = self._check_permission(
549 action, user, self.acl_repo_name, ip_addr,
550 action, user, self.acl_repo_name, ip_addr,
550 plugin, plugin_cache_active, cache_ttl)
551 plugin, plugin_cache_active, cache_ttl)
551 if not perm:
552 if not perm:
552 return HTTPForbidden()(environ, start_response)
553 return HTTPForbidden()(environ, start_response)
553
554
554 # extras are injected into UI object and later available
555 # extras are injected into UI object and later available
555 # in hooks executed by RhodeCode
556 # in hooks executed by RhodeCode
556 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
557 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
557 extras = vcs_operation_context(
558 extras = vcs_operation_context(
558 environ, repo_name=self.acl_repo_name, username=username,
559 environ, repo_name=self.acl_repo_name, username=username,
559 action=action, scm=self.SCM, check_locking=check_locking,
560 action=action, scm=self.SCM, check_locking=check_locking,
560 is_shadow_repo=self.is_shadow_repo
561 is_shadow_repo=self.is_shadow_repo
561 )
562 )
562
563
563 # ======================================================================
564 # ======================================================================
564 # REQUEST HANDLING
565 # REQUEST HANDLING
565 # ======================================================================
566 # ======================================================================
566 repo_path = os.path.join(
567 repo_path = os.path.join(
567 safe_str(self.base_path), safe_str(self.vcs_repo_name))
568 safe_str(self.base_path), safe_str(self.vcs_repo_name))
568 log.debug('Repository path is %s', repo_path)
569 log.debug('Repository path is %s', repo_path)
569
570
570 fix_PATH()
571 fix_PATH()
571
572
572 log.info(
573 log.info(
573 '%s action on %s repo "%s" by "%s" from %s %s',
574 '%s action on %s repo "%s" by "%s" from %s %s',
574 action, self.SCM, safe_str(self.url_repo_name),
575 action, self.SCM, safe_str(self.url_repo_name),
575 safe_str(username), ip_addr, user_agent)
576 safe_str(username), ip_addr, user_agent)
576
577
577 return self._generate_vcs_response(
578 return self._generate_vcs_response(
578 environ, start_response, repo_path, extras, action)
579 environ, start_response, repo_path, extras, action)
579
580
580 @initialize_generator
581 @initialize_generator
581 def _generate_vcs_response(
582 def _generate_vcs_response(
582 self, environ, start_response, repo_path, extras, action):
583 self, environ, start_response, repo_path, extras, action):
583 """
584 """
584 Returns a generator for the response content.
585 Returns a generator for the response content.
585
586
586 This method is implemented as a generator, so that it can trigger
587 This method is implemented as a generator, so that it can trigger
587 the cache validation after all content sent back to the client. It
588 the cache validation after all content sent back to the client. It
588 also handles the locking exceptions which will be triggered when
589 also handles the locking exceptions which will be triggered when
589 the first chunk is produced by the underlying WSGI application.
590 the first chunk is produced by the underlying WSGI application.
590 """
591 """
591 txn_id = ''
592 txn_id = ''
592 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
593 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
593 # case for SVN, we want to re-use the callback daemon port
594 # case for SVN, we want to re-use the callback daemon port
594 # so we use the txn_id, for this we peek the body, and still save
595 # so we use the txn_id, for this we peek the body, and still save
595 # it as wsgi.input
596 # it as wsgi.input
596 data = environ['wsgi.input'].read()
597 data = environ['wsgi.input'].read()
597 environ['wsgi.input'] = StringIO(data)
598 environ['wsgi.input'] = StringIO(data)
598 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
599 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
599
600
600 callback_daemon, extras = self._prepare_callback_daemon(
601 callback_daemon, extras = self._prepare_callback_daemon(
601 extras, environ, action, txn_id=txn_id)
602 extras, environ, action, txn_id=txn_id)
602 log.debug('HOOKS extras is %s', extras)
603 log.debug('HOOKS extras is %s', extras)
603
604
604 config = self._create_config(extras, self.acl_repo_name)
605 config = self._create_config(extras, self.acl_repo_name)
605 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
606 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
606 with callback_daemon:
607 with callback_daemon:
607 app.rc_extras = extras
608 app.rc_extras = extras
608
609
609 try:
610 try:
610 response = app(environ, start_response)
611 response = app(environ, start_response)
611 finally:
612 finally:
612 # This statement works together with the decorator
613 # This statement works together with the decorator
613 # "initialize_generator" above. The decorator ensures that
614 # "initialize_generator" above. The decorator ensures that
614 # we hit the first yield statement before the generator is
615 # we hit the first yield statement before the generator is
615 # returned back to the WSGI server. This is needed to
616 # returned back to the WSGI server. This is needed to
616 # ensure that the call to "app" above triggers the
617 # ensure that the call to "app" above triggers the
617 # needed callback to "start_response" before the
618 # needed callback to "start_response" before the
618 # generator is actually used.
619 # generator is actually used.
619 yield "__init__"
620 yield "__init__"
620
621
621 # iter content
622 # iter content
622 for chunk in response:
623 for chunk in response:
623 yield chunk
624 yield chunk
624
625
625 try:
626 try:
626 # invalidate cache on push
627 # invalidate cache on push
627 if action == 'push':
628 if action == 'push':
628 self._invalidate_cache(self.url_repo_name)
629 self._invalidate_cache(self.url_repo_name)
629 finally:
630 finally:
630 meta.Session.remove()
631 meta.Session.remove()
631
632
632 def _get_repository_name(self, environ):
633 def _get_repository_name(self, environ):
633 """Get repository name out of the environmnent
634 """Get repository name out of the environmnent
634
635
635 :param environ: WSGI environment
636 :param environ: WSGI environment
636 """
637 """
637 raise NotImplementedError()
638 raise NotImplementedError()
638
639
639 def _get_action(self, environ):
640 def _get_action(self, environ):
640 """Map request commands into a pull or push command.
641 """Map request commands into a pull or push command.
641
642
642 :param environ: WSGI environment
643 :param environ: WSGI environment
643 """
644 """
644 raise NotImplementedError()
645 raise NotImplementedError()
645
646
646 def _create_wsgi_app(self, repo_path, repo_name, config):
647 def _create_wsgi_app(self, repo_path, repo_name, config):
647 """Return the WSGI app that will finally handle the request."""
648 """Return the WSGI app that will finally handle the request."""
648 raise NotImplementedError()
649 raise NotImplementedError()
649
650
650 def _create_config(self, extras, repo_name):
651 def _create_config(self, extras, repo_name):
651 """Create a safe config representation."""
652 """Create a safe config representation."""
652 raise NotImplementedError()
653 raise NotImplementedError()
653
654
654 def _should_use_callback_daemon(self, extras, environ, action):
655 def _should_use_callback_daemon(self, extras, environ, action):
655 return True
656 return True
656
657
657 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
658 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
658 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
659 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
659 if not self._should_use_callback_daemon(extras, environ, action):
660 if not self._should_use_callback_daemon(extras, environ, action):
660 # disable callback daemon for actions that don't require it
661 # disable callback daemon for actions that don't require it
661 direct_calls = True
662 direct_calls = True
662
663
663 return prepare_callback_daemon(
664 return prepare_callback_daemon(
664 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
665 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
665 use_direct_calls=direct_calls, txn_id=txn_id)
666 use_direct_calls=direct_calls, txn_id=txn_id)
666
667
667
668
668 def _should_check_locking(query_string):
669 def _should_check_locking(query_string):
669 # this is kind of hacky, but due to how mercurial handles client-server
670 # this is kind of hacky, but due to how mercurial handles client-server
670 # server see all operation on commit; bookmarks, phases and
671 # server see all operation on commit; bookmarks, phases and
671 # obsolescence marker in different transaction, we don't want to check
672 # obsolescence marker in different transaction, we don't want to check
672 # locking on those
673 # locking on those
673 return query_string not in ['cmd=listkeys']
674 return query_string not in ['cmd=listkeys']
@@ -1,1730 +1,1746 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import os
30 import os
31 import re
31 import re
32 import time
32 import time
33 import warnings
33 import warnings
34 import shutil
34 import shutil
35
35
36 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from zope.cachedescriptors.property import Lazy as LazyProperty
37
37
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs import connection
40 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.utils import author_name, author_email
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 RepositoryError)
47 RepositoryError)
48
48
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 FILEMODE_DEFAULT = 0100644
53 FILEMODE_DEFAULT = 0100644
54 FILEMODE_EXECUTABLE = 0100755
54 FILEMODE_EXECUTABLE = 0100755
55
55
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 MergeResponse = collections.namedtuple(
57 MergeResponse = collections.namedtuple(
58 'MergeResponse',
58 'MergeResponse',
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60
60
61
61
62 class MergeFailureReason(object):
62 class MergeFailureReason(object):
63 """
63 """
64 Enumeration with all the reasons why the server side merge could fail.
64 Enumeration with all the reasons why the server side merge could fail.
65
65
66 DO NOT change the number of the reasons, as they may be stored in the
66 DO NOT change the number of the reasons, as they may be stored in the
67 database.
67 database.
68
68
69 Changing the name of a reason is acceptable and encouraged to deprecate old
69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 reasons.
70 reasons.
71 """
71 """
72
72
73 # Everything went well.
73 # Everything went well.
74 NONE = 0
74 NONE = 0
75
75
76 # An unexpected exception was raised. Check the logs for more details.
76 # An unexpected exception was raised. Check the logs for more details.
77 UNKNOWN = 1
77 UNKNOWN = 1
78
78
79 # The merge was not successful, there are conflicts.
79 # The merge was not successful, there are conflicts.
80 MERGE_FAILED = 2
80 MERGE_FAILED = 2
81
81
82 # The merge succeeded but we could not push it to the target repository.
82 # The merge succeeded but we could not push it to the target repository.
83 PUSH_FAILED = 3
83 PUSH_FAILED = 3
84
84
85 # The specified target is not a head in the target repository.
85 # The specified target is not a head in the target repository.
86 TARGET_IS_NOT_HEAD = 4
86 TARGET_IS_NOT_HEAD = 4
87
87
88 # The source repository contains more branches than the target. Pushing
88 # The source repository contains more branches than the target. Pushing
89 # the merge will create additional branches in the target.
89 # the merge will create additional branches in the target.
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91
91
92 # The target reference has multiple heads. That does not allow to correctly
92 # The target reference has multiple heads. That does not allow to correctly
93 # identify the target location. This could only happen for mercurial
93 # identify the target location. This could only happen for mercurial
94 # branches.
94 # branches.
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96
96
97 # The target repository is locked
97 # The target repository is locked
98 TARGET_IS_LOCKED = 7
98 TARGET_IS_LOCKED = 7
99
99
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # A involved commit could not be found.
101 # A involved commit could not be found.
102 _DEPRECATED_MISSING_COMMIT = 8
102 _DEPRECATED_MISSING_COMMIT = 8
103
103
104 # The target repo reference is missing.
104 # The target repo reference is missing.
105 MISSING_TARGET_REF = 9
105 MISSING_TARGET_REF = 9
106
106
107 # The source repo reference is missing.
107 # The source repo reference is missing.
108 MISSING_SOURCE_REF = 10
108 MISSING_SOURCE_REF = 10
109
109
110 # The merge was not successful, there are conflicts related to sub
110 # The merge was not successful, there are conflicts related to sub
111 # repositories.
111 # repositories.
112 SUBREPO_MERGE_FAILED = 11
112 SUBREPO_MERGE_FAILED = 11
113
113
114
114
115 class UpdateFailureReason(object):
115 class UpdateFailureReason(object):
116 """
116 """
117 Enumeration with all the reasons why the pull request update could fail.
117 Enumeration with all the reasons why the pull request update could fail.
118
118
119 DO NOT change the number of the reasons, as they may be stored in the
119 DO NOT change the number of the reasons, as they may be stored in the
120 database.
120 database.
121
121
122 Changing the name of a reason is acceptable and encouraged to deprecate old
122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 reasons.
123 reasons.
124 """
124 """
125
125
126 # Everything went well.
126 # Everything went well.
127 NONE = 0
127 NONE = 0
128
128
129 # An unexpected exception was raised. Check the logs for more details.
129 # An unexpected exception was raised. Check the logs for more details.
130 UNKNOWN = 1
130 UNKNOWN = 1
131
131
132 # The pull request is up to date.
132 # The pull request is up to date.
133 NO_CHANGE = 2
133 NO_CHANGE = 2
134
134
135 # The pull request has a reference type that is not supported for update.
135 # The pull request has a reference type that is not supported for update.
136 WRONG_REF_TYPE = 3
136 WRONG_REF_TYPE = 3
137
137
138 # Update failed because the target reference is missing.
138 # Update failed because the target reference is missing.
139 MISSING_TARGET_REF = 4
139 MISSING_TARGET_REF = 4
140
140
141 # Update failed because the source reference is missing.
141 # Update failed because the source reference is missing.
142 MISSING_SOURCE_REF = 5
142 MISSING_SOURCE_REF = 5
143
143
144
144
145 class BaseRepository(object):
145 class BaseRepository(object):
146 """
146 """
147 Base Repository for final backends
147 Base Repository for final backends
148
148
149 .. attribute:: DEFAULT_BRANCH_NAME
149 .. attribute:: DEFAULT_BRANCH_NAME
150
150
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152
152
153 .. attribute:: commit_ids
153 .. attribute:: commit_ids
154
154
155 list of all available commit ids, in ascending order
155 list of all available commit ids, in ascending order
156
156
157 .. attribute:: path
157 .. attribute:: path
158
158
159 absolute path to the repository
159 absolute path to the repository
160
160
161 .. attribute:: bookmarks
161 .. attribute:: bookmarks
162
162
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 there are no bookmarks or the backend implementation does not support
164 there are no bookmarks or the backend implementation does not support
165 bookmarks.
165 bookmarks.
166
166
167 .. attribute:: tags
167 .. attribute:: tags
168
168
169 Mapping from name to :term:`Commit ID` of the tag.
169 Mapping from name to :term:`Commit ID` of the tag.
170
170
171 """
171 """
172
172
173 DEFAULT_BRANCH_NAME = None
173 DEFAULT_BRANCH_NAME = None
174 DEFAULT_CONTACT = u"Unknown"
174 DEFAULT_CONTACT = u"Unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
176 EMPTY_COMMIT_ID = '0' * 40
176 EMPTY_COMMIT_ID = '0' * 40
177
177
178 path = None
178 path = None
179 _remote = None
179 _remote = None
180
180
181 def __init__(self, repo_path, config=None, create=False, **kwargs):
181 def __init__(self, repo_path, config=None, create=False, **kwargs):
182 """
182 """
183 Initializes repository. Raises RepositoryError if repository could
183 Initializes repository. Raises RepositoryError if repository could
184 not be find at the given ``repo_path`` or directory at ``repo_path``
184 not be find at the given ``repo_path`` or directory at ``repo_path``
185 exists and ``create`` is set to True.
185 exists and ``create`` is set to True.
186
186
187 :param repo_path: local path of the repository
187 :param repo_path: local path of the repository
188 :param config: repository configuration
188 :param config: repository configuration
189 :param create=False: if set to True, would try to create repository.
189 :param create=False: if set to True, would try to create repository.
190 :param src_url=None: if set, should be proper url from which repository
190 :param src_url=None: if set, should be proper url from which repository
191 would be cloned; requires ``create`` parameter to be set to True -
191 would be cloned; requires ``create`` parameter to be set to True -
192 raises RepositoryError if src_url is set and create evaluates to
192 raises RepositoryError if src_url is set and create evaluates to
193 False
193 False
194 """
194 """
195 raise NotImplementedError
195 raise NotImplementedError
196
196
197 def __repr__(self):
197 def __repr__(self):
198 return '<%s at %s>' % (self.__class__.__name__, self.path)
198 return '<%s at %s>' % (self.__class__.__name__, self.path)
199
199
200 def __len__(self):
200 def __len__(self):
201 return self.count()
201 return self.count()
202
202
203 def __eq__(self, other):
203 def __eq__(self, other):
204 same_instance = isinstance(other, self.__class__)
204 same_instance = isinstance(other, self.__class__)
205 return same_instance and other.path == self.path
205 return same_instance and other.path == self.path
206
206
207 def __ne__(self, other):
207 def __ne__(self, other):
208 return not self.__eq__(other)
208 return not self.__eq__(other)
209
209
210 def get_create_shadow_cache_pr_path(self, db_repo):
210 def get_create_shadow_cache_pr_path(self, db_repo):
211 path = db_repo.cached_diffs_dir
211 path = db_repo.cached_diffs_dir
212 if not os.path.exists(path):
212 if not os.path.exists(path):
213 os.makedirs(path, 0755)
213 os.makedirs(path, 0755)
214 return path
214 return path
215
215
216 @classmethod
216 @classmethod
217 def get_default_config(cls, default=None):
217 def get_default_config(cls, default=None):
218 config = Config()
218 config = Config()
219 if default and isinstance(default, list):
219 if default and isinstance(default, list):
220 for section, key, val in default:
220 for section, key, val in default:
221 config.set(section, key, val)
221 config.set(section, key, val)
222 return config
222 return config
223
223
224 @LazyProperty
224 @LazyProperty
225 def EMPTY_COMMIT(self):
225 def EMPTY_COMMIT(self):
226 return EmptyCommit(self.EMPTY_COMMIT_ID)
226 return EmptyCommit(self.EMPTY_COMMIT_ID)
227
227
228 @LazyProperty
228 @LazyProperty
229 def alias(self):
229 def alias(self):
230 for k, v in settings.BACKENDS.items():
230 for k, v in settings.BACKENDS.items():
231 if v.split('.')[-1] == str(self.__class__.__name__):
231 if v.split('.')[-1] == str(self.__class__.__name__):
232 return k
232 return k
233
233
234 @LazyProperty
234 @LazyProperty
235 def name(self):
235 def name(self):
236 return safe_unicode(os.path.basename(self.path))
236 return safe_unicode(os.path.basename(self.path))
237
237
238 @LazyProperty
238 @LazyProperty
239 def description(self):
239 def description(self):
240 raise NotImplementedError
240 raise NotImplementedError
241
241
242 def refs(self):
242 def refs(self):
243 """
243 """
244 returns a `dict` with branches, bookmarks, tags, and closed_branches
244 returns a `dict` with branches, bookmarks, tags, and closed_branches
245 for this repository
245 for this repository
246 """
246 """
247 return dict(
247 return dict(
248 branches=self.branches,
248 branches=self.branches,
249 branches_closed=self.branches_closed,
249 branches_closed=self.branches_closed,
250 tags=self.tags,
250 tags=self.tags,
251 bookmarks=self.bookmarks
251 bookmarks=self.bookmarks
252 )
252 )
253
253
254 @LazyProperty
254 @LazyProperty
255 def branches(self):
255 def branches(self):
256 """
256 """
257 A `dict` which maps branch names to commit ids.
257 A `dict` which maps branch names to commit ids.
258 """
258 """
259 raise NotImplementedError
259 raise NotImplementedError
260
260
261 @LazyProperty
261 @LazyProperty
262 def branches_closed(self):
262 def branches_closed(self):
263 """
263 """
264 A `dict` which maps tags names to commit ids.
264 A `dict` which maps tags names to commit ids.
265 """
265 """
266 raise NotImplementedError
266 raise NotImplementedError
267
267
268 @LazyProperty
268 @LazyProperty
269 def bookmarks(self):
269 def bookmarks(self):
270 """
270 """
271 A `dict` which maps tags names to commit ids.
271 A `dict` which maps tags names to commit ids.
272 """
272 """
273 raise NotImplementedError
273 raise NotImplementedError
274
274
275 @LazyProperty
275 @LazyProperty
276 def tags(self):
276 def tags(self):
277 """
277 """
278 A `dict` which maps tags names to commit ids.
278 A `dict` which maps tags names to commit ids.
279 """
279 """
280 raise NotImplementedError
280 raise NotImplementedError
281
281
282 @LazyProperty
282 @LazyProperty
283 def size(self):
283 def size(self):
284 """
284 """
285 Returns combined size in bytes for all repository files
285 Returns combined size in bytes for all repository files
286 """
286 """
287 tip = self.get_commit()
287 tip = self.get_commit()
288 return tip.size
288 return tip.size
289
289
290 def size_at_commit(self, commit_id):
290 def size_at_commit(self, commit_id):
291 commit = self.get_commit(commit_id)
291 commit = self.get_commit(commit_id)
292 return commit.size
292 return commit.size
293
293
294 def is_empty(self):
294 def is_empty(self):
295 return not bool(self.commit_ids)
295 return not bool(self.commit_ids)
296
296
297 @staticmethod
297 @staticmethod
298 def check_url(url, config):
298 def check_url(url, config):
299 """
299 """
300 Function will check given url and try to verify if it's a valid
300 Function will check given url and try to verify if it's a valid
301 link.
301 link.
302 """
302 """
303 raise NotImplementedError
303 raise NotImplementedError
304
304
305 @staticmethod
305 @staticmethod
306 def is_valid_repository(path):
306 def is_valid_repository(path):
307 """
307 """
308 Check if given `path` contains a valid repository of this backend
308 Check if given `path` contains a valid repository of this backend
309 """
309 """
310 raise NotImplementedError
310 raise NotImplementedError
311
311
312 # ==========================================================================
312 # ==========================================================================
313 # COMMITS
313 # COMMITS
314 # ==========================================================================
314 # ==========================================================================
315
315
316 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
316 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
317 """
317 """
318 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
318 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
319 are both None, most recent commit is returned.
319 are both None, most recent commit is returned.
320
320
321 :param pre_load: Optional. List of commit attributes to load.
321 :param pre_load: Optional. List of commit attributes to load.
322
322
323 :raises ``EmptyRepositoryError``: if there are no commits
323 :raises ``EmptyRepositoryError``: if there are no commits
324 """
324 """
325 raise NotImplementedError
325 raise NotImplementedError
326
326
327 def __iter__(self):
327 def __iter__(self):
328 for commit_id in self.commit_ids:
328 for commit_id in self.commit_ids:
329 yield self.get_commit(commit_id=commit_id)
329 yield self.get_commit(commit_id=commit_id)
330
330
331 def get_commits(
331 def get_commits(
332 self, start_id=None, end_id=None, start_date=None, end_date=None,
332 self, start_id=None, end_id=None, start_date=None, end_date=None,
333 branch_name=None, show_hidden=False, pre_load=None):
333 branch_name=None, show_hidden=False, pre_load=None):
334 """
334 """
335 Returns iterator of `BaseCommit` objects from start to end
335 Returns iterator of `BaseCommit` objects from start to end
336 not inclusive. This should behave just like a list, ie. end is not
336 not inclusive. This should behave just like a list, ie. end is not
337 inclusive.
337 inclusive.
338
338
339 :param start_id: None or str, must be a valid commit id
339 :param start_id: None or str, must be a valid commit id
340 :param end_id: None or str, must be a valid commit id
340 :param end_id: None or str, must be a valid commit id
341 :param start_date:
341 :param start_date:
342 :param end_date:
342 :param end_date:
343 :param branch_name:
343 :param branch_name:
344 :param show_hidden:
344 :param show_hidden:
345 :param pre_load:
345 :param pre_load:
346 """
346 """
347 raise NotImplementedError
347 raise NotImplementedError
348
348
349 def __getitem__(self, key):
349 def __getitem__(self, key):
350 """
350 """
351 Allows index based access to the commit objects of this repository.
351 Allows index based access to the commit objects of this repository.
352 """
352 """
353 pre_load = ["author", "branch", "date", "message", "parents"]
353 pre_load = ["author", "branch", "date", "message", "parents"]
354 if isinstance(key, slice):
354 if isinstance(key, slice):
355 return self._get_range(key, pre_load)
355 return self._get_range(key, pre_load)
356 return self.get_commit(commit_idx=key, pre_load=pre_load)
356 return self.get_commit(commit_idx=key, pre_load=pre_load)
357
357
358 def _get_range(self, slice_obj, pre_load):
358 def _get_range(self, slice_obj, pre_load):
359 for commit_id in self.commit_ids.__getitem__(slice_obj):
359 for commit_id in self.commit_ids.__getitem__(slice_obj):
360 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
360 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
361
361
362 def count(self):
362 def count(self):
363 return len(self.commit_ids)
363 return len(self.commit_ids)
364
364
365 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
365 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
366 """
366 """
367 Creates and returns a tag for the given ``commit_id``.
367 Creates and returns a tag for the given ``commit_id``.
368
368
369 :param name: name for new tag
369 :param name: name for new tag
370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
371 :param commit_id: commit id for which new tag would be created
371 :param commit_id: commit id for which new tag would be created
372 :param message: message of the tag's commit
372 :param message: message of the tag's commit
373 :param date: date of tag's commit
373 :param date: date of tag's commit
374
374
375 :raises TagAlreadyExistError: if tag with same name already exists
375 :raises TagAlreadyExistError: if tag with same name already exists
376 """
376 """
377 raise NotImplementedError
377 raise NotImplementedError
378
378
379 def remove_tag(self, name, user, message=None, date=None):
379 def remove_tag(self, name, user, message=None, date=None):
380 """
380 """
381 Removes tag with the given ``name``.
381 Removes tag with the given ``name``.
382
382
383 :param name: name of the tag to be removed
383 :param name: name of the tag to be removed
384 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
384 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
385 :param message: message of the tag's removal commit
385 :param message: message of the tag's removal commit
386 :param date: date of tag's removal commit
386 :param date: date of tag's removal commit
387
387
388 :raises TagDoesNotExistError: if tag with given name does not exists
388 :raises TagDoesNotExistError: if tag with given name does not exists
389 """
389 """
390 raise NotImplementedError
390 raise NotImplementedError
391
391
392 def get_diff(
392 def get_diff(
393 self, commit1, commit2, path=None, ignore_whitespace=False,
393 self, commit1, commit2, path=None, ignore_whitespace=False,
394 context=3, path1=None):
394 context=3, path1=None):
395 """
395 """
396 Returns (git like) *diff*, as plain text. Shows changes introduced by
396 Returns (git like) *diff*, as plain text. Shows changes introduced by
397 `commit2` since `commit1`.
397 `commit2` since `commit1`.
398
398
399 :param commit1: Entry point from which diff is shown. Can be
399 :param commit1: Entry point from which diff is shown. Can be
400 ``self.EMPTY_COMMIT`` - in this case, patch showing all
400 ``self.EMPTY_COMMIT`` - in this case, patch showing all
401 the changes since empty state of the repository until `commit2`
401 the changes since empty state of the repository until `commit2`
402 :param commit2: Until which commit changes should be shown.
402 :param commit2: Until which commit changes should be shown.
403 :param path: Can be set to a path of a file to create a diff of that
403 :param path: Can be set to a path of a file to create a diff of that
404 file. If `path1` is also set, this value is only associated to
404 file. If `path1` is also set, this value is only associated to
405 `commit2`.
405 `commit2`.
406 :param ignore_whitespace: If set to ``True``, would not show whitespace
406 :param ignore_whitespace: If set to ``True``, would not show whitespace
407 changes. Defaults to ``False``.
407 changes. Defaults to ``False``.
408 :param context: How many lines before/after changed lines should be
408 :param context: How many lines before/after changed lines should be
409 shown. Defaults to ``3``.
409 shown. Defaults to ``3``.
410 :param path1: Can be set to a path to associate with `commit1`. This
410 :param path1: Can be set to a path to associate with `commit1`. This
411 parameter works only for backends which support diff generation for
411 parameter works only for backends which support diff generation for
412 different paths. Other backends will raise a `ValueError` if `path1`
412 different paths. Other backends will raise a `ValueError` if `path1`
413 is set and has a different value than `path`.
413 is set and has a different value than `path`.
414 :param file_path: filter this diff by given path pattern
414 :param file_path: filter this diff by given path pattern
415 """
415 """
416 raise NotImplementedError
416 raise NotImplementedError
417
417
418 def strip(self, commit_id, branch=None):
418 def strip(self, commit_id, branch=None):
419 """
419 """
420 Strip given commit_id from the repository
420 Strip given commit_id from the repository
421 """
421 """
422 raise NotImplementedError
422 raise NotImplementedError
423
423
424 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
424 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
425 """
425 """
426 Return a latest common ancestor commit if one exists for this repo
426 Return a latest common ancestor commit if one exists for this repo
427 `commit_id1` vs `commit_id2` from `repo2`.
427 `commit_id1` vs `commit_id2` from `repo2`.
428
428
429 :param commit_id1: Commit it from this repository to use as a
429 :param commit_id1: Commit it from this repository to use as a
430 target for the comparison.
430 target for the comparison.
431 :param commit_id2: Source commit id to use for comparison.
431 :param commit_id2: Source commit id to use for comparison.
432 :param repo2: Source repository to use for comparison.
432 :param repo2: Source repository to use for comparison.
433 """
433 """
434 raise NotImplementedError
434 raise NotImplementedError
435
435
436 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
436 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
437 """
437 """
438 Compare this repository's revision `commit_id1` with `commit_id2`.
438 Compare this repository's revision `commit_id1` with `commit_id2`.
439
439
440 Returns a tuple(commits, ancestor) that would be merged from
440 Returns a tuple(commits, ancestor) that would be merged from
441 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
441 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
442 will be returned as ancestor.
442 will be returned as ancestor.
443
443
444 :param commit_id1: Commit it from this repository to use as a
444 :param commit_id1: Commit it from this repository to use as a
445 target for the comparison.
445 target for the comparison.
446 :param commit_id2: Source commit id to use for comparison.
446 :param commit_id2: Source commit id to use for comparison.
447 :param repo2: Source repository to use for comparison.
447 :param repo2: Source repository to use for comparison.
448 :param merge: If set to ``True`` will do a merge compare which also
448 :param merge: If set to ``True`` will do a merge compare which also
449 returns the common ancestor.
449 returns the common ancestor.
450 :param pre_load: Optional. List of commit attributes to load.
450 :param pre_load: Optional. List of commit attributes to load.
451 """
451 """
452 raise NotImplementedError
452 raise NotImplementedError
453
453
454 def merge(self, target_ref, source_repo, source_ref, workspace_id,
454 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
455 user_name='', user_email='', message='', dry_run=False,
455 user_name='', user_email='', message='', dry_run=False,
456 use_rebase=False, close_branch=False):
456 use_rebase=False, close_branch=False):
457 """
457 """
458 Merge the revisions specified in `source_ref` from `source_repo`
458 Merge the revisions specified in `source_ref` from `source_repo`
459 onto the `target_ref` of this repository.
459 onto the `target_ref` of this repository.
460
460
461 `source_ref` and `target_ref` are named tupls with the following
461 `source_ref` and `target_ref` are named tupls with the following
462 fields `type`, `name` and `commit_id`.
462 fields `type`, `name` and `commit_id`.
463
463
464 Returns a MergeResponse named tuple with the following fields
464 Returns a MergeResponse named tuple with the following fields
465 'possible', 'executed', 'source_commit', 'target_commit',
465 'possible', 'executed', 'source_commit', 'target_commit',
466 'merge_commit'.
466 'merge_commit'.
467
467
468 :param repo_id: `repo_id` target repo id.
469 :param workspace_id: `workspace_id` unique identifier.
468 :param target_ref: `target_ref` points to the commit on top of which
470 :param target_ref: `target_ref` points to the commit on top of which
469 the `source_ref` should be merged.
471 the `source_ref` should be merged.
470 :param source_repo: The repository that contains the commits to be
472 :param source_repo: The repository that contains the commits to be
471 merged.
473 merged.
472 :param source_ref: `source_ref` points to the topmost commit from
474 :param source_ref: `source_ref` points to the topmost commit from
473 the `source_repo` which should be merged.
475 the `source_repo` which should be merged.
474 :param workspace_id: `workspace_id` unique identifier.
475 :param user_name: Merge commit `user_name`.
476 :param user_name: Merge commit `user_name`.
476 :param user_email: Merge commit `user_email`.
477 :param user_email: Merge commit `user_email`.
477 :param message: Merge commit `message`.
478 :param message: Merge commit `message`.
478 :param dry_run: If `True` the merge will not take place.
479 :param dry_run: If `True` the merge will not take place.
479 :param use_rebase: If `True` commits from the source will be rebased
480 :param use_rebase: If `True` commits from the source will be rebased
480 on top of the target instead of being merged.
481 on top of the target instead of being merged.
481 :param close_branch: If `True` branch will be close before merging it
482 :param close_branch: If `True` branch will be close before merging it
482 """
483 """
483 if dry_run:
484 if dry_run:
484 message = message or 'dry_run_merge_message'
485 message = message or 'dry_run_merge_message'
485 user_email = user_email or 'dry-run-merge@rhodecode.com'
486 user_email = user_email or 'dry-run-merge@rhodecode.com'
486 user_name = user_name or 'Dry-Run User'
487 user_name = user_name or 'Dry-Run User'
487 else:
488 else:
488 if not user_name:
489 if not user_name:
489 raise ValueError('user_name cannot be empty')
490 raise ValueError('user_name cannot be empty')
490 if not user_email:
491 if not user_email:
491 raise ValueError('user_email cannot be empty')
492 raise ValueError('user_email cannot be empty')
492 if not message:
493 if not message:
493 raise ValueError('message cannot be empty')
494 raise ValueError('message cannot be empty')
494
495
495 shadow_repository_path = self._maybe_prepare_merge_workspace(
496 workspace_id, target_ref, source_ref)
497
498 try:
496 try:
499 return self._merge_repo(
497 return self._merge_repo(
500 shadow_repository_path, target_ref, source_repo,
498 repo_id, workspace_id, target_ref, source_repo,
501 source_ref, message, user_name, user_email, dry_run=dry_run,
499 source_ref, message, user_name, user_email, dry_run=dry_run,
502 use_rebase=use_rebase, close_branch=close_branch)
500 use_rebase=use_rebase, close_branch=close_branch)
503 except RepositoryError:
501 except RepositoryError:
504 log.exception(
502 log.exception(
505 'Unexpected failure when running merge, dry-run=%s',
503 'Unexpected failure when running merge, dry-run=%s',
506 dry_run)
504 dry_run)
507 return MergeResponse(
505 return MergeResponse(
508 False, False, None, MergeFailureReason.UNKNOWN)
506 False, False, None, MergeFailureReason.UNKNOWN)
509
507
510 def _merge_repo(self, shadow_repository_path, target_ref,
508 def _merge_repo(self, repo_id, workspace_id, target_ref,
511 source_repo, source_ref, merge_message,
509 source_repo, source_ref, merge_message,
512 merger_name, merger_email, dry_run=False,
510 merger_name, merger_email, dry_run=False,
513 use_rebase=False, close_branch=False):
511 use_rebase=False, close_branch=False):
514 """Internal implementation of merge."""
512 """Internal implementation of merge."""
515 raise NotImplementedError
513 raise NotImplementedError
516
514
517 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
515 def _maybe_prepare_merge_workspace(
516 self, repo_id, workspace_id, target_ref, source_ref):
518 """
517 """
519 Create the merge workspace.
518 Create the merge workspace.
520
519
521 :param workspace_id: `workspace_id` unique identifier.
520 :param workspace_id: `workspace_id` unique identifier.
522 """
521 """
523 raise NotImplementedError
522 raise NotImplementedError
524
523
525 def _get_shadow_repository_path(self, workspace_id):
524 def _get_legacy_shadow_repository_path(self, workspace_id):
526 raise NotImplementedError
525 """
526 Legacy version that was used before. We still need it for
527 backward compat
528 """
529 return os.path.join(
530 os.path.dirname(self.path),
531 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
527
532
528 def cleanup_merge_workspace(self, workspace_id):
533 def _get_shadow_repository_path(self, repo_id, workspace_id):
534 # The name of the shadow repository must start with '.', so it is
535 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
536 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
537 if os.path.exists(legacy_repository_path):
538 return legacy_repository_path
539 else:
540 return os.path.join(
541 os.path.dirname(self.path),
542 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
543
544 def cleanup_merge_workspace(self, repo_id, workspace_id):
529 """
545 """
530 Remove merge workspace.
546 Remove merge workspace.
531
547
532 This function MUST not fail in case there is no workspace associated to
548 This function MUST not fail in case there is no workspace associated to
533 the given `workspace_id`.
549 the given `workspace_id`.
534
550
535 :param workspace_id: `workspace_id` unique identifier.
551 :param workspace_id: `workspace_id` unique identifier.
536 """
552 """
537 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
553 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
538 shadow_repository_path_del = '{}.{}.delete'.format(
554 shadow_repository_path_del = '{}.{}.delete'.format(
539 shadow_repository_path, time.time())
555 shadow_repository_path, time.time())
540
556
541 # move the shadow repo, so it never conflicts with the one used.
557 # move the shadow repo, so it never conflicts with the one used.
542 # we use this method because shutil.rmtree had some edge case problems
558 # we use this method because shutil.rmtree had some edge case problems
543 # removing symlinked repositories
559 # removing symlinked repositories
544 if not os.path.isdir(shadow_repository_path):
560 if not os.path.isdir(shadow_repository_path):
545 return
561 return
546
562
547 shutil.move(shadow_repository_path, shadow_repository_path_del)
563 shutil.move(shadow_repository_path, shadow_repository_path_del)
548 try:
564 try:
549 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
565 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
550 except Exception:
566 except Exception:
551 log.exception('Failed to gracefully remove shadow repo under %s',
567 log.exception('Failed to gracefully remove shadow repo under %s',
552 shadow_repository_path_del)
568 shadow_repository_path_del)
553 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
569 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
554
570
555 # ========== #
571 # ========== #
556 # COMMIT API #
572 # COMMIT API #
557 # ========== #
573 # ========== #
558
574
559 @LazyProperty
575 @LazyProperty
560 def in_memory_commit(self):
576 def in_memory_commit(self):
561 """
577 """
562 Returns :class:`InMemoryCommit` object for this repository.
578 Returns :class:`InMemoryCommit` object for this repository.
563 """
579 """
564 raise NotImplementedError
580 raise NotImplementedError
565
581
566 # ======================== #
582 # ======================== #
567 # UTILITIES FOR SUBCLASSES #
583 # UTILITIES FOR SUBCLASSES #
568 # ======================== #
584 # ======================== #
569
585
570 def _validate_diff_commits(self, commit1, commit2):
586 def _validate_diff_commits(self, commit1, commit2):
571 """
587 """
572 Validates that the given commits are related to this repository.
588 Validates that the given commits are related to this repository.
573
589
574 Intended as a utility for sub classes to have a consistent validation
590 Intended as a utility for sub classes to have a consistent validation
575 of input parameters in methods like :meth:`get_diff`.
591 of input parameters in methods like :meth:`get_diff`.
576 """
592 """
577 self._validate_commit(commit1)
593 self._validate_commit(commit1)
578 self._validate_commit(commit2)
594 self._validate_commit(commit2)
579 if (isinstance(commit1, EmptyCommit) and
595 if (isinstance(commit1, EmptyCommit) and
580 isinstance(commit2, EmptyCommit)):
596 isinstance(commit2, EmptyCommit)):
581 raise ValueError("Cannot compare two empty commits")
597 raise ValueError("Cannot compare two empty commits")
582
598
583 def _validate_commit(self, commit):
599 def _validate_commit(self, commit):
584 if not isinstance(commit, BaseCommit):
600 if not isinstance(commit, BaseCommit):
585 raise TypeError(
601 raise TypeError(
586 "%s is not of type BaseCommit" % repr(commit))
602 "%s is not of type BaseCommit" % repr(commit))
587 if commit.repository != self and not isinstance(commit, EmptyCommit):
603 if commit.repository != self and not isinstance(commit, EmptyCommit):
588 raise ValueError(
604 raise ValueError(
589 "Commit %s must be a valid commit from this repository %s, "
605 "Commit %s must be a valid commit from this repository %s, "
590 "related to this repository instead %s." %
606 "related to this repository instead %s." %
591 (commit, self, commit.repository))
607 (commit, self, commit.repository))
592
608
593 def _validate_commit_id(self, commit_id):
609 def _validate_commit_id(self, commit_id):
594 if not isinstance(commit_id, basestring):
610 if not isinstance(commit_id, basestring):
595 raise TypeError("commit_id must be a string value")
611 raise TypeError("commit_id must be a string value")
596
612
597 def _validate_commit_idx(self, commit_idx):
613 def _validate_commit_idx(self, commit_idx):
598 if not isinstance(commit_idx, (int, long)):
614 if not isinstance(commit_idx, (int, long)):
599 raise TypeError("commit_idx must be a numeric value")
615 raise TypeError("commit_idx must be a numeric value")
600
616
601 def _validate_branch_name(self, branch_name):
617 def _validate_branch_name(self, branch_name):
602 if branch_name and branch_name not in self.branches_all:
618 if branch_name and branch_name not in self.branches_all:
603 msg = ("Branch %s not found in %s" % (branch_name, self))
619 msg = ("Branch %s not found in %s" % (branch_name, self))
604 raise BranchDoesNotExistError(msg)
620 raise BranchDoesNotExistError(msg)
605
621
606 #
622 #
607 # Supporting deprecated API parts
623 # Supporting deprecated API parts
608 # TODO: johbo: consider to move this into a mixin
624 # TODO: johbo: consider to move this into a mixin
609 #
625 #
610
626
611 @property
627 @property
612 def EMPTY_CHANGESET(self):
628 def EMPTY_CHANGESET(self):
613 warnings.warn(
629 warnings.warn(
614 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
630 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
615 return self.EMPTY_COMMIT_ID
631 return self.EMPTY_COMMIT_ID
616
632
617 @property
633 @property
618 def revisions(self):
634 def revisions(self):
619 warnings.warn("Use commits attribute instead", DeprecationWarning)
635 warnings.warn("Use commits attribute instead", DeprecationWarning)
620 return self.commit_ids
636 return self.commit_ids
621
637
622 @revisions.setter
638 @revisions.setter
623 def revisions(self, value):
639 def revisions(self, value):
624 warnings.warn("Use commits attribute instead", DeprecationWarning)
640 warnings.warn("Use commits attribute instead", DeprecationWarning)
625 self.commit_ids = value
641 self.commit_ids = value
626
642
627 def get_changeset(self, revision=None, pre_load=None):
643 def get_changeset(self, revision=None, pre_load=None):
628 warnings.warn("Use get_commit instead", DeprecationWarning)
644 warnings.warn("Use get_commit instead", DeprecationWarning)
629 commit_id = None
645 commit_id = None
630 commit_idx = None
646 commit_idx = None
631 if isinstance(revision, basestring):
647 if isinstance(revision, basestring):
632 commit_id = revision
648 commit_id = revision
633 else:
649 else:
634 commit_idx = revision
650 commit_idx = revision
635 return self.get_commit(
651 return self.get_commit(
636 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
652 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
637
653
638 def get_changesets(
654 def get_changesets(
639 self, start=None, end=None, start_date=None, end_date=None,
655 self, start=None, end=None, start_date=None, end_date=None,
640 branch_name=None, pre_load=None):
656 branch_name=None, pre_load=None):
641 warnings.warn("Use get_commits instead", DeprecationWarning)
657 warnings.warn("Use get_commits instead", DeprecationWarning)
642 start_id = self._revision_to_commit(start)
658 start_id = self._revision_to_commit(start)
643 end_id = self._revision_to_commit(end)
659 end_id = self._revision_to_commit(end)
644 return self.get_commits(
660 return self.get_commits(
645 start_id=start_id, end_id=end_id, start_date=start_date,
661 start_id=start_id, end_id=end_id, start_date=start_date,
646 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
662 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
647
663
648 def _revision_to_commit(self, revision):
664 def _revision_to_commit(self, revision):
649 """
665 """
650 Translates a revision to a commit_id
666 Translates a revision to a commit_id
651
667
652 Helps to support the old changeset based API which allows to use
668 Helps to support the old changeset based API which allows to use
653 commit ids and commit indices interchangeable.
669 commit ids and commit indices interchangeable.
654 """
670 """
655 if revision is None:
671 if revision is None:
656 return revision
672 return revision
657
673
658 if isinstance(revision, basestring):
674 if isinstance(revision, basestring):
659 commit_id = revision
675 commit_id = revision
660 else:
676 else:
661 commit_id = self.commit_ids[revision]
677 commit_id = self.commit_ids[revision]
662 return commit_id
678 return commit_id
663
679
664 @property
680 @property
665 def in_memory_changeset(self):
681 def in_memory_changeset(self):
666 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
682 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
667 return self.in_memory_commit
683 return self.in_memory_commit
668
684
669 def get_path_permissions(self, username):
685 def get_path_permissions(self, username):
670 """
686 """
671 Returns a path permission checker or None if not supported
687 Returns a path permission checker or None if not supported
672
688
673 :param username: session user name
689 :param username: session user name
674 :return: an instance of BasePathPermissionChecker or None
690 :return: an instance of BasePathPermissionChecker or None
675 """
691 """
676 return None
692 return None
677
693
678 def install_hooks(self, force=False):
694 def install_hooks(self, force=False):
679 return self._remote.install_hooks(force)
695 return self._remote.install_hooks(force)
680
696
681
697
682 class BaseCommit(object):
698 class BaseCommit(object):
683 """
699 """
684 Each backend should implement it's commit representation.
700 Each backend should implement it's commit representation.
685
701
686 **Attributes**
702 **Attributes**
687
703
688 ``repository``
704 ``repository``
689 repository object within which commit exists
705 repository object within which commit exists
690
706
691 ``id``
707 ``id``
692 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
708 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
693 just ``tip``.
709 just ``tip``.
694
710
695 ``raw_id``
711 ``raw_id``
696 raw commit representation (i.e. full 40 length sha for git
712 raw commit representation (i.e. full 40 length sha for git
697 backend)
713 backend)
698
714
699 ``short_id``
715 ``short_id``
700 shortened (if apply) version of ``raw_id``; it would be simple
716 shortened (if apply) version of ``raw_id``; it would be simple
701 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
717 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
702 as ``raw_id`` for subversion
718 as ``raw_id`` for subversion
703
719
704 ``idx``
720 ``idx``
705 commit index
721 commit index
706
722
707 ``files``
723 ``files``
708 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
724 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
709
725
710 ``dirs``
726 ``dirs``
711 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
727 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
712
728
713 ``nodes``
729 ``nodes``
714 combined list of ``Node`` objects
730 combined list of ``Node`` objects
715
731
716 ``author``
732 ``author``
717 author of the commit, as unicode
733 author of the commit, as unicode
718
734
719 ``message``
735 ``message``
720 message of the commit, as unicode
736 message of the commit, as unicode
721
737
722 ``parents``
738 ``parents``
723 list of parent commits
739 list of parent commits
724
740
725 """
741 """
726
742
727 branch = None
743 branch = None
728 """
744 """
729 Depending on the backend this should be set to the branch name of the
745 Depending on the backend this should be set to the branch name of the
730 commit. Backends not supporting branches on commits should leave this
746 commit. Backends not supporting branches on commits should leave this
731 value as ``None``.
747 value as ``None``.
732 """
748 """
733
749
734 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
750 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
735 """
751 """
736 This template is used to generate a default prefix for repository archives
752 This template is used to generate a default prefix for repository archives
737 if no prefix has been specified.
753 if no prefix has been specified.
738 """
754 """
739
755
740 def __str__(self):
756 def __str__(self):
741 return '<%s at %s:%s>' % (
757 return '<%s at %s:%s>' % (
742 self.__class__.__name__, self.idx, self.short_id)
758 self.__class__.__name__, self.idx, self.short_id)
743
759
744 def __repr__(self):
760 def __repr__(self):
745 return self.__str__()
761 return self.__str__()
746
762
747 def __unicode__(self):
763 def __unicode__(self):
748 return u'%s:%s' % (self.idx, self.short_id)
764 return u'%s:%s' % (self.idx, self.short_id)
749
765
750 def __eq__(self, other):
766 def __eq__(self, other):
751 same_instance = isinstance(other, self.__class__)
767 same_instance = isinstance(other, self.__class__)
752 return same_instance and self.raw_id == other.raw_id
768 return same_instance and self.raw_id == other.raw_id
753
769
754 def __json__(self):
770 def __json__(self):
755 parents = []
771 parents = []
756 try:
772 try:
757 for parent in self.parents:
773 for parent in self.parents:
758 parents.append({'raw_id': parent.raw_id})
774 parents.append({'raw_id': parent.raw_id})
759 except NotImplementedError:
775 except NotImplementedError:
760 # empty commit doesn't have parents implemented
776 # empty commit doesn't have parents implemented
761 pass
777 pass
762
778
763 return {
779 return {
764 'short_id': self.short_id,
780 'short_id': self.short_id,
765 'raw_id': self.raw_id,
781 'raw_id': self.raw_id,
766 'revision': self.idx,
782 'revision': self.idx,
767 'message': self.message,
783 'message': self.message,
768 'date': self.date,
784 'date': self.date,
769 'author': self.author,
785 'author': self.author,
770 'parents': parents,
786 'parents': parents,
771 'branch': self.branch
787 'branch': self.branch
772 }
788 }
773
789
774 def __getstate__(self):
790 def __getstate__(self):
775 d = self.__dict__.copy()
791 d = self.__dict__.copy()
776 d.pop('_remote', None)
792 d.pop('_remote', None)
777 d.pop('repository', None)
793 d.pop('repository', None)
778 return d
794 return d
779
795
780 def _get_refs(self):
796 def _get_refs(self):
781 return {
797 return {
782 'branches': [self.branch] if self.branch else [],
798 'branches': [self.branch] if self.branch else [],
783 'bookmarks': getattr(self, 'bookmarks', []),
799 'bookmarks': getattr(self, 'bookmarks', []),
784 'tags': self.tags
800 'tags': self.tags
785 }
801 }
786
802
787 @LazyProperty
803 @LazyProperty
788 def last(self):
804 def last(self):
789 """
805 """
790 ``True`` if this is last commit in repository, ``False``
806 ``True`` if this is last commit in repository, ``False``
791 otherwise; trying to access this attribute while there is no
807 otherwise; trying to access this attribute while there is no
792 commits would raise `EmptyRepositoryError`
808 commits would raise `EmptyRepositoryError`
793 """
809 """
794 if self.repository is None:
810 if self.repository is None:
795 raise CommitError("Cannot check if it's most recent commit")
811 raise CommitError("Cannot check if it's most recent commit")
796 return self.raw_id == self.repository.commit_ids[-1]
812 return self.raw_id == self.repository.commit_ids[-1]
797
813
798 @LazyProperty
814 @LazyProperty
799 def parents(self):
815 def parents(self):
800 """
816 """
801 Returns list of parent commits.
817 Returns list of parent commits.
802 """
818 """
803 raise NotImplementedError
819 raise NotImplementedError
804
820
805 @property
821 @property
806 def merge(self):
822 def merge(self):
807 """
823 """
808 Returns boolean if commit is a merge.
824 Returns boolean if commit is a merge.
809 """
825 """
810 return len(self.parents) > 1
826 return len(self.parents) > 1
811
827
812 @LazyProperty
828 @LazyProperty
813 def children(self):
829 def children(self):
814 """
830 """
815 Returns list of child commits.
831 Returns list of child commits.
816 """
832 """
817 raise NotImplementedError
833 raise NotImplementedError
818
834
819 @LazyProperty
835 @LazyProperty
820 def id(self):
836 def id(self):
821 """
837 """
822 Returns string identifying this commit.
838 Returns string identifying this commit.
823 """
839 """
824 raise NotImplementedError
840 raise NotImplementedError
825
841
826 @LazyProperty
842 @LazyProperty
827 def raw_id(self):
843 def raw_id(self):
828 """
844 """
829 Returns raw string identifying this commit.
845 Returns raw string identifying this commit.
830 """
846 """
831 raise NotImplementedError
847 raise NotImplementedError
832
848
833 @LazyProperty
849 @LazyProperty
834 def short_id(self):
850 def short_id(self):
835 """
851 """
836 Returns shortened version of ``raw_id`` attribute, as string,
852 Returns shortened version of ``raw_id`` attribute, as string,
837 identifying this commit, useful for presentation to users.
853 identifying this commit, useful for presentation to users.
838 """
854 """
839 raise NotImplementedError
855 raise NotImplementedError
840
856
841 @LazyProperty
857 @LazyProperty
842 def idx(self):
858 def idx(self):
843 """
859 """
844 Returns integer identifying this commit.
860 Returns integer identifying this commit.
845 """
861 """
846 raise NotImplementedError
862 raise NotImplementedError
847
863
848 @LazyProperty
864 @LazyProperty
849 def committer(self):
865 def committer(self):
850 """
866 """
851 Returns committer for this commit
867 Returns committer for this commit
852 """
868 """
853 raise NotImplementedError
869 raise NotImplementedError
854
870
855 @LazyProperty
871 @LazyProperty
856 def committer_name(self):
872 def committer_name(self):
857 """
873 """
858 Returns committer name for this commit
874 Returns committer name for this commit
859 """
875 """
860
876
861 return author_name(self.committer)
877 return author_name(self.committer)
862
878
863 @LazyProperty
879 @LazyProperty
864 def committer_email(self):
880 def committer_email(self):
865 """
881 """
866 Returns committer email address for this commit
882 Returns committer email address for this commit
867 """
883 """
868
884
869 return author_email(self.committer)
885 return author_email(self.committer)
870
886
871 @LazyProperty
887 @LazyProperty
872 def author(self):
888 def author(self):
873 """
889 """
874 Returns author for this commit
890 Returns author for this commit
875 """
891 """
876
892
877 raise NotImplementedError
893 raise NotImplementedError
878
894
879 @LazyProperty
895 @LazyProperty
880 def author_name(self):
896 def author_name(self):
881 """
897 """
882 Returns author name for this commit
898 Returns author name for this commit
883 """
899 """
884
900
885 return author_name(self.author)
901 return author_name(self.author)
886
902
887 @LazyProperty
903 @LazyProperty
888 def author_email(self):
904 def author_email(self):
889 """
905 """
890 Returns author email address for this commit
906 Returns author email address for this commit
891 """
907 """
892
908
893 return author_email(self.author)
909 return author_email(self.author)
894
910
895 def get_file_mode(self, path):
911 def get_file_mode(self, path):
896 """
912 """
897 Returns stat mode of the file at `path`.
913 Returns stat mode of the file at `path`.
898 """
914 """
899 raise NotImplementedError
915 raise NotImplementedError
900
916
901 def is_link(self, path):
917 def is_link(self, path):
902 """
918 """
903 Returns ``True`` if given `path` is a symlink
919 Returns ``True`` if given `path` is a symlink
904 """
920 """
905 raise NotImplementedError
921 raise NotImplementedError
906
922
907 def get_file_content(self, path):
923 def get_file_content(self, path):
908 """
924 """
909 Returns content of the file at the given `path`.
925 Returns content of the file at the given `path`.
910 """
926 """
911 raise NotImplementedError
927 raise NotImplementedError
912
928
913 def get_file_size(self, path):
929 def get_file_size(self, path):
914 """
930 """
915 Returns size of the file at the given `path`.
931 Returns size of the file at the given `path`.
916 """
932 """
917 raise NotImplementedError
933 raise NotImplementedError
918
934
919 def get_file_commit(self, path, pre_load=None):
935 def get_file_commit(self, path, pre_load=None):
920 """
936 """
921 Returns last commit of the file at the given `path`.
937 Returns last commit of the file at the given `path`.
922
938
923 :param pre_load: Optional. List of commit attributes to load.
939 :param pre_load: Optional. List of commit attributes to load.
924 """
940 """
925 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
941 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
926 if not commits:
942 if not commits:
927 raise RepositoryError(
943 raise RepositoryError(
928 'Failed to fetch history for path {}. '
944 'Failed to fetch history for path {}. '
929 'Please check if such path exists in your repository'.format(
945 'Please check if such path exists in your repository'.format(
930 path))
946 path))
931 return commits[0]
947 return commits[0]
932
948
933 def get_file_history(self, path, limit=None, pre_load=None):
949 def get_file_history(self, path, limit=None, pre_load=None):
934 """
950 """
935 Returns history of file as reversed list of :class:`BaseCommit`
951 Returns history of file as reversed list of :class:`BaseCommit`
936 objects for which file at given `path` has been modified.
952 objects for which file at given `path` has been modified.
937
953
938 :param limit: Optional. Allows to limit the size of the returned
954 :param limit: Optional. Allows to limit the size of the returned
939 history. This is intended as a hint to the underlying backend, so
955 history. This is intended as a hint to the underlying backend, so
940 that it can apply optimizations depending on the limit.
956 that it can apply optimizations depending on the limit.
941 :param pre_load: Optional. List of commit attributes to load.
957 :param pre_load: Optional. List of commit attributes to load.
942 """
958 """
943 raise NotImplementedError
959 raise NotImplementedError
944
960
945 def get_file_annotate(self, path, pre_load=None):
961 def get_file_annotate(self, path, pre_load=None):
946 """
962 """
947 Returns a generator of four element tuples with
963 Returns a generator of four element tuples with
948 lineno, sha, commit lazy loader and line
964 lineno, sha, commit lazy loader and line
949
965
950 :param pre_load: Optional. List of commit attributes to load.
966 :param pre_load: Optional. List of commit attributes to load.
951 """
967 """
952 raise NotImplementedError
968 raise NotImplementedError
953
969
954 def get_nodes(self, path):
970 def get_nodes(self, path):
955 """
971 """
956 Returns combined ``DirNode`` and ``FileNode`` objects list representing
972 Returns combined ``DirNode`` and ``FileNode`` objects list representing
957 state of commit at the given ``path``.
973 state of commit at the given ``path``.
958
974
959 :raises ``CommitError``: if node at the given ``path`` is not
975 :raises ``CommitError``: if node at the given ``path`` is not
960 instance of ``DirNode``
976 instance of ``DirNode``
961 """
977 """
962 raise NotImplementedError
978 raise NotImplementedError
963
979
964 def get_node(self, path):
980 def get_node(self, path):
965 """
981 """
966 Returns ``Node`` object from the given ``path``.
982 Returns ``Node`` object from the given ``path``.
967
983
968 :raises ``NodeDoesNotExistError``: if there is no node at the given
984 :raises ``NodeDoesNotExistError``: if there is no node at the given
969 ``path``
985 ``path``
970 """
986 """
971 raise NotImplementedError
987 raise NotImplementedError
972
988
973 def get_largefile_node(self, path):
989 def get_largefile_node(self, path):
974 """
990 """
975 Returns the path to largefile from Mercurial/Git-lfs storage.
991 Returns the path to largefile from Mercurial/Git-lfs storage.
976 or None if it's not a largefile node
992 or None if it's not a largefile node
977 """
993 """
978 return None
994 return None
979
995
980 def archive_repo(self, file_path, kind='tgz', subrepos=None,
996 def archive_repo(self, file_path, kind='tgz', subrepos=None,
981 prefix=None, write_metadata=False, mtime=None):
997 prefix=None, write_metadata=False, mtime=None):
982 """
998 """
983 Creates an archive containing the contents of the repository.
999 Creates an archive containing the contents of the repository.
984
1000
985 :param file_path: path to the file which to create the archive.
1001 :param file_path: path to the file which to create the archive.
986 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1002 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
987 :param prefix: name of root directory in archive.
1003 :param prefix: name of root directory in archive.
988 Default is repository name and commit's short_id joined with dash:
1004 Default is repository name and commit's short_id joined with dash:
989 ``"{repo_name}-{short_id}"``.
1005 ``"{repo_name}-{short_id}"``.
990 :param write_metadata: write a metadata file into archive.
1006 :param write_metadata: write a metadata file into archive.
991 :param mtime: custom modification time for archive creation, defaults
1007 :param mtime: custom modification time for archive creation, defaults
992 to time.time() if not given.
1008 to time.time() if not given.
993
1009
994 :raise VCSError: If prefix has a problem.
1010 :raise VCSError: If prefix has a problem.
995 """
1011 """
996 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1012 allowed_kinds = settings.ARCHIVE_SPECS.keys()
997 if kind not in allowed_kinds:
1013 if kind not in allowed_kinds:
998 raise ImproperArchiveTypeError(
1014 raise ImproperArchiveTypeError(
999 'Archive kind (%s) not supported use one of %s' %
1015 'Archive kind (%s) not supported use one of %s' %
1000 (kind, allowed_kinds))
1016 (kind, allowed_kinds))
1001
1017
1002 prefix = self._validate_archive_prefix(prefix)
1018 prefix = self._validate_archive_prefix(prefix)
1003
1019
1004 mtime = mtime or time.mktime(self.date.timetuple())
1020 mtime = mtime or time.mktime(self.date.timetuple())
1005
1021
1006 file_info = []
1022 file_info = []
1007 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1023 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1008 for _r, _d, files in cur_rev.walk('/'):
1024 for _r, _d, files in cur_rev.walk('/'):
1009 for f in files:
1025 for f in files:
1010 f_path = os.path.join(prefix, f.path)
1026 f_path = os.path.join(prefix, f.path)
1011 file_info.append(
1027 file_info.append(
1012 (f_path, f.mode, f.is_link(), f.raw_bytes))
1028 (f_path, f.mode, f.is_link(), f.raw_bytes))
1013
1029
1014 if write_metadata:
1030 if write_metadata:
1015 metadata = [
1031 metadata = [
1016 ('repo_name', self.repository.name),
1032 ('repo_name', self.repository.name),
1017 ('rev', self.raw_id),
1033 ('rev', self.raw_id),
1018 ('create_time', mtime),
1034 ('create_time', mtime),
1019 ('branch', self.branch),
1035 ('branch', self.branch),
1020 ('tags', ','.join(self.tags)),
1036 ('tags', ','.join(self.tags)),
1021 ]
1037 ]
1022 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1038 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1023 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1039 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1024
1040
1025 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1041 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1026
1042
1027 def _validate_archive_prefix(self, prefix):
1043 def _validate_archive_prefix(self, prefix):
1028 if prefix is None:
1044 if prefix is None:
1029 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1045 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1030 repo_name=safe_str(self.repository.name),
1046 repo_name=safe_str(self.repository.name),
1031 short_id=self.short_id)
1047 short_id=self.short_id)
1032 elif not isinstance(prefix, str):
1048 elif not isinstance(prefix, str):
1033 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1049 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1034 elif prefix.startswith('/'):
1050 elif prefix.startswith('/'):
1035 raise VCSError("Prefix cannot start with leading slash")
1051 raise VCSError("Prefix cannot start with leading slash")
1036 elif prefix.strip() == '':
1052 elif prefix.strip() == '':
1037 raise VCSError("Prefix cannot be empty")
1053 raise VCSError("Prefix cannot be empty")
1038 return prefix
1054 return prefix
1039
1055
1040 @LazyProperty
1056 @LazyProperty
1041 def root(self):
1057 def root(self):
1042 """
1058 """
1043 Returns ``RootNode`` object for this commit.
1059 Returns ``RootNode`` object for this commit.
1044 """
1060 """
1045 return self.get_node('')
1061 return self.get_node('')
1046
1062
1047 def next(self, branch=None):
1063 def next(self, branch=None):
1048 """
1064 """
1049 Returns next commit from current, if branch is gives it will return
1065 Returns next commit from current, if branch is gives it will return
1050 next commit belonging to this branch
1066 next commit belonging to this branch
1051
1067
1052 :param branch: show commits within the given named branch
1068 :param branch: show commits within the given named branch
1053 """
1069 """
1054 indexes = xrange(self.idx + 1, self.repository.count())
1070 indexes = xrange(self.idx + 1, self.repository.count())
1055 return self._find_next(indexes, branch)
1071 return self._find_next(indexes, branch)
1056
1072
1057 def prev(self, branch=None):
1073 def prev(self, branch=None):
1058 """
1074 """
1059 Returns previous commit from current, if branch is gives it will
1075 Returns previous commit from current, if branch is gives it will
1060 return previous commit belonging to this branch
1076 return previous commit belonging to this branch
1061
1077
1062 :param branch: show commit within the given named branch
1078 :param branch: show commit within the given named branch
1063 """
1079 """
1064 indexes = xrange(self.idx - 1, -1, -1)
1080 indexes = xrange(self.idx - 1, -1, -1)
1065 return self._find_next(indexes, branch)
1081 return self._find_next(indexes, branch)
1066
1082
1067 def _find_next(self, indexes, branch=None):
1083 def _find_next(self, indexes, branch=None):
1068 if branch and self.branch != branch:
1084 if branch and self.branch != branch:
1069 raise VCSError('Branch option used on commit not belonging '
1085 raise VCSError('Branch option used on commit not belonging '
1070 'to that branch')
1086 'to that branch')
1071
1087
1072 for next_idx in indexes:
1088 for next_idx in indexes:
1073 commit = self.repository.get_commit(commit_idx=next_idx)
1089 commit = self.repository.get_commit(commit_idx=next_idx)
1074 if branch and branch != commit.branch:
1090 if branch and branch != commit.branch:
1075 continue
1091 continue
1076 return commit
1092 return commit
1077 raise CommitDoesNotExistError
1093 raise CommitDoesNotExistError
1078
1094
1079 def diff(self, ignore_whitespace=True, context=3):
1095 def diff(self, ignore_whitespace=True, context=3):
1080 """
1096 """
1081 Returns a `Diff` object representing the change made by this commit.
1097 Returns a `Diff` object representing the change made by this commit.
1082 """
1098 """
1083 parent = (
1099 parent = (
1084 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1100 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1085 diff = self.repository.get_diff(
1101 diff = self.repository.get_diff(
1086 parent, self,
1102 parent, self,
1087 ignore_whitespace=ignore_whitespace,
1103 ignore_whitespace=ignore_whitespace,
1088 context=context)
1104 context=context)
1089 return diff
1105 return diff
1090
1106
1091 @LazyProperty
1107 @LazyProperty
1092 def added(self):
1108 def added(self):
1093 """
1109 """
1094 Returns list of added ``FileNode`` objects.
1110 Returns list of added ``FileNode`` objects.
1095 """
1111 """
1096 raise NotImplementedError
1112 raise NotImplementedError
1097
1113
1098 @LazyProperty
1114 @LazyProperty
1099 def changed(self):
1115 def changed(self):
1100 """
1116 """
1101 Returns list of modified ``FileNode`` objects.
1117 Returns list of modified ``FileNode`` objects.
1102 """
1118 """
1103 raise NotImplementedError
1119 raise NotImplementedError
1104
1120
1105 @LazyProperty
1121 @LazyProperty
1106 def removed(self):
1122 def removed(self):
1107 """
1123 """
1108 Returns list of removed ``FileNode`` objects.
1124 Returns list of removed ``FileNode`` objects.
1109 """
1125 """
1110 raise NotImplementedError
1126 raise NotImplementedError
1111
1127
1112 @LazyProperty
1128 @LazyProperty
1113 def size(self):
1129 def size(self):
1114 """
1130 """
1115 Returns total number of bytes from contents of all filenodes.
1131 Returns total number of bytes from contents of all filenodes.
1116 """
1132 """
1117 return sum((node.size for node in self.get_filenodes_generator()))
1133 return sum((node.size for node in self.get_filenodes_generator()))
1118
1134
1119 def walk(self, topurl=''):
1135 def walk(self, topurl=''):
1120 """
1136 """
1121 Similar to os.walk method. Insted of filesystem it walks through
1137 Similar to os.walk method. Insted of filesystem it walks through
1122 commit starting at given ``topurl``. Returns generator of tuples
1138 commit starting at given ``topurl``. Returns generator of tuples
1123 (topnode, dirnodes, filenodes).
1139 (topnode, dirnodes, filenodes).
1124 """
1140 """
1125 topnode = self.get_node(topurl)
1141 topnode = self.get_node(topurl)
1126 if not topnode.is_dir():
1142 if not topnode.is_dir():
1127 return
1143 return
1128 yield (topnode, topnode.dirs, topnode.files)
1144 yield (topnode, topnode.dirs, topnode.files)
1129 for dirnode in topnode.dirs:
1145 for dirnode in topnode.dirs:
1130 for tup in self.walk(dirnode.path):
1146 for tup in self.walk(dirnode.path):
1131 yield tup
1147 yield tup
1132
1148
1133 def get_filenodes_generator(self):
1149 def get_filenodes_generator(self):
1134 """
1150 """
1135 Returns generator that yields *all* file nodes.
1151 Returns generator that yields *all* file nodes.
1136 """
1152 """
1137 for topnode, dirs, files in self.walk():
1153 for topnode, dirs, files in self.walk():
1138 for node in files:
1154 for node in files:
1139 yield node
1155 yield node
1140
1156
1141 #
1157 #
1142 # Utilities for sub classes to support consistent behavior
1158 # Utilities for sub classes to support consistent behavior
1143 #
1159 #
1144
1160
1145 def no_node_at_path(self, path):
1161 def no_node_at_path(self, path):
1146 return NodeDoesNotExistError(
1162 return NodeDoesNotExistError(
1147 u"There is no file nor directory at the given path: "
1163 u"There is no file nor directory at the given path: "
1148 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1164 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1149
1165
1150 def _fix_path(self, path):
1166 def _fix_path(self, path):
1151 """
1167 """
1152 Paths are stored without trailing slash so we need to get rid off it if
1168 Paths are stored without trailing slash so we need to get rid off it if
1153 needed.
1169 needed.
1154 """
1170 """
1155 return path.rstrip('/')
1171 return path.rstrip('/')
1156
1172
1157 #
1173 #
1158 # Deprecated API based on changesets
1174 # Deprecated API based on changesets
1159 #
1175 #
1160
1176
1161 @property
1177 @property
1162 def revision(self):
1178 def revision(self):
1163 warnings.warn("Use idx instead", DeprecationWarning)
1179 warnings.warn("Use idx instead", DeprecationWarning)
1164 return self.idx
1180 return self.idx
1165
1181
1166 @revision.setter
1182 @revision.setter
1167 def revision(self, value):
1183 def revision(self, value):
1168 warnings.warn("Use idx instead", DeprecationWarning)
1184 warnings.warn("Use idx instead", DeprecationWarning)
1169 self.idx = value
1185 self.idx = value
1170
1186
1171 def get_file_changeset(self, path):
1187 def get_file_changeset(self, path):
1172 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1188 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1173 return self.get_file_commit(path)
1189 return self.get_file_commit(path)
1174
1190
1175
1191
1176 class BaseChangesetClass(type):
1192 class BaseChangesetClass(type):
1177
1193
1178 def __instancecheck__(self, instance):
1194 def __instancecheck__(self, instance):
1179 return isinstance(instance, BaseCommit)
1195 return isinstance(instance, BaseCommit)
1180
1196
1181
1197
1182 class BaseChangeset(BaseCommit):
1198 class BaseChangeset(BaseCommit):
1183
1199
1184 __metaclass__ = BaseChangesetClass
1200 __metaclass__ = BaseChangesetClass
1185
1201
1186 def __new__(cls, *args, **kwargs):
1202 def __new__(cls, *args, **kwargs):
1187 warnings.warn(
1203 warnings.warn(
1188 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1204 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1189 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1205 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1190
1206
1191
1207
1192 class BaseInMemoryCommit(object):
1208 class BaseInMemoryCommit(object):
1193 """
1209 """
1194 Represents differences between repository's state (most recent head) and
1210 Represents differences between repository's state (most recent head) and
1195 changes made *in place*.
1211 changes made *in place*.
1196
1212
1197 **Attributes**
1213 **Attributes**
1198
1214
1199 ``repository``
1215 ``repository``
1200 repository object for this in-memory-commit
1216 repository object for this in-memory-commit
1201
1217
1202 ``added``
1218 ``added``
1203 list of ``FileNode`` objects marked as *added*
1219 list of ``FileNode`` objects marked as *added*
1204
1220
1205 ``changed``
1221 ``changed``
1206 list of ``FileNode`` objects marked as *changed*
1222 list of ``FileNode`` objects marked as *changed*
1207
1223
1208 ``removed``
1224 ``removed``
1209 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1225 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1210 *removed*
1226 *removed*
1211
1227
1212 ``parents``
1228 ``parents``
1213 list of :class:`BaseCommit` instances representing parents of
1229 list of :class:`BaseCommit` instances representing parents of
1214 in-memory commit. Should always be 2-element sequence.
1230 in-memory commit. Should always be 2-element sequence.
1215
1231
1216 """
1232 """
1217
1233
1218 def __init__(self, repository):
1234 def __init__(self, repository):
1219 self.repository = repository
1235 self.repository = repository
1220 self.added = []
1236 self.added = []
1221 self.changed = []
1237 self.changed = []
1222 self.removed = []
1238 self.removed = []
1223 self.parents = []
1239 self.parents = []
1224
1240
1225 def add(self, *filenodes):
1241 def add(self, *filenodes):
1226 """
1242 """
1227 Marks given ``FileNode`` objects as *to be committed*.
1243 Marks given ``FileNode`` objects as *to be committed*.
1228
1244
1229 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1245 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1230 latest commit
1246 latest commit
1231 :raises ``NodeAlreadyAddedError``: if node with same path is already
1247 :raises ``NodeAlreadyAddedError``: if node with same path is already
1232 marked as *added*
1248 marked as *added*
1233 """
1249 """
1234 # Check if not already marked as *added* first
1250 # Check if not already marked as *added* first
1235 for node in filenodes:
1251 for node in filenodes:
1236 if node.path in (n.path for n in self.added):
1252 if node.path in (n.path for n in self.added):
1237 raise NodeAlreadyAddedError(
1253 raise NodeAlreadyAddedError(
1238 "Such FileNode %s is already marked for addition"
1254 "Such FileNode %s is already marked for addition"
1239 % node.path)
1255 % node.path)
1240 for node in filenodes:
1256 for node in filenodes:
1241 self.added.append(node)
1257 self.added.append(node)
1242
1258
1243 def change(self, *filenodes):
1259 def change(self, *filenodes):
1244 """
1260 """
1245 Marks given ``FileNode`` objects to be *changed* in next commit.
1261 Marks given ``FileNode`` objects to be *changed* in next commit.
1246
1262
1247 :raises ``EmptyRepositoryError``: if there are no commits yet
1263 :raises ``EmptyRepositoryError``: if there are no commits yet
1248 :raises ``NodeAlreadyExistsError``: if node with same path is already
1264 :raises ``NodeAlreadyExistsError``: if node with same path is already
1249 marked to be *changed*
1265 marked to be *changed*
1250 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1266 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1251 marked to be *removed*
1267 marked to be *removed*
1252 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1268 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1253 commit
1269 commit
1254 :raises ``NodeNotChangedError``: if node hasn't really be changed
1270 :raises ``NodeNotChangedError``: if node hasn't really be changed
1255 """
1271 """
1256 for node in filenodes:
1272 for node in filenodes:
1257 if node.path in (n.path for n in self.removed):
1273 if node.path in (n.path for n in self.removed):
1258 raise NodeAlreadyRemovedError(
1274 raise NodeAlreadyRemovedError(
1259 "Node at %s is already marked as removed" % node.path)
1275 "Node at %s is already marked as removed" % node.path)
1260 try:
1276 try:
1261 self.repository.get_commit()
1277 self.repository.get_commit()
1262 except EmptyRepositoryError:
1278 except EmptyRepositoryError:
1263 raise EmptyRepositoryError(
1279 raise EmptyRepositoryError(
1264 "Nothing to change - try to *add* new nodes rather than "
1280 "Nothing to change - try to *add* new nodes rather than "
1265 "changing them")
1281 "changing them")
1266 for node in filenodes:
1282 for node in filenodes:
1267 if node.path in (n.path for n in self.changed):
1283 if node.path in (n.path for n in self.changed):
1268 raise NodeAlreadyChangedError(
1284 raise NodeAlreadyChangedError(
1269 "Node at '%s' is already marked as changed" % node.path)
1285 "Node at '%s' is already marked as changed" % node.path)
1270 self.changed.append(node)
1286 self.changed.append(node)
1271
1287
1272 def remove(self, *filenodes):
1288 def remove(self, *filenodes):
1273 """
1289 """
1274 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1290 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1275 *removed* in next commit.
1291 *removed* in next commit.
1276
1292
1277 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1293 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1278 be *removed*
1294 be *removed*
1279 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1295 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1280 be *changed*
1296 be *changed*
1281 """
1297 """
1282 for node in filenodes:
1298 for node in filenodes:
1283 if node.path in (n.path for n in self.removed):
1299 if node.path in (n.path for n in self.removed):
1284 raise NodeAlreadyRemovedError(
1300 raise NodeAlreadyRemovedError(
1285 "Node is already marked to for removal at %s" % node.path)
1301 "Node is already marked to for removal at %s" % node.path)
1286 if node.path in (n.path for n in self.changed):
1302 if node.path in (n.path for n in self.changed):
1287 raise NodeAlreadyChangedError(
1303 raise NodeAlreadyChangedError(
1288 "Node is already marked to be changed at %s" % node.path)
1304 "Node is already marked to be changed at %s" % node.path)
1289 # We only mark node as *removed* - real removal is done by
1305 # We only mark node as *removed* - real removal is done by
1290 # commit method
1306 # commit method
1291 self.removed.append(node)
1307 self.removed.append(node)
1292
1308
1293 def reset(self):
1309 def reset(self):
1294 """
1310 """
1295 Resets this instance to initial state (cleans ``added``, ``changed``
1311 Resets this instance to initial state (cleans ``added``, ``changed``
1296 and ``removed`` lists).
1312 and ``removed`` lists).
1297 """
1313 """
1298 self.added = []
1314 self.added = []
1299 self.changed = []
1315 self.changed = []
1300 self.removed = []
1316 self.removed = []
1301 self.parents = []
1317 self.parents = []
1302
1318
1303 def get_ipaths(self):
1319 def get_ipaths(self):
1304 """
1320 """
1305 Returns generator of paths from nodes marked as added, changed or
1321 Returns generator of paths from nodes marked as added, changed or
1306 removed.
1322 removed.
1307 """
1323 """
1308 for node in itertools.chain(self.added, self.changed, self.removed):
1324 for node in itertools.chain(self.added, self.changed, self.removed):
1309 yield node.path
1325 yield node.path
1310
1326
1311 def get_paths(self):
1327 def get_paths(self):
1312 """
1328 """
1313 Returns list of paths from nodes marked as added, changed or removed.
1329 Returns list of paths from nodes marked as added, changed or removed.
1314 """
1330 """
1315 return list(self.get_ipaths())
1331 return list(self.get_ipaths())
1316
1332
1317 def check_integrity(self, parents=None):
1333 def check_integrity(self, parents=None):
1318 """
1334 """
1319 Checks in-memory commit's integrity. Also, sets parents if not
1335 Checks in-memory commit's integrity. Also, sets parents if not
1320 already set.
1336 already set.
1321
1337
1322 :raises CommitError: if any error occurs (i.e.
1338 :raises CommitError: if any error occurs (i.e.
1323 ``NodeDoesNotExistError``).
1339 ``NodeDoesNotExistError``).
1324 """
1340 """
1325 if not self.parents:
1341 if not self.parents:
1326 parents = parents or []
1342 parents = parents or []
1327 if len(parents) == 0:
1343 if len(parents) == 0:
1328 try:
1344 try:
1329 parents = [self.repository.get_commit(), None]
1345 parents = [self.repository.get_commit(), None]
1330 except EmptyRepositoryError:
1346 except EmptyRepositoryError:
1331 parents = [None, None]
1347 parents = [None, None]
1332 elif len(parents) == 1:
1348 elif len(parents) == 1:
1333 parents += [None]
1349 parents += [None]
1334 self.parents = parents
1350 self.parents = parents
1335
1351
1336 # Local parents, only if not None
1352 # Local parents, only if not None
1337 parents = [p for p in self.parents if p]
1353 parents = [p for p in self.parents if p]
1338
1354
1339 # Check nodes marked as added
1355 # Check nodes marked as added
1340 for p in parents:
1356 for p in parents:
1341 for node in self.added:
1357 for node in self.added:
1342 try:
1358 try:
1343 p.get_node(node.path)
1359 p.get_node(node.path)
1344 except NodeDoesNotExistError:
1360 except NodeDoesNotExistError:
1345 pass
1361 pass
1346 else:
1362 else:
1347 raise NodeAlreadyExistsError(
1363 raise NodeAlreadyExistsError(
1348 "Node `%s` already exists at %s" % (node.path, p))
1364 "Node `%s` already exists at %s" % (node.path, p))
1349
1365
1350 # Check nodes marked as changed
1366 # Check nodes marked as changed
1351 missing = set(self.changed)
1367 missing = set(self.changed)
1352 not_changed = set(self.changed)
1368 not_changed = set(self.changed)
1353 if self.changed and not parents:
1369 if self.changed and not parents:
1354 raise NodeDoesNotExistError(str(self.changed[0].path))
1370 raise NodeDoesNotExistError(str(self.changed[0].path))
1355 for p in parents:
1371 for p in parents:
1356 for node in self.changed:
1372 for node in self.changed:
1357 try:
1373 try:
1358 old = p.get_node(node.path)
1374 old = p.get_node(node.path)
1359 missing.remove(node)
1375 missing.remove(node)
1360 # if content actually changed, remove node from not_changed
1376 # if content actually changed, remove node from not_changed
1361 if old.content != node.content:
1377 if old.content != node.content:
1362 not_changed.remove(node)
1378 not_changed.remove(node)
1363 except NodeDoesNotExistError:
1379 except NodeDoesNotExistError:
1364 pass
1380 pass
1365 if self.changed and missing:
1381 if self.changed and missing:
1366 raise NodeDoesNotExistError(
1382 raise NodeDoesNotExistError(
1367 "Node `%s` marked as modified but missing in parents: %s"
1383 "Node `%s` marked as modified but missing in parents: %s"
1368 % (node.path, parents))
1384 % (node.path, parents))
1369
1385
1370 if self.changed and not_changed:
1386 if self.changed and not_changed:
1371 raise NodeNotChangedError(
1387 raise NodeNotChangedError(
1372 "Node `%s` wasn't actually changed (parents: %s)"
1388 "Node `%s` wasn't actually changed (parents: %s)"
1373 % (not_changed.pop().path, parents))
1389 % (not_changed.pop().path, parents))
1374
1390
1375 # Check nodes marked as removed
1391 # Check nodes marked as removed
1376 if self.removed and not parents:
1392 if self.removed and not parents:
1377 raise NodeDoesNotExistError(
1393 raise NodeDoesNotExistError(
1378 "Cannot remove node at %s as there "
1394 "Cannot remove node at %s as there "
1379 "were no parents specified" % self.removed[0].path)
1395 "were no parents specified" % self.removed[0].path)
1380 really_removed = set()
1396 really_removed = set()
1381 for p in parents:
1397 for p in parents:
1382 for node in self.removed:
1398 for node in self.removed:
1383 try:
1399 try:
1384 p.get_node(node.path)
1400 p.get_node(node.path)
1385 really_removed.add(node)
1401 really_removed.add(node)
1386 except CommitError:
1402 except CommitError:
1387 pass
1403 pass
1388 not_removed = set(self.removed) - really_removed
1404 not_removed = set(self.removed) - really_removed
1389 if not_removed:
1405 if not_removed:
1390 # TODO: johbo: This code branch does not seem to be covered
1406 # TODO: johbo: This code branch does not seem to be covered
1391 raise NodeDoesNotExistError(
1407 raise NodeDoesNotExistError(
1392 "Cannot remove node at %s from "
1408 "Cannot remove node at %s from "
1393 "following parents: %s" % (not_removed, parents))
1409 "following parents: %s" % (not_removed, parents))
1394
1410
1395 def commit(
1411 def commit(
1396 self, message, author, parents=None, branch=None, date=None,
1412 self, message, author, parents=None, branch=None, date=None,
1397 **kwargs):
1413 **kwargs):
1398 """
1414 """
1399 Performs in-memory commit (doesn't check workdir in any way) and
1415 Performs in-memory commit (doesn't check workdir in any way) and
1400 returns newly created :class:`BaseCommit`. Updates repository's
1416 returns newly created :class:`BaseCommit`. Updates repository's
1401 attribute `commits`.
1417 attribute `commits`.
1402
1418
1403 .. note::
1419 .. note::
1404
1420
1405 While overriding this method each backend's should call
1421 While overriding this method each backend's should call
1406 ``self.check_integrity(parents)`` in the first place.
1422 ``self.check_integrity(parents)`` in the first place.
1407
1423
1408 :param message: message of the commit
1424 :param message: message of the commit
1409 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1425 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1410 :param parents: single parent or sequence of parents from which commit
1426 :param parents: single parent or sequence of parents from which commit
1411 would be derived
1427 would be derived
1412 :param date: ``datetime.datetime`` instance. Defaults to
1428 :param date: ``datetime.datetime`` instance. Defaults to
1413 ``datetime.datetime.now()``.
1429 ``datetime.datetime.now()``.
1414 :param branch: branch name, as string. If none given, default backend's
1430 :param branch: branch name, as string. If none given, default backend's
1415 branch would be used.
1431 branch would be used.
1416
1432
1417 :raises ``CommitError``: if any error occurs while committing
1433 :raises ``CommitError``: if any error occurs while committing
1418 """
1434 """
1419 raise NotImplementedError
1435 raise NotImplementedError
1420
1436
1421
1437
1422 class BaseInMemoryChangesetClass(type):
1438 class BaseInMemoryChangesetClass(type):
1423
1439
1424 def __instancecheck__(self, instance):
1440 def __instancecheck__(self, instance):
1425 return isinstance(instance, BaseInMemoryCommit)
1441 return isinstance(instance, BaseInMemoryCommit)
1426
1442
1427
1443
1428 class BaseInMemoryChangeset(BaseInMemoryCommit):
1444 class BaseInMemoryChangeset(BaseInMemoryCommit):
1429
1445
1430 __metaclass__ = BaseInMemoryChangesetClass
1446 __metaclass__ = BaseInMemoryChangesetClass
1431
1447
1432 def __new__(cls, *args, **kwargs):
1448 def __new__(cls, *args, **kwargs):
1433 warnings.warn(
1449 warnings.warn(
1434 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1450 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1435 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1451 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1436
1452
1437
1453
1438 class EmptyCommit(BaseCommit):
1454 class EmptyCommit(BaseCommit):
1439 """
1455 """
1440 An dummy empty commit. It's possible to pass hash when creating
1456 An dummy empty commit. It's possible to pass hash when creating
1441 an EmptyCommit
1457 an EmptyCommit
1442 """
1458 """
1443
1459
1444 def __init__(
1460 def __init__(
1445 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1461 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1446 message='', author='', date=None):
1462 message='', author='', date=None):
1447 self._empty_commit_id = commit_id
1463 self._empty_commit_id = commit_id
1448 # TODO: johbo: Solve idx parameter, default value does not make
1464 # TODO: johbo: Solve idx parameter, default value does not make
1449 # too much sense
1465 # too much sense
1450 self.idx = idx
1466 self.idx = idx
1451 self.message = message
1467 self.message = message
1452 self.author = author
1468 self.author = author
1453 self.date = date or datetime.datetime.fromtimestamp(0)
1469 self.date = date or datetime.datetime.fromtimestamp(0)
1454 self.repository = repo
1470 self.repository = repo
1455 self.alias = alias
1471 self.alias = alias
1456
1472
1457 @LazyProperty
1473 @LazyProperty
1458 def raw_id(self):
1474 def raw_id(self):
1459 """
1475 """
1460 Returns raw string identifying this commit, useful for web
1476 Returns raw string identifying this commit, useful for web
1461 representation.
1477 representation.
1462 """
1478 """
1463
1479
1464 return self._empty_commit_id
1480 return self._empty_commit_id
1465
1481
1466 @LazyProperty
1482 @LazyProperty
1467 def branch(self):
1483 def branch(self):
1468 if self.alias:
1484 if self.alias:
1469 from rhodecode.lib.vcs.backends import get_backend
1485 from rhodecode.lib.vcs.backends import get_backend
1470 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1486 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1471
1487
1472 @LazyProperty
1488 @LazyProperty
1473 def short_id(self):
1489 def short_id(self):
1474 return self.raw_id[:12]
1490 return self.raw_id[:12]
1475
1491
1476 @LazyProperty
1492 @LazyProperty
1477 def id(self):
1493 def id(self):
1478 return self.raw_id
1494 return self.raw_id
1479
1495
1480 def get_file_commit(self, path):
1496 def get_file_commit(self, path):
1481 return self
1497 return self
1482
1498
1483 def get_file_content(self, path):
1499 def get_file_content(self, path):
1484 return u''
1500 return u''
1485
1501
1486 def get_file_size(self, path):
1502 def get_file_size(self, path):
1487 return 0
1503 return 0
1488
1504
1489
1505
1490 class EmptyChangesetClass(type):
1506 class EmptyChangesetClass(type):
1491
1507
1492 def __instancecheck__(self, instance):
1508 def __instancecheck__(self, instance):
1493 return isinstance(instance, EmptyCommit)
1509 return isinstance(instance, EmptyCommit)
1494
1510
1495
1511
1496 class EmptyChangeset(EmptyCommit):
1512 class EmptyChangeset(EmptyCommit):
1497
1513
1498 __metaclass__ = EmptyChangesetClass
1514 __metaclass__ = EmptyChangesetClass
1499
1515
1500 def __new__(cls, *args, **kwargs):
1516 def __new__(cls, *args, **kwargs):
1501 warnings.warn(
1517 warnings.warn(
1502 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1518 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1503 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1519 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1504
1520
1505 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1521 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1506 alias=None, revision=-1, message='', author='', date=None):
1522 alias=None, revision=-1, message='', author='', date=None):
1507 if requested_revision is not None:
1523 if requested_revision is not None:
1508 warnings.warn(
1524 warnings.warn(
1509 "Parameter requested_revision not supported anymore",
1525 "Parameter requested_revision not supported anymore",
1510 DeprecationWarning)
1526 DeprecationWarning)
1511 super(EmptyChangeset, self).__init__(
1527 super(EmptyChangeset, self).__init__(
1512 commit_id=cs, repo=repo, alias=alias, idx=revision,
1528 commit_id=cs, repo=repo, alias=alias, idx=revision,
1513 message=message, author=author, date=date)
1529 message=message, author=author, date=date)
1514
1530
1515 @property
1531 @property
1516 def revision(self):
1532 def revision(self):
1517 warnings.warn("Use idx instead", DeprecationWarning)
1533 warnings.warn("Use idx instead", DeprecationWarning)
1518 return self.idx
1534 return self.idx
1519
1535
1520 @revision.setter
1536 @revision.setter
1521 def revision(self, value):
1537 def revision(self, value):
1522 warnings.warn("Use idx instead", DeprecationWarning)
1538 warnings.warn("Use idx instead", DeprecationWarning)
1523 self.idx = value
1539 self.idx = value
1524
1540
1525
1541
1526 class EmptyRepository(BaseRepository):
1542 class EmptyRepository(BaseRepository):
1527 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1543 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1528 pass
1544 pass
1529
1545
1530 def get_diff(self, *args, **kwargs):
1546 def get_diff(self, *args, **kwargs):
1531 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1547 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1532 return GitDiff('')
1548 return GitDiff('')
1533
1549
1534
1550
1535 class CollectionGenerator(object):
1551 class CollectionGenerator(object):
1536
1552
1537 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1553 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1538 self.repo = repo
1554 self.repo = repo
1539 self.commit_ids = commit_ids
1555 self.commit_ids = commit_ids
1540 # TODO: (oliver) this isn't currently hooked up
1556 # TODO: (oliver) this isn't currently hooked up
1541 self.collection_size = None
1557 self.collection_size = None
1542 self.pre_load = pre_load
1558 self.pre_load = pre_load
1543
1559
1544 def __len__(self):
1560 def __len__(self):
1545 if self.collection_size is not None:
1561 if self.collection_size is not None:
1546 return self.collection_size
1562 return self.collection_size
1547 return self.commit_ids.__len__()
1563 return self.commit_ids.__len__()
1548
1564
1549 def __iter__(self):
1565 def __iter__(self):
1550 for commit_id in self.commit_ids:
1566 for commit_id in self.commit_ids:
1551 # TODO: johbo: Mercurial passes in commit indices or commit ids
1567 # TODO: johbo: Mercurial passes in commit indices or commit ids
1552 yield self._commit_factory(commit_id)
1568 yield self._commit_factory(commit_id)
1553
1569
1554 def _commit_factory(self, commit_id):
1570 def _commit_factory(self, commit_id):
1555 """
1571 """
1556 Allows backends to override the way commits are generated.
1572 Allows backends to override the way commits are generated.
1557 """
1573 """
1558 return self.repo.get_commit(commit_id=commit_id,
1574 return self.repo.get_commit(commit_id=commit_id,
1559 pre_load=self.pre_load)
1575 pre_load=self.pre_load)
1560
1576
1561 def __getslice__(self, i, j):
1577 def __getslice__(self, i, j):
1562 """
1578 """
1563 Returns an iterator of sliced repository
1579 Returns an iterator of sliced repository
1564 """
1580 """
1565 commit_ids = self.commit_ids[i:j]
1581 commit_ids = self.commit_ids[i:j]
1566 return self.__class__(
1582 return self.__class__(
1567 self.repo, commit_ids, pre_load=self.pre_load)
1583 self.repo, commit_ids, pre_load=self.pre_load)
1568
1584
1569 def __repr__(self):
1585 def __repr__(self):
1570 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1586 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1571
1587
1572
1588
1573 class Config(object):
1589 class Config(object):
1574 """
1590 """
1575 Represents the configuration for a repository.
1591 Represents the configuration for a repository.
1576
1592
1577 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1593 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1578 standard library. It implements only the needed subset.
1594 standard library. It implements only the needed subset.
1579 """
1595 """
1580
1596
1581 def __init__(self):
1597 def __init__(self):
1582 self._values = {}
1598 self._values = {}
1583
1599
1584 def copy(self):
1600 def copy(self):
1585 clone = Config()
1601 clone = Config()
1586 for section, values in self._values.items():
1602 for section, values in self._values.items():
1587 clone._values[section] = values.copy()
1603 clone._values[section] = values.copy()
1588 return clone
1604 return clone
1589
1605
1590 def __repr__(self):
1606 def __repr__(self):
1591 return '<Config(%s sections) at %s>' % (
1607 return '<Config(%s sections) at %s>' % (
1592 len(self._values), hex(id(self)))
1608 len(self._values), hex(id(self)))
1593
1609
1594 def items(self, section):
1610 def items(self, section):
1595 return self._values.get(section, {}).iteritems()
1611 return self._values.get(section, {}).iteritems()
1596
1612
1597 def get(self, section, option):
1613 def get(self, section, option):
1598 return self._values.get(section, {}).get(option)
1614 return self._values.get(section, {}).get(option)
1599
1615
1600 def set(self, section, option, value):
1616 def set(self, section, option, value):
1601 section_values = self._values.setdefault(section, {})
1617 section_values = self._values.setdefault(section, {})
1602 section_values[option] = value
1618 section_values[option] = value
1603
1619
1604 def clear_section(self, section):
1620 def clear_section(self, section):
1605 self._values[section] = {}
1621 self._values[section] = {}
1606
1622
1607 def serialize(self):
1623 def serialize(self):
1608 """
1624 """
1609 Creates a list of three tuples (section, key, value) representing
1625 Creates a list of three tuples (section, key, value) representing
1610 this config object.
1626 this config object.
1611 """
1627 """
1612 items = []
1628 items = []
1613 for section in self._values:
1629 for section in self._values:
1614 for option, value in self._values[section].items():
1630 for option, value in self._values[section].items():
1615 items.append(
1631 items.append(
1616 (safe_str(section), safe_str(option), safe_str(value)))
1632 (safe_str(section), safe_str(option), safe_str(value)))
1617 return items
1633 return items
1618
1634
1619
1635
1620 class Diff(object):
1636 class Diff(object):
1621 """
1637 """
1622 Represents a diff result from a repository backend.
1638 Represents a diff result from a repository backend.
1623
1639
1624 Subclasses have to provide a backend specific value for
1640 Subclasses have to provide a backend specific value for
1625 :attr:`_header_re` and :attr:`_meta_re`.
1641 :attr:`_header_re` and :attr:`_meta_re`.
1626 """
1642 """
1627 _meta_re = None
1643 _meta_re = None
1628 _header_re = None
1644 _header_re = None
1629
1645
1630 def __init__(self, raw_diff):
1646 def __init__(self, raw_diff):
1631 self.raw = raw_diff
1647 self.raw = raw_diff
1632
1648
1633 def chunks(self):
1649 def chunks(self):
1634 """
1650 """
1635 split the diff in chunks of separate --git a/file b/file chunks
1651 split the diff in chunks of separate --git a/file b/file chunks
1636 to make diffs consistent we must prepend with \n, and make sure
1652 to make diffs consistent we must prepend with \n, and make sure
1637 we can detect last chunk as this was also has special rule
1653 we can detect last chunk as this was also has special rule
1638 """
1654 """
1639
1655
1640 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1656 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1641 header = diff_parts[0]
1657 header = diff_parts[0]
1642
1658
1643 if self._meta_re:
1659 if self._meta_re:
1644 match = self._meta_re.match(header)
1660 match = self._meta_re.match(header)
1645
1661
1646 chunks = diff_parts[1:]
1662 chunks = diff_parts[1:]
1647 total_chunks = len(chunks)
1663 total_chunks = len(chunks)
1648
1664
1649 return (
1665 return (
1650 DiffChunk(chunk, self, cur_chunk == total_chunks)
1666 DiffChunk(chunk, self, cur_chunk == total_chunks)
1651 for cur_chunk, chunk in enumerate(chunks, start=1))
1667 for cur_chunk, chunk in enumerate(chunks, start=1))
1652
1668
1653
1669
1654 class DiffChunk(object):
1670 class DiffChunk(object):
1655
1671
1656 def __init__(self, chunk, diff, last_chunk):
1672 def __init__(self, chunk, diff, last_chunk):
1657 self._diff = diff
1673 self._diff = diff
1658
1674
1659 # since we split by \ndiff --git that part is lost from original diff
1675 # since we split by \ndiff --git that part is lost from original diff
1660 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1676 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1661 if not last_chunk:
1677 if not last_chunk:
1662 chunk += '\n'
1678 chunk += '\n'
1663
1679
1664 match = self._diff._header_re.match(chunk)
1680 match = self._diff._header_re.match(chunk)
1665 self.header = match.groupdict()
1681 self.header = match.groupdict()
1666 self.diff = chunk[match.end():]
1682 self.diff = chunk[match.end():]
1667 self.raw = chunk
1683 self.raw = chunk
1668
1684
1669
1685
1670 class BasePathPermissionChecker(object):
1686 class BasePathPermissionChecker(object):
1671
1687
1672 @staticmethod
1688 @staticmethod
1673 def create_from_patterns(includes, excludes):
1689 def create_from_patterns(includes, excludes):
1674 if includes and '*' in includes and not excludes:
1690 if includes and '*' in includes and not excludes:
1675 return AllPathPermissionChecker()
1691 return AllPathPermissionChecker()
1676 elif excludes and '*' in excludes:
1692 elif excludes and '*' in excludes:
1677 return NonePathPermissionChecker()
1693 return NonePathPermissionChecker()
1678 else:
1694 else:
1679 return PatternPathPermissionChecker(includes, excludes)
1695 return PatternPathPermissionChecker(includes, excludes)
1680
1696
1681 @property
1697 @property
1682 def has_full_access(self):
1698 def has_full_access(self):
1683 raise NotImplemented()
1699 raise NotImplemented()
1684
1700
1685 def has_access(self, path):
1701 def has_access(self, path):
1686 raise NotImplemented()
1702 raise NotImplemented()
1687
1703
1688
1704
1689 class AllPathPermissionChecker(BasePathPermissionChecker):
1705 class AllPathPermissionChecker(BasePathPermissionChecker):
1690
1706
1691 @property
1707 @property
1692 def has_full_access(self):
1708 def has_full_access(self):
1693 return True
1709 return True
1694
1710
1695 def has_access(self, path):
1711 def has_access(self, path):
1696 return True
1712 return True
1697
1713
1698
1714
1699 class NonePathPermissionChecker(BasePathPermissionChecker):
1715 class NonePathPermissionChecker(BasePathPermissionChecker):
1700
1716
1701 @property
1717 @property
1702 def has_full_access(self):
1718 def has_full_access(self):
1703 return False
1719 return False
1704
1720
1705 def has_access(self, path):
1721 def has_access(self, path):
1706 return False
1722 return False
1707
1723
1708
1724
1709 class PatternPathPermissionChecker(BasePathPermissionChecker):
1725 class PatternPathPermissionChecker(BasePathPermissionChecker):
1710
1726
1711 def __init__(self, includes, excludes):
1727 def __init__(self, includes, excludes):
1712 self.includes = includes
1728 self.includes = includes
1713 self.excludes = excludes
1729 self.excludes = excludes
1714 self.includes_re = [] if not includes else [
1730 self.includes_re = [] if not includes else [
1715 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1731 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1716 self.excludes_re = [] if not excludes else [
1732 self.excludes_re = [] if not excludes else [
1717 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1733 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1718
1734
1719 @property
1735 @property
1720 def has_full_access(self):
1736 def has_full_access(self):
1721 return '*' in self.includes and not self.excludes
1737 return '*' in self.includes and not self.excludes
1722
1738
1723 def has_access(self, path):
1739 def has_access(self, path):
1724 for regex in self.excludes_re:
1740 for regex in self.excludes_re:
1725 if regex.match(path):
1741 if regex.match(path):
1726 return False
1742 return False
1727 for regex in self.includes_re:
1743 for regex in self.includes_re:
1728 if regex.match(path):
1744 if regex.match(path):
1729 return True
1745 return True
1730 return False
1746 return False
@@ -1,1006 +1,1006 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference)
38 MergeFailureReason, Reference)
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError,
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45
45
46
46
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 class GitRepository(BaseRepository):
52 class GitRepository(BaseRepository):
53 """
53 """
54 Git repository backend.
54 Git repository backend.
55 """
55 """
56 DEFAULT_BRANCH_NAME = 'master'
56 DEFAULT_BRANCH_NAME = 'master'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 update_after_clone=False, with_wire=None, bare=False):
61 update_after_clone=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self._remote = connection.Git(
65 self._remote = connection.Git(
66 self.path, self.config, with_wire=with_wire)
66 self.path, self.config, with_wire=with_wire)
67
67
68 self._init_repo(create, src_url, update_after_clone, bare)
68 self._init_repo(create, src_url, update_after_clone, bare)
69
69
70 # caches
70 # caches
71 self._commit_ids = {}
71 self._commit_ids = {}
72
72
73 @LazyProperty
73 @LazyProperty
74 def bare(self):
74 def bare(self):
75 return self._remote.bare()
75 return self._remote.bare()
76
76
77 @LazyProperty
77 @LazyProperty
78 def head(self):
78 def head(self):
79 return self._remote.head()
79 return self._remote.head()
80
80
81 @LazyProperty
81 @LazyProperty
82 def commit_ids(self):
82 def commit_ids(self):
83 """
83 """
84 Returns list of commit ids, in ascending order. Being lazy
84 Returns list of commit ids, in ascending order. Being lazy
85 attribute allows external tools to inject commit ids from cache.
85 attribute allows external tools to inject commit ids from cache.
86 """
86 """
87 commit_ids = self._get_all_commit_ids()
87 commit_ids = self._get_all_commit_ids()
88 self._rebuild_cache(commit_ids)
88 self._rebuild_cache(commit_ids)
89 return commit_ids
89 return commit_ids
90
90
91 def _rebuild_cache(self, commit_ids):
91 def _rebuild_cache(self, commit_ids):
92 self._commit_ids = dict((commit_id, index)
92 self._commit_ids = dict((commit_id, index)
93 for index, commit_id in enumerate(commit_ids))
93 for index, commit_id in enumerate(commit_ids))
94
94
95 def run_git_command(self, cmd, **opts):
95 def run_git_command(self, cmd, **opts):
96 """
96 """
97 Runs given ``cmd`` as git command and returns tuple
97 Runs given ``cmd`` as git command and returns tuple
98 (stdout, stderr).
98 (stdout, stderr).
99
99
100 :param cmd: git command to be executed
100 :param cmd: git command to be executed
101 :param opts: env options to pass into Subprocess command
101 :param opts: env options to pass into Subprocess command
102 """
102 """
103 if not isinstance(cmd, list):
103 if not isinstance(cmd, list):
104 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
104 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
105
105
106 skip_stderr_log = opts.pop('skip_stderr_log', False)
106 skip_stderr_log = opts.pop('skip_stderr_log', False)
107 out, err = self._remote.run_git_command(cmd, **opts)
107 out, err = self._remote.run_git_command(cmd, **opts)
108 if err and not skip_stderr_log:
108 if err and not skip_stderr_log:
109 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
109 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
110 return out, err
110 return out, err
111
111
112 @staticmethod
112 @staticmethod
113 def check_url(url, config):
113 def check_url(url, config):
114 """
114 """
115 Function will check given url and try to verify if it's a valid
115 Function will check given url and try to verify if it's a valid
116 link. Sometimes it may happened that git will issue basic
116 link. Sometimes it may happened that git will issue basic
117 auth request that can cause whole API to hang when used from python
117 auth request that can cause whole API to hang when used from python
118 or other external calls.
118 or other external calls.
119
119
120 On failures it'll raise urllib2.HTTPError, exception is also thrown
120 On failures it'll raise urllib2.HTTPError, exception is also thrown
121 when the return code is non 200
121 when the return code is non 200
122 """
122 """
123 # check first if it's not an url
123 # check first if it's not an url
124 if os.path.isdir(url) or url.startswith('file:'):
124 if os.path.isdir(url) or url.startswith('file:'):
125 return True
125 return True
126
126
127 if '+' in url.split('://', 1)[0]:
127 if '+' in url.split('://', 1)[0]:
128 url = url.split('+', 1)[1]
128 url = url.split('+', 1)[1]
129
129
130 # Request the _remote to verify the url
130 # Request the _remote to verify the url
131 return connection.Git.check_url(url, config.serialize())
131 return connection.Git.check_url(url, config.serialize())
132
132
133 @staticmethod
133 @staticmethod
134 def is_valid_repository(path):
134 def is_valid_repository(path):
135 if os.path.isdir(os.path.join(path, '.git')):
135 if os.path.isdir(os.path.join(path, '.git')):
136 return True
136 return True
137 # check case of bare repository
137 # check case of bare repository
138 try:
138 try:
139 GitRepository(path)
139 GitRepository(path)
140 return True
140 return True
141 except VCSError:
141 except VCSError:
142 pass
142 pass
143 return False
143 return False
144
144
145 def _init_repo(self, create, src_url=None, update_after_clone=False,
145 def _init_repo(self, create, src_url=None, update_after_clone=False,
146 bare=False):
146 bare=False):
147 if create and os.path.exists(self.path):
147 if create and os.path.exists(self.path):
148 raise RepositoryError(
148 raise RepositoryError(
149 "Cannot create repository at %s, location already exist"
149 "Cannot create repository at %s, location already exist"
150 % self.path)
150 % self.path)
151
151
152 try:
152 try:
153 if create and src_url:
153 if create and src_url:
154 GitRepository.check_url(src_url, self.config)
154 GitRepository.check_url(src_url, self.config)
155 self.clone(src_url, update_after_clone, bare)
155 self.clone(src_url, update_after_clone, bare)
156 elif create:
156 elif create:
157 os.makedirs(self.path, mode=0755)
157 os.makedirs(self.path, mode=0755)
158
158
159 if bare:
159 if bare:
160 self._remote.init_bare()
160 self._remote.init_bare()
161 else:
161 else:
162 self._remote.init()
162 self._remote.init()
163 else:
163 else:
164 if not self._remote.assert_correct_path():
164 if not self._remote.assert_correct_path():
165 raise RepositoryError(
165 raise RepositoryError(
166 'Path "%s" does not contain a Git repository' %
166 'Path "%s" does not contain a Git repository' %
167 (self.path,))
167 (self.path,))
168
168
169 # TODO: johbo: check if we have to translate the OSError here
169 # TODO: johbo: check if we have to translate the OSError here
170 except OSError as err:
170 except OSError as err:
171 raise RepositoryError(err)
171 raise RepositoryError(err)
172
172
173 def _get_all_commit_ids(self, filters=None):
173 def _get_all_commit_ids(self, filters=None):
174 # we must check if this repo is not empty, since later command
174 # we must check if this repo is not empty, since later command
175 # fails if it is. And it's cheaper to ask than throw the subprocess
175 # fails if it is. And it's cheaper to ask than throw the subprocess
176 # errors
176 # errors
177 try:
177 try:
178 self._remote.head()
178 self._remote.head()
179 except KeyError:
179 except KeyError:
180 return []
180 return []
181
181
182 rev_filter = ['--branches', '--tags']
182 rev_filter = ['--branches', '--tags']
183 extra_filter = []
183 extra_filter = []
184
184
185 if filters:
185 if filters:
186 if filters.get('since'):
186 if filters.get('since'):
187 extra_filter.append('--since=%s' % (filters['since']))
187 extra_filter.append('--since=%s' % (filters['since']))
188 if filters.get('until'):
188 if filters.get('until'):
189 extra_filter.append('--until=%s' % (filters['until']))
189 extra_filter.append('--until=%s' % (filters['until']))
190 if filters.get('branch_name'):
190 if filters.get('branch_name'):
191 rev_filter = ['--tags']
191 rev_filter = ['--tags']
192 extra_filter.append(filters['branch_name'])
192 extra_filter.append(filters['branch_name'])
193 rev_filter.extend(extra_filter)
193 rev_filter.extend(extra_filter)
194
194
195 # if filters.get('start') or filters.get('end'):
195 # if filters.get('start') or filters.get('end'):
196 # # skip is offset, max-count is limit
196 # # skip is offset, max-count is limit
197 # if filters.get('start'):
197 # if filters.get('start'):
198 # extra_filter += ' --skip=%s' % filters['start']
198 # extra_filter += ' --skip=%s' % filters['start']
199 # if filters.get('end'):
199 # if filters.get('end'):
200 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
200 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
201
201
202 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
202 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
203 try:
203 try:
204 output, __ = self.run_git_command(cmd)
204 output, __ = self.run_git_command(cmd)
205 except RepositoryError:
205 except RepositoryError:
206 # Can be raised for empty repositories
206 # Can be raised for empty repositories
207 return []
207 return []
208 return output.splitlines()
208 return output.splitlines()
209
209
210 def _get_commit_id(self, commit_id_or_idx):
210 def _get_commit_id(self, commit_id_or_idx):
211 def is_null(value):
211 def is_null(value):
212 return len(value) == commit_id_or_idx.count('0')
212 return len(value) == commit_id_or_idx.count('0')
213
213
214 if self.is_empty():
214 if self.is_empty():
215 raise EmptyRepositoryError("There are no commits yet")
215 raise EmptyRepositoryError("There are no commits yet")
216
216
217 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
217 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
218 return self.commit_ids[-1]
218 return self.commit_ids[-1]
219
219
220 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
220 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
221 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
221 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
222 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
222 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
223 try:
223 try:
224 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
224 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
225 except Exception:
225 except Exception:
226 msg = "Commit %s does not exist for %s" % (
226 msg = "Commit %s does not exist for %s" % (
227 commit_id_or_idx, self)
227 commit_id_or_idx, self)
228 raise CommitDoesNotExistError(msg)
228 raise CommitDoesNotExistError(msg)
229
229
230 elif is_bstr:
230 elif is_bstr:
231 # check full path ref, eg. refs/heads/master
231 # check full path ref, eg. refs/heads/master
232 ref_id = self._refs.get(commit_id_or_idx)
232 ref_id = self._refs.get(commit_id_or_idx)
233 if ref_id:
233 if ref_id:
234 return ref_id
234 return ref_id
235
235
236 # check branch name
236 # check branch name
237 branch_ids = self.branches.values()
237 branch_ids = self.branches.values()
238 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
238 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
239 if ref_id:
239 if ref_id:
240 return ref_id
240 return ref_id
241
241
242 # check tag name
242 # check tag name
243 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
243 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
244 if ref_id:
244 if ref_id:
245 return ref_id
245 return ref_id
246
246
247 if (not SHA_PATTERN.match(commit_id_or_idx) or
247 if (not SHA_PATTERN.match(commit_id_or_idx) or
248 commit_id_or_idx not in self.commit_ids):
248 commit_id_or_idx not in self.commit_ids):
249 msg = "Commit %s does not exist for %s" % (
249 msg = "Commit %s does not exist for %s" % (
250 commit_id_or_idx, self)
250 commit_id_or_idx, self)
251 raise CommitDoesNotExistError(msg)
251 raise CommitDoesNotExistError(msg)
252
252
253 # Ensure we return full id
253 # Ensure we return full id
254 if not SHA_PATTERN.match(str(commit_id_or_idx)):
254 if not SHA_PATTERN.match(str(commit_id_or_idx)):
255 raise CommitDoesNotExistError(
255 raise CommitDoesNotExistError(
256 "Given commit id %s not recognized" % commit_id_or_idx)
256 "Given commit id %s not recognized" % commit_id_or_idx)
257 return commit_id_or_idx
257 return commit_id_or_idx
258
258
259 def get_hook_location(self):
259 def get_hook_location(self):
260 """
260 """
261 returns absolute path to location where hooks are stored
261 returns absolute path to location where hooks are stored
262 """
262 """
263 loc = os.path.join(self.path, 'hooks')
263 loc = os.path.join(self.path, 'hooks')
264 if not self.bare:
264 if not self.bare:
265 loc = os.path.join(self.path, '.git', 'hooks')
265 loc = os.path.join(self.path, '.git', 'hooks')
266 return loc
266 return loc
267
267
268 @LazyProperty
268 @LazyProperty
269 def last_change(self):
269 def last_change(self):
270 """
270 """
271 Returns last change made on this repository as
271 Returns last change made on this repository as
272 `datetime.datetime` object.
272 `datetime.datetime` object.
273 """
273 """
274 try:
274 try:
275 return self.get_commit().date
275 return self.get_commit().date
276 except RepositoryError:
276 except RepositoryError:
277 tzoffset = makedate()[1]
277 tzoffset = makedate()[1]
278 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
278 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
279
279
280 def _get_fs_mtime(self):
280 def _get_fs_mtime(self):
281 idx_loc = '' if self.bare else '.git'
281 idx_loc = '' if self.bare else '.git'
282 # fallback to filesystem
282 # fallback to filesystem
283 in_path = os.path.join(self.path, idx_loc, "index")
283 in_path = os.path.join(self.path, idx_loc, "index")
284 he_path = os.path.join(self.path, idx_loc, "HEAD")
284 he_path = os.path.join(self.path, idx_loc, "HEAD")
285 if os.path.exists(in_path):
285 if os.path.exists(in_path):
286 return os.stat(in_path).st_mtime
286 return os.stat(in_path).st_mtime
287 else:
287 else:
288 return os.stat(he_path).st_mtime
288 return os.stat(he_path).st_mtime
289
289
290 @LazyProperty
290 @LazyProperty
291 def description(self):
291 def description(self):
292 description = self._remote.get_description()
292 description = self._remote.get_description()
293 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
293 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
294
294
295 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
295 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
296 if self.is_empty():
296 if self.is_empty():
297 return OrderedDict()
297 return OrderedDict()
298
298
299 result = []
299 result = []
300 for ref, sha in self._refs.iteritems():
300 for ref, sha in self._refs.iteritems():
301 if ref.startswith(prefix):
301 if ref.startswith(prefix):
302 ref_name = ref
302 ref_name = ref
303 if strip_prefix:
303 if strip_prefix:
304 ref_name = ref[len(prefix):]
304 ref_name = ref[len(prefix):]
305 result.append((safe_unicode(ref_name), sha))
305 result.append((safe_unicode(ref_name), sha))
306
306
307 def get_name(entry):
307 def get_name(entry):
308 return entry[0]
308 return entry[0]
309
309
310 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
310 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
311
311
312 def _get_branches(self):
312 def _get_branches(self):
313 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
313 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
314
314
315 @LazyProperty
315 @LazyProperty
316 def branches(self):
316 def branches(self):
317 return self._get_branches()
317 return self._get_branches()
318
318
319 @LazyProperty
319 @LazyProperty
320 def branches_closed(self):
320 def branches_closed(self):
321 return {}
321 return {}
322
322
323 @LazyProperty
323 @LazyProperty
324 def bookmarks(self):
324 def bookmarks(self):
325 return {}
325 return {}
326
326
327 @LazyProperty
327 @LazyProperty
328 def branches_all(self):
328 def branches_all(self):
329 all_branches = {}
329 all_branches = {}
330 all_branches.update(self.branches)
330 all_branches.update(self.branches)
331 all_branches.update(self.branches_closed)
331 all_branches.update(self.branches_closed)
332 return all_branches
332 return all_branches
333
333
334 @LazyProperty
334 @LazyProperty
335 def tags(self):
335 def tags(self):
336 return self._get_tags()
336 return self._get_tags()
337
337
338 def _get_tags(self):
338 def _get_tags(self):
339 return self._get_refs_entries(
339 return self._get_refs_entries(
340 prefix='refs/tags/', strip_prefix=True, reverse=True)
340 prefix='refs/tags/', strip_prefix=True, reverse=True)
341
341
342 def tag(self, name, user, commit_id=None, message=None, date=None,
342 def tag(self, name, user, commit_id=None, message=None, date=None,
343 **kwargs):
343 **kwargs):
344 # TODO: fix this method to apply annotated tags correct with message
344 # TODO: fix this method to apply annotated tags correct with message
345 """
345 """
346 Creates and returns a tag for the given ``commit_id``.
346 Creates and returns a tag for the given ``commit_id``.
347
347
348 :param name: name for new tag
348 :param name: name for new tag
349 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
349 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
350 :param commit_id: commit id for which new tag would be created
350 :param commit_id: commit id for which new tag would be created
351 :param message: message of the tag's commit
351 :param message: message of the tag's commit
352 :param date: date of tag's commit
352 :param date: date of tag's commit
353
353
354 :raises TagAlreadyExistError: if tag with same name already exists
354 :raises TagAlreadyExistError: if tag with same name already exists
355 """
355 """
356 if name in self.tags:
356 if name in self.tags:
357 raise TagAlreadyExistError("Tag %s already exists" % name)
357 raise TagAlreadyExistError("Tag %s already exists" % name)
358 commit = self.get_commit(commit_id=commit_id)
358 commit = self.get_commit(commit_id=commit_id)
359 message = message or "Added tag %s for commit %s" % (
359 message = message or "Added tag %s for commit %s" % (
360 name, commit.raw_id)
360 name, commit.raw_id)
361 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
361 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
362
362
363 self._refs = self._get_refs()
363 self._refs = self._get_refs()
364 self.tags = self._get_tags()
364 self.tags = self._get_tags()
365 return commit
365 return commit
366
366
367 def remove_tag(self, name, user, message=None, date=None):
367 def remove_tag(self, name, user, message=None, date=None):
368 """
368 """
369 Removes tag with the given ``name``.
369 Removes tag with the given ``name``.
370
370
371 :param name: name of the tag to be removed
371 :param name: name of the tag to be removed
372 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
372 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 :param message: message of the tag's removal commit
373 :param message: message of the tag's removal commit
374 :param date: date of tag's removal commit
374 :param date: date of tag's removal commit
375
375
376 :raises TagDoesNotExistError: if tag with given name does not exists
376 :raises TagDoesNotExistError: if tag with given name does not exists
377 """
377 """
378 if name not in self.tags:
378 if name not in self.tags:
379 raise TagDoesNotExistError("Tag %s does not exist" % name)
379 raise TagDoesNotExistError("Tag %s does not exist" % name)
380 tagpath = vcspath.join(
380 tagpath = vcspath.join(
381 self._remote.get_refs_path(), 'refs', 'tags', name)
381 self._remote.get_refs_path(), 'refs', 'tags', name)
382 try:
382 try:
383 os.remove(tagpath)
383 os.remove(tagpath)
384 self._refs = self._get_refs()
384 self._refs = self._get_refs()
385 self.tags = self._get_tags()
385 self.tags = self._get_tags()
386 except OSError as e:
386 except OSError as e:
387 raise RepositoryError(e.strerror)
387 raise RepositoryError(e.strerror)
388
388
389 def _get_refs(self):
389 def _get_refs(self):
390 return self._remote.get_refs()
390 return self._remote.get_refs()
391
391
392 @LazyProperty
392 @LazyProperty
393 def _refs(self):
393 def _refs(self):
394 return self._get_refs()
394 return self._get_refs()
395
395
396 @property
396 @property
397 def _ref_tree(self):
397 def _ref_tree(self):
398 node = tree = {}
398 node = tree = {}
399 for ref, sha in self._refs.iteritems():
399 for ref, sha in self._refs.iteritems():
400 path = ref.split('/')
400 path = ref.split('/')
401 for bit in path[:-1]:
401 for bit in path[:-1]:
402 node = node.setdefault(bit, {})
402 node = node.setdefault(bit, {})
403 node[path[-1]] = sha
403 node[path[-1]] = sha
404 node = tree
404 node = tree
405 return tree
405 return tree
406
406
407 def get_remote_ref(self, ref_name):
407 def get_remote_ref(self, ref_name):
408 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
409 try:
409 try:
410 return self._refs[ref_key]
410 return self._refs[ref_key]
411 except Exception:
411 except Exception:
412 return
412 return
413
413
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 """
415 """
416 Returns `GitCommit` object representing commit from git repository
416 Returns `GitCommit` object representing commit from git repository
417 at the given `commit_id` or head (most recent commit) if None given.
417 at the given `commit_id` or head (most recent commit) if None given.
418 """
418 """
419 if commit_id is not None:
419 if commit_id is not None:
420 self._validate_commit_id(commit_id)
420 self._validate_commit_id(commit_id)
421 elif commit_idx is not None:
421 elif commit_idx is not None:
422 self._validate_commit_idx(commit_idx)
422 self._validate_commit_idx(commit_idx)
423 commit_id = commit_idx
423 commit_id = commit_idx
424 commit_id = self._get_commit_id(commit_id)
424 commit_id = self._get_commit_id(commit_id)
425 try:
425 try:
426 # Need to call remote to translate id for tagging scenario
426 # Need to call remote to translate id for tagging scenario
427 commit_id = self._remote.get_object(commit_id)["commit_id"]
427 commit_id = self._remote.get_object(commit_id)["commit_id"]
428 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
429 except KeyError:
429 except KeyError:
430 raise RepositoryError("Cannot get object with id %s" % commit_id)
430 raise RepositoryError("Cannot get object with id %s" % commit_id)
431
431
432 return GitCommit(self, commit_id, idx, pre_load=pre_load)
432 return GitCommit(self, commit_id, idx, pre_load=pre_load)
433
433
434 def get_commits(
434 def get_commits(
435 self, start_id=None, end_id=None, start_date=None, end_date=None,
435 self, start_id=None, end_id=None, start_date=None, end_date=None,
436 branch_name=None, show_hidden=False, pre_load=None):
436 branch_name=None, show_hidden=False, pre_load=None):
437 """
437 """
438 Returns generator of `GitCommit` objects from start to end (both
438 Returns generator of `GitCommit` objects from start to end (both
439 are inclusive), in ascending date order.
439 are inclusive), in ascending date order.
440
440
441 :param start_id: None, str(commit_id)
441 :param start_id: None, str(commit_id)
442 :param end_id: None, str(commit_id)
442 :param end_id: None, str(commit_id)
443 :param start_date: if specified, commits with commit date less than
443 :param start_date: if specified, commits with commit date less than
444 ``start_date`` would be filtered out from returned set
444 ``start_date`` would be filtered out from returned set
445 :param end_date: if specified, commits with commit date greater than
445 :param end_date: if specified, commits with commit date greater than
446 ``end_date`` would be filtered out from returned set
446 ``end_date`` would be filtered out from returned set
447 :param branch_name: if specified, commits not reachable from given
447 :param branch_name: if specified, commits not reachable from given
448 branch would be filtered out from returned set
448 branch would be filtered out from returned set
449 :param show_hidden: Show hidden commits such as obsolete or hidden from
449 :param show_hidden: Show hidden commits such as obsolete or hidden from
450 Mercurial evolve
450 Mercurial evolve
451 :raise BranchDoesNotExistError: If given `branch_name` does not
451 :raise BranchDoesNotExistError: If given `branch_name` does not
452 exist.
452 exist.
453 :raise CommitDoesNotExistError: If commits for given `start` or
453 :raise CommitDoesNotExistError: If commits for given `start` or
454 `end` could not be found.
454 `end` could not be found.
455
455
456 """
456 """
457 if self.is_empty():
457 if self.is_empty():
458 raise EmptyRepositoryError("There are no commits yet")
458 raise EmptyRepositoryError("There are no commits yet")
459 self._validate_branch_name(branch_name)
459 self._validate_branch_name(branch_name)
460
460
461 if start_id is not None:
461 if start_id is not None:
462 self._validate_commit_id(start_id)
462 self._validate_commit_id(start_id)
463 if end_id is not None:
463 if end_id is not None:
464 self._validate_commit_id(end_id)
464 self._validate_commit_id(end_id)
465
465
466 start_raw_id = self._get_commit_id(start_id)
466 start_raw_id = self._get_commit_id(start_id)
467 start_pos = self._commit_ids[start_raw_id] if start_id else None
467 start_pos = self._commit_ids[start_raw_id] if start_id else None
468 end_raw_id = self._get_commit_id(end_id)
468 end_raw_id = self._get_commit_id(end_id)
469 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
469 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
470
470
471 if None not in [start_id, end_id] and start_pos > end_pos:
471 if None not in [start_id, end_id] and start_pos > end_pos:
472 raise RepositoryError(
472 raise RepositoryError(
473 "Start commit '%s' cannot be after end commit '%s'" %
473 "Start commit '%s' cannot be after end commit '%s'" %
474 (start_id, end_id))
474 (start_id, end_id))
475
475
476 if end_pos is not None:
476 if end_pos is not None:
477 end_pos += 1
477 end_pos += 1
478
478
479 filter_ = []
479 filter_ = []
480 if branch_name:
480 if branch_name:
481 filter_.append({'branch_name': branch_name})
481 filter_.append({'branch_name': branch_name})
482 if start_date and not end_date:
482 if start_date and not end_date:
483 filter_.append({'since': start_date})
483 filter_.append({'since': start_date})
484 if end_date and not start_date:
484 if end_date and not start_date:
485 filter_.append({'until': end_date})
485 filter_.append({'until': end_date})
486 if start_date and end_date:
486 if start_date and end_date:
487 filter_.append({'since': start_date})
487 filter_.append({'since': start_date})
488 filter_.append({'until': end_date})
488 filter_.append({'until': end_date})
489
489
490 # if start_pos or end_pos:
490 # if start_pos or end_pos:
491 # filter_.append({'start': start_pos})
491 # filter_.append({'start': start_pos})
492 # filter_.append({'end': end_pos})
492 # filter_.append({'end': end_pos})
493
493
494 if filter_:
494 if filter_:
495 revfilters = {
495 revfilters = {
496 'branch_name': branch_name,
496 'branch_name': branch_name,
497 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
497 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
498 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
498 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
499 'start': start_pos,
499 'start': start_pos,
500 'end': end_pos,
500 'end': end_pos,
501 }
501 }
502 commit_ids = self._get_all_commit_ids(filters=revfilters)
502 commit_ids = self._get_all_commit_ids(filters=revfilters)
503
503
504 # pure python stuff, it's slow due to walker walking whole repo
504 # pure python stuff, it's slow due to walker walking whole repo
505 # def get_revs(walker):
505 # def get_revs(walker):
506 # for walker_entry in walker:
506 # for walker_entry in walker:
507 # yield walker_entry.commit.id
507 # yield walker_entry.commit.id
508 # revfilters = {}
508 # revfilters = {}
509 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
509 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
510 else:
510 else:
511 commit_ids = self.commit_ids
511 commit_ids = self.commit_ids
512
512
513 if start_pos or end_pos:
513 if start_pos or end_pos:
514 commit_ids = commit_ids[start_pos: end_pos]
514 commit_ids = commit_ids[start_pos: end_pos]
515
515
516 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
516 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
517
517
518 def get_diff(
518 def get_diff(
519 self, commit1, commit2, path='', ignore_whitespace=False,
519 self, commit1, commit2, path='', ignore_whitespace=False,
520 context=3, path1=None):
520 context=3, path1=None):
521 """
521 """
522 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 ``commit2`` since ``commit1``.
523 ``commit2`` since ``commit1``.
524
524
525 :param commit1: Entry point from which diff is shown. Can be
525 :param commit1: Entry point from which diff is shown. Can be
526 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 ``self.EMPTY_COMMIT`` - in this case, patch showing all
527 the changes since empty state of the repository until ``commit2``
527 the changes since empty state of the repository until ``commit2``
528 :param commit2: Until which commits changes should be shown.
528 :param commit2: Until which commits changes should be shown.
529 :param ignore_whitespace: If set to ``True``, would not show whitespace
529 :param ignore_whitespace: If set to ``True``, would not show whitespace
530 changes. Defaults to ``False``.
530 changes. Defaults to ``False``.
531 :param context: How many lines before/after changed lines should be
531 :param context: How many lines before/after changed lines should be
532 shown. Defaults to ``3``.
532 shown. Defaults to ``3``.
533 """
533 """
534 self._validate_diff_commits(commit1, commit2)
534 self._validate_diff_commits(commit1, commit2)
535 if path1 is not None and path1 != path:
535 if path1 is not None and path1 != path:
536 raise ValueError("Diff of two different paths not supported.")
536 raise ValueError("Diff of two different paths not supported.")
537
537
538 flags = [
538 flags = [
539 '-U%s' % context, '--full-index', '--binary', '-p',
539 '-U%s' % context, '--full-index', '--binary', '-p',
540 '-M', '--abbrev=40']
540 '-M', '--abbrev=40']
541 if ignore_whitespace:
541 if ignore_whitespace:
542 flags.append('-w')
542 flags.append('-w')
543
543
544 if commit1 == self.EMPTY_COMMIT:
544 if commit1 == self.EMPTY_COMMIT:
545 cmd = ['show'] + flags + [commit2.raw_id]
545 cmd = ['show'] + flags + [commit2.raw_id]
546 else:
546 else:
547 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
547 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
548
548
549 if path:
549 if path:
550 cmd.extend(['--', path])
550 cmd.extend(['--', path])
551
551
552 stdout, __ = self.run_git_command(cmd)
552 stdout, __ = self.run_git_command(cmd)
553 # If we used 'show' command, strip first few lines (until actual diff
553 # If we used 'show' command, strip first few lines (until actual diff
554 # starts)
554 # starts)
555 if commit1 == self.EMPTY_COMMIT:
555 if commit1 == self.EMPTY_COMMIT:
556 lines = stdout.splitlines()
556 lines = stdout.splitlines()
557 x = 0
557 x = 0
558 for line in lines:
558 for line in lines:
559 if line.startswith('diff'):
559 if line.startswith('diff'):
560 break
560 break
561 x += 1
561 x += 1
562 # Append new line just like 'diff' command do
562 # Append new line just like 'diff' command do
563 stdout = '\n'.join(lines[x:]) + '\n'
563 stdout = '\n'.join(lines[x:]) + '\n'
564 return GitDiff(stdout)
564 return GitDiff(stdout)
565
565
566 def strip(self, commit_id, branch_name):
566 def strip(self, commit_id, branch_name):
567 commit = self.get_commit(commit_id=commit_id)
567 commit = self.get_commit(commit_id=commit_id)
568 if commit.merge:
568 if commit.merge:
569 raise Exception('Cannot reset to merge commit')
569 raise Exception('Cannot reset to merge commit')
570
570
571 # parent is going to be the new head now
571 # parent is going to be the new head now
572 commit = commit.parents[0]
572 commit = commit.parents[0]
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574
574
575 self.commit_ids = self._get_all_commit_ids()
575 self.commit_ids = self._get_all_commit_ids()
576 self._rebuild_cache(self.commit_ids)
576 self._rebuild_cache(self.commit_ids)
577
577
578 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
578 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
579 if commit_id1 == commit_id2:
579 if commit_id1 == commit_id2:
580 return commit_id1
580 return commit_id1
581
581
582 if self != repo2:
582 if self != repo2:
583 commits = self._remote.get_missing_revs(
583 commits = self._remote.get_missing_revs(
584 commit_id1, commit_id2, repo2.path)
584 commit_id1, commit_id2, repo2.path)
585 if commits:
585 if commits:
586 commit = repo2.get_commit(commits[-1])
586 commit = repo2.get_commit(commits[-1])
587 if commit.parents:
587 if commit.parents:
588 ancestor_id = commit.parents[0].raw_id
588 ancestor_id = commit.parents[0].raw_id
589 else:
589 else:
590 ancestor_id = None
590 ancestor_id = None
591 else:
591 else:
592 # no commits from other repo, ancestor_id is the commit_id2
592 # no commits from other repo, ancestor_id is the commit_id2
593 ancestor_id = commit_id2
593 ancestor_id = commit_id2
594 else:
594 else:
595 output, __ = self.run_git_command(
595 output, __ = self.run_git_command(
596 ['merge-base', commit_id1, commit_id2])
596 ['merge-base', commit_id1, commit_id2])
597 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
597 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
598
598
599 return ancestor_id
599 return ancestor_id
600
600
601 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
601 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
602 repo1 = self
602 repo1 = self
603 ancestor_id = None
603 ancestor_id = None
604
604
605 if commit_id1 == commit_id2:
605 if commit_id1 == commit_id2:
606 commits = []
606 commits = []
607 elif repo1 != repo2:
607 elif repo1 != repo2:
608 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
608 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
609 repo2.path)
609 repo2.path)
610 commits = [
610 commits = [
611 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
611 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
612 for commit_id in reversed(missing_ids)]
612 for commit_id in reversed(missing_ids)]
613 else:
613 else:
614 output, __ = repo1.run_git_command(
614 output, __ = repo1.run_git_command(
615 ['log', '--reverse', '--pretty=format: %H', '-s',
615 ['log', '--reverse', '--pretty=format: %H', '-s',
616 '%s..%s' % (commit_id1, commit_id2)])
616 '%s..%s' % (commit_id1, commit_id2)])
617 commits = [
617 commits = [
618 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
618 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
619 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
619 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
620
620
621 return commits
621 return commits
622
622
623 @LazyProperty
623 @LazyProperty
624 def in_memory_commit(self):
624 def in_memory_commit(self):
625 """
625 """
626 Returns ``GitInMemoryCommit`` object for this repository.
626 Returns ``GitInMemoryCommit`` object for this repository.
627 """
627 """
628 return GitInMemoryCommit(self)
628 return GitInMemoryCommit(self)
629
629
630 def clone(self, url, update_after_clone=True, bare=False):
630 def clone(self, url, update_after_clone=True, bare=False):
631 """
631 """
632 Tries to clone commits from external location.
632 Tries to clone commits from external location.
633
633
634 :param update_after_clone: If set to ``False``, git won't checkout
634 :param update_after_clone: If set to ``False``, git won't checkout
635 working directory
635 working directory
636 :param bare: If set to ``True``, repository would be cloned into
636 :param bare: If set to ``True``, repository would be cloned into
637 *bare* git repository (no working directory at all).
637 *bare* git repository (no working directory at all).
638 """
638 """
639 # init_bare and init expect empty dir created to proceed
639 # init_bare and init expect empty dir created to proceed
640 if not os.path.exists(self.path):
640 if not os.path.exists(self.path):
641 os.mkdir(self.path)
641 os.mkdir(self.path)
642
642
643 if bare:
643 if bare:
644 self._remote.init_bare()
644 self._remote.init_bare()
645 else:
645 else:
646 self._remote.init()
646 self._remote.init()
647
647
648 deferred = '^{}'
648 deferred = '^{}'
649 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
649 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
650
650
651 return self._remote.clone(
651 return self._remote.clone(
652 url, deferred, valid_refs, update_after_clone)
652 url, deferred, valid_refs, update_after_clone)
653
653
654 def pull(self, url, commit_ids=None):
654 def pull(self, url, commit_ids=None):
655 """
655 """
656 Tries to pull changes from external location. We use fetch here since
656 Tries to pull changes from external location. We use fetch here since
657 pull in get does merges and we want to be compatible with hg backend so
657 pull in get does merges and we want to be compatible with hg backend so
658 pull == fetch in this case
658 pull == fetch in this case
659 """
659 """
660 self.fetch(url, commit_ids=commit_ids)
660 self.fetch(url, commit_ids=commit_ids)
661
661
662 def fetch(self, url, commit_ids=None):
662 def fetch(self, url, commit_ids=None):
663 """
663 """
664 Tries to fetch changes from external location.
664 Tries to fetch changes from external location.
665 """
665 """
666 refs = None
666 refs = None
667
667
668 if commit_ids is not None:
668 if commit_ids is not None:
669 remote_refs = self._remote.get_remote_refs(url)
669 remote_refs = self._remote.get_remote_refs(url)
670 refs = [
670 refs = [
671 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
671 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
672 self._remote.fetch(url, refs=refs)
672 self._remote.fetch(url, refs=refs)
673
673
674 def push(self, url):
674 def push(self, url):
675 refs = None
675 refs = None
676 self._remote.sync_push(url, refs=refs)
676 self._remote.sync_push(url, refs=refs)
677
677
678 def set_refs(self, ref_name, commit_id):
678 def set_refs(self, ref_name, commit_id):
679 self._remote.set_refs(ref_name, commit_id)
679 self._remote.set_refs(ref_name, commit_id)
680
680
681 def remove_ref(self, ref_name):
681 def remove_ref(self, ref_name):
682 self._remote.remove_ref(ref_name)
682 self._remote.remove_ref(ref_name)
683
683
684 def _update_server_info(self):
684 def _update_server_info(self):
685 """
685 """
686 runs gits update-server-info command in this repo instance
686 runs gits update-server-info command in this repo instance
687 """
687 """
688 self._remote.update_server_info()
688 self._remote.update_server_info()
689
689
690 def _current_branch(self):
690 def _current_branch(self):
691 """
691 """
692 Return the name of the current branch.
692 Return the name of the current branch.
693
693
694 It only works for non bare repositories (i.e. repositories with a
694 It only works for non bare repositories (i.e. repositories with a
695 working copy)
695 working copy)
696 """
696 """
697 if self.bare:
697 if self.bare:
698 raise RepositoryError('Bare git repos do not have active branches')
698 raise RepositoryError('Bare git repos do not have active branches')
699
699
700 if self.is_empty():
700 if self.is_empty():
701 return None
701 return None
702
702
703 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
703 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
704 return stdout.strip()
704 return stdout.strip()
705
705
706 def _checkout(self, branch_name, create=False, force=False):
706 def _checkout(self, branch_name, create=False, force=False):
707 """
707 """
708 Checkout a branch in the working directory.
708 Checkout a branch in the working directory.
709
709
710 It tries to create the branch if create is True, failing if the branch
710 It tries to create the branch if create is True, failing if the branch
711 already exists.
711 already exists.
712
712
713 It only works for non bare repositories (i.e. repositories with a
713 It only works for non bare repositories (i.e. repositories with a
714 working copy)
714 working copy)
715 """
715 """
716 if self.bare:
716 if self.bare:
717 raise RepositoryError('Cannot checkout branches in a bare git repo')
717 raise RepositoryError('Cannot checkout branches in a bare git repo')
718
718
719 cmd = ['checkout']
719 cmd = ['checkout']
720 if force:
720 if force:
721 cmd.append('-f')
721 cmd.append('-f')
722 if create:
722 if create:
723 cmd.append('-b')
723 cmd.append('-b')
724 cmd.append(branch_name)
724 cmd.append(branch_name)
725 self.run_git_command(cmd, fail_on_stderr=False)
725 self.run_git_command(cmd, fail_on_stderr=False)
726
726
727 def _identify(self):
727 def _identify(self):
728 """
728 """
729 Return the current state of the working directory.
729 Return the current state of the working directory.
730 """
730 """
731 if self.bare:
731 if self.bare:
732 raise RepositoryError('Bare git repos do not have active branches')
732 raise RepositoryError('Bare git repos do not have active branches')
733
733
734 if self.is_empty():
734 if self.is_empty():
735 return None
735 return None
736
736
737 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
737 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
738 return stdout.strip()
738 return stdout.strip()
739
739
740 def _local_clone(self, clone_path, branch_name, source_branch=None):
740 def _local_clone(self, clone_path, branch_name, source_branch=None):
741 """
741 """
742 Create a local clone of the current repo.
742 Create a local clone of the current repo.
743 """
743 """
744 # N.B.(skreft): the --branch option is required as otherwise the shallow
744 # N.B.(skreft): the --branch option is required as otherwise the shallow
745 # clone will only fetch the active branch.
745 # clone will only fetch the active branch.
746 cmd = ['clone', '--branch', branch_name,
746 cmd = ['clone', '--branch', branch_name,
747 self.path, os.path.abspath(clone_path)]
747 self.path, os.path.abspath(clone_path)]
748
748
749 self.run_git_command(cmd, fail_on_stderr=False)
749 self.run_git_command(cmd, fail_on_stderr=False)
750
750
751 # if we get the different source branch, make sure we also fetch it for
751 # if we get the different source branch, make sure we also fetch it for
752 # merge conditions
752 # merge conditions
753 if source_branch and source_branch != branch_name:
753 if source_branch and source_branch != branch_name:
754 # check if the ref exists.
754 # check if the ref exists.
755 shadow_repo = GitRepository(os.path.abspath(clone_path))
755 shadow_repo = GitRepository(os.path.abspath(clone_path))
756 if shadow_repo.get_remote_ref(source_branch):
756 if shadow_repo.get_remote_ref(source_branch):
757 cmd = ['fetch', self.path, source_branch]
757 cmd = ['fetch', self.path, source_branch]
758 self.run_git_command(cmd, fail_on_stderr=False)
758 self.run_git_command(cmd, fail_on_stderr=False)
759
759
760 def _local_fetch(self, repository_path, branch_name, use_origin=False):
760 def _local_fetch(self, repository_path, branch_name, use_origin=False):
761 """
761 """
762 Fetch a branch from a local repository.
762 Fetch a branch from a local repository.
763 """
763 """
764 repository_path = os.path.abspath(repository_path)
764 repository_path = os.path.abspath(repository_path)
765 if repository_path == self.path:
765 if repository_path == self.path:
766 raise ValueError('Cannot fetch from the same repository')
766 raise ValueError('Cannot fetch from the same repository')
767
767
768 if use_origin:
768 if use_origin:
769 branch_name = '+{branch}:refs/heads/{branch}'.format(
769 branch_name = '+{branch}:refs/heads/{branch}'.format(
770 branch=branch_name)
770 branch=branch_name)
771
771
772 cmd = ['fetch', '--no-tags', '--update-head-ok',
772 cmd = ['fetch', '--no-tags', '--update-head-ok',
773 repository_path, branch_name]
773 repository_path, branch_name]
774 self.run_git_command(cmd, fail_on_stderr=False)
774 self.run_git_command(cmd, fail_on_stderr=False)
775
775
776 def _local_reset(self, branch_name):
776 def _local_reset(self, branch_name):
777 branch_name = '{}'.format(branch_name)
777 branch_name = '{}'.format(branch_name)
778 cmd = ['reset', '--hard', branch_name]
778 cmd = ['reset', '--hard', branch_name]
779 self.run_git_command(cmd, fail_on_stderr=False)
779 self.run_git_command(cmd, fail_on_stderr=False)
780
780
781 def _last_fetch_heads(self):
781 def _last_fetch_heads(self):
782 """
782 """
783 Return the last fetched heads that need merging.
783 Return the last fetched heads that need merging.
784
784
785 The algorithm is defined at
785 The algorithm is defined at
786 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
786 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
787 """
787 """
788 if not self.bare:
788 if not self.bare:
789 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
789 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
790 else:
790 else:
791 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
791 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
792
792
793 heads = []
793 heads = []
794 with open(fetch_heads_path) as f:
794 with open(fetch_heads_path) as f:
795 for line in f:
795 for line in f:
796 if ' not-for-merge ' in line:
796 if ' not-for-merge ' in line:
797 continue
797 continue
798 line = re.sub('\t.*', '', line, flags=re.DOTALL)
798 line = re.sub('\t.*', '', line, flags=re.DOTALL)
799 heads.append(line)
799 heads.append(line)
800
800
801 return heads
801 return heads
802
802
803 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
803 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
804 return GitRepository(shadow_repository_path)
804 return GitRepository(shadow_repository_path)
805
805
806 def _local_pull(self, repository_path, branch_name, ff_only=True):
806 def _local_pull(self, repository_path, branch_name, ff_only=True):
807 """
807 """
808 Pull a branch from a local repository.
808 Pull a branch from a local repository.
809 """
809 """
810 if self.bare:
810 if self.bare:
811 raise RepositoryError('Cannot pull into a bare git repository')
811 raise RepositoryError('Cannot pull into a bare git repository')
812 # N.B.(skreft): The --ff-only option is to make sure this is a
812 # N.B.(skreft): The --ff-only option is to make sure this is a
813 # fast-forward (i.e., we are only pulling new changes and there are no
813 # fast-forward (i.e., we are only pulling new changes and there are no
814 # conflicts with our current branch)
814 # conflicts with our current branch)
815 # Additionally, that option needs to go before --no-tags, otherwise git
815 # Additionally, that option needs to go before --no-tags, otherwise git
816 # pull complains about it being an unknown flag.
816 # pull complains about it being an unknown flag.
817 cmd = ['pull']
817 cmd = ['pull']
818 if ff_only:
818 if ff_only:
819 cmd.append('--ff-only')
819 cmd.append('--ff-only')
820 cmd.extend(['--no-tags', repository_path, branch_name])
820 cmd.extend(['--no-tags', repository_path, branch_name])
821 self.run_git_command(cmd, fail_on_stderr=False)
821 self.run_git_command(cmd, fail_on_stderr=False)
822
822
823 def _local_merge(self, merge_message, user_name, user_email, heads):
823 def _local_merge(self, merge_message, user_name, user_email, heads):
824 """
824 """
825 Merge the given head into the checked out branch.
825 Merge the given head into the checked out branch.
826
826
827 It will force a merge commit.
827 It will force a merge commit.
828
828
829 Currently it raises an error if the repo is empty, as it is not possible
829 Currently it raises an error if the repo is empty, as it is not possible
830 to create a merge commit in an empty repo.
830 to create a merge commit in an empty repo.
831
831
832 :param merge_message: The message to use for the merge commit.
832 :param merge_message: The message to use for the merge commit.
833 :param heads: the heads to merge.
833 :param heads: the heads to merge.
834 """
834 """
835 if self.bare:
835 if self.bare:
836 raise RepositoryError('Cannot merge into a bare git repository')
836 raise RepositoryError('Cannot merge into a bare git repository')
837
837
838 if not heads:
838 if not heads:
839 return
839 return
840
840
841 if self.is_empty():
841 if self.is_empty():
842 # TODO(skreft): do somehting more robust in this case.
842 # TODO(skreft): do somehting more robust in this case.
843 raise RepositoryError(
843 raise RepositoryError(
844 'Do not know how to merge into empty repositories yet')
844 'Do not know how to merge into empty repositories yet')
845
845
846 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
846 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
847 # commit message. We also specify the user who is doing the merge.
847 # commit message. We also specify the user who is doing the merge.
848 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
848 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
849 '-c', 'user.email=%s' % safe_str(user_email),
849 '-c', 'user.email=%s' % safe_str(user_email),
850 'merge', '--no-ff', '-m', safe_str(merge_message)]
850 'merge', '--no-ff', '-m', safe_str(merge_message)]
851 cmd.extend(heads)
851 cmd.extend(heads)
852 try:
852 try:
853 output = self.run_git_command(cmd, fail_on_stderr=False)
853 output = self.run_git_command(cmd, fail_on_stderr=False)
854 except RepositoryError:
854 except RepositoryError:
855 # Cleanup any merge leftovers
855 # Cleanup any merge leftovers
856 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
856 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
857 raise
857 raise
858
858
859 def _local_push(
859 def _local_push(
860 self, source_branch, repository_path, target_branch,
860 self, source_branch, repository_path, target_branch,
861 enable_hooks=False, rc_scm_data=None):
861 enable_hooks=False, rc_scm_data=None):
862 """
862 """
863 Push the source_branch to the given repository and target_branch.
863 Push the source_branch to the given repository and target_branch.
864
864
865 Currently it if the target_branch is not master and the target repo is
865 Currently it if the target_branch is not master and the target repo is
866 empty, the push will work, but then GitRepository won't be able to find
866 empty, the push will work, but then GitRepository won't be able to find
867 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
867 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
868 pointing to master, which does not exist).
868 pointing to master, which does not exist).
869
869
870 It does not run the hooks in the target repo.
870 It does not run the hooks in the target repo.
871 """
871 """
872 # TODO(skreft): deal with the case in which the target repo is empty,
872 # TODO(skreft): deal with the case in which the target repo is empty,
873 # and the target_branch is not master.
873 # and the target_branch is not master.
874 target_repo = GitRepository(repository_path)
874 target_repo = GitRepository(repository_path)
875 if (not target_repo.bare and
875 if (not target_repo.bare and
876 target_repo._current_branch() == target_branch):
876 target_repo._current_branch() == target_branch):
877 # Git prevents pushing to the checked out branch, so simulate it by
877 # Git prevents pushing to the checked out branch, so simulate it by
878 # pulling into the target repository.
878 # pulling into the target repository.
879 target_repo._local_pull(self.path, source_branch)
879 target_repo._local_pull(self.path, source_branch)
880 else:
880 else:
881 cmd = ['push', os.path.abspath(repository_path),
881 cmd = ['push', os.path.abspath(repository_path),
882 '%s:%s' % (source_branch, target_branch)]
882 '%s:%s' % (source_branch, target_branch)]
883 gitenv = {}
883 gitenv = {}
884 if rc_scm_data:
884 if rc_scm_data:
885 gitenv.update({'RC_SCM_DATA': rc_scm_data})
885 gitenv.update({'RC_SCM_DATA': rc_scm_data})
886
886
887 if not enable_hooks:
887 if not enable_hooks:
888 gitenv['RC_SKIP_HOOKS'] = '1'
888 gitenv['RC_SKIP_HOOKS'] = '1'
889 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
889 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
890
890
891 def _get_new_pr_branch(self, source_branch, target_branch):
891 def _get_new_pr_branch(self, source_branch, target_branch):
892 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
892 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
893 pr_branches = []
893 pr_branches = []
894 for branch in self.branches:
894 for branch in self.branches:
895 if branch.startswith(prefix):
895 if branch.startswith(prefix):
896 pr_branches.append(int(branch[len(prefix):]))
896 pr_branches.append(int(branch[len(prefix):]))
897
897
898 if not pr_branches:
898 if not pr_branches:
899 branch_id = 0
899 branch_id = 0
900 else:
900 else:
901 branch_id = max(pr_branches) + 1
901 branch_id = max(pr_branches) + 1
902
902
903 return '%s%d' % (prefix, branch_id)
903 return '%s%d' % (prefix, branch_id)
904
904
905 def _merge_repo(self, shadow_repository_path, target_ref,
905 def _maybe_prepare_merge_workspace(
906 self, repo_id, workspace_id, target_ref, source_ref):
907 shadow_repository_path = self._get_shadow_repository_path(
908 repo_id, workspace_id)
909 if not os.path.exists(shadow_repository_path):
910 self._local_clone(
911 shadow_repository_path, target_ref.name, source_ref.name)
912 log.debug(
913 'Prepared shadow repository in %s', shadow_repository_path)
914
915 return shadow_repository_path
916
917 def _merge_repo(self, repo_id, workspace_id, target_ref,
906 source_repo, source_ref, merge_message,
918 source_repo, source_ref, merge_message,
907 merger_name, merger_email, dry_run=False,
919 merger_name, merger_email, dry_run=False,
908 use_rebase=False, close_branch=False):
920 use_rebase=False, close_branch=False):
909 if target_ref.commit_id != self.branches[target_ref.name]:
921 if target_ref.commit_id != self.branches[target_ref.name]:
910 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
911 target_ref.commit_id, self.branches[target_ref.name])
923 target_ref.commit_id, self.branches[target_ref.name])
912 return MergeResponse(
924 return MergeResponse(
913 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
914
926
915 shadow_repo = GitRepository(shadow_repository_path)
927 shadow_repository_path = self._maybe_prepare_merge_workspace(
928 repo_id, workspace_id, target_ref, source_ref)
929 shadow_repo = self._get_shadow_instance(shadow_repository_path)
930
916 # checkout source, if it's different. Otherwise we could not
931 # checkout source, if it's different. Otherwise we could not
917 # fetch proper commits for merge testing
932 # fetch proper commits for merge testing
918 if source_ref.name != target_ref.name:
933 if source_ref.name != target_ref.name:
919 if shadow_repo.get_remote_ref(source_ref.name):
934 if shadow_repo.get_remote_ref(source_ref.name):
920 shadow_repo._checkout(source_ref.name, force=True)
935 shadow_repo._checkout(source_ref.name, force=True)
921
936
922 # checkout target, and fetch changes
937 # checkout target, and fetch changes
923 shadow_repo._checkout(target_ref.name, force=True)
938 shadow_repo._checkout(target_ref.name, force=True)
924
939
925 # fetch/reset pull the target, in case it is changed
940 # fetch/reset pull the target, in case it is changed
926 # this handles even force changes
941 # this handles even force changes
927 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
942 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
928 shadow_repo._local_reset(target_ref.name)
943 shadow_repo._local_reset(target_ref.name)
929
944
930 # Need to reload repo to invalidate the cache, or otherwise we cannot
945 # Need to reload repo to invalidate the cache, or otherwise we cannot
931 # retrieve the last target commit.
946 # retrieve the last target commit.
932 shadow_repo = GitRepository(shadow_repository_path)
947 shadow_repo = self._get_shadow_instance(shadow_repository_path)
933 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
948 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
934 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
949 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
935 target_ref, target_ref.commit_id,
950 target_ref, target_ref.commit_id,
936 shadow_repo.branches[target_ref.name])
951 shadow_repo.branches[target_ref.name])
937 return MergeResponse(
952 return MergeResponse(
938 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
953 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
939
954
940 # calculate new branch
955 # calculate new branch
941 pr_branch = shadow_repo._get_new_pr_branch(
956 pr_branch = shadow_repo._get_new_pr_branch(
942 source_ref.name, target_ref.name)
957 source_ref.name, target_ref.name)
943 log.debug('using pull-request merge branch: `%s`', pr_branch)
958 log.debug('using pull-request merge branch: `%s`', pr_branch)
944 # checkout to temp branch, and fetch changes
959 # checkout to temp branch, and fetch changes
945 shadow_repo._checkout(pr_branch, create=True)
960 shadow_repo._checkout(pr_branch, create=True)
946 try:
961 try:
947 shadow_repo._local_fetch(source_repo.path, source_ref.name)
962 shadow_repo._local_fetch(source_repo.path, source_ref.name)
948 except RepositoryError:
963 except RepositoryError:
949 log.exception('Failure when doing local fetch on git shadow repo')
964 log.exception('Failure when doing local fetch on git shadow repo')
950 return MergeResponse(
965 return MergeResponse(
951 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
966 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
952
967
953 merge_ref = None
968 merge_ref = None
954 merge_failure_reason = MergeFailureReason.NONE
969 merge_failure_reason = MergeFailureReason.NONE
955 try:
970 try:
956 shadow_repo._local_merge(merge_message, merger_name, merger_email,
971 shadow_repo._local_merge(merge_message, merger_name, merger_email,
957 [source_ref.commit_id])
972 [source_ref.commit_id])
958 merge_possible = True
973 merge_possible = True
959
974
960 # Need to reload repo to invalidate the cache, or otherwise we
975 # Need to reload repo to invalidate the cache, or otherwise we
961 # cannot retrieve the merge commit.
976 # cannot retrieve the merge commit.
962 shadow_repo = GitRepository(shadow_repository_path)
977 shadow_repo = GitRepository(shadow_repository_path)
963 merge_commit_id = shadow_repo.branches[pr_branch]
978 merge_commit_id = shadow_repo.branches[pr_branch]
964
979
965 # Set a reference pointing to the merge commit. This reference may
980 # Set a reference pointing to the merge commit. This reference may
966 # be used to easily identify the last successful merge commit in
981 # be used to easily identify the last successful merge commit in
967 # the shadow repository.
982 # the shadow repository.
968 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
983 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
969 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
984 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
970 except RepositoryError:
985 except RepositoryError:
971 log.exception('Failure when doing local merge on git shadow repo')
986 log.exception('Failure when doing local merge on git shadow repo')
972 merge_possible = False
987 merge_possible = False
973 merge_failure_reason = MergeFailureReason.MERGE_FAILED
988 merge_failure_reason = MergeFailureReason.MERGE_FAILED
974
989
975 if merge_possible and not dry_run:
990 if merge_possible and not dry_run:
976 try:
991 try:
977 shadow_repo._local_push(
992 shadow_repo._local_push(
978 pr_branch, self.path, target_ref.name, enable_hooks=True,
993 pr_branch, self.path, target_ref.name, enable_hooks=True,
979 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
994 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
980 merge_succeeded = True
995 merge_succeeded = True
981 except RepositoryError:
996 except RepositoryError:
982 log.exception(
997 log.exception(
983 'Failure when doing local push on git shadow repo')
998 'Failure when doing local push on git shadow repo')
984 merge_succeeded = False
999 merge_succeeded = False
985 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1000 merge_failure_reason = MergeFailureReason.PUSH_FAILED
986 else:
1001 else:
987 merge_succeeded = False
1002 merge_succeeded = False
988
1003
989 return MergeResponse(
1004 return MergeResponse(
990 merge_possible, merge_succeeded, merge_ref,
1005 merge_possible, merge_succeeded, merge_ref,
991 merge_failure_reason)
1006 merge_failure_reason)
992
993 def _get_shadow_repository_path(self, workspace_id):
994 # The name of the shadow repository must start with '.', so it is
995 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
996 return os.path.join(
997 os.path.dirname(self.path),
998 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
999
1000 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
1001 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
1002 if not os.path.exists(shadow_repository_path):
1003 self._local_clone(
1004 shadow_repository_path, target_ref.name, source_ref.name)
1005
1006 return shadow_repository_path
@@ -1,918 +1,915 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
35 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
45 from rhodecode.lib.vcs.compat import configparser
46
46
47 hexlify = binascii.hexlify
47 hexlify = binascii.hexlify
48 nullid = "\0" * 20
48 nullid = "\0" * 20
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class MercurialRepository(BaseRepository):
53 class MercurialRepository(BaseRepository):
54 """
54 """
55 Mercurial repository backend
55 Mercurial repository backend
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'default'
57 DEFAULT_BRANCH_NAME = 'default'
58
58
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 update_after_clone=False, with_wire=None):
60 update_after_clone=False, with_wire=None):
61 """
61 """
62 Raises RepositoryError if repository could not be find at the given
62 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
63 ``repo_path``.
64
64
65 :param repo_path: local path of the repository
65 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
66 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
67 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
68 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
69 :param src_url=None: would try to clone repository from given location
70 :param update_after_clone=False: sets update of working copy after
70 :param update_after_clone=False: sets update of working copy after
71 making a clone
71 making a clone
72 """
72 """
73
73
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 # mercurial since 4.4.X requires certain configuration to be present
75 # mercurial since 4.4.X requires certain configuration to be present
76 # because sometimes we init the repos with config we need to meet
76 # because sometimes we init the repos with config we need to meet
77 # special requirements
77 # special requirements
78 self.config = config if config else self.get_default_config(
78 self.config = config if config else self.get_default_config(
79 default=[('extensions', 'largefiles', '1')])
79 default=[('extensions', 'largefiles', '1')])
80
80
81 self._remote = connection.Hg(
81 self._remote = connection.Hg(
82 self.path, self.config, with_wire=with_wire)
82 self.path, self.config, with_wire=with_wire)
83
83
84 self._init_repo(create, src_url, update_after_clone)
84 self._init_repo(create, src_url, update_after_clone)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def commit_ids(self):
90 def commit_ids(self):
91 """
91 """
92 Returns list of commit ids, in ascending order. Being lazy
92 Returns list of commit ids, in ascending order. Being lazy
93 attribute allows external tools to inject shas from cache.
93 attribute allows external tools to inject shas from cache.
94 """
94 """
95 commit_ids = self._get_all_commit_ids()
95 commit_ids = self._get_all_commit_ids()
96 self._rebuild_cache(commit_ids)
96 self._rebuild_cache(commit_ids)
97 return commit_ids
97 return commit_ids
98
98
99 def _rebuild_cache(self, commit_ids):
99 def _rebuild_cache(self, commit_ids):
100 self._commit_ids = dict((commit_id, index)
100 self._commit_ids = dict((commit_id, index)
101 for index, commit_id in enumerate(commit_ids))
101 for index, commit_id in enumerate(commit_ids))
102
102
103 @LazyProperty
103 @LazyProperty
104 def branches(self):
104 def branches(self):
105 return self._get_branches()
105 return self._get_branches()
106
106
107 @LazyProperty
107 @LazyProperty
108 def branches_closed(self):
108 def branches_closed(self):
109 return self._get_branches(active=False, closed=True)
109 return self._get_branches(active=False, closed=True)
110
110
111 @LazyProperty
111 @LazyProperty
112 def branches_all(self):
112 def branches_all(self):
113 all_branches = {}
113 all_branches = {}
114 all_branches.update(self.branches)
114 all_branches.update(self.branches)
115 all_branches.update(self.branches_closed)
115 all_branches.update(self.branches_closed)
116 return all_branches
116 return all_branches
117
117
118 def _get_branches(self, active=True, closed=False):
118 def _get_branches(self, active=True, closed=False):
119 """
119 """
120 Gets branches for this repository
120 Gets branches for this repository
121 Returns only not closed active branches by default
121 Returns only not closed active branches by default
122
122
123 :param active: return also active branches
123 :param active: return also active branches
124 :param closed: return also closed branches
124 :param closed: return also closed branches
125
125
126 """
126 """
127 if self.is_empty():
127 if self.is_empty():
128 return {}
128 return {}
129
129
130 def get_name(ctx):
130 def get_name(ctx):
131 return ctx[0]
131 return ctx[0]
132
132
133 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
133 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
134 self._remote.branches(active, closed).items()]
134 self._remote.branches(active, closed).items()]
135
135
136 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
136 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
137
137
138 @LazyProperty
138 @LazyProperty
139 def tags(self):
139 def tags(self):
140 """
140 """
141 Gets tags for this repository
141 Gets tags for this repository
142 """
142 """
143 return self._get_tags()
143 return self._get_tags()
144
144
145 def _get_tags(self):
145 def _get_tags(self):
146 if self.is_empty():
146 if self.is_empty():
147 return {}
147 return {}
148
148
149 def get_name(ctx):
149 def get_name(ctx):
150 return ctx[0]
150 return ctx[0]
151
151
152 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
152 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
153 self._remote.tags().items()]
153 self._remote.tags().items()]
154
154
155 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
155 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
156
156
157 def tag(self, name, user, commit_id=None, message=None, date=None,
157 def tag(self, name, user, commit_id=None, message=None, date=None,
158 **kwargs):
158 **kwargs):
159 """
159 """
160 Creates and returns a tag for the given ``commit_id``.
160 Creates and returns a tag for the given ``commit_id``.
161
161
162 :param name: name for new tag
162 :param name: name for new tag
163 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
163 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
164 :param commit_id: commit id for which new tag would be created
164 :param commit_id: commit id for which new tag would be created
165 :param message: message of the tag's commit
165 :param message: message of the tag's commit
166 :param date: date of tag's commit
166 :param date: date of tag's commit
167
167
168 :raises TagAlreadyExistError: if tag with same name already exists
168 :raises TagAlreadyExistError: if tag with same name already exists
169 """
169 """
170 if name in self.tags:
170 if name in self.tags:
171 raise TagAlreadyExistError("Tag %s already exists" % name)
171 raise TagAlreadyExistError("Tag %s already exists" % name)
172 commit = self.get_commit(commit_id=commit_id)
172 commit = self.get_commit(commit_id=commit_id)
173 local = kwargs.setdefault('local', False)
173 local = kwargs.setdefault('local', False)
174
174
175 if message is None:
175 if message is None:
176 message = "Added tag %s for commit %s" % (name, commit.short_id)
176 message = "Added tag %s for commit %s" % (name, commit.short_id)
177
177
178 date, tz = date_to_timestamp_plus_offset(date)
178 date, tz = date_to_timestamp_plus_offset(date)
179
179
180 self._remote.tag(
180 self._remote.tag(
181 name, commit.raw_id, message, local, user, date, tz)
181 name, commit.raw_id, message, local, user, date, tz)
182 self._remote.invalidate_vcs_cache()
182 self._remote.invalidate_vcs_cache()
183
183
184 # Reinitialize tags
184 # Reinitialize tags
185 self.tags = self._get_tags()
185 self.tags = self._get_tags()
186 tag_id = self.tags[name]
186 tag_id = self.tags[name]
187
187
188 return self.get_commit(commit_id=tag_id)
188 return self.get_commit(commit_id=tag_id)
189
189
190 def remove_tag(self, name, user, message=None, date=None):
190 def remove_tag(self, name, user, message=None, date=None):
191 """
191 """
192 Removes tag with the given `name`.
192 Removes tag with the given `name`.
193
193
194 :param name: name of the tag to be removed
194 :param name: name of the tag to be removed
195 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
195 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
196 :param message: message of the tag's removal commit
196 :param message: message of the tag's removal commit
197 :param date: date of tag's removal commit
197 :param date: date of tag's removal commit
198
198
199 :raises TagDoesNotExistError: if tag with given name does not exists
199 :raises TagDoesNotExistError: if tag with given name does not exists
200 """
200 """
201 if name not in self.tags:
201 if name not in self.tags:
202 raise TagDoesNotExistError("Tag %s does not exist" % name)
202 raise TagDoesNotExistError("Tag %s does not exist" % name)
203 if message is None:
203 if message is None:
204 message = "Removed tag %s" % name
204 message = "Removed tag %s" % name
205 local = False
205 local = False
206
206
207 date, tz = date_to_timestamp_plus_offset(date)
207 date, tz = date_to_timestamp_plus_offset(date)
208
208
209 self._remote.tag(name, nullid, message, local, user, date, tz)
209 self._remote.tag(name, nullid, message, local, user, date, tz)
210 self._remote.invalidate_vcs_cache()
210 self._remote.invalidate_vcs_cache()
211 self.tags = self._get_tags()
211 self.tags = self._get_tags()
212
212
213 @LazyProperty
213 @LazyProperty
214 def bookmarks(self):
214 def bookmarks(self):
215 """
215 """
216 Gets bookmarks for this repository
216 Gets bookmarks for this repository
217 """
217 """
218 return self._get_bookmarks()
218 return self._get_bookmarks()
219
219
220 def _get_bookmarks(self):
220 def _get_bookmarks(self):
221 if self.is_empty():
221 if self.is_empty():
222 return {}
222 return {}
223
223
224 def get_name(ctx):
224 def get_name(ctx):
225 return ctx[0]
225 return ctx[0]
226
226
227 _bookmarks = [
227 _bookmarks = [
228 (safe_unicode(n), hexlify(h)) for n, h in
228 (safe_unicode(n), hexlify(h)) for n, h in
229 self._remote.bookmarks().items()]
229 self._remote.bookmarks().items()]
230
230
231 return OrderedDict(sorted(_bookmarks, key=get_name))
231 return OrderedDict(sorted(_bookmarks, key=get_name))
232
232
233 def _get_all_commit_ids(self):
233 def _get_all_commit_ids(self):
234 return self._remote.get_all_commit_ids('visible')
234 return self._remote.get_all_commit_ids('visible')
235
235
236 def get_diff(
236 def get_diff(
237 self, commit1, commit2, path='', ignore_whitespace=False,
237 self, commit1, commit2, path='', ignore_whitespace=False,
238 context=3, path1=None):
238 context=3, path1=None):
239 """
239 """
240 Returns (git like) *diff*, as plain text. Shows changes introduced by
240 Returns (git like) *diff*, as plain text. Shows changes introduced by
241 `commit2` since `commit1`.
241 `commit2` since `commit1`.
242
242
243 :param commit1: Entry point from which diff is shown. Can be
243 :param commit1: Entry point from which diff is shown. Can be
244 ``self.EMPTY_COMMIT`` - in this case, patch showing all
244 ``self.EMPTY_COMMIT`` - in this case, patch showing all
245 the changes since empty state of the repository until `commit2`
245 the changes since empty state of the repository until `commit2`
246 :param commit2: Until which commit changes should be shown.
246 :param commit2: Until which commit changes should be shown.
247 :param ignore_whitespace: If set to ``True``, would not show whitespace
247 :param ignore_whitespace: If set to ``True``, would not show whitespace
248 changes. Defaults to ``False``.
248 changes. Defaults to ``False``.
249 :param context: How many lines before/after changed lines should be
249 :param context: How many lines before/after changed lines should be
250 shown. Defaults to ``3``.
250 shown. Defaults to ``3``.
251 """
251 """
252 self._validate_diff_commits(commit1, commit2)
252 self._validate_diff_commits(commit1, commit2)
253 if path1 is not None and path1 != path:
253 if path1 is not None and path1 != path:
254 raise ValueError("Diff of two different paths not supported.")
254 raise ValueError("Diff of two different paths not supported.")
255
255
256 if path:
256 if path:
257 file_filter = [self.path, path]
257 file_filter = [self.path, path]
258 else:
258 else:
259 file_filter = None
259 file_filter = None
260
260
261 diff = self._remote.diff(
261 diff = self._remote.diff(
262 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
262 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
263 opt_git=True, opt_ignorews=ignore_whitespace,
263 opt_git=True, opt_ignorews=ignore_whitespace,
264 context=context)
264 context=context)
265 return MercurialDiff(diff)
265 return MercurialDiff(diff)
266
266
267 def strip(self, commit_id, branch=None):
267 def strip(self, commit_id, branch=None):
268 self._remote.strip(commit_id, update=False, backup="none")
268 self._remote.strip(commit_id, update=False, backup="none")
269
269
270 self._remote.invalidate_vcs_cache()
270 self._remote.invalidate_vcs_cache()
271 self.commit_ids = self._get_all_commit_ids()
271 self.commit_ids = self._get_all_commit_ids()
272 self._rebuild_cache(self.commit_ids)
272 self._rebuild_cache(self.commit_ids)
273
273
274 def verify(self):
274 def verify(self):
275 verify = self._remote.verify()
275 verify = self._remote.verify()
276
276
277 self._remote.invalidate_vcs_cache()
277 self._remote.invalidate_vcs_cache()
278 return verify
278 return verify
279
279
280 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
280 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
281 if commit_id1 == commit_id2:
281 if commit_id1 == commit_id2:
282 return commit_id1
282 return commit_id1
283
283
284 ancestors = self._remote.revs_from_revspec(
284 ancestors = self._remote.revs_from_revspec(
285 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
285 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
286 other_path=repo2.path)
286 other_path=repo2.path)
287 return repo2[ancestors[0]].raw_id if ancestors else None
287 return repo2[ancestors[0]].raw_id if ancestors else None
288
288
289 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
289 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
290 if commit_id1 == commit_id2:
290 if commit_id1 == commit_id2:
291 commits = []
291 commits = []
292 else:
292 else:
293 if merge:
293 if merge:
294 indexes = self._remote.revs_from_revspec(
294 indexes = self._remote.revs_from_revspec(
295 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
295 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
296 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
296 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
297 else:
297 else:
298 indexes = self._remote.revs_from_revspec(
298 indexes = self._remote.revs_from_revspec(
299 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
299 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
300 commit_id1, other_path=repo2.path)
300 commit_id1, other_path=repo2.path)
301
301
302 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
302 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
303 for idx in indexes]
303 for idx in indexes]
304
304
305 return commits
305 return commits
306
306
307 @staticmethod
307 @staticmethod
308 def check_url(url, config):
308 def check_url(url, config):
309 """
309 """
310 Function will check given url and try to verify if it's a valid
310 Function will check given url and try to verify if it's a valid
311 link. Sometimes it may happened that mercurial will issue basic
311 link. Sometimes it may happened that mercurial will issue basic
312 auth request that can cause whole API to hang when used from python
312 auth request that can cause whole API to hang when used from python
313 or other external calls.
313 or other external calls.
314
314
315 On failures it'll raise urllib2.HTTPError, exception is also thrown
315 On failures it'll raise urllib2.HTTPError, exception is also thrown
316 when the return code is non 200
316 when the return code is non 200
317 """
317 """
318 # check first if it's not an local url
318 # check first if it's not an local url
319 if os.path.isdir(url) or url.startswith('file:'):
319 if os.path.isdir(url) or url.startswith('file:'):
320 return True
320 return True
321
321
322 # Request the _remote to verify the url
322 # Request the _remote to verify the url
323 return connection.Hg.check_url(url, config.serialize())
323 return connection.Hg.check_url(url, config.serialize())
324
324
325 @staticmethod
325 @staticmethod
326 def is_valid_repository(path):
326 def is_valid_repository(path):
327 return os.path.isdir(os.path.join(path, '.hg'))
327 return os.path.isdir(os.path.join(path, '.hg'))
328
328
329 def _init_repo(self, create, src_url=None, update_after_clone=False):
329 def _init_repo(self, create, src_url=None, update_after_clone=False):
330 """
330 """
331 Function will check for mercurial repository in given path. If there
331 Function will check for mercurial repository in given path. If there
332 is no repository in that path it will raise an exception unless
332 is no repository in that path it will raise an exception unless
333 `create` parameter is set to True - in that case repository would
333 `create` parameter is set to True - in that case repository would
334 be created.
334 be created.
335
335
336 If `src_url` is given, would try to clone repository from the
336 If `src_url` is given, would try to clone repository from the
337 location at given clone_point. Additionally it'll make update to
337 location at given clone_point. Additionally it'll make update to
338 working copy accordingly to `update_after_clone` flag.
338 working copy accordingly to `update_after_clone` flag.
339 """
339 """
340 if create and os.path.exists(self.path):
340 if create and os.path.exists(self.path):
341 raise RepositoryError(
341 raise RepositoryError(
342 "Cannot create repository at %s, location already exist"
342 "Cannot create repository at %s, location already exist"
343 % self.path)
343 % self.path)
344
344
345 if src_url:
345 if src_url:
346 url = str(self._get_url(src_url))
346 url = str(self._get_url(src_url))
347 MercurialRepository.check_url(url, self.config)
347 MercurialRepository.check_url(url, self.config)
348
348
349 self._remote.clone(url, self.path, update_after_clone)
349 self._remote.clone(url, self.path, update_after_clone)
350
350
351 # Don't try to create if we've already cloned repo
351 # Don't try to create if we've already cloned repo
352 create = False
352 create = False
353
353
354 if create:
354 if create:
355 os.makedirs(self.path, mode=0755)
355 os.makedirs(self.path, mode=0755)
356
356
357 self._remote.localrepository(create)
357 self._remote.localrepository(create)
358
358
359 @LazyProperty
359 @LazyProperty
360 def in_memory_commit(self):
360 def in_memory_commit(self):
361 return MercurialInMemoryCommit(self)
361 return MercurialInMemoryCommit(self)
362
362
363 @LazyProperty
363 @LazyProperty
364 def description(self):
364 def description(self):
365 description = self._remote.get_config_value(
365 description = self._remote.get_config_value(
366 'web', 'description', untrusted=True)
366 'web', 'description', untrusted=True)
367 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
367 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
368
368
369 @LazyProperty
369 @LazyProperty
370 def contact(self):
370 def contact(self):
371 contact = (
371 contact = (
372 self._remote.get_config_value("web", "contact") or
372 self._remote.get_config_value("web", "contact") or
373 self._remote.get_config_value("ui", "username"))
373 self._remote.get_config_value("ui", "username"))
374 return safe_unicode(contact or self.DEFAULT_CONTACT)
374 return safe_unicode(contact or self.DEFAULT_CONTACT)
375
375
376 @LazyProperty
376 @LazyProperty
377 def last_change(self):
377 def last_change(self):
378 """
378 """
379 Returns last change made on this repository as
379 Returns last change made on this repository as
380 `datetime.datetime` object.
380 `datetime.datetime` object.
381 """
381 """
382 try:
382 try:
383 return self.get_commit().date
383 return self.get_commit().date
384 except RepositoryError:
384 except RepositoryError:
385 tzoffset = makedate()[1]
385 tzoffset = makedate()[1]
386 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
386 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
387
387
388 def _get_fs_mtime(self):
388 def _get_fs_mtime(self):
389 # fallback to filesystem
389 # fallback to filesystem
390 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
390 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
391 st_path = os.path.join(self.path, '.hg', "store")
391 st_path = os.path.join(self.path, '.hg', "store")
392 if os.path.exists(cl_path):
392 if os.path.exists(cl_path):
393 return os.stat(cl_path).st_mtime
393 return os.stat(cl_path).st_mtime
394 else:
394 else:
395 return os.stat(st_path).st_mtime
395 return os.stat(st_path).st_mtime
396
396
397 def _get_url(self, url):
397 def _get_url(self, url):
398 """
398 """
399 Returns normalized url. If schema is not given, would fall
399 Returns normalized url. If schema is not given, would fall
400 to filesystem
400 to filesystem
401 (``file:///``) schema.
401 (``file:///``) schema.
402 """
402 """
403 url = url.encode('utf8')
403 url = url.encode('utf8')
404 if url != 'default' and '://' not in url:
404 if url != 'default' and '://' not in url:
405 url = "file:" + urllib.pathname2url(url)
405 url = "file:" + urllib.pathname2url(url)
406 return url
406 return url
407
407
408 def get_hook_location(self):
408 def get_hook_location(self):
409 """
409 """
410 returns absolute path to location where hooks are stored
410 returns absolute path to location where hooks are stored
411 """
411 """
412 return os.path.join(self.path, '.hg', '.hgrc')
412 return os.path.join(self.path, '.hg', '.hgrc')
413
413
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 """
415 """
416 Returns ``MercurialCommit`` object representing repository's
416 Returns ``MercurialCommit`` object representing repository's
417 commit at the given `commit_id` or `commit_idx`.
417 commit at the given `commit_id` or `commit_idx`.
418 """
418 """
419 if self.is_empty():
419 if self.is_empty():
420 raise EmptyRepositoryError("There are no commits yet")
420 raise EmptyRepositoryError("There are no commits yet")
421
421
422 if commit_id is not None:
422 if commit_id is not None:
423 self._validate_commit_id(commit_id)
423 self._validate_commit_id(commit_id)
424 try:
424 try:
425 idx = self._commit_ids[commit_id]
425 idx = self._commit_ids[commit_id]
426 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
426 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
427 except KeyError:
427 except KeyError:
428 pass
428 pass
429 elif commit_idx is not None:
429 elif commit_idx is not None:
430 self._validate_commit_idx(commit_idx)
430 self._validate_commit_idx(commit_idx)
431 try:
431 try:
432 id_ = self.commit_ids[commit_idx]
432 id_ = self.commit_ids[commit_idx]
433 if commit_idx < 0:
433 if commit_idx < 0:
434 commit_idx += len(self.commit_ids)
434 commit_idx += len(self.commit_ids)
435 return MercurialCommit(
435 return MercurialCommit(
436 self, id_, commit_idx, pre_load=pre_load)
436 self, id_, commit_idx, pre_load=pre_load)
437 except IndexError:
437 except IndexError:
438 commit_id = commit_idx
438 commit_id = commit_idx
439 else:
439 else:
440 commit_id = "tip"
440 commit_id = "tip"
441
441
442 if isinstance(commit_id, unicode):
442 if isinstance(commit_id, unicode):
443 commit_id = safe_str(commit_id)
443 commit_id = safe_str(commit_id)
444
444
445 try:
445 try:
446 raw_id, idx = self._remote.lookup(commit_id, both=True)
446 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 except CommitDoesNotExistError:
447 except CommitDoesNotExistError:
448 msg = "Commit %s does not exist for %s" % (
448 msg = "Commit %s does not exist for %s" % (
449 commit_id, self)
449 commit_id, self)
450 raise CommitDoesNotExistError(msg)
450 raise CommitDoesNotExistError(msg)
451
451
452 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
452 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
453
453
454 def get_commits(
454 def get_commits(
455 self, start_id=None, end_id=None, start_date=None, end_date=None,
455 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 branch_name=None, show_hidden=False, pre_load=None):
456 branch_name=None, show_hidden=False, pre_load=None):
457 """
457 """
458 Returns generator of ``MercurialCommit`` objects from start to end
458 Returns generator of ``MercurialCommit`` objects from start to end
459 (both are inclusive)
459 (both are inclusive)
460
460
461 :param start_id: None, str(commit_id)
461 :param start_id: None, str(commit_id)
462 :param end_id: None, str(commit_id)
462 :param end_id: None, str(commit_id)
463 :param start_date: if specified, commits with commit date less than
463 :param start_date: if specified, commits with commit date less than
464 ``start_date`` would be filtered out from returned set
464 ``start_date`` would be filtered out from returned set
465 :param end_date: if specified, commits with commit date greater than
465 :param end_date: if specified, commits with commit date greater than
466 ``end_date`` would be filtered out from returned set
466 ``end_date`` would be filtered out from returned set
467 :param branch_name: if specified, commits not reachable from given
467 :param branch_name: if specified, commits not reachable from given
468 branch would be filtered out from returned set
468 branch would be filtered out from returned set
469 :param show_hidden: Show hidden commits such as obsolete or hidden from
469 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 Mercurial evolve
470 Mercurial evolve
471 :raise BranchDoesNotExistError: If given ``branch_name`` does not
471 :raise BranchDoesNotExistError: If given ``branch_name`` does not
472 exist.
472 exist.
473 :raise CommitDoesNotExistError: If commit for given ``start`` or
473 :raise CommitDoesNotExistError: If commit for given ``start`` or
474 ``end`` could not be found.
474 ``end`` could not be found.
475 """
475 """
476 # actually we should check now if it's not an empty repo
476 # actually we should check now if it's not an empty repo
477 branch_ancestors = False
477 branch_ancestors = False
478 if self.is_empty():
478 if self.is_empty():
479 raise EmptyRepositoryError("There are no commits yet")
479 raise EmptyRepositoryError("There are no commits yet")
480 self._validate_branch_name(branch_name)
480 self._validate_branch_name(branch_name)
481
481
482 if start_id is not None:
482 if start_id is not None:
483 self._validate_commit_id(start_id)
483 self._validate_commit_id(start_id)
484 c_start = self.get_commit(commit_id=start_id)
484 c_start = self.get_commit(commit_id=start_id)
485 start_pos = self._commit_ids[c_start.raw_id]
485 start_pos = self._commit_ids[c_start.raw_id]
486 else:
486 else:
487 start_pos = None
487 start_pos = None
488
488
489 if end_id is not None:
489 if end_id is not None:
490 self._validate_commit_id(end_id)
490 self._validate_commit_id(end_id)
491 c_end = self.get_commit(commit_id=end_id)
491 c_end = self.get_commit(commit_id=end_id)
492 end_pos = max(0, self._commit_ids[c_end.raw_id])
492 end_pos = max(0, self._commit_ids[c_end.raw_id])
493 else:
493 else:
494 end_pos = None
494 end_pos = None
495
495
496 if None not in [start_id, end_id] and start_pos > end_pos:
496 if None not in [start_id, end_id] and start_pos > end_pos:
497 raise RepositoryError(
497 raise RepositoryError(
498 "Start commit '%s' cannot be after end commit '%s'" %
498 "Start commit '%s' cannot be after end commit '%s'" %
499 (start_id, end_id))
499 (start_id, end_id))
500
500
501 if end_pos is not None:
501 if end_pos is not None:
502 end_pos += 1
502 end_pos += 1
503
503
504 commit_filter = []
504 commit_filter = []
505
505
506 if branch_name and not branch_ancestors:
506 if branch_name and not branch_ancestors:
507 commit_filter.append('branch("%s")' % (branch_name,))
507 commit_filter.append('branch("%s")' % (branch_name,))
508 elif branch_name and branch_ancestors:
508 elif branch_name and branch_ancestors:
509 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
509 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
510
510
511 if start_date and not end_date:
511 if start_date and not end_date:
512 commit_filter.append('date(">%s")' % (start_date,))
512 commit_filter.append('date(">%s")' % (start_date,))
513 if end_date and not start_date:
513 if end_date and not start_date:
514 commit_filter.append('date("<%s")' % (end_date,))
514 commit_filter.append('date("<%s")' % (end_date,))
515 if start_date and end_date:
515 if start_date and end_date:
516 commit_filter.append(
516 commit_filter.append(
517 'date(">%s") and date("<%s")' % (start_date, end_date))
517 'date(">%s") and date("<%s")' % (start_date, end_date))
518
518
519 if not show_hidden:
519 if not show_hidden:
520 commit_filter.append('not obsolete()')
520 commit_filter.append('not obsolete()')
521 commit_filter.append('not hidden()')
521 commit_filter.append('not hidden()')
522
522
523 # TODO: johbo: Figure out a simpler way for this solution
523 # TODO: johbo: Figure out a simpler way for this solution
524 collection_generator = CollectionGenerator
524 collection_generator = CollectionGenerator
525 if commit_filter:
525 if commit_filter:
526 commit_filter = ' and '.join(map(safe_str, commit_filter))
526 commit_filter = ' and '.join(map(safe_str, commit_filter))
527 revisions = self._remote.rev_range([commit_filter])
527 revisions = self._remote.rev_range([commit_filter])
528 collection_generator = MercurialIndexBasedCollectionGenerator
528 collection_generator = MercurialIndexBasedCollectionGenerator
529 else:
529 else:
530 revisions = self.commit_ids
530 revisions = self.commit_ids
531
531
532 if start_pos or end_pos:
532 if start_pos or end_pos:
533 revisions = revisions[start_pos:end_pos]
533 revisions = revisions[start_pos:end_pos]
534
534
535 return collection_generator(self, revisions, pre_load=pre_load)
535 return collection_generator(self, revisions, pre_load=pre_load)
536
536
537 def pull(self, url, commit_ids=None):
537 def pull(self, url, commit_ids=None):
538 """
538 """
539 Tries to pull changes from external location.
539 Tries to pull changes from external location.
540
540
541 :param commit_ids: Optional. Can be set to a list of commit ids
541 :param commit_ids: Optional. Can be set to a list of commit ids
542 which shall be pulled from the other repository.
542 which shall be pulled from the other repository.
543 """
543 """
544 url = self._get_url(url)
544 url = self._get_url(url)
545 self._remote.pull(url, commit_ids=commit_ids)
545 self._remote.pull(url, commit_ids=commit_ids)
546 self._remote.invalidate_vcs_cache()
546 self._remote.invalidate_vcs_cache()
547
547
548 def push(self, url):
548 def push(self, url):
549 url = self._get_url(url)
549 url = self._get_url(url)
550 self._remote.sync_push(url)
550 self._remote.sync_push(url)
551
551
552 def _local_clone(self, clone_path):
552 def _local_clone(self, clone_path):
553 """
553 """
554 Create a local clone of the current repo.
554 Create a local clone of the current repo.
555 """
555 """
556 self._remote.clone(self.path, clone_path, update_after_clone=True,
556 self._remote.clone(self.path, clone_path, update_after_clone=True,
557 hooks=False)
557 hooks=False)
558
558
559 def _update(self, revision, clean=False):
559 def _update(self, revision, clean=False):
560 """
560 """
561 Update the working copy to the specified revision.
561 Update the working copy to the specified revision.
562 """
562 """
563 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
563 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
564 self._remote.update(revision, clean=clean)
564 self._remote.update(revision, clean=clean)
565
565
566 def _identify(self):
566 def _identify(self):
567 """
567 """
568 Return the current state of the working directory.
568 Return the current state of the working directory.
569 """
569 """
570 return self._remote.identify().strip().rstrip('+')
570 return self._remote.identify().strip().rstrip('+')
571
571
572 def _heads(self, branch=None):
572 def _heads(self, branch=None):
573 """
573 """
574 Return the commit ids of the repository heads.
574 Return the commit ids of the repository heads.
575 """
575 """
576 return self._remote.heads(branch=branch).strip().split(' ')
576 return self._remote.heads(branch=branch).strip().split(' ')
577
577
578 def _ancestor(self, revision1, revision2):
578 def _ancestor(self, revision1, revision2):
579 """
579 """
580 Return the common ancestor of the two revisions.
580 Return the common ancestor of the two revisions.
581 """
581 """
582 return self._remote.ancestor(revision1, revision2)
582 return self._remote.ancestor(revision1, revision2)
583
583
584 def _local_push(
584 def _local_push(
585 self, revision, repository_path, push_branches=False,
585 self, revision, repository_path, push_branches=False,
586 enable_hooks=False):
586 enable_hooks=False):
587 """
587 """
588 Push the given revision to the specified repository.
588 Push the given revision to the specified repository.
589
589
590 :param push_branches: allow to create branches in the target repo.
590 :param push_branches: allow to create branches in the target repo.
591 """
591 """
592 self._remote.push(
592 self._remote.push(
593 [revision], repository_path, hooks=enable_hooks,
593 [revision], repository_path, hooks=enable_hooks,
594 push_branches=push_branches)
594 push_branches=push_branches)
595
595
596 def _local_merge(self, target_ref, merge_message, user_name, user_email,
596 def _local_merge(self, target_ref, merge_message, user_name, user_email,
597 source_ref, use_rebase=False, dry_run=False):
597 source_ref, use_rebase=False, dry_run=False):
598 """
598 """
599 Merge the given source_revision into the checked out revision.
599 Merge the given source_revision into the checked out revision.
600
600
601 Returns the commit id of the merge and a boolean indicating if the
601 Returns the commit id of the merge and a boolean indicating if the
602 commit needs to be pushed.
602 commit needs to be pushed.
603 """
603 """
604 self._update(target_ref.commit_id)
604 self._update(target_ref.commit_id)
605
605
606 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
606 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
607 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
607 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
608
608
609 if ancestor == source_ref.commit_id:
609 if ancestor == source_ref.commit_id:
610 # Nothing to do, the changes were already integrated
610 # Nothing to do, the changes were already integrated
611 return target_ref.commit_id, False
611 return target_ref.commit_id, False
612
612
613 elif ancestor == target_ref.commit_id and is_the_same_branch:
613 elif ancestor == target_ref.commit_id and is_the_same_branch:
614 # In this case we should force a commit message
614 # In this case we should force a commit message
615 return source_ref.commit_id, True
615 return source_ref.commit_id, True
616
616
617 if use_rebase:
617 if use_rebase:
618 try:
618 try:
619 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
619 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
620 target_ref.commit_id)
620 target_ref.commit_id)
621 self.bookmark(bookmark_name, revision=source_ref.commit_id)
621 self.bookmark(bookmark_name, revision=source_ref.commit_id)
622 self._remote.rebase(
622 self._remote.rebase(
623 source=source_ref.commit_id, dest=target_ref.commit_id)
623 source=source_ref.commit_id, dest=target_ref.commit_id)
624 self._remote.invalidate_vcs_cache()
624 self._remote.invalidate_vcs_cache()
625 self._update(bookmark_name)
625 self._update(bookmark_name)
626 return self._identify(), True
626 return self._identify(), True
627 except RepositoryError:
627 except RepositoryError:
628 # The rebase-abort may raise another exception which 'hides'
628 # The rebase-abort may raise another exception which 'hides'
629 # the original one, therefore we log it here.
629 # the original one, therefore we log it here.
630 log.exception('Error while rebasing shadow repo during merge.')
630 log.exception('Error while rebasing shadow repo during merge.')
631
631
632 # Cleanup any rebase leftovers
632 # Cleanup any rebase leftovers
633 self._remote.invalidate_vcs_cache()
633 self._remote.invalidate_vcs_cache()
634 self._remote.rebase(abort=True)
634 self._remote.rebase(abort=True)
635 self._remote.invalidate_vcs_cache()
635 self._remote.invalidate_vcs_cache()
636 self._remote.update(clean=True)
636 self._remote.update(clean=True)
637 raise
637 raise
638 else:
638 else:
639 try:
639 try:
640 self._remote.merge(source_ref.commit_id)
640 self._remote.merge(source_ref.commit_id)
641 self._remote.invalidate_vcs_cache()
641 self._remote.invalidate_vcs_cache()
642 self._remote.commit(
642 self._remote.commit(
643 message=safe_str(merge_message),
643 message=safe_str(merge_message),
644 username=safe_str('%s <%s>' % (user_name, user_email)))
644 username=safe_str('%s <%s>' % (user_name, user_email)))
645 self._remote.invalidate_vcs_cache()
645 self._remote.invalidate_vcs_cache()
646 return self._identify(), True
646 return self._identify(), True
647 except RepositoryError:
647 except RepositoryError:
648 # Cleanup any merge leftovers
648 # Cleanup any merge leftovers
649 self._remote.update(clean=True)
649 self._remote.update(clean=True)
650 raise
650 raise
651
651
652 def _local_close(self, target_ref, user_name, user_email,
652 def _local_close(self, target_ref, user_name, user_email,
653 source_ref, close_message=''):
653 source_ref, close_message=''):
654 """
654 """
655 Close the branch of the given source_revision
655 Close the branch of the given source_revision
656
656
657 Returns the commit id of the close and a boolean indicating if the
657 Returns the commit id of the close and a boolean indicating if the
658 commit needs to be pushed.
658 commit needs to be pushed.
659 """
659 """
660 self._update(source_ref.commit_id)
660 self._update(source_ref.commit_id)
661 message = close_message or "Closing branch: `{}`".format(source_ref.name)
661 message = close_message or "Closing branch: `{}`".format(source_ref.name)
662 try:
662 try:
663 self._remote.commit(
663 self._remote.commit(
664 message=safe_str(message),
664 message=safe_str(message),
665 username=safe_str('%s <%s>' % (user_name, user_email)),
665 username=safe_str('%s <%s>' % (user_name, user_email)),
666 close_branch=True)
666 close_branch=True)
667 self._remote.invalidate_vcs_cache()
667 self._remote.invalidate_vcs_cache()
668 return self._identify(), True
668 return self._identify(), True
669 except RepositoryError:
669 except RepositoryError:
670 # Cleanup any commit leftovers
670 # Cleanup any commit leftovers
671 self._remote.update(clean=True)
671 self._remote.update(clean=True)
672 raise
672 raise
673
673
674 def _is_the_same_branch(self, target_ref, source_ref):
674 def _is_the_same_branch(self, target_ref, source_ref):
675 return (
675 return (
676 self._get_branch_name(target_ref) ==
676 self._get_branch_name(target_ref) ==
677 self._get_branch_name(source_ref))
677 self._get_branch_name(source_ref))
678
678
679 def _get_branch_name(self, ref):
679 def _get_branch_name(self, ref):
680 if ref.type == 'branch':
680 if ref.type == 'branch':
681 return ref.name
681 return ref.name
682 return self._remote.ctx_branch(ref.commit_id)
682 return self._remote.ctx_branch(ref.commit_id)
683
683
684 def _get_shadow_repository_path(self, workspace_id):
684 def _maybe_prepare_merge_workspace(
685 # The name of the shadow repository must start with '.', so it is
685 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
686 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
686 shadow_repository_path = self._get_shadow_repository_path(
687 return os.path.join(
687 repo_id, workspace_id)
688 os.path.dirname(self.path),
689 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
690
691 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
692 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
693 if not os.path.exists(shadow_repository_path):
688 if not os.path.exists(shadow_repository_path):
694 self._local_clone(shadow_repository_path)
689 self._local_clone(shadow_repository_path)
695 log.debug(
690 log.debug(
696 'Prepared shadow repository in %s', shadow_repository_path)
691 'Prepared shadow repository in %s', shadow_repository_path)
697
692
698 return shadow_repository_path
693 return shadow_repository_path
699
694
700 def _merge_repo(self, shadow_repository_path, target_ref,
695 def _merge_repo(self, repo_id, workspace_id, target_ref,
701 source_repo, source_ref, merge_message,
696 source_repo, source_ref, merge_message,
702 merger_name, merger_email, dry_run=False,
697 merger_name, merger_email, dry_run=False,
703 use_rebase=False, close_branch=False):
698 use_rebase=False, close_branch=False):
704
699
705 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
700 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
706 'rebase' if use_rebase else 'merge', dry_run)
701 'rebase' if use_rebase else 'merge', dry_run)
707 if target_ref.commit_id not in self._heads():
702 if target_ref.commit_id not in self._heads():
708 return MergeResponse(
703 return MergeResponse(
709 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
704 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
710
705
711 try:
706 try:
712 if (target_ref.type == 'branch' and
707 if (target_ref.type == 'branch' and
713 len(self._heads(target_ref.name)) != 1):
708 len(self._heads(target_ref.name)) != 1):
714 return MergeResponse(
709 return MergeResponse(
715 False, False, None,
710 False, False, None,
716 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
711 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
717 except CommitDoesNotExistError:
712 except CommitDoesNotExistError:
718 log.exception('Failure when looking up branch heads on hg target')
713 log.exception('Failure when looking up branch heads on hg target')
719 return MergeResponse(
714 return MergeResponse(
720 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
715 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
721
716
717 shadow_repository_path = self._maybe_prepare_merge_workspace(
718 repo_id, workspace_id, target_ref, source_ref)
722 shadow_repo = self._get_shadow_instance(shadow_repository_path)
719 shadow_repo = self._get_shadow_instance(shadow_repository_path)
723
720
724 log.debug('Pulling in target reference %s', target_ref)
721 log.debug('Pulling in target reference %s', target_ref)
725 self._validate_pull_reference(target_ref)
722 self._validate_pull_reference(target_ref)
726 shadow_repo._local_pull(self.path, target_ref)
723 shadow_repo._local_pull(self.path, target_ref)
727 try:
724 try:
728 log.debug('Pulling in source reference %s', source_ref)
725 log.debug('Pulling in source reference %s', source_ref)
729 source_repo._validate_pull_reference(source_ref)
726 source_repo._validate_pull_reference(source_ref)
730 shadow_repo._local_pull(source_repo.path, source_ref)
727 shadow_repo._local_pull(source_repo.path, source_ref)
731 except CommitDoesNotExistError:
728 except CommitDoesNotExistError:
732 log.exception('Failure when doing local pull on hg shadow repo')
729 log.exception('Failure when doing local pull on hg shadow repo')
733 return MergeResponse(
730 return MergeResponse(
734 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
731 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
735
732
736 merge_ref = None
733 merge_ref = None
737 merge_commit_id = None
734 merge_commit_id = None
738 close_commit_id = None
735 close_commit_id = None
739 merge_failure_reason = MergeFailureReason.NONE
736 merge_failure_reason = MergeFailureReason.NONE
740
737
741 # enforce that close branch should be used only in case we source from
738 # enforce that close branch should be used only in case we source from
742 # an actual Branch
739 # an actual Branch
743 close_branch = close_branch and source_ref.type == 'branch'
740 close_branch = close_branch and source_ref.type == 'branch'
744
741
745 # don't allow to close branch if source and target are the same
742 # don't allow to close branch if source and target are the same
746 close_branch = close_branch and source_ref.name != target_ref.name
743 close_branch = close_branch and source_ref.name != target_ref.name
747
744
748 needs_push_on_close = False
745 needs_push_on_close = False
749 if close_branch and not use_rebase and not dry_run:
746 if close_branch and not use_rebase and not dry_run:
750 try:
747 try:
751 close_commit_id, needs_push_on_close = shadow_repo._local_close(
748 close_commit_id, needs_push_on_close = shadow_repo._local_close(
752 target_ref, merger_name, merger_email, source_ref)
749 target_ref, merger_name, merger_email, source_ref)
753 merge_possible = True
750 merge_possible = True
754 except RepositoryError:
751 except RepositoryError:
755 log.exception(
752 log.exception(
756 'Failure when doing close branch on hg shadow repo')
753 'Failure when doing close branch on hg shadow repo')
757 merge_possible = False
754 merge_possible = False
758 merge_failure_reason = MergeFailureReason.MERGE_FAILED
755 merge_failure_reason = MergeFailureReason.MERGE_FAILED
759 else:
756 else:
760 merge_possible = True
757 merge_possible = True
761
758
762 needs_push = False
759 needs_push = False
763 if merge_possible:
760 if merge_possible:
764 try:
761 try:
765 merge_commit_id, needs_push = shadow_repo._local_merge(
762 merge_commit_id, needs_push = shadow_repo._local_merge(
766 target_ref, merge_message, merger_name, merger_email,
763 target_ref, merge_message, merger_name, merger_email,
767 source_ref, use_rebase=use_rebase, dry_run=dry_run)
764 source_ref, use_rebase=use_rebase, dry_run=dry_run)
768 merge_possible = True
765 merge_possible = True
769
766
770 # read the state of the close action, if it
767 # read the state of the close action, if it
771 # maybe required a push
768 # maybe required a push
772 needs_push = needs_push or needs_push_on_close
769 needs_push = needs_push or needs_push_on_close
773
770
774 # Set a bookmark pointing to the merge commit. This bookmark
771 # Set a bookmark pointing to the merge commit. This bookmark
775 # may be used to easily identify the last successful merge
772 # may be used to easily identify the last successful merge
776 # commit in the shadow repository.
773 # commit in the shadow repository.
777 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
774 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
778 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
775 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
779 except SubrepoMergeError:
776 except SubrepoMergeError:
780 log.exception(
777 log.exception(
781 'Subrepo merge error during local merge on hg shadow repo.')
778 'Subrepo merge error during local merge on hg shadow repo.')
782 merge_possible = False
779 merge_possible = False
783 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
780 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
784 needs_push = False
781 needs_push = False
785 except RepositoryError:
782 except RepositoryError:
786 log.exception('Failure when doing local merge on hg shadow repo')
783 log.exception('Failure when doing local merge on hg shadow repo')
787 merge_possible = False
784 merge_possible = False
788 merge_failure_reason = MergeFailureReason.MERGE_FAILED
785 merge_failure_reason = MergeFailureReason.MERGE_FAILED
789 needs_push = False
786 needs_push = False
790
787
791 if merge_possible and not dry_run:
788 if merge_possible and not dry_run:
792 if needs_push:
789 if needs_push:
793 # In case the target is a bookmark, update it, so after pushing
790 # In case the target is a bookmark, update it, so after pushing
794 # the bookmarks is also updated in the target.
791 # the bookmarks is also updated in the target.
795 if target_ref.type == 'book':
792 if target_ref.type == 'book':
796 shadow_repo.bookmark(
793 shadow_repo.bookmark(
797 target_ref.name, revision=merge_commit_id)
794 target_ref.name, revision=merge_commit_id)
798 try:
795 try:
799 shadow_repo_with_hooks = self._get_shadow_instance(
796 shadow_repo_with_hooks = self._get_shadow_instance(
800 shadow_repository_path,
797 shadow_repository_path,
801 enable_hooks=True)
798 enable_hooks=True)
802 # This is the actual merge action, we push from shadow
799 # This is the actual merge action, we push from shadow
803 # into origin.
800 # into origin.
804 # Note: the push_branches option will push any new branch
801 # Note: the push_branches option will push any new branch
805 # defined in the source repository to the target. This may
802 # defined in the source repository to the target. This may
806 # be dangerous as branches are permanent in Mercurial.
803 # be dangerous as branches are permanent in Mercurial.
807 # This feature was requested in issue #441.
804 # This feature was requested in issue #441.
808 shadow_repo_with_hooks._local_push(
805 shadow_repo_with_hooks._local_push(
809 merge_commit_id, self.path, push_branches=True,
806 merge_commit_id, self.path, push_branches=True,
810 enable_hooks=True)
807 enable_hooks=True)
811
808
812 # maybe we also need to push the close_commit_id
809 # maybe we also need to push the close_commit_id
813 if close_commit_id:
810 if close_commit_id:
814 shadow_repo_with_hooks._local_push(
811 shadow_repo_with_hooks._local_push(
815 close_commit_id, self.path, push_branches=True,
812 close_commit_id, self.path, push_branches=True,
816 enable_hooks=True)
813 enable_hooks=True)
817 merge_succeeded = True
814 merge_succeeded = True
818 except RepositoryError:
815 except RepositoryError:
819 log.exception(
816 log.exception(
820 'Failure when doing local push from the shadow '
817 'Failure when doing local push from the shadow '
821 'repository to the target repository.')
818 'repository to the target repository.')
822 merge_succeeded = False
819 merge_succeeded = False
823 merge_failure_reason = MergeFailureReason.PUSH_FAILED
820 merge_failure_reason = MergeFailureReason.PUSH_FAILED
824 else:
821 else:
825 merge_succeeded = True
822 merge_succeeded = True
826 else:
823 else:
827 merge_succeeded = False
824 merge_succeeded = False
828
825
829 return MergeResponse(
826 return MergeResponse(
830 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
827 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
831
828
832 def _get_shadow_instance(
829 def _get_shadow_instance(
833 self, shadow_repository_path, enable_hooks=False):
830 self, shadow_repository_path, enable_hooks=False):
834 config = self.config.copy()
831 config = self.config.copy()
835 if not enable_hooks:
832 if not enable_hooks:
836 config.clear_section('hooks')
833 config.clear_section('hooks')
837 return MercurialRepository(shadow_repository_path, config)
834 return MercurialRepository(shadow_repository_path, config)
838
835
839 def _validate_pull_reference(self, reference):
836 def _validate_pull_reference(self, reference):
840 if not (reference.name in self.bookmarks or
837 if not (reference.name in self.bookmarks or
841 reference.name in self.branches or
838 reference.name in self.branches or
842 self.get_commit(reference.commit_id)):
839 self.get_commit(reference.commit_id)):
843 raise CommitDoesNotExistError(
840 raise CommitDoesNotExistError(
844 'Unknown branch, bookmark or commit id')
841 'Unknown branch, bookmark or commit id')
845
842
846 def _local_pull(self, repository_path, reference):
843 def _local_pull(self, repository_path, reference):
847 """
844 """
848 Fetch a branch, bookmark or commit from a local repository.
845 Fetch a branch, bookmark or commit from a local repository.
849 """
846 """
850 repository_path = os.path.abspath(repository_path)
847 repository_path = os.path.abspath(repository_path)
851 if repository_path == self.path:
848 if repository_path == self.path:
852 raise ValueError('Cannot pull from the same repository')
849 raise ValueError('Cannot pull from the same repository')
853
850
854 reference_type_to_option_name = {
851 reference_type_to_option_name = {
855 'book': 'bookmark',
852 'book': 'bookmark',
856 'branch': 'branch',
853 'branch': 'branch',
857 }
854 }
858 option_name = reference_type_to_option_name.get(
855 option_name = reference_type_to_option_name.get(
859 reference.type, 'revision')
856 reference.type, 'revision')
860
857
861 if option_name == 'revision':
858 if option_name == 'revision':
862 ref = reference.commit_id
859 ref = reference.commit_id
863 else:
860 else:
864 ref = reference.name
861 ref = reference.name
865
862
866 options = {option_name: [ref]}
863 options = {option_name: [ref]}
867 self._remote.pull_cmd(repository_path, hooks=False, **options)
864 self._remote.pull_cmd(repository_path, hooks=False, **options)
868 self._remote.invalidate_vcs_cache()
865 self._remote.invalidate_vcs_cache()
869
866
870 def bookmark(self, bookmark, revision=None):
867 def bookmark(self, bookmark, revision=None):
871 if isinstance(bookmark, unicode):
868 if isinstance(bookmark, unicode):
872 bookmark = safe_str(bookmark)
869 bookmark = safe_str(bookmark)
873 self._remote.bookmark(bookmark, revision=revision)
870 self._remote.bookmark(bookmark, revision=revision)
874 self._remote.invalidate_vcs_cache()
871 self._remote.invalidate_vcs_cache()
875
872
876 def get_path_permissions(self, username):
873 def get_path_permissions(self, username):
877 hgacl_file = os.path.join(self.path, '.hg/hgacl')
874 hgacl_file = os.path.join(self.path, '.hg/hgacl')
878
875
879 def read_patterns(suffix):
876 def read_patterns(suffix):
880 svalue = None
877 svalue = None
881 try:
878 try:
882 svalue = hgacl.get('narrowhgacl', username + suffix)
879 svalue = hgacl.get('narrowhgacl', username + suffix)
883 except configparser.NoOptionError:
880 except configparser.NoOptionError:
884 try:
881 try:
885 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
882 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
886 except configparser.NoOptionError:
883 except configparser.NoOptionError:
887 pass
884 pass
888 if not svalue:
885 if not svalue:
889 return None
886 return None
890 result = ['/']
887 result = ['/']
891 for pattern in svalue.split():
888 for pattern in svalue.split():
892 result.append(pattern)
889 result.append(pattern)
893 if '*' not in pattern and '?' not in pattern:
890 if '*' not in pattern and '?' not in pattern:
894 result.append(pattern + '/*')
891 result.append(pattern + '/*')
895 return result
892 return result
896
893
897 if os.path.exists(hgacl_file):
894 if os.path.exists(hgacl_file):
898 try:
895 try:
899 hgacl = configparser.RawConfigParser()
896 hgacl = configparser.RawConfigParser()
900 hgacl.read(hgacl_file)
897 hgacl.read(hgacl_file)
901
898
902 includes = read_patterns('.includes')
899 includes = read_patterns('.includes')
903 excludes = read_patterns('.excludes')
900 excludes = read_patterns('.excludes')
904 return BasePathPermissionChecker.create_from_patterns(
901 return BasePathPermissionChecker.create_from_patterns(
905 includes, excludes)
902 includes, excludes)
906 except BaseException as e:
903 except BaseException as e:
907 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
904 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
908 hgacl_file, self.name, e)
905 hgacl_file, self.name, e)
909 raise exceptions.RepositoryRequirementError(msg)
906 raise exceptions.RepositoryRequirementError(msg)
910 else:
907 else:
911 return None
908 return None
912
909
913
910
914 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
911 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
915
912
916 def _commit_factory(self, commit_id):
913 def _commit_factory(self, commit_id):
917 return self.repo.get_commit(
914 return self.repo.get_commit(
918 commit_idx=commit_id, pre_load=self.pre_load)
915 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,4524 +1,4530 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Database Models for RhodeCode Enterprise
22 Database Models for RhodeCode Enterprise
23 """
23 """
24
24
25 import re
25 import re
26 import os
26 import os
27 import time
27 import time
28 import hashlib
28 import hashlib
29 import logging
29 import logging
30 import datetime
30 import datetime
31 import warnings
31 import warnings
32 import ipaddress
32 import ipaddress
33 import functools
33 import functools
34 import traceback
34 import traceback
35 import collections
35 import collections
36
36
37 from sqlalchemy import (
37 from sqlalchemy import (
38 or_, and_, not_, func, TypeDecorator, event,
38 or_, and_, not_, func, TypeDecorator, event,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 Text, Float, PickleType)
41 Text, Float, PickleType)
42 from sqlalchemy.sql.expression import true, false
42 from sqlalchemy.sql.expression import true, false
43 from sqlalchemy.sql.functions import coalesce, count # noqa
43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 from sqlalchemy.orm import (
44 from sqlalchemy.orm import (
45 relationship, joinedload, class_mapper, validates, aliased)
45 relationship, joinedload, class_mapper, validates, aliased)
46 from sqlalchemy.ext.declarative import declared_attr
46 from sqlalchemy.ext.declarative import declared_attr
47 from sqlalchemy.ext.hybrid import hybrid_property
47 from sqlalchemy.ext.hybrid import hybrid_property
48 from sqlalchemy.exc import IntegrityError # noqa
48 from sqlalchemy.exc import IntegrityError # noqa
49 from sqlalchemy.dialects.mysql import LONGTEXT
49 from sqlalchemy.dialects.mysql import LONGTEXT
50 from beaker.cache import cache_region
50 from beaker.cache import cache_region
51 from zope.cachedescriptors.property import Lazy as LazyProperty
51 from zope.cachedescriptors.property import Lazy as LazyProperty
52
52
53 from pyramid.threadlocal import get_current_request
53 from pyramid.threadlocal import get_current_request
54
54
55 from rhodecode.translation import _
55 from rhodecode.translation import _
56 from rhodecode.lib.vcs import get_vcs_instance
56 from rhodecode.lib.vcs import get_vcs_instance
57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
58 from rhodecode.lib.utils2 import (
58 from rhodecode.lib.utils2 import (
59 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
59 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 glob2re, StrictAttributeDict, cleaned_uri)
61 glob2re, StrictAttributeDict, cleaned_uri)
62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
63 JsonRaw
63 JsonRaw
64 from rhodecode.lib.ext_json import json
64 from rhodecode.lib.ext_json import json
65 from rhodecode.lib.caching_query import FromCache
65 from rhodecode.lib.caching_query import FromCache
66 from rhodecode.lib.encrypt import AESCipher
66 from rhodecode.lib.encrypt import AESCipher
67
67
68 from rhodecode.model.meta import Base, Session
68 from rhodecode.model.meta import Base, Session
69
69
70 URL_SEP = '/'
70 URL_SEP = '/'
71 log = logging.getLogger(__name__)
71 log = logging.getLogger(__name__)
72
72
73 # =============================================================================
73 # =============================================================================
74 # BASE CLASSES
74 # BASE CLASSES
75 # =============================================================================
75 # =============================================================================
76
76
77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
78 # beaker.session.secret if first is not set.
78 # beaker.session.secret if first is not set.
79 # and initialized at environment.py
79 # and initialized at environment.py
80 ENCRYPTION_KEY = None
80 ENCRYPTION_KEY = None
81
81
82 # used to sort permissions by types, '#' used here is not allowed to be in
82 # used to sort permissions by types, '#' used here is not allowed to be in
83 # usernames, and it's very early in sorted string.printable table.
83 # usernames, and it's very early in sorted string.printable table.
84 PERMISSION_TYPE_SORT = {
84 PERMISSION_TYPE_SORT = {
85 'admin': '####',
85 'admin': '####',
86 'write': '###',
86 'write': '###',
87 'read': '##',
87 'read': '##',
88 'none': '#',
88 'none': '#',
89 }
89 }
90
90
91
91
92 def display_user_sort(obj):
92 def display_user_sort(obj):
93 """
93 """
94 Sort function used to sort permissions in .permissions() function of
94 Sort function used to sort permissions in .permissions() function of
95 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 Repository, RepoGroup, UserGroup. Also it put the default user in front
96 of all other resources
96 of all other resources
97 """
97 """
98
98
99 if obj.username == User.DEFAULT_USER:
99 if obj.username == User.DEFAULT_USER:
100 return '#####'
100 return '#####'
101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
102 return prefix + obj.username
102 return prefix + obj.username
103
103
104
104
105 def display_user_group_sort(obj):
105 def display_user_group_sort(obj):
106 """
106 """
107 Sort function used to sort permissions in .permissions() function of
107 Sort function used to sort permissions in .permissions() function of
108 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 Repository, RepoGroup, UserGroup. Also it put the default user in front
109 of all other resources
109 of all other resources
110 """
110 """
111
111
112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
113 return prefix + obj.users_group_name
113 return prefix + obj.users_group_name
114
114
115
115
116 def _hash_key(k):
116 def _hash_key(k):
117 return md5_safe(k)
117 return md5_safe(k)
118
118
119
119
120 def in_filter_generator(qry, items, limit=500):
120 def in_filter_generator(qry, items, limit=500):
121 """
121 """
122 Splits IN() into multiple with OR
122 Splits IN() into multiple with OR
123 e.g.::
123 e.g.::
124 cnt = Repository.query().filter(
124 cnt = Repository.query().filter(
125 or_(
125 or_(
126 *in_filter_generator(Repository.repo_id, range(100000))
126 *in_filter_generator(Repository.repo_id, range(100000))
127 )).count()
127 )).count()
128 """
128 """
129 if not items:
129 if not items:
130 # empty list will cause empty query which might cause security issues
130 # empty list will cause empty query which might cause security issues
131 # this can lead to hidden unpleasant results
131 # this can lead to hidden unpleasant results
132 items = [-1]
132 items = [-1]
133
133
134 parts = []
134 parts = []
135 for chunk in xrange(0, len(items), limit):
135 for chunk in xrange(0, len(items), limit):
136 parts.append(
136 parts.append(
137 qry.in_(items[chunk: chunk + limit])
137 qry.in_(items[chunk: chunk + limit])
138 )
138 )
139
139
140 return parts
140 return parts
141
141
142
142
143 class EncryptedTextValue(TypeDecorator):
143 class EncryptedTextValue(TypeDecorator):
144 """
144 """
145 Special column for encrypted long text data, use like::
145 Special column for encrypted long text data, use like::
146
146
147 value = Column("encrypted_value", EncryptedValue(), nullable=False)
147 value = Column("encrypted_value", EncryptedValue(), nullable=False)
148
148
149 This column is intelligent so if value is in unencrypted form it return
149 This column is intelligent so if value is in unencrypted form it return
150 unencrypted form, but on save it always encrypts
150 unencrypted form, but on save it always encrypts
151 """
151 """
152 impl = Text
152 impl = Text
153
153
154 def process_bind_param(self, value, dialect):
154 def process_bind_param(self, value, dialect):
155 if not value:
155 if not value:
156 return value
156 return value
157 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
157 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
158 # protect against double encrypting if someone manually starts
158 # protect against double encrypting if someone manually starts
159 # doing
159 # doing
160 raise ValueError('value needs to be in unencrypted format, ie. '
160 raise ValueError('value needs to be in unencrypted format, ie. '
161 'not starting with enc$aes')
161 'not starting with enc$aes')
162 return 'enc$aes_hmac$%s' % AESCipher(
162 return 'enc$aes_hmac$%s' % AESCipher(
163 ENCRYPTION_KEY, hmac=True).encrypt(value)
163 ENCRYPTION_KEY, hmac=True).encrypt(value)
164
164
165 def process_result_value(self, value, dialect):
165 def process_result_value(self, value, dialect):
166 import rhodecode
166 import rhodecode
167
167
168 if not value:
168 if not value:
169 return value
169 return value
170
170
171 parts = value.split('$', 3)
171 parts = value.split('$', 3)
172 if not len(parts) == 3:
172 if not len(parts) == 3:
173 # probably not encrypted values
173 # probably not encrypted values
174 return value
174 return value
175 else:
175 else:
176 if parts[0] != 'enc':
176 if parts[0] != 'enc':
177 # parts ok but without our header ?
177 # parts ok but without our header ?
178 return value
178 return value
179 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
179 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
180 'rhodecode.encrypted_values.strict') or True)
180 'rhodecode.encrypted_values.strict') or True)
181 # at that stage we know it's our encryption
181 # at that stage we know it's our encryption
182 if parts[1] == 'aes':
182 if parts[1] == 'aes':
183 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
183 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
184 elif parts[1] == 'aes_hmac':
184 elif parts[1] == 'aes_hmac':
185 decrypted_data = AESCipher(
185 decrypted_data = AESCipher(
186 ENCRYPTION_KEY, hmac=True,
186 ENCRYPTION_KEY, hmac=True,
187 strict_verification=enc_strict_mode).decrypt(parts[2])
187 strict_verification=enc_strict_mode).decrypt(parts[2])
188 else:
188 else:
189 raise ValueError(
189 raise ValueError(
190 'Encryption type part is wrong, must be `aes` '
190 'Encryption type part is wrong, must be `aes` '
191 'or `aes_hmac`, got `%s` instead' % (parts[1]))
191 'or `aes_hmac`, got `%s` instead' % (parts[1]))
192 return decrypted_data
192 return decrypted_data
193
193
194
194
195 class BaseModel(object):
195 class BaseModel(object):
196 """
196 """
197 Base Model for all classes
197 Base Model for all classes
198 """
198 """
199
199
200 @classmethod
200 @classmethod
201 def _get_keys(cls):
201 def _get_keys(cls):
202 """return column names for this model """
202 """return column names for this model """
203 return class_mapper(cls).c.keys()
203 return class_mapper(cls).c.keys()
204
204
205 def get_dict(self):
205 def get_dict(self):
206 """
206 """
207 return dict with keys and values corresponding
207 return dict with keys and values corresponding
208 to this model data """
208 to this model data """
209
209
210 d = {}
210 d = {}
211 for k in self._get_keys():
211 for k in self._get_keys():
212 d[k] = getattr(self, k)
212 d[k] = getattr(self, k)
213
213
214 # also use __json__() if present to get additional fields
214 # also use __json__() if present to get additional fields
215 _json_attr = getattr(self, '__json__', None)
215 _json_attr = getattr(self, '__json__', None)
216 if _json_attr:
216 if _json_attr:
217 # update with attributes from __json__
217 # update with attributes from __json__
218 if callable(_json_attr):
218 if callable(_json_attr):
219 _json_attr = _json_attr()
219 _json_attr = _json_attr()
220 for k, val in _json_attr.iteritems():
220 for k, val in _json_attr.iteritems():
221 d[k] = val
221 d[k] = val
222 return d
222 return d
223
223
224 def get_appstruct(self):
224 def get_appstruct(self):
225 """return list with keys and values tuples corresponding
225 """return list with keys and values tuples corresponding
226 to this model data """
226 to this model data """
227
227
228 lst = []
228 lst = []
229 for k in self._get_keys():
229 for k in self._get_keys():
230 lst.append((k, getattr(self, k),))
230 lst.append((k, getattr(self, k),))
231 return lst
231 return lst
232
232
233 def populate_obj(self, populate_dict):
233 def populate_obj(self, populate_dict):
234 """populate model with data from given populate_dict"""
234 """populate model with data from given populate_dict"""
235
235
236 for k in self._get_keys():
236 for k in self._get_keys():
237 if k in populate_dict:
237 if k in populate_dict:
238 setattr(self, k, populate_dict[k])
238 setattr(self, k, populate_dict[k])
239
239
240 @classmethod
240 @classmethod
241 def query(cls):
241 def query(cls):
242 return Session().query(cls)
242 return Session().query(cls)
243
243
244 @classmethod
244 @classmethod
245 def get(cls, id_):
245 def get(cls, id_):
246 if id_:
246 if id_:
247 return cls.query().get(id_)
247 return cls.query().get(id_)
248
248
249 @classmethod
249 @classmethod
250 def get_or_404(cls, id_):
250 def get_or_404(cls, id_):
251 from pyramid.httpexceptions import HTTPNotFound
251 from pyramid.httpexceptions import HTTPNotFound
252
252
253 try:
253 try:
254 id_ = int(id_)
254 id_ = int(id_)
255 except (TypeError, ValueError):
255 except (TypeError, ValueError):
256 raise HTTPNotFound()
256 raise HTTPNotFound()
257
257
258 res = cls.query().get(id_)
258 res = cls.query().get(id_)
259 if not res:
259 if not res:
260 raise HTTPNotFound()
260 raise HTTPNotFound()
261 return res
261 return res
262
262
263 @classmethod
263 @classmethod
264 def getAll(cls):
264 def getAll(cls):
265 # deprecated and left for backward compatibility
265 # deprecated and left for backward compatibility
266 return cls.get_all()
266 return cls.get_all()
267
267
268 @classmethod
268 @classmethod
269 def get_all(cls):
269 def get_all(cls):
270 return cls.query().all()
270 return cls.query().all()
271
271
272 @classmethod
272 @classmethod
273 def delete(cls, id_):
273 def delete(cls, id_):
274 obj = cls.query().get(id_)
274 obj = cls.query().get(id_)
275 Session().delete(obj)
275 Session().delete(obj)
276
276
277 @classmethod
277 @classmethod
278 def identity_cache(cls, session, attr_name, value):
278 def identity_cache(cls, session, attr_name, value):
279 exist_in_session = []
279 exist_in_session = []
280 for (item_cls, pkey), instance in session.identity_map.items():
280 for (item_cls, pkey), instance in session.identity_map.items():
281 if cls == item_cls and getattr(instance, attr_name) == value:
281 if cls == item_cls and getattr(instance, attr_name) == value:
282 exist_in_session.append(instance)
282 exist_in_session.append(instance)
283 if exist_in_session:
283 if exist_in_session:
284 if len(exist_in_session) == 1:
284 if len(exist_in_session) == 1:
285 return exist_in_session[0]
285 return exist_in_session[0]
286 log.exception(
286 log.exception(
287 'multiple objects with attr %s and '
287 'multiple objects with attr %s and '
288 'value %s found with same name: %r',
288 'value %s found with same name: %r',
289 attr_name, value, exist_in_session)
289 attr_name, value, exist_in_session)
290
290
291 def __repr__(self):
291 def __repr__(self):
292 if hasattr(self, '__unicode__'):
292 if hasattr(self, '__unicode__'):
293 # python repr needs to return str
293 # python repr needs to return str
294 try:
294 try:
295 return safe_str(self.__unicode__())
295 return safe_str(self.__unicode__())
296 except UnicodeDecodeError:
296 except UnicodeDecodeError:
297 pass
297 pass
298 return '<DB:%s>' % (self.__class__.__name__)
298 return '<DB:%s>' % (self.__class__.__name__)
299
299
300
300
301 class RhodeCodeSetting(Base, BaseModel):
301 class RhodeCodeSetting(Base, BaseModel):
302 __tablename__ = 'rhodecode_settings'
302 __tablename__ = 'rhodecode_settings'
303 __table_args__ = (
303 __table_args__ = (
304 UniqueConstraint('app_settings_name'),
304 UniqueConstraint('app_settings_name'),
305 {'extend_existing': True, 'mysql_engine': 'InnoDB',
305 {'extend_existing': True, 'mysql_engine': 'InnoDB',
306 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
306 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
307 )
307 )
308
308
309 SETTINGS_TYPES = {
309 SETTINGS_TYPES = {
310 'str': safe_str,
310 'str': safe_str,
311 'int': safe_int,
311 'int': safe_int,
312 'unicode': safe_unicode,
312 'unicode': safe_unicode,
313 'bool': str2bool,
313 'bool': str2bool,
314 'list': functools.partial(aslist, sep=',')
314 'list': functools.partial(aslist, sep=',')
315 }
315 }
316 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
316 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
317 GLOBAL_CONF_KEY = 'app_settings'
317 GLOBAL_CONF_KEY = 'app_settings'
318
318
319 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
319 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
320 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
320 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
321 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
321 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
322 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
322 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
323
323
324 def __init__(self, key='', val='', type='unicode'):
324 def __init__(self, key='', val='', type='unicode'):
325 self.app_settings_name = key
325 self.app_settings_name = key
326 self.app_settings_type = type
326 self.app_settings_type = type
327 self.app_settings_value = val
327 self.app_settings_value = val
328
328
329 @validates('_app_settings_value')
329 @validates('_app_settings_value')
330 def validate_settings_value(self, key, val):
330 def validate_settings_value(self, key, val):
331 assert type(val) == unicode
331 assert type(val) == unicode
332 return val
332 return val
333
333
334 @hybrid_property
334 @hybrid_property
335 def app_settings_value(self):
335 def app_settings_value(self):
336 v = self._app_settings_value
336 v = self._app_settings_value
337 _type = self.app_settings_type
337 _type = self.app_settings_type
338 if _type:
338 if _type:
339 _type = self.app_settings_type.split('.')[0]
339 _type = self.app_settings_type.split('.')[0]
340 # decode the encrypted value
340 # decode the encrypted value
341 if 'encrypted' in self.app_settings_type:
341 if 'encrypted' in self.app_settings_type:
342 cipher = EncryptedTextValue()
342 cipher = EncryptedTextValue()
343 v = safe_unicode(cipher.process_result_value(v, None))
343 v = safe_unicode(cipher.process_result_value(v, None))
344
344
345 converter = self.SETTINGS_TYPES.get(_type) or \
345 converter = self.SETTINGS_TYPES.get(_type) or \
346 self.SETTINGS_TYPES['unicode']
346 self.SETTINGS_TYPES['unicode']
347 return converter(v)
347 return converter(v)
348
348
349 @app_settings_value.setter
349 @app_settings_value.setter
350 def app_settings_value(self, val):
350 def app_settings_value(self, val):
351 """
351 """
352 Setter that will always make sure we use unicode in app_settings_value
352 Setter that will always make sure we use unicode in app_settings_value
353
353
354 :param val:
354 :param val:
355 """
355 """
356 val = safe_unicode(val)
356 val = safe_unicode(val)
357 # encode the encrypted value
357 # encode the encrypted value
358 if 'encrypted' in self.app_settings_type:
358 if 'encrypted' in self.app_settings_type:
359 cipher = EncryptedTextValue()
359 cipher = EncryptedTextValue()
360 val = safe_unicode(cipher.process_bind_param(val, None))
360 val = safe_unicode(cipher.process_bind_param(val, None))
361 self._app_settings_value = val
361 self._app_settings_value = val
362
362
363 @hybrid_property
363 @hybrid_property
364 def app_settings_type(self):
364 def app_settings_type(self):
365 return self._app_settings_type
365 return self._app_settings_type
366
366
367 @app_settings_type.setter
367 @app_settings_type.setter
368 def app_settings_type(self, val):
368 def app_settings_type(self, val):
369 if val.split('.')[0] not in self.SETTINGS_TYPES:
369 if val.split('.')[0] not in self.SETTINGS_TYPES:
370 raise Exception('type must be one of %s got %s'
370 raise Exception('type must be one of %s got %s'
371 % (self.SETTINGS_TYPES.keys(), val))
371 % (self.SETTINGS_TYPES.keys(), val))
372 self._app_settings_type = val
372 self._app_settings_type = val
373
373
374 def __unicode__(self):
374 def __unicode__(self):
375 return u"<%s('%s:%s[%s]')>" % (
375 return u"<%s('%s:%s[%s]')>" % (
376 self.__class__.__name__,
376 self.__class__.__name__,
377 self.app_settings_name, self.app_settings_value,
377 self.app_settings_name, self.app_settings_value,
378 self.app_settings_type
378 self.app_settings_type
379 )
379 )
380
380
381
381
382 class RhodeCodeUi(Base, BaseModel):
382 class RhodeCodeUi(Base, BaseModel):
383 __tablename__ = 'rhodecode_ui'
383 __tablename__ = 'rhodecode_ui'
384 __table_args__ = (
384 __table_args__ = (
385 UniqueConstraint('ui_key'),
385 UniqueConstraint('ui_key'),
386 {'extend_existing': True, 'mysql_engine': 'InnoDB',
386 {'extend_existing': True, 'mysql_engine': 'InnoDB',
387 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
387 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
388 )
388 )
389
389
390 HOOK_REPO_SIZE = 'changegroup.repo_size'
390 HOOK_REPO_SIZE = 'changegroup.repo_size'
391 # HG
391 # HG
392 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
392 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
393 HOOK_PULL = 'outgoing.pull_logger'
393 HOOK_PULL = 'outgoing.pull_logger'
394 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
394 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
395 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
395 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
396 HOOK_PUSH = 'changegroup.push_logger'
396 HOOK_PUSH = 'changegroup.push_logger'
397 HOOK_PUSH_KEY = 'pushkey.key_push'
397 HOOK_PUSH_KEY = 'pushkey.key_push'
398
398
399 # TODO: johbo: Unify way how hooks are configured for git and hg,
399 # TODO: johbo: Unify way how hooks are configured for git and hg,
400 # git part is currently hardcoded.
400 # git part is currently hardcoded.
401
401
402 # SVN PATTERNS
402 # SVN PATTERNS
403 SVN_BRANCH_ID = 'vcs_svn_branch'
403 SVN_BRANCH_ID = 'vcs_svn_branch'
404 SVN_TAG_ID = 'vcs_svn_tag'
404 SVN_TAG_ID = 'vcs_svn_tag'
405
405
406 ui_id = Column(
406 ui_id = Column(
407 "ui_id", Integer(), nullable=False, unique=True, default=None,
407 "ui_id", Integer(), nullable=False, unique=True, default=None,
408 primary_key=True)
408 primary_key=True)
409 ui_section = Column(
409 ui_section = Column(
410 "ui_section", String(255), nullable=True, unique=None, default=None)
410 "ui_section", String(255), nullable=True, unique=None, default=None)
411 ui_key = Column(
411 ui_key = Column(
412 "ui_key", String(255), nullable=True, unique=None, default=None)
412 "ui_key", String(255), nullable=True, unique=None, default=None)
413 ui_value = Column(
413 ui_value = Column(
414 "ui_value", String(255), nullable=True, unique=None, default=None)
414 "ui_value", String(255), nullable=True, unique=None, default=None)
415 ui_active = Column(
415 ui_active = Column(
416 "ui_active", Boolean(), nullable=True, unique=None, default=True)
416 "ui_active", Boolean(), nullable=True, unique=None, default=True)
417
417
418 def __repr__(self):
418 def __repr__(self):
419 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
419 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
420 self.ui_key, self.ui_value)
420 self.ui_key, self.ui_value)
421
421
422
422
423 class RepoRhodeCodeSetting(Base, BaseModel):
423 class RepoRhodeCodeSetting(Base, BaseModel):
424 __tablename__ = 'repo_rhodecode_settings'
424 __tablename__ = 'repo_rhodecode_settings'
425 __table_args__ = (
425 __table_args__ = (
426 UniqueConstraint(
426 UniqueConstraint(
427 'app_settings_name', 'repository_id',
427 'app_settings_name', 'repository_id',
428 name='uq_repo_rhodecode_setting_name_repo_id'),
428 name='uq_repo_rhodecode_setting_name_repo_id'),
429 {'extend_existing': True, 'mysql_engine': 'InnoDB',
429 {'extend_existing': True, 'mysql_engine': 'InnoDB',
430 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
430 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
431 )
431 )
432
432
433 repository_id = Column(
433 repository_id = Column(
434 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
434 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
435 nullable=False)
435 nullable=False)
436 app_settings_id = Column(
436 app_settings_id = Column(
437 "app_settings_id", Integer(), nullable=False, unique=True,
437 "app_settings_id", Integer(), nullable=False, unique=True,
438 default=None, primary_key=True)
438 default=None, primary_key=True)
439 app_settings_name = Column(
439 app_settings_name = Column(
440 "app_settings_name", String(255), nullable=True, unique=None,
440 "app_settings_name", String(255), nullable=True, unique=None,
441 default=None)
441 default=None)
442 _app_settings_value = Column(
442 _app_settings_value = Column(
443 "app_settings_value", String(4096), nullable=True, unique=None,
443 "app_settings_value", String(4096), nullable=True, unique=None,
444 default=None)
444 default=None)
445 _app_settings_type = Column(
445 _app_settings_type = Column(
446 "app_settings_type", String(255), nullable=True, unique=None,
446 "app_settings_type", String(255), nullable=True, unique=None,
447 default=None)
447 default=None)
448
448
449 repository = relationship('Repository')
449 repository = relationship('Repository')
450
450
451 def __init__(self, repository_id, key='', val='', type='unicode'):
451 def __init__(self, repository_id, key='', val='', type='unicode'):
452 self.repository_id = repository_id
452 self.repository_id = repository_id
453 self.app_settings_name = key
453 self.app_settings_name = key
454 self.app_settings_type = type
454 self.app_settings_type = type
455 self.app_settings_value = val
455 self.app_settings_value = val
456
456
457 @validates('_app_settings_value')
457 @validates('_app_settings_value')
458 def validate_settings_value(self, key, val):
458 def validate_settings_value(self, key, val):
459 assert type(val) == unicode
459 assert type(val) == unicode
460 return val
460 return val
461
461
462 @hybrid_property
462 @hybrid_property
463 def app_settings_value(self):
463 def app_settings_value(self):
464 v = self._app_settings_value
464 v = self._app_settings_value
465 type_ = self.app_settings_type
465 type_ = self.app_settings_type
466 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
466 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
467 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
467 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
468 return converter(v)
468 return converter(v)
469
469
470 @app_settings_value.setter
470 @app_settings_value.setter
471 def app_settings_value(self, val):
471 def app_settings_value(self, val):
472 """
472 """
473 Setter that will always make sure we use unicode in app_settings_value
473 Setter that will always make sure we use unicode in app_settings_value
474
474
475 :param val:
475 :param val:
476 """
476 """
477 self._app_settings_value = safe_unicode(val)
477 self._app_settings_value = safe_unicode(val)
478
478
479 @hybrid_property
479 @hybrid_property
480 def app_settings_type(self):
480 def app_settings_type(self):
481 return self._app_settings_type
481 return self._app_settings_type
482
482
483 @app_settings_type.setter
483 @app_settings_type.setter
484 def app_settings_type(self, val):
484 def app_settings_type(self, val):
485 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
485 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
486 if val not in SETTINGS_TYPES:
486 if val not in SETTINGS_TYPES:
487 raise Exception('type must be one of %s got %s'
487 raise Exception('type must be one of %s got %s'
488 % (SETTINGS_TYPES.keys(), val))
488 % (SETTINGS_TYPES.keys(), val))
489 self._app_settings_type = val
489 self._app_settings_type = val
490
490
491 def __unicode__(self):
491 def __unicode__(self):
492 return u"<%s('%s:%s:%s[%s]')>" % (
492 return u"<%s('%s:%s:%s[%s]')>" % (
493 self.__class__.__name__, self.repository.repo_name,
493 self.__class__.__name__, self.repository.repo_name,
494 self.app_settings_name, self.app_settings_value,
494 self.app_settings_name, self.app_settings_value,
495 self.app_settings_type
495 self.app_settings_type
496 )
496 )
497
497
498
498
499 class RepoRhodeCodeUi(Base, BaseModel):
499 class RepoRhodeCodeUi(Base, BaseModel):
500 __tablename__ = 'repo_rhodecode_ui'
500 __tablename__ = 'repo_rhodecode_ui'
501 __table_args__ = (
501 __table_args__ = (
502 UniqueConstraint(
502 UniqueConstraint(
503 'repository_id', 'ui_section', 'ui_key',
503 'repository_id', 'ui_section', 'ui_key',
504 name='uq_repo_rhodecode_ui_repository_id_section_key'),
504 name='uq_repo_rhodecode_ui_repository_id_section_key'),
505 {'extend_existing': True, 'mysql_engine': 'InnoDB',
505 {'extend_existing': True, 'mysql_engine': 'InnoDB',
506 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
506 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
507 )
507 )
508
508
509 repository_id = Column(
509 repository_id = Column(
510 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
510 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
511 nullable=False)
511 nullable=False)
512 ui_id = Column(
512 ui_id = Column(
513 "ui_id", Integer(), nullable=False, unique=True, default=None,
513 "ui_id", Integer(), nullable=False, unique=True, default=None,
514 primary_key=True)
514 primary_key=True)
515 ui_section = Column(
515 ui_section = Column(
516 "ui_section", String(255), nullable=True, unique=None, default=None)
516 "ui_section", String(255), nullable=True, unique=None, default=None)
517 ui_key = Column(
517 ui_key = Column(
518 "ui_key", String(255), nullable=True, unique=None, default=None)
518 "ui_key", String(255), nullable=True, unique=None, default=None)
519 ui_value = Column(
519 ui_value = Column(
520 "ui_value", String(255), nullable=True, unique=None, default=None)
520 "ui_value", String(255), nullable=True, unique=None, default=None)
521 ui_active = Column(
521 ui_active = Column(
522 "ui_active", Boolean(), nullable=True, unique=None, default=True)
522 "ui_active", Boolean(), nullable=True, unique=None, default=True)
523
523
524 repository = relationship('Repository')
524 repository = relationship('Repository')
525
525
526 def __repr__(self):
526 def __repr__(self):
527 return '<%s[%s:%s]%s=>%s]>' % (
527 return '<%s[%s:%s]%s=>%s]>' % (
528 self.__class__.__name__, self.repository.repo_name,
528 self.__class__.__name__, self.repository.repo_name,
529 self.ui_section, self.ui_key, self.ui_value)
529 self.ui_section, self.ui_key, self.ui_value)
530
530
531
531
532 class User(Base, BaseModel):
532 class User(Base, BaseModel):
533 __tablename__ = 'users'
533 __tablename__ = 'users'
534 __table_args__ = (
534 __table_args__ = (
535 UniqueConstraint('username'), UniqueConstraint('email'),
535 UniqueConstraint('username'), UniqueConstraint('email'),
536 Index('u_username_idx', 'username'),
536 Index('u_username_idx', 'username'),
537 Index('u_email_idx', 'email'),
537 Index('u_email_idx', 'email'),
538 {'extend_existing': True, 'mysql_engine': 'InnoDB',
538 {'extend_existing': True, 'mysql_engine': 'InnoDB',
539 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
539 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
540 )
540 )
541 DEFAULT_USER = 'default'
541 DEFAULT_USER = 'default'
542 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
542 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
543 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
543 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
544
544
545 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
545 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
546 username = Column("username", String(255), nullable=True, unique=None, default=None)
546 username = Column("username", String(255), nullable=True, unique=None, default=None)
547 password = Column("password", String(255), nullable=True, unique=None, default=None)
547 password = Column("password", String(255), nullable=True, unique=None, default=None)
548 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
548 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
549 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
549 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
550 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
550 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
551 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
551 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
552 _email = Column("email", String(255), nullable=True, unique=None, default=None)
552 _email = Column("email", String(255), nullable=True, unique=None, default=None)
553 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
553 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
554 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
554 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
555
555
556 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
556 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
557 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
557 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
558 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
558 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
559 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
559 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
560 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
560 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
561 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
561 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
562
562
563 user_log = relationship('UserLog')
563 user_log = relationship('UserLog')
564 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
564 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
565
565
566 repositories = relationship('Repository')
566 repositories = relationship('Repository')
567 repository_groups = relationship('RepoGroup')
567 repository_groups = relationship('RepoGroup')
568 user_groups = relationship('UserGroup')
568 user_groups = relationship('UserGroup')
569
569
570 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
570 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
571 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
571 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
572
572
573 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
573 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
574 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
574 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
575 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
575 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
576
576
577 group_member = relationship('UserGroupMember', cascade='all')
577 group_member = relationship('UserGroupMember', cascade='all')
578
578
579 notifications = relationship('UserNotification', cascade='all')
579 notifications = relationship('UserNotification', cascade='all')
580 # notifications assigned to this user
580 # notifications assigned to this user
581 user_created_notifications = relationship('Notification', cascade='all')
581 user_created_notifications = relationship('Notification', cascade='all')
582 # comments created by this user
582 # comments created by this user
583 user_comments = relationship('ChangesetComment', cascade='all')
583 user_comments = relationship('ChangesetComment', cascade='all')
584 # user profile extra info
584 # user profile extra info
585 user_emails = relationship('UserEmailMap', cascade='all')
585 user_emails = relationship('UserEmailMap', cascade='all')
586 user_ip_map = relationship('UserIpMap', cascade='all')
586 user_ip_map = relationship('UserIpMap', cascade='all')
587 user_auth_tokens = relationship('UserApiKeys', cascade='all')
587 user_auth_tokens = relationship('UserApiKeys', cascade='all')
588 user_ssh_keys = relationship('UserSshKeys', cascade='all')
588 user_ssh_keys = relationship('UserSshKeys', cascade='all')
589
589
590 # gists
590 # gists
591 user_gists = relationship('Gist', cascade='all')
591 user_gists = relationship('Gist', cascade='all')
592 # user pull requests
592 # user pull requests
593 user_pull_requests = relationship('PullRequest', cascade='all')
593 user_pull_requests = relationship('PullRequest', cascade='all')
594 # external identities
594 # external identities
595 extenal_identities = relationship(
595 extenal_identities = relationship(
596 'ExternalIdentity',
596 'ExternalIdentity',
597 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
597 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
598 cascade='all')
598 cascade='all')
599 # review rules
599 # review rules
600 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
600 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
601
601
602 def __unicode__(self):
602 def __unicode__(self):
603 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
603 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
604 self.user_id, self.username)
604 self.user_id, self.username)
605
605
606 @hybrid_property
606 @hybrid_property
607 def email(self):
607 def email(self):
608 return self._email
608 return self._email
609
609
610 @email.setter
610 @email.setter
611 def email(self, val):
611 def email(self, val):
612 self._email = val.lower() if val else None
612 self._email = val.lower() if val else None
613
613
614 @hybrid_property
614 @hybrid_property
615 def first_name(self):
615 def first_name(self):
616 from rhodecode.lib import helpers as h
616 from rhodecode.lib import helpers as h
617 if self.name:
617 if self.name:
618 return h.escape(self.name)
618 return h.escape(self.name)
619 return self.name
619 return self.name
620
620
621 @hybrid_property
621 @hybrid_property
622 def last_name(self):
622 def last_name(self):
623 from rhodecode.lib import helpers as h
623 from rhodecode.lib import helpers as h
624 if self.lastname:
624 if self.lastname:
625 return h.escape(self.lastname)
625 return h.escape(self.lastname)
626 return self.lastname
626 return self.lastname
627
627
628 @hybrid_property
628 @hybrid_property
629 def api_key(self):
629 def api_key(self):
630 """
630 """
631 Fetch if exist an auth-token with role ALL connected to this user
631 Fetch if exist an auth-token with role ALL connected to this user
632 """
632 """
633 user_auth_token = UserApiKeys.query()\
633 user_auth_token = UserApiKeys.query()\
634 .filter(UserApiKeys.user_id == self.user_id)\
634 .filter(UserApiKeys.user_id == self.user_id)\
635 .filter(or_(UserApiKeys.expires == -1,
635 .filter(or_(UserApiKeys.expires == -1,
636 UserApiKeys.expires >= time.time()))\
636 UserApiKeys.expires >= time.time()))\
637 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
637 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
638 if user_auth_token:
638 if user_auth_token:
639 user_auth_token = user_auth_token.api_key
639 user_auth_token = user_auth_token.api_key
640
640
641 return user_auth_token
641 return user_auth_token
642
642
643 @api_key.setter
643 @api_key.setter
644 def api_key(self, val):
644 def api_key(self, val):
645 # don't allow to set API key this is deprecated for now
645 # don't allow to set API key this is deprecated for now
646 self._api_key = None
646 self._api_key = None
647
647
648 @property
648 @property
649 def reviewer_pull_requests(self):
649 def reviewer_pull_requests(self):
650 return PullRequestReviewers.query() \
650 return PullRequestReviewers.query() \
651 .options(joinedload(PullRequestReviewers.pull_request)) \
651 .options(joinedload(PullRequestReviewers.pull_request)) \
652 .filter(PullRequestReviewers.user_id == self.user_id) \
652 .filter(PullRequestReviewers.user_id == self.user_id) \
653 .all()
653 .all()
654
654
655 @property
655 @property
656 def firstname(self):
656 def firstname(self):
657 # alias for future
657 # alias for future
658 return self.name
658 return self.name
659
659
660 @property
660 @property
661 def emails(self):
661 def emails(self):
662 other = UserEmailMap.query()\
662 other = UserEmailMap.query()\
663 .filter(UserEmailMap.user == self) \
663 .filter(UserEmailMap.user == self) \
664 .order_by(UserEmailMap.email_id.asc()) \
664 .order_by(UserEmailMap.email_id.asc()) \
665 .all()
665 .all()
666 return [self.email] + [x.email for x in other]
666 return [self.email] + [x.email for x in other]
667
667
668 @property
668 @property
669 def auth_tokens(self):
669 def auth_tokens(self):
670 auth_tokens = self.get_auth_tokens()
670 auth_tokens = self.get_auth_tokens()
671 return [x.api_key for x in auth_tokens]
671 return [x.api_key for x in auth_tokens]
672
672
673 def get_auth_tokens(self):
673 def get_auth_tokens(self):
674 return UserApiKeys.query()\
674 return UserApiKeys.query()\
675 .filter(UserApiKeys.user == self)\
675 .filter(UserApiKeys.user == self)\
676 .order_by(UserApiKeys.user_api_key_id.asc())\
676 .order_by(UserApiKeys.user_api_key_id.asc())\
677 .all()
677 .all()
678
678
679 @LazyProperty
679 @LazyProperty
680 def feed_token(self):
680 def feed_token(self):
681 return self.get_feed_token()
681 return self.get_feed_token()
682
682
683 def get_feed_token(self, cache=True):
683 def get_feed_token(self, cache=True):
684 feed_tokens = UserApiKeys.query()\
684 feed_tokens = UserApiKeys.query()\
685 .filter(UserApiKeys.user == self)\
685 .filter(UserApiKeys.user == self)\
686 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
686 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
687 if cache:
687 if cache:
688 feed_tokens = feed_tokens.options(
688 feed_tokens = feed_tokens.options(
689 FromCache("long_term", "get_user_feed_token_%s" % self.user_id))
689 FromCache("long_term", "get_user_feed_token_%s" % self.user_id))
690
690
691 feed_tokens = feed_tokens.all()
691 feed_tokens = feed_tokens.all()
692 if feed_tokens:
692 if feed_tokens:
693 return feed_tokens[0].api_key
693 return feed_tokens[0].api_key
694 return 'NO_FEED_TOKEN_AVAILABLE'
694 return 'NO_FEED_TOKEN_AVAILABLE'
695
695
696 @classmethod
696 @classmethod
697 def get(cls, user_id, cache=False):
697 def get(cls, user_id, cache=False):
698 if not user_id:
698 if not user_id:
699 return
699 return
700
700
701 user = cls.query()
701 user = cls.query()
702 if cache:
702 if cache:
703 user = user.options(
703 user = user.options(
704 FromCache("sql_cache_short", "get_users_%s" % user_id))
704 FromCache("sql_cache_short", "get_users_%s" % user_id))
705 return user.get(user_id)
705 return user.get(user_id)
706
706
707 @classmethod
707 @classmethod
708 def extra_valid_auth_tokens(cls, user, role=None):
708 def extra_valid_auth_tokens(cls, user, role=None):
709 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
709 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
710 .filter(or_(UserApiKeys.expires == -1,
710 .filter(or_(UserApiKeys.expires == -1,
711 UserApiKeys.expires >= time.time()))
711 UserApiKeys.expires >= time.time()))
712 if role:
712 if role:
713 tokens = tokens.filter(or_(UserApiKeys.role == role,
713 tokens = tokens.filter(or_(UserApiKeys.role == role,
714 UserApiKeys.role == UserApiKeys.ROLE_ALL))
714 UserApiKeys.role == UserApiKeys.ROLE_ALL))
715 return tokens.all()
715 return tokens.all()
716
716
717 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
717 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
718 from rhodecode.lib import auth
718 from rhodecode.lib import auth
719
719
720 log.debug('Trying to authenticate user: %s via auth-token, '
720 log.debug('Trying to authenticate user: %s via auth-token, '
721 'and roles: %s', self, roles)
721 'and roles: %s', self, roles)
722
722
723 if not auth_token:
723 if not auth_token:
724 return False
724 return False
725
725
726 crypto_backend = auth.crypto_backend()
726 crypto_backend = auth.crypto_backend()
727
727
728 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
728 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
729 tokens_q = UserApiKeys.query()\
729 tokens_q = UserApiKeys.query()\
730 .filter(UserApiKeys.user_id == self.user_id)\
730 .filter(UserApiKeys.user_id == self.user_id)\
731 .filter(or_(UserApiKeys.expires == -1,
731 .filter(or_(UserApiKeys.expires == -1,
732 UserApiKeys.expires >= time.time()))
732 UserApiKeys.expires >= time.time()))
733
733
734 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
734 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
735
735
736 plain_tokens = []
736 plain_tokens = []
737 hash_tokens = []
737 hash_tokens = []
738
738
739 for token in tokens_q.all():
739 for token in tokens_q.all():
740 # verify scope first
740 # verify scope first
741 if token.repo_id:
741 if token.repo_id:
742 # token has a scope, we need to verify it
742 # token has a scope, we need to verify it
743 if scope_repo_id != token.repo_id:
743 if scope_repo_id != token.repo_id:
744 log.debug(
744 log.debug(
745 'Scope mismatch: token has a set repo scope: %s, '
745 'Scope mismatch: token has a set repo scope: %s, '
746 'and calling scope is:%s, skipping further checks',
746 'and calling scope is:%s, skipping further checks',
747 token.repo, scope_repo_id)
747 token.repo, scope_repo_id)
748 # token has a scope, and it doesn't match, skip token
748 # token has a scope, and it doesn't match, skip token
749 continue
749 continue
750
750
751 if token.api_key.startswith(crypto_backend.ENC_PREF):
751 if token.api_key.startswith(crypto_backend.ENC_PREF):
752 hash_tokens.append(token.api_key)
752 hash_tokens.append(token.api_key)
753 else:
753 else:
754 plain_tokens.append(token.api_key)
754 plain_tokens.append(token.api_key)
755
755
756 is_plain_match = auth_token in plain_tokens
756 is_plain_match = auth_token in plain_tokens
757 if is_plain_match:
757 if is_plain_match:
758 return True
758 return True
759
759
760 for hashed in hash_tokens:
760 for hashed in hash_tokens:
761 # TODO(marcink): this is expensive to calculate, but most secure
761 # TODO(marcink): this is expensive to calculate, but most secure
762 match = crypto_backend.hash_check(auth_token, hashed)
762 match = crypto_backend.hash_check(auth_token, hashed)
763 if match:
763 if match:
764 return True
764 return True
765
765
766 return False
766 return False
767
767
768 @property
768 @property
769 def ip_addresses(self):
769 def ip_addresses(self):
770 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
770 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
771 return [x.ip_addr for x in ret]
771 return [x.ip_addr for x in ret]
772
772
773 @property
773 @property
774 def username_and_name(self):
774 def username_and_name(self):
775 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
775 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
776
776
777 @property
777 @property
778 def username_or_name_or_email(self):
778 def username_or_name_or_email(self):
779 full_name = self.full_name if self.full_name is not ' ' else None
779 full_name = self.full_name if self.full_name is not ' ' else None
780 return self.username or full_name or self.email
780 return self.username or full_name or self.email
781
781
782 @property
782 @property
783 def full_name(self):
783 def full_name(self):
784 return '%s %s' % (self.first_name, self.last_name)
784 return '%s %s' % (self.first_name, self.last_name)
785
785
786 @property
786 @property
787 def full_name_or_username(self):
787 def full_name_or_username(self):
788 return ('%s %s' % (self.first_name, self.last_name)
788 return ('%s %s' % (self.first_name, self.last_name)
789 if (self.first_name and self.last_name) else self.username)
789 if (self.first_name and self.last_name) else self.username)
790
790
791 @property
791 @property
792 def full_contact(self):
792 def full_contact(self):
793 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
793 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
794
794
795 @property
795 @property
796 def short_contact(self):
796 def short_contact(self):
797 return '%s %s' % (self.first_name, self.last_name)
797 return '%s %s' % (self.first_name, self.last_name)
798
798
799 @property
799 @property
800 def is_admin(self):
800 def is_admin(self):
801 return self.admin
801 return self.admin
802
802
803 def AuthUser(self, **kwargs):
803 def AuthUser(self, **kwargs):
804 """
804 """
805 Returns instance of AuthUser for this user
805 Returns instance of AuthUser for this user
806 """
806 """
807 from rhodecode.lib.auth import AuthUser
807 from rhodecode.lib.auth import AuthUser
808 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
808 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
809
809
810 @hybrid_property
810 @hybrid_property
811 def user_data(self):
811 def user_data(self):
812 if not self._user_data:
812 if not self._user_data:
813 return {}
813 return {}
814
814
815 try:
815 try:
816 return json.loads(self._user_data)
816 return json.loads(self._user_data)
817 except TypeError:
817 except TypeError:
818 return {}
818 return {}
819
819
820 @user_data.setter
820 @user_data.setter
821 def user_data(self, val):
821 def user_data(self, val):
822 if not isinstance(val, dict):
822 if not isinstance(val, dict):
823 raise Exception('user_data must be dict, got %s' % type(val))
823 raise Exception('user_data must be dict, got %s' % type(val))
824 try:
824 try:
825 self._user_data = json.dumps(val)
825 self._user_data = json.dumps(val)
826 except Exception:
826 except Exception:
827 log.error(traceback.format_exc())
827 log.error(traceback.format_exc())
828
828
829 @classmethod
829 @classmethod
830 def get_by_username(cls, username, case_insensitive=False,
830 def get_by_username(cls, username, case_insensitive=False,
831 cache=False, identity_cache=False):
831 cache=False, identity_cache=False):
832 session = Session()
832 session = Session()
833
833
834 if case_insensitive:
834 if case_insensitive:
835 q = cls.query().filter(
835 q = cls.query().filter(
836 func.lower(cls.username) == func.lower(username))
836 func.lower(cls.username) == func.lower(username))
837 else:
837 else:
838 q = cls.query().filter(cls.username == username)
838 q = cls.query().filter(cls.username == username)
839
839
840 if cache:
840 if cache:
841 if identity_cache:
841 if identity_cache:
842 val = cls.identity_cache(session, 'username', username)
842 val = cls.identity_cache(session, 'username', username)
843 if val:
843 if val:
844 return val
844 return val
845 else:
845 else:
846 cache_key = "get_user_by_name_%s" % _hash_key(username)
846 cache_key = "get_user_by_name_%s" % _hash_key(username)
847 q = q.options(
847 q = q.options(
848 FromCache("sql_cache_short", cache_key))
848 FromCache("sql_cache_short", cache_key))
849
849
850 return q.scalar()
850 return q.scalar()
851
851
852 @classmethod
852 @classmethod
853 def get_by_auth_token(cls, auth_token, cache=False):
853 def get_by_auth_token(cls, auth_token, cache=False):
854 q = UserApiKeys.query()\
854 q = UserApiKeys.query()\
855 .filter(UserApiKeys.api_key == auth_token)\
855 .filter(UserApiKeys.api_key == auth_token)\
856 .filter(or_(UserApiKeys.expires == -1,
856 .filter(or_(UserApiKeys.expires == -1,
857 UserApiKeys.expires >= time.time()))
857 UserApiKeys.expires >= time.time()))
858 if cache:
858 if cache:
859 q = q.options(
859 q = q.options(
860 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
860 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
861
861
862 match = q.first()
862 match = q.first()
863 if match:
863 if match:
864 return match.user
864 return match.user
865
865
866 @classmethod
866 @classmethod
867 def get_by_email(cls, email, case_insensitive=False, cache=False):
867 def get_by_email(cls, email, case_insensitive=False, cache=False):
868
868
869 if case_insensitive:
869 if case_insensitive:
870 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
870 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
871
871
872 else:
872 else:
873 q = cls.query().filter(cls.email == email)
873 q = cls.query().filter(cls.email == email)
874
874
875 email_key = _hash_key(email)
875 email_key = _hash_key(email)
876 if cache:
876 if cache:
877 q = q.options(
877 q = q.options(
878 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
878 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
879
879
880 ret = q.scalar()
880 ret = q.scalar()
881 if ret is None:
881 if ret is None:
882 q = UserEmailMap.query()
882 q = UserEmailMap.query()
883 # try fetching in alternate email map
883 # try fetching in alternate email map
884 if case_insensitive:
884 if case_insensitive:
885 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
885 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
886 else:
886 else:
887 q = q.filter(UserEmailMap.email == email)
887 q = q.filter(UserEmailMap.email == email)
888 q = q.options(joinedload(UserEmailMap.user))
888 q = q.options(joinedload(UserEmailMap.user))
889 if cache:
889 if cache:
890 q = q.options(
890 q = q.options(
891 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
891 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
892 ret = getattr(q.scalar(), 'user', None)
892 ret = getattr(q.scalar(), 'user', None)
893
893
894 return ret
894 return ret
895
895
896 @classmethod
896 @classmethod
897 def get_from_cs_author(cls, author):
897 def get_from_cs_author(cls, author):
898 """
898 """
899 Tries to get User objects out of commit author string
899 Tries to get User objects out of commit author string
900
900
901 :param author:
901 :param author:
902 """
902 """
903 from rhodecode.lib.helpers import email, author_name
903 from rhodecode.lib.helpers import email, author_name
904 # Valid email in the attribute passed, see if they're in the system
904 # Valid email in the attribute passed, see if they're in the system
905 _email = email(author)
905 _email = email(author)
906 if _email:
906 if _email:
907 user = cls.get_by_email(_email, case_insensitive=True)
907 user = cls.get_by_email(_email, case_insensitive=True)
908 if user:
908 if user:
909 return user
909 return user
910 # Maybe we can match by username?
910 # Maybe we can match by username?
911 _author = author_name(author)
911 _author = author_name(author)
912 user = cls.get_by_username(_author, case_insensitive=True)
912 user = cls.get_by_username(_author, case_insensitive=True)
913 if user:
913 if user:
914 return user
914 return user
915
915
916 def update_userdata(self, **kwargs):
916 def update_userdata(self, **kwargs):
917 usr = self
917 usr = self
918 old = usr.user_data
918 old = usr.user_data
919 old.update(**kwargs)
919 old.update(**kwargs)
920 usr.user_data = old
920 usr.user_data = old
921 Session().add(usr)
921 Session().add(usr)
922 log.debug('updated userdata with ', kwargs)
922 log.debug('updated userdata with ', kwargs)
923
923
924 def update_lastlogin(self):
924 def update_lastlogin(self):
925 """Update user lastlogin"""
925 """Update user lastlogin"""
926 self.last_login = datetime.datetime.now()
926 self.last_login = datetime.datetime.now()
927 Session().add(self)
927 Session().add(self)
928 log.debug('updated user %s lastlogin', self.username)
928 log.debug('updated user %s lastlogin', self.username)
929
929
930 def update_lastactivity(self):
930 def update_lastactivity(self):
931 """Update user lastactivity"""
931 """Update user lastactivity"""
932 self.last_activity = datetime.datetime.now()
932 self.last_activity = datetime.datetime.now()
933 Session().add(self)
933 Session().add(self)
934 log.debug('updated user `%s` last activity', self.username)
934 log.debug('updated user `%s` last activity', self.username)
935
935
936 def update_password(self, new_password):
936 def update_password(self, new_password):
937 from rhodecode.lib.auth import get_crypt_password
937 from rhodecode.lib.auth import get_crypt_password
938
938
939 self.password = get_crypt_password(new_password)
939 self.password = get_crypt_password(new_password)
940 Session().add(self)
940 Session().add(self)
941
941
942 @classmethod
942 @classmethod
943 def get_first_super_admin(cls):
943 def get_first_super_admin(cls):
944 user = User.query().filter(User.admin == true()).first()
944 user = User.query().filter(User.admin == true()).first()
945 if user is None:
945 if user is None:
946 raise Exception('FATAL: Missing administrative account!')
946 raise Exception('FATAL: Missing administrative account!')
947 return user
947 return user
948
948
949 @classmethod
949 @classmethod
950 def get_all_super_admins(cls):
950 def get_all_super_admins(cls):
951 """
951 """
952 Returns all admin accounts sorted by username
952 Returns all admin accounts sorted by username
953 """
953 """
954 return User.query().filter(User.admin == true())\
954 return User.query().filter(User.admin == true())\
955 .order_by(User.username.asc()).all()
955 .order_by(User.username.asc()).all()
956
956
957 @classmethod
957 @classmethod
958 def get_default_user(cls, cache=False, refresh=False):
958 def get_default_user(cls, cache=False, refresh=False):
959 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
959 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
960 if user is None:
960 if user is None:
961 raise Exception('FATAL: Missing default account!')
961 raise Exception('FATAL: Missing default account!')
962 if refresh:
962 if refresh:
963 # The default user might be based on outdated state which
963 # The default user might be based on outdated state which
964 # has been loaded from the cache.
964 # has been loaded from the cache.
965 # A call to refresh() ensures that the
965 # A call to refresh() ensures that the
966 # latest state from the database is used.
966 # latest state from the database is used.
967 Session().refresh(user)
967 Session().refresh(user)
968 return user
968 return user
969
969
970 def _get_default_perms(self, user, suffix=''):
970 def _get_default_perms(self, user, suffix=''):
971 from rhodecode.model.permission import PermissionModel
971 from rhodecode.model.permission import PermissionModel
972 return PermissionModel().get_default_perms(user.user_perms, suffix)
972 return PermissionModel().get_default_perms(user.user_perms, suffix)
973
973
974 def get_default_perms(self, suffix=''):
974 def get_default_perms(self, suffix=''):
975 return self._get_default_perms(self, suffix)
975 return self._get_default_perms(self, suffix)
976
976
977 def get_api_data(self, include_secrets=False, details='full'):
977 def get_api_data(self, include_secrets=False, details='full'):
978 """
978 """
979 Common function for generating user related data for API
979 Common function for generating user related data for API
980
980
981 :param include_secrets: By default secrets in the API data will be replaced
981 :param include_secrets: By default secrets in the API data will be replaced
982 by a placeholder value to prevent exposing this data by accident. In case
982 by a placeholder value to prevent exposing this data by accident. In case
983 this data shall be exposed, set this flag to ``True``.
983 this data shall be exposed, set this flag to ``True``.
984
984
985 :param details: details can be 'basic|full' basic gives only a subset of
985 :param details: details can be 'basic|full' basic gives only a subset of
986 the available user information that includes user_id, name and emails.
986 the available user information that includes user_id, name and emails.
987 """
987 """
988 user = self
988 user = self
989 user_data = self.user_data
989 user_data = self.user_data
990 data = {
990 data = {
991 'user_id': user.user_id,
991 'user_id': user.user_id,
992 'username': user.username,
992 'username': user.username,
993 'firstname': user.name,
993 'firstname': user.name,
994 'lastname': user.lastname,
994 'lastname': user.lastname,
995 'email': user.email,
995 'email': user.email,
996 'emails': user.emails,
996 'emails': user.emails,
997 }
997 }
998 if details == 'basic':
998 if details == 'basic':
999 return data
999 return data
1000
1000
1001 auth_token_length = 40
1001 auth_token_length = 40
1002 auth_token_replacement = '*' * auth_token_length
1002 auth_token_replacement = '*' * auth_token_length
1003
1003
1004 extras = {
1004 extras = {
1005 'auth_tokens': [auth_token_replacement],
1005 'auth_tokens': [auth_token_replacement],
1006 'active': user.active,
1006 'active': user.active,
1007 'admin': user.admin,
1007 'admin': user.admin,
1008 'extern_type': user.extern_type,
1008 'extern_type': user.extern_type,
1009 'extern_name': user.extern_name,
1009 'extern_name': user.extern_name,
1010 'last_login': user.last_login,
1010 'last_login': user.last_login,
1011 'last_activity': user.last_activity,
1011 'last_activity': user.last_activity,
1012 'ip_addresses': user.ip_addresses,
1012 'ip_addresses': user.ip_addresses,
1013 'language': user_data.get('language')
1013 'language': user_data.get('language')
1014 }
1014 }
1015 data.update(extras)
1015 data.update(extras)
1016
1016
1017 if include_secrets:
1017 if include_secrets:
1018 data['auth_tokens'] = user.auth_tokens
1018 data['auth_tokens'] = user.auth_tokens
1019 return data
1019 return data
1020
1020
1021 def __json__(self):
1021 def __json__(self):
1022 data = {
1022 data = {
1023 'full_name': self.full_name,
1023 'full_name': self.full_name,
1024 'full_name_or_username': self.full_name_or_username,
1024 'full_name_or_username': self.full_name_or_username,
1025 'short_contact': self.short_contact,
1025 'short_contact': self.short_contact,
1026 'full_contact': self.full_contact,
1026 'full_contact': self.full_contact,
1027 }
1027 }
1028 data.update(self.get_api_data())
1028 data.update(self.get_api_data())
1029 return data
1029 return data
1030
1030
1031
1031
1032 class UserApiKeys(Base, BaseModel):
1032 class UserApiKeys(Base, BaseModel):
1033 __tablename__ = 'user_api_keys'
1033 __tablename__ = 'user_api_keys'
1034 __table_args__ = (
1034 __table_args__ = (
1035 Index('uak_api_key_idx', 'api_key', unique=True),
1035 Index('uak_api_key_idx', 'api_key', unique=True),
1036 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1036 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1039 )
1039 )
1040 __mapper_args__ = {}
1040 __mapper_args__ = {}
1041
1041
1042 # ApiKey role
1042 # ApiKey role
1043 ROLE_ALL = 'token_role_all'
1043 ROLE_ALL = 'token_role_all'
1044 ROLE_HTTP = 'token_role_http'
1044 ROLE_HTTP = 'token_role_http'
1045 ROLE_VCS = 'token_role_vcs'
1045 ROLE_VCS = 'token_role_vcs'
1046 ROLE_API = 'token_role_api'
1046 ROLE_API = 'token_role_api'
1047 ROLE_FEED = 'token_role_feed'
1047 ROLE_FEED = 'token_role_feed'
1048 ROLE_PASSWORD_RESET = 'token_password_reset'
1048 ROLE_PASSWORD_RESET = 'token_password_reset'
1049
1049
1050 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1050 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1051
1051
1052 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1052 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1053 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1053 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1054 api_key = Column("api_key", String(255), nullable=False, unique=True)
1054 api_key = Column("api_key", String(255), nullable=False, unique=True)
1055 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1055 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1056 expires = Column('expires', Float(53), nullable=False)
1056 expires = Column('expires', Float(53), nullable=False)
1057 role = Column('role', String(255), nullable=True)
1057 role = Column('role', String(255), nullable=True)
1058 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1058 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1059
1059
1060 # scope columns
1060 # scope columns
1061 repo_id = Column(
1061 repo_id = Column(
1062 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1062 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1063 nullable=True, unique=None, default=None)
1063 nullable=True, unique=None, default=None)
1064 repo = relationship('Repository', lazy='joined')
1064 repo = relationship('Repository', lazy='joined')
1065
1065
1066 repo_group_id = Column(
1066 repo_group_id = Column(
1067 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1067 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1068 nullable=True, unique=None, default=None)
1068 nullable=True, unique=None, default=None)
1069 repo_group = relationship('RepoGroup', lazy='joined')
1069 repo_group = relationship('RepoGroup', lazy='joined')
1070
1070
1071 user = relationship('User', lazy='joined')
1071 user = relationship('User', lazy='joined')
1072
1072
1073 def __unicode__(self):
1073 def __unicode__(self):
1074 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1074 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1075
1075
1076 def __json__(self):
1076 def __json__(self):
1077 data = {
1077 data = {
1078 'auth_token': self.api_key,
1078 'auth_token': self.api_key,
1079 'role': self.role,
1079 'role': self.role,
1080 'scope': self.scope_humanized,
1080 'scope': self.scope_humanized,
1081 'expired': self.expired
1081 'expired': self.expired
1082 }
1082 }
1083 return data
1083 return data
1084
1084
1085 def get_api_data(self, include_secrets=False):
1085 def get_api_data(self, include_secrets=False):
1086 data = self.__json__()
1086 data = self.__json__()
1087 if include_secrets:
1087 if include_secrets:
1088 return data
1088 return data
1089 else:
1089 else:
1090 data['auth_token'] = self.token_obfuscated
1090 data['auth_token'] = self.token_obfuscated
1091 return data
1091 return data
1092
1092
1093 @hybrid_property
1093 @hybrid_property
1094 def description_safe(self):
1094 def description_safe(self):
1095 from rhodecode.lib import helpers as h
1095 from rhodecode.lib import helpers as h
1096 return h.escape(self.description)
1096 return h.escape(self.description)
1097
1097
1098 @property
1098 @property
1099 def expired(self):
1099 def expired(self):
1100 if self.expires == -1:
1100 if self.expires == -1:
1101 return False
1101 return False
1102 return time.time() > self.expires
1102 return time.time() > self.expires
1103
1103
1104 @classmethod
1104 @classmethod
1105 def _get_role_name(cls, role):
1105 def _get_role_name(cls, role):
1106 return {
1106 return {
1107 cls.ROLE_ALL: _('all'),
1107 cls.ROLE_ALL: _('all'),
1108 cls.ROLE_HTTP: _('http/web interface'),
1108 cls.ROLE_HTTP: _('http/web interface'),
1109 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1109 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1110 cls.ROLE_API: _('api calls'),
1110 cls.ROLE_API: _('api calls'),
1111 cls.ROLE_FEED: _('feed access'),
1111 cls.ROLE_FEED: _('feed access'),
1112 }.get(role, role)
1112 }.get(role, role)
1113
1113
1114 @property
1114 @property
1115 def role_humanized(self):
1115 def role_humanized(self):
1116 return self._get_role_name(self.role)
1116 return self._get_role_name(self.role)
1117
1117
1118 def _get_scope(self):
1118 def _get_scope(self):
1119 if self.repo:
1119 if self.repo:
1120 return repr(self.repo)
1120 return repr(self.repo)
1121 if self.repo_group:
1121 if self.repo_group:
1122 return repr(self.repo_group) + ' (recursive)'
1122 return repr(self.repo_group) + ' (recursive)'
1123 return 'global'
1123 return 'global'
1124
1124
1125 @property
1125 @property
1126 def scope_humanized(self):
1126 def scope_humanized(self):
1127 return self._get_scope()
1127 return self._get_scope()
1128
1128
1129 @property
1129 @property
1130 def token_obfuscated(self):
1130 def token_obfuscated(self):
1131 if self.api_key:
1131 if self.api_key:
1132 return self.api_key[:4] + "****"
1132 return self.api_key[:4] + "****"
1133
1133
1134
1134
1135 class UserEmailMap(Base, BaseModel):
1135 class UserEmailMap(Base, BaseModel):
1136 __tablename__ = 'user_email_map'
1136 __tablename__ = 'user_email_map'
1137 __table_args__ = (
1137 __table_args__ = (
1138 Index('uem_email_idx', 'email'),
1138 Index('uem_email_idx', 'email'),
1139 UniqueConstraint('email'),
1139 UniqueConstraint('email'),
1140 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1140 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1141 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1141 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1142 )
1142 )
1143 __mapper_args__ = {}
1143 __mapper_args__ = {}
1144
1144
1145 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1145 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1146 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1146 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1147 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1147 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1148 user = relationship('User', lazy='joined')
1148 user = relationship('User', lazy='joined')
1149
1149
1150 @validates('_email')
1150 @validates('_email')
1151 def validate_email(self, key, email):
1151 def validate_email(self, key, email):
1152 # check if this email is not main one
1152 # check if this email is not main one
1153 main_email = Session().query(User).filter(User.email == email).scalar()
1153 main_email = Session().query(User).filter(User.email == email).scalar()
1154 if main_email is not None:
1154 if main_email is not None:
1155 raise AttributeError('email %s is present is user table' % email)
1155 raise AttributeError('email %s is present is user table' % email)
1156 return email
1156 return email
1157
1157
1158 @hybrid_property
1158 @hybrid_property
1159 def email(self):
1159 def email(self):
1160 return self._email
1160 return self._email
1161
1161
1162 @email.setter
1162 @email.setter
1163 def email(self, val):
1163 def email(self, val):
1164 self._email = val.lower() if val else None
1164 self._email = val.lower() if val else None
1165
1165
1166
1166
1167 class UserIpMap(Base, BaseModel):
1167 class UserIpMap(Base, BaseModel):
1168 __tablename__ = 'user_ip_map'
1168 __tablename__ = 'user_ip_map'
1169 __table_args__ = (
1169 __table_args__ = (
1170 UniqueConstraint('user_id', 'ip_addr'),
1170 UniqueConstraint('user_id', 'ip_addr'),
1171 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1171 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1172 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1172 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1173 )
1173 )
1174 __mapper_args__ = {}
1174 __mapper_args__ = {}
1175
1175
1176 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1176 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1177 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1177 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1178 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1178 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1179 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1179 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1180 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1180 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1181 user = relationship('User', lazy='joined')
1181 user = relationship('User', lazy='joined')
1182
1182
1183 @hybrid_property
1183 @hybrid_property
1184 def description_safe(self):
1184 def description_safe(self):
1185 from rhodecode.lib import helpers as h
1185 from rhodecode.lib import helpers as h
1186 return h.escape(self.description)
1186 return h.escape(self.description)
1187
1187
1188 @classmethod
1188 @classmethod
1189 def _get_ip_range(cls, ip_addr):
1189 def _get_ip_range(cls, ip_addr):
1190 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1190 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1191 return [str(net.network_address), str(net.broadcast_address)]
1191 return [str(net.network_address), str(net.broadcast_address)]
1192
1192
1193 def __json__(self):
1193 def __json__(self):
1194 return {
1194 return {
1195 'ip_addr': self.ip_addr,
1195 'ip_addr': self.ip_addr,
1196 'ip_range': self._get_ip_range(self.ip_addr),
1196 'ip_range': self._get_ip_range(self.ip_addr),
1197 }
1197 }
1198
1198
1199 def __unicode__(self):
1199 def __unicode__(self):
1200 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1200 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1201 self.user_id, self.ip_addr)
1201 self.user_id, self.ip_addr)
1202
1202
1203
1203
1204 class UserSshKeys(Base, BaseModel):
1204 class UserSshKeys(Base, BaseModel):
1205 __tablename__ = 'user_ssh_keys'
1205 __tablename__ = 'user_ssh_keys'
1206 __table_args__ = (
1206 __table_args__ = (
1207 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1207 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1208
1208
1209 UniqueConstraint('ssh_key_fingerprint'),
1209 UniqueConstraint('ssh_key_fingerprint'),
1210
1210
1211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1213 )
1213 )
1214 __mapper_args__ = {}
1214 __mapper_args__ = {}
1215
1215
1216 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1216 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1217 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1218 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1218 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1219
1219
1220 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1220 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1221
1221
1222 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1222 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1223 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1223 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1224 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1224 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1225
1225
1226 user = relationship('User', lazy='joined')
1226 user = relationship('User', lazy='joined')
1227
1227
1228 def __json__(self):
1228 def __json__(self):
1229 data = {
1229 data = {
1230 'ssh_fingerprint': self.ssh_key_fingerprint,
1230 'ssh_fingerprint': self.ssh_key_fingerprint,
1231 'description': self.description,
1231 'description': self.description,
1232 'created_on': self.created_on
1232 'created_on': self.created_on
1233 }
1233 }
1234 return data
1234 return data
1235
1235
1236 def get_api_data(self):
1236 def get_api_data(self):
1237 data = self.__json__()
1237 data = self.__json__()
1238 return data
1238 return data
1239
1239
1240
1240
1241 class UserLog(Base, BaseModel):
1241 class UserLog(Base, BaseModel):
1242 __tablename__ = 'user_logs'
1242 __tablename__ = 'user_logs'
1243 __table_args__ = (
1243 __table_args__ = (
1244 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1244 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1245 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1245 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1246 )
1246 )
1247 VERSION_1 = 'v1'
1247 VERSION_1 = 'v1'
1248 VERSION_2 = 'v2'
1248 VERSION_2 = 'v2'
1249 VERSIONS = [VERSION_1, VERSION_2]
1249 VERSIONS = [VERSION_1, VERSION_2]
1250
1250
1251 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1251 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1253 username = Column("username", String(255), nullable=True, unique=None, default=None)
1253 username = Column("username", String(255), nullable=True, unique=None, default=None)
1254 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1254 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1255 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1255 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1256 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1256 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1257 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1257 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1258 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1258 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1259
1259
1260 version = Column("version", String(255), nullable=True, default=VERSION_1)
1260 version = Column("version", String(255), nullable=True, default=VERSION_1)
1261 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1261 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1263
1263
1264 def __unicode__(self):
1264 def __unicode__(self):
1265 return u"<%s('id:%s:%s')>" % (
1265 return u"<%s('id:%s:%s')>" % (
1266 self.__class__.__name__, self.repository_name, self.action)
1266 self.__class__.__name__, self.repository_name, self.action)
1267
1267
1268 def __json__(self):
1268 def __json__(self):
1269 return {
1269 return {
1270 'user_id': self.user_id,
1270 'user_id': self.user_id,
1271 'username': self.username,
1271 'username': self.username,
1272 'repository_id': self.repository_id,
1272 'repository_id': self.repository_id,
1273 'repository_name': self.repository_name,
1273 'repository_name': self.repository_name,
1274 'user_ip': self.user_ip,
1274 'user_ip': self.user_ip,
1275 'action_date': self.action_date,
1275 'action_date': self.action_date,
1276 'action': self.action,
1276 'action': self.action,
1277 }
1277 }
1278
1278
1279 @hybrid_property
1279 @hybrid_property
1280 def entry_id(self):
1280 def entry_id(self):
1281 return self.user_log_id
1281 return self.user_log_id
1282
1282
1283 @property
1283 @property
1284 def action_as_day(self):
1284 def action_as_day(self):
1285 return datetime.date(*self.action_date.timetuple()[:3])
1285 return datetime.date(*self.action_date.timetuple()[:3])
1286
1286
1287 user = relationship('User')
1287 user = relationship('User')
1288 repository = relationship('Repository', cascade='')
1288 repository = relationship('Repository', cascade='')
1289
1289
1290
1290
1291 class UserGroup(Base, BaseModel):
1291 class UserGroup(Base, BaseModel):
1292 __tablename__ = 'users_groups'
1292 __tablename__ = 'users_groups'
1293 __table_args__ = (
1293 __table_args__ = (
1294 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1294 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1295 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1295 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1296 )
1296 )
1297
1297
1298 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1298 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1299 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1299 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1300 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1300 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1301 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1301 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1302 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1302 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1303 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1303 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1304 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1304 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1305 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1305 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1306
1306
1307 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1307 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1308 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1308 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1309 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1309 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1310 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1310 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1311 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1311 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1312 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1312 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1313
1313
1314 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1314 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1315 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1315 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1316
1316
1317 @classmethod
1317 @classmethod
1318 def _load_group_data(cls, column):
1318 def _load_group_data(cls, column):
1319 if not column:
1319 if not column:
1320 return {}
1320 return {}
1321
1321
1322 try:
1322 try:
1323 return json.loads(column) or {}
1323 return json.loads(column) or {}
1324 except TypeError:
1324 except TypeError:
1325 return {}
1325 return {}
1326
1326
1327 @hybrid_property
1327 @hybrid_property
1328 def description_safe(self):
1328 def description_safe(self):
1329 from rhodecode.lib import helpers as h
1329 from rhodecode.lib import helpers as h
1330 return h.escape(self.user_group_description)
1330 return h.escape(self.user_group_description)
1331
1331
1332 @hybrid_property
1332 @hybrid_property
1333 def group_data(self):
1333 def group_data(self):
1334 return self._load_group_data(self._group_data)
1334 return self._load_group_data(self._group_data)
1335
1335
1336 @group_data.expression
1336 @group_data.expression
1337 def group_data(self, **kwargs):
1337 def group_data(self, **kwargs):
1338 return self._group_data
1338 return self._group_data
1339
1339
1340 @group_data.setter
1340 @group_data.setter
1341 def group_data(self, val):
1341 def group_data(self, val):
1342 try:
1342 try:
1343 self._group_data = json.dumps(val)
1343 self._group_data = json.dumps(val)
1344 except Exception:
1344 except Exception:
1345 log.error(traceback.format_exc())
1345 log.error(traceback.format_exc())
1346
1346
1347 @classmethod
1347 @classmethod
1348 def _load_sync(cls, group_data):
1348 def _load_sync(cls, group_data):
1349 if group_data:
1349 if group_data:
1350 return group_data.get('extern_type')
1350 return group_data.get('extern_type')
1351
1351
1352 @property
1352 @property
1353 def sync(self):
1353 def sync(self):
1354 return self._load_sync(self.group_data)
1354 return self._load_sync(self.group_data)
1355
1355
1356 def __unicode__(self):
1356 def __unicode__(self):
1357 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1357 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1358 self.users_group_id,
1358 self.users_group_id,
1359 self.users_group_name)
1359 self.users_group_name)
1360
1360
1361 @classmethod
1361 @classmethod
1362 def get_by_group_name(cls, group_name, cache=False,
1362 def get_by_group_name(cls, group_name, cache=False,
1363 case_insensitive=False):
1363 case_insensitive=False):
1364 if case_insensitive:
1364 if case_insensitive:
1365 q = cls.query().filter(func.lower(cls.users_group_name) ==
1365 q = cls.query().filter(func.lower(cls.users_group_name) ==
1366 func.lower(group_name))
1366 func.lower(group_name))
1367
1367
1368 else:
1368 else:
1369 q = cls.query().filter(cls.users_group_name == group_name)
1369 q = cls.query().filter(cls.users_group_name == group_name)
1370 if cache:
1370 if cache:
1371 q = q.options(
1371 q = q.options(
1372 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1372 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1373 return q.scalar()
1373 return q.scalar()
1374
1374
1375 @classmethod
1375 @classmethod
1376 def get(cls, user_group_id, cache=False):
1376 def get(cls, user_group_id, cache=False):
1377 if not user_group_id:
1377 if not user_group_id:
1378 return
1378 return
1379
1379
1380 user_group = cls.query()
1380 user_group = cls.query()
1381 if cache:
1381 if cache:
1382 user_group = user_group.options(
1382 user_group = user_group.options(
1383 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1383 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1384 return user_group.get(user_group_id)
1384 return user_group.get(user_group_id)
1385
1385
1386 def permissions(self, with_admins=True, with_owner=True):
1386 def permissions(self, with_admins=True, with_owner=True):
1387 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1387 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1388 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1388 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1389 joinedload(UserUserGroupToPerm.user),
1389 joinedload(UserUserGroupToPerm.user),
1390 joinedload(UserUserGroupToPerm.permission),)
1390 joinedload(UserUserGroupToPerm.permission),)
1391
1391
1392 # get owners and admins and permissions. We do a trick of re-writing
1392 # get owners and admins and permissions. We do a trick of re-writing
1393 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1393 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1394 # has a global reference and changing one object propagates to all
1394 # has a global reference and changing one object propagates to all
1395 # others. This means if admin is also an owner admin_row that change
1395 # others. This means if admin is also an owner admin_row that change
1396 # would propagate to both objects
1396 # would propagate to both objects
1397 perm_rows = []
1397 perm_rows = []
1398 for _usr in q.all():
1398 for _usr in q.all():
1399 usr = AttributeDict(_usr.user.get_dict())
1399 usr = AttributeDict(_usr.user.get_dict())
1400 usr.permission = _usr.permission.permission_name
1400 usr.permission = _usr.permission.permission_name
1401 perm_rows.append(usr)
1401 perm_rows.append(usr)
1402
1402
1403 # filter the perm rows by 'default' first and then sort them by
1403 # filter the perm rows by 'default' first and then sort them by
1404 # admin,write,read,none permissions sorted again alphabetically in
1404 # admin,write,read,none permissions sorted again alphabetically in
1405 # each group
1405 # each group
1406 perm_rows = sorted(perm_rows, key=display_user_sort)
1406 perm_rows = sorted(perm_rows, key=display_user_sort)
1407
1407
1408 _admin_perm = 'usergroup.admin'
1408 _admin_perm = 'usergroup.admin'
1409 owner_row = []
1409 owner_row = []
1410 if with_owner:
1410 if with_owner:
1411 usr = AttributeDict(self.user.get_dict())
1411 usr = AttributeDict(self.user.get_dict())
1412 usr.owner_row = True
1412 usr.owner_row = True
1413 usr.permission = _admin_perm
1413 usr.permission = _admin_perm
1414 owner_row.append(usr)
1414 owner_row.append(usr)
1415
1415
1416 super_admin_rows = []
1416 super_admin_rows = []
1417 if with_admins:
1417 if with_admins:
1418 for usr in User.get_all_super_admins():
1418 for usr in User.get_all_super_admins():
1419 # if this admin is also owner, don't double the record
1419 # if this admin is also owner, don't double the record
1420 if usr.user_id == owner_row[0].user_id:
1420 if usr.user_id == owner_row[0].user_id:
1421 owner_row[0].admin_row = True
1421 owner_row[0].admin_row = True
1422 else:
1422 else:
1423 usr = AttributeDict(usr.get_dict())
1423 usr = AttributeDict(usr.get_dict())
1424 usr.admin_row = True
1424 usr.admin_row = True
1425 usr.permission = _admin_perm
1425 usr.permission = _admin_perm
1426 super_admin_rows.append(usr)
1426 super_admin_rows.append(usr)
1427
1427
1428 return super_admin_rows + owner_row + perm_rows
1428 return super_admin_rows + owner_row + perm_rows
1429
1429
1430 def permission_user_groups(self):
1430 def permission_user_groups(self):
1431 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1431 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1432 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1432 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1433 joinedload(UserGroupUserGroupToPerm.target_user_group),
1433 joinedload(UserGroupUserGroupToPerm.target_user_group),
1434 joinedload(UserGroupUserGroupToPerm.permission),)
1434 joinedload(UserGroupUserGroupToPerm.permission),)
1435
1435
1436 perm_rows = []
1436 perm_rows = []
1437 for _user_group in q.all():
1437 for _user_group in q.all():
1438 usr = AttributeDict(_user_group.user_group.get_dict())
1438 usr = AttributeDict(_user_group.user_group.get_dict())
1439 usr.permission = _user_group.permission.permission_name
1439 usr.permission = _user_group.permission.permission_name
1440 perm_rows.append(usr)
1440 perm_rows.append(usr)
1441
1441
1442 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1442 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1443 return perm_rows
1443 return perm_rows
1444
1444
1445 def _get_default_perms(self, user_group, suffix=''):
1445 def _get_default_perms(self, user_group, suffix=''):
1446 from rhodecode.model.permission import PermissionModel
1446 from rhodecode.model.permission import PermissionModel
1447 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1447 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1448
1448
1449 def get_default_perms(self, suffix=''):
1449 def get_default_perms(self, suffix=''):
1450 return self._get_default_perms(self, suffix)
1450 return self._get_default_perms(self, suffix)
1451
1451
1452 def get_api_data(self, with_group_members=True, include_secrets=False):
1452 def get_api_data(self, with_group_members=True, include_secrets=False):
1453 """
1453 """
1454 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1454 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1455 basically forwarded.
1455 basically forwarded.
1456
1456
1457 """
1457 """
1458 user_group = self
1458 user_group = self
1459 data = {
1459 data = {
1460 'users_group_id': user_group.users_group_id,
1460 'users_group_id': user_group.users_group_id,
1461 'group_name': user_group.users_group_name,
1461 'group_name': user_group.users_group_name,
1462 'group_description': user_group.user_group_description,
1462 'group_description': user_group.user_group_description,
1463 'active': user_group.users_group_active,
1463 'active': user_group.users_group_active,
1464 'owner': user_group.user.username,
1464 'owner': user_group.user.username,
1465 'sync': user_group.sync,
1465 'sync': user_group.sync,
1466 'owner_email': user_group.user.email,
1466 'owner_email': user_group.user.email,
1467 }
1467 }
1468
1468
1469 if with_group_members:
1469 if with_group_members:
1470 users = []
1470 users = []
1471 for user in user_group.members:
1471 for user in user_group.members:
1472 user = user.user
1472 user = user.user
1473 users.append(user.get_api_data(include_secrets=include_secrets))
1473 users.append(user.get_api_data(include_secrets=include_secrets))
1474 data['users'] = users
1474 data['users'] = users
1475
1475
1476 return data
1476 return data
1477
1477
1478
1478
1479 class UserGroupMember(Base, BaseModel):
1479 class UserGroupMember(Base, BaseModel):
1480 __tablename__ = 'users_groups_members'
1480 __tablename__ = 'users_groups_members'
1481 __table_args__ = (
1481 __table_args__ = (
1482 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1482 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1483 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1483 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1484 )
1484 )
1485
1485
1486 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1486 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1487 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1487 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1488 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1488 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1489
1489
1490 user = relationship('User', lazy='joined')
1490 user = relationship('User', lazy='joined')
1491 users_group = relationship('UserGroup')
1491 users_group = relationship('UserGroup')
1492
1492
1493 def __init__(self, gr_id='', u_id=''):
1493 def __init__(self, gr_id='', u_id=''):
1494 self.users_group_id = gr_id
1494 self.users_group_id = gr_id
1495 self.user_id = u_id
1495 self.user_id = u_id
1496
1496
1497
1497
1498 class RepositoryField(Base, BaseModel):
1498 class RepositoryField(Base, BaseModel):
1499 __tablename__ = 'repositories_fields'
1499 __tablename__ = 'repositories_fields'
1500 __table_args__ = (
1500 __table_args__ = (
1501 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1501 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1504 )
1504 )
1505 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1505 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1506
1506
1507 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1507 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1508 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1508 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1509 field_key = Column("field_key", String(250))
1509 field_key = Column("field_key", String(250))
1510 field_label = Column("field_label", String(1024), nullable=False)
1510 field_label = Column("field_label", String(1024), nullable=False)
1511 field_value = Column("field_value", String(10000), nullable=False)
1511 field_value = Column("field_value", String(10000), nullable=False)
1512 field_desc = Column("field_desc", String(1024), nullable=False)
1512 field_desc = Column("field_desc", String(1024), nullable=False)
1513 field_type = Column("field_type", String(255), nullable=False, unique=None)
1513 field_type = Column("field_type", String(255), nullable=False, unique=None)
1514 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1514 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1515
1515
1516 repository = relationship('Repository')
1516 repository = relationship('Repository')
1517
1517
1518 @property
1518 @property
1519 def field_key_prefixed(self):
1519 def field_key_prefixed(self):
1520 return 'ex_%s' % self.field_key
1520 return 'ex_%s' % self.field_key
1521
1521
1522 @classmethod
1522 @classmethod
1523 def un_prefix_key(cls, key):
1523 def un_prefix_key(cls, key):
1524 if key.startswith(cls.PREFIX):
1524 if key.startswith(cls.PREFIX):
1525 return key[len(cls.PREFIX):]
1525 return key[len(cls.PREFIX):]
1526 return key
1526 return key
1527
1527
1528 @classmethod
1528 @classmethod
1529 def get_by_key_name(cls, key, repo):
1529 def get_by_key_name(cls, key, repo):
1530 row = cls.query()\
1530 row = cls.query()\
1531 .filter(cls.repository == repo)\
1531 .filter(cls.repository == repo)\
1532 .filter(cls.field_key == key).scalar()
1532 .filter(cls.field_key == key).scalar()
1533 return row
1533 return row
1534
1534
1535
1535
1536 class Repository(Base, BaseModel):
1536 class Repository(Base, BaseModel):
1537 __tablename__ = 'repositories'
1537 __tablename__ = 'repositories'
1538 __table_args__ = (
1538 __table_args__ = (
1539 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1539 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1540 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1540 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1541 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1541 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1542 )
1542 )
1543 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1543 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1544 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1544 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1545 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1545 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1546
1546
1547 STATE_CREATED = 'repo_state_created'
1547 STATE_CREATED = 'repo_state_created'
1548 STATE_PENDING = 'repo_state_pending'
1548 STATE_PENDING = 'repo_state_pending'
1549 STATE_ERROR = 'repo_state_error'
1549 STATE_ERROR = 'repo_state_error'
1550
1550
1551 LOCK_AUTOMATIC = 'lock_auto'
1551 LOCK_AUTOMATIC = 'lock_auto'
1552 LOCK_API = 'lock_api'
1552 LOCK_API = 'lock_api'
1553 LOCK_WEB = 'lock_web'
1553 LOCK_WEB = 'lock_web'
1554 LOCK_PULL = 'lock_pull'
1554 LOCK_PULL = 'lock_pull'
1555
1555
1556 NAME_SEP = URL_SEP
1556 NAME_SEP = URL_SEP
1557
1557
1558 repo_id = Column(
1558 repo_id = Column(
1559 "repo_id", Integer(), nullable=False, unique=True, default=None,
1559 "repo_id", Integer(), nullable=False, unique=True, default=None,
1560 primary_key=True)
1560 primary_key=True)
1561 _repo_name = Column(
1561 _repo_name = Column(
1562 "repo_name", Text(), nullable=False, default=None)
1562 "repo_name", Text(), nullable=False, default=None)
1563 _repo_name_hash = Column(
1563 _repo_name_hash = Column(
1564 "repo_name_hash", String(255), nullable=False, unique=True)
1564 "repo_name_hash", String(255), nullable=False, unique=True)
1565 repo_state = Column("repo_state", String(255), nullable=True)
1565 repo_state = Column("repo_state", String(255), nullable=True)
1566
1566
1567 clone_uri = Column(
1567 clone_uri = Column(
1568 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1568 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1569 default=None)
1569 default=None)
1570 push_uri = Column(
1570 push_uri = Column(
1571 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1571 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1572 default=None)
1572 default=None)
1573 repo_type = Column(
1573 repo_type = Column(
1574 "repo_type", String(255), nullable=False, unique=False, default=None)
1574 "repo_type", String(255), nullable=False, unique=False, default=None)
1575 user_id = Column(
1575 user_id = Column(
1576 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1576 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1577 unique=False, default=None)
1577 unique=False, default=None)
1578 private = Column(
1578 private = Column(
1579 "private", Boolean(), nullable=True, unique=None, default=None)
1579 "private", Boolean(), nullable=True, unique=None, default=None)
1580 enable_statistics = Column(
1580 enable_statistics = Column(
1581 "statistics", Boolean(), nullable=True, unique=None, default=True)
1581 "statistics", Boolean(), nullable=True, unique=None, default=True)
1582 enable_downloads = Column(
1582 enable_downloads = Column(
1583 "downloads", Boolean(), nullable=True, unique=None, default=True)
1583 "downloads", Boolean(), nullable=True, unique=None, default=True)
1584 description = Column(
1584 description = Column(
1585 "description", String(10000), nullable=True, unique=None, default=None)
1585 "description", String(10000), nullable=True, unique=None, default=None)
1586 created_on = Column(
1586 created_on = Column(
1587 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1587 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1588 default=datetime.datetime.now)
1588 default=datetime.datetime.now)
1589 updated_on = Column(
1589 updated_on = Column(
1590 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1590 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1591 default=datetime.datetime.now)
1591 default=datetime.datetime.now)
1592 _landing_revision = Column(
1592 _landing_revision = Column(
1593 "landing_revision", String(255), nullable=False, unique=False,
1593 "landing_revision", String(255), nullable=False, unique=False,
1594 default=None)
1594 default=None)
1595 enable_locking = Column(
1595 enable_locking = Column(
1596 "enable_locking", Boolean(), nullable=False, unique=None,
1596 "enable_locking", Boolean(), nullable=False, unique=None,
1597 default=False)
1597 default=False)
1598 _locked = Column(
1598 _locked = Column(
1599 "locked", String(255), nullable=True, unique=False, default=None)
1599 "locked", String(255), nullable=True, unique=False, default=None)
1600 _changeset_cache = Column(
1600 _changeset_cache = Column(
1601 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1601 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1602
1602
1603 fork_id = Column(
1603 fork_id = Column(
1604 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1604 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1605 nullable=True, unique=False, default=None)
1605 nullable=True, unique=False, default=None)
1606 group_id = Column(
1606 group_id = Column(
1607 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1607 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1608 unique=False, default=None)
1608 unique=False, default=None)
1609
1609
1610 user = relationship('User', lazy='joined')
1610 user = relationship('User', lazy='joined')
1611 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1611 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1612 group = relationship('RepoGroup', lazy='joined')
1612 group = relationship('RepoGroup', lazy='joined')
1613 repo_to_perm = relationship(
1613 repo_to_perm = relationship(
1614 'UserRepoToPerm', cascade='all',
1614 'UserRepoToPerm', cascade='all',
1615 order_by='UserRepoToPerm.repo_to_perm_id')
1615 order_by='UserRepoToPerm.repo_to_perm_id')
1616 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1616 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1617 stats = relationship('Statistics', cascade='all', uselist=False)
1617 stats = relationship('Statistics', cascade='all', uselist=False)
1618
1618
1619 followers = relationship(
1619 followers = relationship(
1620 'UserFollowing',
1620 'UserFollowing',
1621 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1621 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1622 cascade='all')
1622 cascade='all')
1623 extra_fields = relationship(
1623 extra_fields = relationship(
1624 'RepositoryField', cascade="all, delete, delete-orphan")
1624 'RepositoryField', cascade="all, delete, delete-orphan")
1625 logs = relationship('UserLog')
1625 logs = relationship('UserLog')
1626 comments = relationship(
1626 comments = relationship(
1627 'ChangesetComment', cascade="all, delete, delete-orphan")
1627 'ChangesetComment', cascade="all, delete, delete-orphan")
1628 pull_requests_source = relationship(
1628 pull_requests_source = relationship(
1629 'PullRequest',
1629 'PullRequest',
1630 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1630 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1631 cascade="all, delete, delete-orphan")
1631 cascade="all, delete, delete-orphan")
1632 pull_requests_target = relationship(
1632 pull_requests_target = relationship(
1633 'PullRequest',
1633 'PullRequest',
1634 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1634 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1635 cascade="all, delete, delete-orphan")
1635 cascade="all, delete, delete-orphan")
1636 ui = relationship('RepoRhodeCodeUi', cascade="all")
1636 ui = relationship('RepoRhodeCodeUi', cascade="all")
1637 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1637 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1638 integrations = relationship('Integration',
1638 integrations = relationship('Integration',
1639 cascade="all, delete, delete-orphan")
1639 cascade="all, delete, delete-orphan")
1640
1640
1641 scoped_tokens = relationship('UserApiKeys', cascade="all")
1641 scoped_tokens = relationship('UserApiKeys', cascade="all")
1642
1642
1643 def __unicode__(self):
1643 def __unicode__(self):
1644 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1644 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1645 safe_unicode(self.repo_name))
1645 safe_unicode(self.repo_name))
1646
1646
1647 @hybrid_property
1647 @hybrid_property
1648 def description_safe(self):
1648 def description_safe(self):
1649 from rhodecode.lib import helpers as h
1649 from rhodecode.lib import helpers as h
1650 return h.escape(self.description)
1650 return h.escape(self.description)
1651
1651
1652 @hybrid_property
1652 @hybrid_property
1653 def landing_rev(self):
1653 def landing_rev(self):
1654 # always should return [rev_type, rev]
1654 # always should return [rev_type, rev]
1655 if self._landing_revision:
1655 if self._landing_revision:
1656 _rev_info = self._landing_revision.split(':')
1656 _rev_info = self._landing_revision.split(':')
1657 if len(_rev_info) < 2:
1657 if len(_rev_info) < 2:
1658 _rev_info.insert(0, 'rev')
1658 _rev_info.insert(0, 'rev')
1659 return [_rev_info[0], _rev_info[1]]
1659 return [_rev_info[0], _rev_info[1]]
1660 return [None, None]
1660 return [None, None]
1661
1661
1662 @landing_rev.setter
1662 @landing_rev.setter
1663 def landing_rev(self, val):
1663 def landing_rev(self, val):
1664 if ':' not in val:
1664 if ':' not in val:
1665 raise ValueError('value must be delimited with `:` and consist '
1665 raise ValueError('value must be delimited with `:` and consist '
1666 'of <rev_type>:<rev>, got %s instead' % val)
1666 'of <rev_type>:<rev>, got %s instead' % val)
1667 self._landing_revision = val
1667 self._landing_revision = val
1668
1668
1669 @hybrid_property
1669 @hybrid_property
1670 def locked(self):
1670 def locked(self):
1671 if self._locked:
1671 if self._locked:
1672 user_id, timelocked, reason = self._locked.split(':')
1672 user_id, timelocked, reason = self._locked.split(':')
1673 lock_values = int(user_id), timelocked, reason
1673 lock_values = int(user_id), timelocked, reason
1674 else:
1674 else:
1675 lock_values = [None, None, None]
1675 lock_values = [None, None, None]
1676 return lock_values
1676 return lock_values
1677
1677
1678 @locked.setter
1678 @locked.setter
1679 def locked(self, val):
1679 def locked(self, val):
1680 if val and isinstance(val, (list, tuple)):
1680 if val and isinstance(val, (list, tuple)):
1681 self._locked = ':'.join(map(str, val))
1681 self._locked = ':'.join(map(str, val))
1682 else:
1682 else:
1683 self._locked = None
1683 self._locked = None
1684
1684
1685 @hybrid_property
1685 @hybrid_property
1686 def changeset_cache(self):
1686 def changeset_cache(self):
1687 from rhodecode.lib.vcs.backends.base import EmptyCommit
1687 from rhodecode.lib.vcs.backends.base import EmptyCommit
1688 dummy = EmptyCommit().__json__()
1688 dummy = EmptyCommit().__json__()
1689 if not self._changeset_cache:
1689 if not self._changeset_cache:
1690 return dummy
1690 return dummy
1691 try:
1691 try:
1692 return json.loads(self._changeset_cache)
1692 return json.loads(self._changeset_cache)
1693 except TypeError:
1693 except TypeError:
1694 return dummy
1694 return dummy
1695 except Exception:
1695 except Exception:
1696 log.error(traceback.format_exc())
1696 log.error(traceback.format_exc())
1697 return dummy
1697 return dummy
1698
1698
1699 @changeset_cache.setter
1699 @changeset_cache.setter
1700 def changeset_cache(self, val):
1700 def changeset_cache(self, val):
1701 try:
1701 try:
1702 self._changeset_cache = json.dumps(val)
1702 self._changeset_cache = json.dumps(val)
1703 except Exception:
1703 except Exception:
1704 log.error(traceback.format_exc())
1704 log.error(traceback.format_exc())
1705
1705
1706 @hybrid_property
1706 @hybrid_property
1707 def repo_name(self):
1707 def repo_name(self):
1708 return self._repo_name
1708 return self._repo_name
1709
1709
1710 @repo_name.setter
1710 @repo_name.setter
1711 def repo_name(self, value):
1711 def repo_name(self, value):
1712 self._repo_name = value
1712 self._repo_name = value
1713 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1713 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1714
1714
1715 @classmethod
1715 @classmethod
1716 def normalize_repo_name(cls, repo_name):
1716 def normalize_repo_name(cls, repo_name):
1717 """
1717 """
1718 Normalizes os specific repo_name to the format internally stored inside
1718 Normalizes os specific repo_name to the format internally stored inside
1719 database using URL_SEP
1719 database using URL_SEP
1720
1720
1721 :param cls:
1721 :param cls:
1722 :param repo_name:
1722 :param repo_name:
1723 """
1723 """
1724 return cls.NAME_SEP.join(repo_name.split(os.sep))
1724 return cls.NAME_SEP.join(repo_name.split(os.sep))
1725
1725
1726 @classmethod
1726 @classmethod
1727 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1727 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1728 session = Session()
1728 session = Session()
1729 q = session.query(cls).filter(cls.repo_name == repo_name)
1729 q = session.query(cls).filter(cls.repo_name == repo_name)
1730
1730
1731 if cache:
1731 if cache:
1732 if identity_cache:
1732 if identity_cache:
1733 val = cls.identity_cache(session, 'repo_name', repo_name)
1733 val = cls.identity_cache(session, 'repo_name', repo_name)
1734 if val:
1734 if val:
1735 return val
1735 return val
1736 else:
1736 else:
1737 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1737 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1738 q = q.options(
1738 q = q.options(
1739 FromCache("sql_cache_short", cache_key))
1739 FromCache("sql_cache_short", cache_key))
1740
1740
1741 return q.scalar()
1741 return q.scalar()
1742
1742
1743 @classmethod
1743 @classmethod
1744 def get_by_id_or_repo_name(cls, repoid):
1744 def get_by_id_or_repo_name(cls, repoid):
1745 if isinstance(repoid, (int, long)):
1745 if isinstance(repoid, (int, long)):
1746 try:
1746 try:
1747 repo = cls.get(repoid)
1747 repo = cls.get(repoid)
1748 except ValueError:
1748 except ValueError:
1749 repo = None
1749 repo = None
1750 else:
1750 else:
1751 repo = cls.get_by_repo_name(repoid)
1751 repo = cls.get_by_repo_name(repoid)
1752 return repo
1752 return repo
1753
1753
1754 @classmethod
1754 @classmethod
1755 def get_by_full_path(cls, repo_full_path):
1755 def get_by_full_path(cls, repo_full_path):
1756 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1756 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1757 repo_name = cls.normalize_repo_name(repo_name)
1757 repo_name = cls.normalize_repo_name(repo_name)
1758 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1758 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1759
1759
1760 @classmethod
1760 @classmethod
1761 def get_repo_forks(cls, repo_id):
1761 def get_repo_forks(cls, repo_id):
1762 return cls.query().filter(Repository.fork_id == repo_id)
1762 return cls.query().filter(Repository.fork_id == repo_id)
1763
1763
1764 @classmethod
1764 @classmethod
1765 def base_path(cls):
1765 def base_path(cls):
1766 """
1766 """
1767 Returns base path when all repos are stored
1767 Returns base path when all repos are stored
1768
1768
1769 :param cls:
1769 :param cls:
1770 """
1770 """
1771 q = Session().query(RhodeCodeUi)\
1771 q = Session().query(RhodeCodeUi)\
1772 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1772 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1773 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1773 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1774 return q.one().ui_value
1774 return q.one().ui_value
1775
1775
1776 @classmethod
1776 @classmethod
1777 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1777 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1778 case_insensitive=True):
1778 case_insensitive=True):
1779 q = Repository.query()
1779 q = Repository.query()
1780
1780
1781 if not isinstance(user_id, Optional):
1781 if not isinstance(user_id, Optional):
1782 q = q.filter(Repository.user_id == user_id)
1782 q = q.filter(Repository.user_id == user_id)
1783
1783
1784 if not isinstance(group_id, Optional):
1784 if not isinstance(group_id, Optional):
1785 q = q.filter(Repository.group_id == group_id)
1785 q = q.filter(Repository.group_id == group_id)
1786
1786
1787 if case_insensitive:
1787 if case_insensitive:
1788 q = q.order_by(func.lower(Repository.repo_name))
1788 q = q.order_by(func.lower(Repository.repo_name))
1789 else:
1789 else:
1790 q = q.order_by(Repository.repo_name)
1790 q = q.order_by(Repository.repo_name)
1791 return q.all()
1791 return q.all()
1792
1792
1793 @property
1793 @property
1794 def forks(self):
1794 def forks(self):
1795 """
1795 """
1796 Return forks of this repo
1796 Return forks of this repo
1797 """
1797 """
1798 return Repository.get_repo_forks(self.repo_id)
1798 return Repository.get_repo_forks(self.repo_id)
1799
1799
1800 @property
1800 @property
1801 def parent(self):
1801 def parent(self):
1802 """
1802 """
1803 Returns fork parent
1803 Returns fork parent
1804 """
1804 """
1805 return self.fork
1805 return self.fork
1806
1806
1807 @property
1807 @property
1808 def just_name(self):
1808 def just_name(self):
1809 return self.repo_name.split(self.NAME_SEP)[-1]
1809 return self.repo_name.split(self.NAME_SEP)[-1]
1810
1810
1811 @property
1811 @property
1812 def groups_with_parents(self):
1812 def groups_with_parents(self):
1813 groups = []
1813 groups = []
1814 if self.group is None:
1814 if self.group is None:
1815 return groups
1815 return groups
1816
1816
1817 cur_gr = self.group
1817 cur_gr = self.group
1818 groups.insert(0, cur_gr)
1818 groups.insert(0, cur_gr)
1819 while 1:
1819 while 1:
1820 gr = getattr(cur_gr, 'parent_group', None)
1820 gr = getattr(cur_gr, 'parent_group', None)
1821 cur_gr = cur_gr.parent_group
1821 cur_gr = cur_gr.parent_group
1822 if gr is None:
1822 if gr is None:
1823 break
1823 break
1824 groups.insert(0, gr)
1824 groups.insert(0, gr)
1825
1825
1826 return groups
1826 return groups
1827
1827
1828 @property
1828 @property
1829 def groups_and_repo(self):
1829 def groups_and_repo(self):
1830 return self.groups_with_parents, self
1830 return self.groups_with_parents, self
1831
1831
1832 @LazyProperty
1832 @LazyProperty
1833 def repo_path(self):
1833 def repo_path(self):
1834 """
1834 """
1835 Returns base full path for that repository means where it actually
1835 Returns base full path for that repository means where it actually
1836 exists on a filesystem
1836 exists on a filesystem
1837 """
1837 """
1838 q = Session().query(RhodeCodeUi).filter(
1838 q = Session().query(RhodeCodeUi).filter(
1839 RhodeCodeUi.ui_key == self.NAME_SEP)
1839 RhodeCodeUi.ui_key == self.NAME_SEP)
1840 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1840 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1841 return q.one().ui_value
1841 return q.one().ui_value
1842
1842
1843 @property
1843 @property
1844 def repo_full_path(self):
1844 def repo_full_path(self):
1845 p = [self.repo_path]
1845 p = [self.repo_path]
1846 # we need to split the name by / since this is how we store the
1846 # we need to split the name by / since this is how we store the
1847 # names in the database, but that eventually needs to be converted
1847 # names in the database, but that eventually needs to be converted
1848 # into a valid system path
1848 # into a valid system path
1849 p += self.repo_name.split(self.NAME_SEP)
1849 p += self.repo_name.split(self.NAME_SEP)
1850 return os.path.join(*map(safe_unicode, p))
1850 return os.path.join(*map(safe_unicode, p))
1851
1851
1852 @property
1852 @property
1853 def cache_keys(self):
1853 def cache_keys(self):
1854 """
1854 """
1855 Returns associated cache keys for that repo
1855 Returns associated cache keys for that repo
1856 """
1856 """
1857 return CacheKey.query()\
1857 return CacheKey.query()\
1858 .filter(CacheKey.cache_args == self.repo_name)\
1858 .filter(CacheKey.cache_args == self.repo_name)\
1859 .order_by(CacheKey.cache_key)\
1859 .order_by(CacheKey.cache_key)\
1860 .all()
1860 .all()
1861
1861
1862 @property
1862 @property
1863 def cached_diffs_relative_dir(self):
1863 def cached_diffs_relative_dir(self):
1864 """
1864 """
1865 Return a relative to the repository store path of cached diffs
1865 Return a relative to the repository store path of cached diffs
1866 used for safe display for users, who shouldn't know the absolute store
1866 used for safe display for users, who shouldn't know the absolute store
1867 path
1867 path
1868 """
1868 """
1869 return os.path.join(
1869 return os.path.join(
1870 os.path.dirname(self.repo_name),
1870 os.path.dirname(self.repo_name),
1871 self.cached_diffs_dir.split(os.path.sep)[-1])
1871 self.cached_diffs_dir.split(os.path.sep)[-1])
1872
1872
1873 @property
1873 @property
1874 def cached_diffs_dir(self):
1874 def cached_diffs_dir(self):
1875 path = self.repo_full_path
1875 path = self.repo_full_path
1876 return os.path.join(
1876 return os.path.join(
1877 os.path.dirname(path),
1877 os.path.dirname(path),
1878 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1878 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1879
1879
1880 def cached_diffs(self):
1880 def cached_diffs(self):
1881 diff_cache_dir = self.cached_diffs_dir
1881 diff_cache_dir = self.cached_diffs_dir
1882 if os.path.isdir(diff_cache_dir):
1882 if os.path.isdir(diff_cache_dir):
1883 return os.listdir(diff_cache_dir)
1883 return os.listdir(diff_cache_dir)
1884 return []
1884 return []
1885
1885
1886 def shadow_repos(self):
1887 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1888 return [
1889 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1890 if x.startswith(shadow_repos_pattern)]
1891
1886 def get_new_name(self, repo_name):
1892 def get_new_name(self, repo_name):
1887 """
1893 """
1888 returns new full repository name based on assigned group and new new
1894 returns new full repository name based on assigned group and new new
1889
1895
1890 :param group_name:
1896 :param group_name:
1891 """
1897 """
1892 path_prefix = self.group.full_path_splitted if self.group else []
1898 path_prefix = self.group.full_path_splitted if self.group else []
1893 return self.NAME_SEP.join(path_prefix + [repo_name])
1899 return self.NAME_SEP.join(path_prefix + [repo_name])
1894
1900
1895 @property
1901 @property
1896 def _config(self):
1902 def _config(self):
1897 """
1903 """
1898 Returns db based config object.
1904 Returns db based config object.
1899 """
1905 """
1900 from rhodecode.lib.utils import make_db_config
1906 from rhodecode.lib.utils import make_db_config
1901 return make_db_config(clear_session=False, repo=self)
1907 return make_db_config(clear_session=False, repo=self)
1902
1908
1903 def permissions(self, with_admins=True, with_owner=True):
1909 def permissions(self, with_admins=True, with_owner=True):
1904 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1910 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1905 q = q.options(joinedload(UserRepoToPerm.repository),
1911 q = q.options(joinedload(UserRepoToPerm.repository),
1906 joinedload(UserRepoToPerm.user),
1912 joinedload(UserRepoToPerm.user),
1907 joinedload(UserRepoToPerm.permission),)
1913 joinedload(UserRepoToPerm.permission),)
1908
1914
1909 # get owners and admins and permissions. We do a trick of re-writing
1915 # get owners and admins and permissions. We do a trick of re-writing
1910 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1916 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1911 # has a global reference and changing one object propagates to all
1917 # has a global reference and changing one object propagates to all
1912 # others. This means if admin is also an owner admin_row that change
1918 # others. This means if admin is also an owner admin_row that change
1913 # would propagate to both objects
1919 # would propagate to both objects
1914 perm_rows = []
1920 perm_rows = []
1915 for _usr in q.all():
1921 for _usr in q.all():
1916 usr = AttributeDict(_usr.user.get_dict())
1922 usr = AttributeDict(_usr.user.get_dict())
1917 usr.permission = _usr.permission.permission_name
1923 usr.permission = _usr.permission.permission_name
1918 perm_rows.append(usr)
1924 perm_rows.append(usr)
1919
1925
1920 # filter the perm rows by 'default' first and then sort them by
1926 # filter the perm rows by 'default' first and then sort them by
1921 # admin,write,read,none permissions sorted again alphabetically in
1927 # admin,write,read,none permissions sorted again alphabetically in
1922 # each group
1928 # each group
1923 perm_rows = sorted(perm_rows, key=display_user_sort)
1929 perm_rows = sorted(perm_rows, key=display_user_sort)
1924
1930
1925 _admin_perm = 'repository.admin'
1931 _admin_perm = 'repository.admin'
1926 owner_row = []
1932 owner_row = []
1927 if with_owner:
1933 if with_owner:
1928 usr = AttributeDict(self.user.get_dict())
1934 usr = AttributeDict(self.user.get_dict())
1929 usr.owner_row = True
1935 usr.owner_row = True
1930 usr.permission = _admin_perm
1936 usr.permission = _admin_perm
1931 owner_row.append(usr)
1937 owner_row.append(usr)
1932
1938
1933 super_admin_rows = []
1939 super_admin_rows = []
1934 if with_admins:
1940 if with_admins:
1935 for usr in User.get_all_super_admins():
1941 for usr in User.get_all_super_admins():
1936 # if this admin is also owner, don't double the record
1942 # if this admin is also owner, don't double the record
1937 if usr.user_id == owner_row[0].user_id:
1943 if usr.user_id == owner_row[0].user_id:
1938 owner_row[0].admin_row = True
1944 owner_row[0].admin_row = True
1939 else:
1945 else:
1940 usr = AttributeDict(usr.get_dict())
1946 usr = AttributeDict(usr.get_dict())
1941 usr.admin_row = True
1947 usr.admin_row = True
1942 usr.permission = _admin_perm
1948 usr.permission = _admin_perm
1943 super_admin_rows.append(usr)
1949 super_admin_rows.append(usr)
1944
1950
1945 return super_admin_rows + owner_row + perm_rows
1951 return super_admin_rows + owner_row + perm_rows
1946
1952
1947 def permission_user_groups(self):
1953 def permission_user_groups(self):
1948 q = UserGroupRepoToPerm.query().filter(
1954 q = UserGroupRepoToPerm.query().filter(
1949 UserGroupRepoToPerm.repository == self)
1955 UserGroupRepoToPerm.repository == self)
1950 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1956 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1951 joinedload(UserGroupRepoToPerm.users_group),
1957 joinedload(UserGroupRepoToPerm.users_group),
1952 joinedload(UserGroupRepoToPerm.permission),)
1958 joinedload(UserGroupRepoToPerm.permission),)
1953
1959
1954 perm_rows = []
1960 perm_rows = []
1955 for _user_group in q.all():
1961 for _user_group in q.all():
1956 usr = AttributeDict(_user_group.users_group.get_dict())
1962 usr = AttributeDict(_user_group.users_group.get_dict())
1957 usr.permission = _user_group.permission.permission_name
1963 usr.permission = _user_group.permission.permission_name
1958 perm_rows.append(usr)
1964 perm_rows.append(usr)
1959
1965
1960 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1966 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1961 return perm_rows
1967 return perm_rows
1962
1968
1963 def get_api_data(self, include_secrets=False):
1969 def get_api_data(self, include_secrets=False):
1964 """
1970 """
1965 Common function for generating repo api data
1971 Common function for generating repo api data
1966
1972
1967 :param include_secrets: See :meth:`User.get_api_data`.
1973 :param include_secrets: See :meth:`User.get_api_data`.
1968
1974
1969 """
1975 """
1970 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1976 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1971 # move this methods on models level.
1977 # move this methods on models level.
1972 from rhodecode.model.settings import SettingsModel
1978 from rhodecode.model.settings import SettingsModel
1973 from rhodecode.model.repo import RepoModel
1979 from rhodecode.model.repo import RepoModel
1974
1980
1975 repo = self
1981 repo = self
1976 _user_id, _time, _reason = self.locked
1982 _user_id, _time, _reason = self.locked
1977
1983
1978 data = {
1984 data = {
1979 'repo_id': repo.repo_id,
1985 'repo_id': repo.repo_id,
1980 'repo_name': repo.repo_name,
1986 'repo_name': repo.repo_name,
1981 'repo_type': repo.repo_type,
1987 'repo_type': repo.repo_type,
1982 'clone_uri': repo.clone_uri or '',
1988 'clone_uri': repo.clone_uri or '',
1983 'push_uri': repo.push_uri or '',
1989 'push_uri': repo.push_uri or '',
1984 'url': RepoModel().get_url(self),
1990 'url': RepoModel().get_url(self),
1985 'private': repo.private,
1991 'private': repo.private,
1986 'created_on': repo.created_on,
1992 'created_on': repo.created_on,
1987 'description': repo.description_safe,
1993 'description': repo.description_safe,
1988 'landing_rev': repo.landing_rev,
1994 'landing_rev': repo.landing_rev,
1989 'owner': repo.user.username,
1995 'owner': repo.user.username,
1990 'fork_of': repo.fork.repo_name if repo.fork else None,
1996 'fork_of': repo.fork.repo_name if repo.fork else None,
1991 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1997 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1992 'enable_statistics': repo.enable_statistics,
1998 'enable_statistics': repo.enable_statistics,
1993 'enable_locking': repo.enable_locking,
1999 'enable_locking': repo.enable_locking,
1994 'enable_downloads': repo.enable_downloads,
2000 'enable_downloads': repo.enable_downloads,
1995 'last_changeset': repo.changeset_cache,
2001 'last_changeset': repo.changeset_cache,
1996 'locked_by': User.get(_user_id).get_api_data(
2002 'locked_by': User.get(_user_id).get_api_data(
1997 include_secrets=include_secrets) if _user_id else None,
2003 include_secrets=include_secrets) if _user_id else None,
1998 'locked_date': time_to_datetime(_time) if _time else None,
2004 'locked_date': time_to_datetime(_time) if _time else None,
1999 'lock_reason': _reason if _reason else None,
2005 'lock_reason': _reason if _reason else None,
2000 }
2006 }
2001
2007
2002 # TODO: mikhail: should be per-repo settings here
2008 # TODO: mikhail: should be per-repo settings here
2003 rc_config = SettingsModel().get_all_settings()
2009 rc_config = SettingsModel().get_all_settings()
2004 repository_fields = str2bool(
2010 repository_fields = str2bool(
2005 rc_config.get('rhodecode_repository_fields'))
2011 rc_config.get('rhodecode_repository_fields'))
2006 if repository_fields:
2012 if repository_fields:
2007 for f in self.extra_fields:
2013 for f in self.extra_fields:
2008 data[f.field_key_prefixed] = f.field_value
2014 data[f.field_key_prefixed] = f.field_value
2009
2015
2010 return data
2016 return data
2011
2017
2012 @classmethod
2018 @classmethod
2013 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2019 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2014 if not lock_time:
2020 if not lock_time:
2015 lock_time = time.time()
2021 lock_time = time.time()
2016 if not lock_reason:
2022 if not lock_reason:
2017 lock_reason = cls.LOCK_AUTOMATIC
2023 lock_reason = cls.LOCK_AUTOMATIC
2018 repo.locked = [user_id, lock_time, lock_reason]
2024 repo.locked = [user_id, lock_time, lock_reason]
2019 Session().add(repo)
2025 Session().add(repo)
2020 Session().commit()
2026 Session().commit()
2021
2027
2022 @classmethod
2028 @classmethod
2023 def unlock(cls, repo):
2029 def unlock(cls, repo):
2024 repo.locked = None
2030 repo.locked = None
2025 Session().add(repo)
2031 Session().add(repo)
2026 Session().commit()
2032 Session().commit()
2027
2033
2028 @classmethod
2034 @classmethod
2029 def getlock(cls, repo):
2035 def getlock(cls, repo):
2030 return repo.locked
2036 return repo.locked
2031
2037
2032 def is_user_lock(self, user_id):
2038 def is_user_lock(self, user_id):
2033 if self.lock[0]:
2039 if self.lock[0]:
2034 lock_user_id = safe_int(self.lock[0])
2040 lock_user_id = safe_int(self.lock[0])
2035 user_id = safe_int(user_id)
2041 user_id = safe_int(user_id)
2036 # both are ints, and they are equal
2042 # both are ints, and they are equal
2037 return all([lock_user_id, user_id]) and lock_user_id == user_id
2043 return all([lock_user_id, user_id]) and lock_user_id == user_id
2038
2044
2039 return False
2045 return False
2040
2046
2041 def get_locking_state(self, action, user_id, only_when_enabled=True):
2047 def get_locking_state(self, action, user_id, only_when_enabled=True):
2042 """
2048 """
2043 Checks locking on this repository, if locking is enabled and lock is
2049 Checks locking on this repository, if locking is enabled and lock is
2044 present returns a tuple of make_lock, locked, locked_by.
2050 present returns a tuple of make_lock, locked, locked_by.
2045 make_lock can have 3 states None (do nothing) True, make lock
2051 make_lock can have 3 states None (do nothing) True, make lock
2046 False release lock, This value is later propagated to hooks, which
2052 False release lock, This value is later propagated to hooks, which
2047 do the locking. Think about this as signals passed to hooks what to do.
2053 do the locking. Think about this as signals passed to hooks what to do.
2048
2054
2049 """
2055 """
2050 # TODO: johbo: This is part of the business logic and should be moved
2056 # TODO: johbo: This is part of the business logic and should be moved
2051 # into the RepositoryModel.
2057 # into the RepositoryModel.
2052
2058
2053 if action not in ('push', 'pull'):
2059 if action not in ('push', 'pull'):
2054 raise ValueError("Invalid action value: %s" % repr(action))
2060 raise ValueError("Invalid action value: %s" % repr(action))
2055
2061
2056 # defines if locked error should be thrown to user
2062 # defines if locked error should be thrown to user
2057 currently_locked = False
2063 currently_locked = False
2058 # defines if new lock should be made, tri-state
2064 # defines if new lock should be made, tri-state
2059 make_lock = None
2065 make_lock = None
2060 repo = self
2066 repo = self
2061 user = User.get(user_id)
2067 user = User.get(user_id)
2062
2068
2063 lock_info = repo.locked
2069 lock_info = repo.locked
2064
2070
2065 if repo and (repo.enable_locking or not only_when_enabled):
2071 if repo and (repo.enable_locking or not only_when_enabled):
2066 if action == 'push':
2072 if action == 'push':
2067 # check if it's already locked !, if it is compare users
2073 # check if it's already locked !, if it is compare users
2068 locked_by_user_id = lock_info[0]
2074 locked_by_user_id = lock_info[0]
2069 if user.user_id == locked_by_user_id:
2075 if user.user_id == locked_by_user_id:
2070 log.debug(
2076 log.debug(
2071 'Got `push` action from user %s, now unlocking', user)
2077 'Got `push` action from user %s, now unlocking', user)
2072 # unlock if we have push from user who locked
2078 # unlock if we have push from user who locked
2073 make_lock = False
2079 make_lock = False
2074 else:
2080 else:
2075 # we're not the same user who locked, ban with
2081 # we're not the same user who locked, ban with
2076 # code defined in settings (default is 423 HTTP Locked) !
2082 # code defined in settings (default is 423 HTTP Locked) !
2077 log.debug('Repo %s is currently locked by %s', repo, user)
2083 log.debug('Repo %s is currently locked by %s', repo, user)
2078 currently_locked = True
2084 currently_locked = True
2079 elif action == 'pull':
2085 elif action == 'pull':
2080 # [0] user [1] date
2086 # [0] user [1] date
2081 if lock_info[0] and lock_info[1]:
2087 if lock_info[0] and lock_info[1]:
2082 log.debug('Repo %s is currently locked by %s', repo, user)
2088 log.debug('Repo %s is currently locked by %s', repo, user)
2083 currently_locked = True
2089 currently_locked = True
2084 else:
2090 else:
2085 log.debug('Setting lock on repo %s by %s', repo, user)
2091 log.debug('Setting lock on repo %s by %s', repo, user)
2086 make_lock = True
2092 make_lock = True
2087
2093
2088 else:
2094 else:
2089 log.debug('Repository %s do not have locking enabled', repo)
2095 log.debug('Repository %s do not have locking enabled', repo)
2090
2096
2091 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2097 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2092 make_lock, currently_locked, lock_info)
2098 make_lock, currently_locked, lock_info)
2093
2099
2094 from rhodecode.lib.auth import HasRepoPermissionAny
2100 from rhodecode.lib.auth import HasRepoPermissionAny
2095 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2101 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2096 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2102 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2097 # if we don't have at least write permission we cannot make a lock
2103 # if we don't have at least write permission we cannot make a lock
2098 log.debug('lock state reset back to FALSE due to lack '
2104 log.debug('lock state reset back to FALSE due to lack '
2099 'of at least read permission')
2105 'of at least read permission')
2100 make_lock = False
2106 make_lock = False
2101
2107
2102 return make_lock, currently_locked, lock_info
2108 return make_lock, currently_locked, lock_info
2103
2109
2104 @property
2110 @property
2105 def last_db_change(self):
2111 def last_db_change(self):
2106 return self.updated_on
2112 return self.updated_on
2107
2113
2108 @property
2114 @property
2109 def clone_uri_hidden(self):
2115 def clone_uri_hidden(self):
2110 clone_uri = self.clone_uri
2116 clone_uri = self.clone_uri
2111 if clone_uri:
2117 if clone_uri:
2112 import urlobject
2118 import urlobject
2113 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2119 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2114 if url_obj.password:
2120 if url_obj.password:
2115 clone_uri = url_obj.with_password('*****')
2121 clone_uri = url_obj.with_password('*****')
2116 return clone_uri
2122 return clone_uri
2117
2123
2118 @property
2124 @property
2119 def push_uri_hidden(self):
2125 def push_uri_hidden(self):
2120 push_uri = self.push_uri
2126 push_uri = self.push_uri
2121 if push_uri:
2127 if push_uri:
2122 import urlobject
2128 import urlobject
2123 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2129 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2124 if url_obj.password:
2130 if url_obj.password:
2125 push_uri = url_obj.with_password('*****')
2131 push_uri = url_obj.with_password('*****')
2126 return push_uri
2132 return push_uri
2127
2133
2128 def clone_url(self, **override):
2134 def clone_url(self, **override):
2129 from rhodecode.model.settings import SettingsModel
2135 from rhodecode.model.settings import SettingsModel
2130
2136
2131 uri_tmpl = None
2137 uri_tmpl = None
2132 if 'with_id' in override:
2138 if 'with_id' in override:
2133 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2139 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2134 del override['with_id']
2140 del override['with_id']
2135
2141
2136 if 'uri_tmpl' in override:
2142 if 'uri_tmpl' in override:
2137 uri_tmpl = override['uri_tmpl']
2143 uri_tmpl = override['uri_tmpl']
2138 del override['uri_tmpl']
2144 del override['uri_tmpl']
2139
2145
2140 ssh = False
2146 ssh = False
2141 if 'ssh' in override:
2147 if 'ssh' in override:
2142 ssh = True
2148 ssh = True
2143 del override['ssh']
2149 del override['ssh']
2144
2150
2145 # we didn't override our tmpl from **overrides
2151 # we didn't override our tmpl from **overrides
2146 if not uri_tmpl:
2152 if not uri_tmpl:
2147 rc_config = SettingsModel().get_all_settings(cache=True)
2153 rc_config = SettingsModel().get_all_settings(cache=True)
2148 if ssh:
2154 if ssh:
2149 uri_tmpl = rc_config.get(
2155 uri_tmpl = rc_config.get(
2150 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2156 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2151 else:
2157 else:
2152 uri_tmpl = rc_config.get(
2158 uri_tmpl = rc_config.get(
2153 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2159 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2154
2160
2155 request = get_current_request()
2161 request = get_current_request()
2156 return get_clone_url(request=request,
2162 return get_clone_url(request=request,
2157 uri_tmpl=uri_tmpl,
2163 uri_tmpl=uri_tmpl,
2158 repo_name=self.repo_name,
2164 repo_name=self.repo_name,
2159 repo_id=self.repo_id, **override)
2165 repo_id=self.repo_id, **override)
2160
2166
2161 def set_state(self, state):
2167 def set_state(self, state):
2162 self.repo_state = state
2168 self.repo_state = state
2163 Session().add(self)
2169 Session().add(self)
2164 #==========================================================================
2170 #==========================================================================
2165 # SCM PROPERTIES
2171 # SCM PROPERTIES
2166 #==========================================================================
2172 #==========================================================================
2167
2173
2168 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2174 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2169 return get_commit_safe(
2175 return get_commit_safe(
2170 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2176 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2171
2177
2172 def get_changeset(self, rev=None, pre_load=None):
2178 def get_changeset(self, rev=None, pre_load=None):
2173 warnings.warn("Use get_commit", DeprecationWarning)
2179 warnings.warn("Use get_commit", DeprecationWarning)
2174 commit_id = None
2180 commit_id = None
2175 commit_idx = None
2181 commit_idx = None
2176 if isinstance(rev, basestring):
2182 if isinstance(rev, basestring):
2177 commit_id = rev
2183 commit_id = rev
2178 else:
2184 else:
2179 commit_idx = rev
2185 commit_idx = rev
2180 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2186 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2181 pre_load=pre_load)
2187 pre_load=pre_load)
2182
2188
2183 def get_landing_commit(self):
2189 def get_landing_commit(self):
2184 """
2190 """
2185 Returns landing commit, or if that doesn't exist returns the tip
2191 Returns landing commit, or if that doesn't exist returns the tip
2186 """
2192 """
2187 _rev_type, _rev = self.landing_rev
2193 _rev_type, _rev = self.landing_rev
2188 commit = self.get_commit(_rev)
2194 commit = self.get_commit(_rev)
2189 if isinstance(commit, EmptyCommit):
2195 if isinstance(commit, EmptyCommit):
2190 return self.get_commit()
2196 return self.get_commit()
2191 return commit
2197 return commit
2192
2198
2193 def update_commit_cache(self, cs_cache=None, config=None):
2199 def update_commit_cache(self, cs_cache=None, config=None):
2194 """
2200 """
2195 Update cache of last changeset for repository, keys should be::
2201 Update cache of last changeset for repository, keys should be::
2196
2202
2197 short_id
2203 short_id
2198 raw_id
2204 raw_id
2199 revision
2205 revision
2200 parents
2206 parents
2201 message
2207 message
2202 date
2208 date
2203 author
2209 author
2204
2210
2205 :param cs_cache:
2211 :param cs_cache:
2206 """
2212 """
2207 from rhodecode.lib.vcs.backends.base import BaseChangeset
2213 from rhodecode.lib.vcs.backends.base import BaseChangeset
2208 if cs_cache is None:
2214 if cs_cache is None:
2209 # use no-cache version here
2215 # use no-cache version here
2210 scm_repo = self.scm_instance(cache=False, config=config)
2216 scm_repo = self.scm_instance(cache=False, config=config)
2211 if scm_repo:
2217 if scm_repo:
2212 cs_cache = scm_repo.get_commit(
2218 cs_cache = scm_repo.get_commit(
2213 pre_load=["author", "date", "message", "parents"])
2219 pre_load=["author", "date", "message", "parents"])
2214 else:
2220 else:
2215 cs_cache = EmptyCommit()
2221 cs_cache = EmptyCommit()
2216
2222
2217 if isinstance(cs_cache, BaseChangeset):
2223 if isinstance(cs_cache, BaseChangeset):
2218 cs_cache = cs_cache.__json__()
2224 cs_cache = cs_cache.__json__()
2219
2225
2220 def is_outdated(new_cs_cache):
2226 def is_outdated(new_cs_cache):
2221 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2227 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2222 new_cs_cache['revision'] != self.changeset_cache['revision']):
2228 new_cs_cache['revision'] != self.changeset_cache['revision']):
2223 return True
2229 return True
2224 return False
2230 return False
2225
2231
2226 # check if we have maybe already latest cached revision
2232 # check if we have maybe already latest cached revision
2227 if is_outdated(cs_cache) or not self.changeset_cache:
2233 if is_outdated(cs_cache) or not self.changeset_cache:
2228 _default = datetime.datetime.fromtimestamp(0)
2234 _default = datetime.datetime.fromtimestamp(0)
2229 last_change = cs_cache.get('date') or _default
2235 last_change = cs_cache.get('date') or _default
2230 log.debug('updated repo %s with new cs cache %s',
2236 log.debug('updated repo %s with new cs cache %s',
2231 self.repo_name, cs_cache)
2237 self.repo_name, cs_cache)
2232 self.updated_on = last_change
2238 self.updated_on = last_change
2233 self.changeset_cache = cs_cache
2239 self.changeset_cache = cs_cache
2234 Session().add(self)
2240 Session().add(self)
2235 Session().commit()
2241 Session().commit()
2236 else:
2242 else:
2237 log.debug('Skipping update_commit_cache for repo:`%s` '
2243 log.debug('Skipping update_commit_cache for repo:`%s` '
2238 'commit already with latest changes', self.repo_name)
2244 'commit already with latest changes', self.repo_name)
2239
2245
2240 @property
2246 @property
2241 def tip(self):
2247 def tip(self):
2242 return self.get_commit('tip')
2248 return self.get_commit('tip')
2243
2249
2244 @property
2250 @property
2245 def author(self):
2251 def author(self):
2246 return self.tip.author
2252 return self.tip.author
2247
2253
2248 @property
2254 @property
2249 def last_change(self):
2255 def last_change(self):
2250 return self.scm_instance().last_change
2256 return self.scm_instance().last_change
2251
2257
2252 def get_comments(self, revisions=None):
2258 def get_comments(self, revisions=None):
2253 """
2259 """
2254 Returns comments for this repository grouped by revisions
2260 Returns comments for this repository grouped by revisions
2255
2261
2256 :param revisions: filter query by revisions only
2262 :param revisions: filter query by revisions only
2257 """
2263 """
2258 cmts = ChangesetComment.query()\
2264 cmts = ChangesetComment.query()\
2259 .filter(ChangesetComment.repo == self)
2265 .filter(ChangesetComment.repo == self)
2260 if revisions:
2266 if revisions:
2261 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2267 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2262 grouped = collections.defaultdict(list)
2268 grouped = collections.defaultdict(list)
2263 for cmt in cmts.all():
2269 for cmt in cmts.all():
2264 grouped[cmt.revision].append(cmt)
2270 grouped[cmt.revision].append(cmt)
2265 return grouped
2271 return grouped
2266
2272
2267 def statuses(self, revisions=None):
2273 def statuses(self, revisions=None):
2268 """
2274 """
2269 Returns statuses for this repository
2275 Returns statuses for this repository
2270
2276
2271 :param revisions: list of revisions to get statuses for
2277 :param revisions: list of revisions to get statuses for
2272 """
2278 """
2273 statuses = ChangesetStatus.query()\
2279 statuses = ChangesetStatus.query()\
2274 .filter(ChangesetStatus.repo == self)\
2280 .filter(ChangesetStatus.repo == self)\
2275 .filter(ChangesetStatus.version == 0)
2281 .filter(ChangesetStatus.version == 0)
2276
2282
2277 if revisions:
2283 if revisions:
2278 # Try doing the filtering in chunks to avoid hitting limits
2284 # Try doing the filtering in chunks to avoid hitting limits
2279 size = 500
2285 size = 500
2280 status_results = []
2286 status_results = []
2281 for chunk in xrange(0, len(revisions), size):
2287 for chunk in xrange(0, len(revisions), size):
2282 status_results += statuses.filter(
2288 status_results += statuses.filter(
2283 ChangesetStatus.revision.in_(
2289 ChangesetStatus.revision.in_(
2284 revisions[chunk: chunk+size])
2290 revisions[chunk: chunk+size])
2285 ).all()
2291 ).all()
2286 else:
2292 else:
2287 status_results = statuses.all()
2293 status_results = statuses.all()
2288
2294
2289 grouped = {}
2295 grouped = {}
2290
2296
2291 # maybe we have open new pullrequest without a status?
2297 # maybe we have open new pullrequest without a status?
2292 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2298 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2293 status_lbl = ChangesetStatus.get_status_lbl(stat)
2299 status_lbl = ChangesetStatus.get_status_lbl(stat)
2294 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2300 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2295 for rev in pr.revisions:
2301 for rev in pr.revisions:
2296 pr_id = pr.pull_request_id
2302 pr_id = pr.pull_request_id
2297 pr_repo = pr.target_repo.repo_name
2303 pr_repo = pr.target_repo.repo_name
2298 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2304 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2299
2305
2300 for stat in status_results:
2306 for stat in status_results:
2301 pr_id = pr_repo = None
2307 pr_id = pr_repo = None
2302 if stat.pull_request:
2308 if stat.pull_request:
2303 pr_id = stat.pull_request.pull_request_id
2309 pr_id = stat.pull_request.pull_request_id
2304 pr_repo = stat.pull_request.target_repo.repo_name
2310 pr_repo = stat.pull_request.target_repo.repo_name
2305 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2311 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2306 pr_id, pr_repo]
2312 pr_id, pr_repo]
2307 return grouped
2313 return grouped
2308
2314
2309 # ==========================================================================
2315 # ==========================================================================
2310 # SCM CACHE INSTANCE
2316 # SCM CACHE INSTANCE
2311 # ==========================================================================
2317 # ==========================================================================
2312
2318
2313 def scm_instance(self, **kwargs):
2319 def scm_instance(self, **kwargs):
2314 import rhodecode
2320 import rhodecode
2315
2321
2316 # Passing a config will not hit the cache currently only used
2322 # Passing a config will not hit the cache currently only used
2317 # for repo2dbmapper
2323 # for repo2dbmapper
2318 config = kwargs.pop('config', None)
2324 config = kwargs.pop('config', None)
2319 cache = kwargs.pop('cache', None)
2325 cache = kwargs.pop('cache', None)
2320 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2326 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2321 # if cache is NOT defined use default global, else we have a full
2327 # if cache is NOT defined use default global, else we have a full
2322 # control over cache behaviour
2328 # control over cache behaviour
2323 if cache is None and full_cache and not config:
2329 if cache is None and full_cache and not config:
2324 return self._get_instance_cached()
2330 return self._get_instance_cached()
2325 return self._get_instance(cache=bool(cache), config=config)
2331 return self._get_instance(cache=bool(cache), config=config)
2326
2332
2327 def _get_instance_cached(self):
2333 def _get_instance_cached(self):
2328 @cache_region('long_term')
2334 @cache_region('long_term')
2329 def _get_repo(cache_key):
2335 def _get_repo(cache_key):
2330 return self._get_instance()
2336 return self._get_instance()
2331
2337
2332 invalidator_context = CacheKey.repo_context_cache(
2338 invalidator_context = CacheKey.repo_context_cache(
2333 _get_repo, self.repo_name, None, thread_scoped=True)
2339 _get_repo, self.repo_name, None, thread_scoped=True)
2334
2340
2335 with invalidator_context as context:
2341 with invalidator_context as context:
2336 context.invalidate()
2342 context.invalidate()
2337 repo = context.compute()
2343 repo = context.compute()
2338
2344
2339 return repo
2345 return repo
2340
2346
2341 def _get_instance(self, cache=True, config=None):
2347 def _get_instance(self, cache=True, config=None):
2342 config = config or self._config
2348 config = config or self._config
2343 custom_wire = {
2349 custom_wire = {
2344 'cache': cache # controls the vcs.remote cache
2350 'cache': cache # controls the vcs.remote cache
2345 }
2351 }
2346 repo = get_vcs_instance(
2352 repo = get_vcs_instance(
2347 repo_path=safe_str(self.repo_full_path),
2353 repo_path=safe_str(self.repo_full_path),
2348 config=config,
2354 config=config,
2349 with_wire=custom_wire,
2355 with_wire=custom_wire,
2350 create=False,
2356 create=False,
2351 _vcs_alias=self.repo_type)
2357 _vcs_alias=self.repo_type)
2352
2358
2353 return repo
2359 return repo
2354
2360
2355 def __json__(self):
2361 def __json__(self):
2356 return {'landing_rev': self.landing_rev}
2362 return {'landing_rev': self.landing_rev}
2357
2363
2358 def get_dict(self):
2364 def get_dict(self):
2359
2365
2360 # Since we transformed `repo_name` to a hybrid property, we need to
2366 # Since we transformed `repo_name` to a hybrid property, we need to
2361 # keep compatibility with the code which uses `repo_name` field.
2367 # keep compatibility with the code which uses `repo_name` field.
2362
2368
2363 result = super(Repository, self).get_dict()
2369 result = super(Repository, self).get_dict()
2364 result['repo_name'] = result.pop('_repo_name', None)
2370 result['repo_name'] = result.pop('_repo_name', None)
2365 return result
2371 return result
2366
2372
2367
2373
2368 class RepoGroup(Base, BaseModel):
2374 class RepoGroup(Base, BaseModel):
2369 __tablename__ = 'groups'
2375 __tablename__ = 'groups'
2370 __table_args__ = (
2376 __table_args__ = (
2371 UniqueConstraint('group_name', 'group_parent_id'),
2377 UniqueConstraint('group_name', 'group_parent_id'),
2372 CheckConstraint('group_id != group_parent_id'),
2378 CheckConstraint('group_id != group_parent_id'),
2373 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2379 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2374 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2380 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2375 )
2381 )
2376 __mapper_args__ = {'order_by': 'group_name'}
2382 __mapper_args__ = {'order_by': 'group_name'}
2377
2383
2378 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2384 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2379
2385
2380 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2386 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2381 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2387 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2382 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2388 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2383 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2389 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2384 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2390 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2385 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2391 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2386 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2392 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2387 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2393 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2388 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2394 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2389
2395
2390 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2396 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2391 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2397 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2392 parent_group = relationship('RepoGroup', remote_side=group_id)
2398 parent_group = relationship('RepoGroup', remote_side=group_id)
2393 user = relationship('User')
2399 user = relationship('User')
2394 integrations = relationship('Integration',
2400 integrations = relationship('Integration',
2395 cascade="all, delete, delete-orphan")
2401 cascade="all, delete, delete-orphan")
2396
2402
2397 def __init__(self, group_name='', parent_group=None):
2403 def __init__(self, group_name='', parent_group=None):
2398 self.group_name = group_name
2404 self.group_name = group_name
2399 self.parent_group = parent_group
2405 self.parent_group = parent_group
2400
2406
2401 def __unicode__(self):
2407 def __unicode__(self):
2402 return u"<%s('id:%s:%s')>" % (
2408 return u"<%s('id:%s:%s')>" % (
2403 self.__class__.__name__, self.group_id, self.group_name)
2409 self.__class__.__name__, self.group_id, self.group_name)
2404
2410
2405 @hybrid_property
2411 @hybrid_property
2406 def description_safe(self):
2412 def description_safe(self):
2407 from rhodecode.lib import helpers as h
2413 from rhodecode.lib import helpers as h
2408 return h.escape(self.group_description)
2414 return h.escape(self.group_description)
2409
2415
2410 @classmethod
2416 @classmethod
2411 def _generate_choice(cls, repo_group):
2417 def _generate_choice(cls, repo_group):
2412 from webhelpers.html import literal as _literal
2418 from webhelpers.html import literal as _literal
2413 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2419 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2414 return repo_group.group_id, _name(repo_group.full_path_splitted)
2420 return repo_group.group_id, _name(repo_group.full_path_splitted)
2415
2421
2416 @classmethod
2422 @classmethod
2417 def groups_choices(cls, groups=None, show_empty_group=True):
2423 def groups_choices(cls, groups=None, show_empty_group=True):
2418 if not groups:
2424 if not groups:
2419 groups = cls.query().all()
2425 groups = cls.query().all()
2420
2426
2421 repo_groups = []
2427 repo_groups = []
2422 if show_empty_group:
2428 if show_empty_group:
2423 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2429 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2424
2430
2425 repo_groups.extend([cls._generate_choice(x) for x in groups])
2431 repo_groups.extend([cls._generate_choice(x) for x in groups])
2426
2432
2427 repo_groups = sorted(
2433 repo_groups = sorted(
2428 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2434 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2429 return repo_groups
2435 return repo_groups
2430
2436
2431 @classmethod
2437 @classmethod
2432 def url_sep(cls):
2438 def url_sep(cls):
2433 return URL_SEP
2439 return URL_SEP
2434
2440
2435 @classmethod
2441 @classmethod
2436 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2442 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2437 if case_insensitive:
2443 if case_insensitive:
2438 gr = cls.query().filter(func.lower(cls.group_name)
2444 gr = cls.query().filter(func.lower(cls.group_name)
2439 == func.lower(group_name))
2445 == func.lower(group_name))
2440 else:
2446 else:
2441 gr = cls.query().filter(cls.group_name == group_name)
2447 gr = cls.query().filter(cls.group_name == group_name)
2442 if cache:
2448 if cache:
2443 name_key = _hash_key(group_name)
2449 name_key = _hash_key(group_name)
2444 gr = gr.options(
2450 gr = gr.options(
2445 FromCache("sql_cache_short", "get_group_%s" % name_key))
2451 FromCache("sql_cache_short", "get_group_%s" % name_key))
2446 return gr.scalar()
2452 return gr.scalar()
2447
2453
2448 @classmethod
2454 @classmethod
2449 def get_user_personal_repo_group(cls, user_id):
2455 def get_user_personal_repo_group(cls, user_id):
2450 user = User.get(user_id)
2456 user = User.get(user_id)
2451 if user.username == User.DEFAULT_USER:
2457 if user.username == User.DEFAULT_USER:
2452 return None
2458 return None
2453
2459
2454 return cls.query()\
2460 return cls.query()\
2455 .filter(cls.personal == true()) \
2461 .filter(cls.personal == true()) \
2456 .filter(cls.user == user).scalar()
2462 .filter(cls.user == user).scalar()
2457
2463
2458 @classmethod
2464 @classmethod
2459 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2465 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2460 case_insensitive=True):
2466 case_insensitive=True):
2461 q = RepoGroup.query()
2467 q = RepoGroup.query()
2462
2468
2463 if not isinstance(user_id, Optional):
2469 if not isinstance(user_id, Optional):
2464 q = q.filter(RepoGroup.user_id == user_id)
2470 q = q.filter(RepoGroup.user_id == user_id)
2465
2471
2466 if not isinstance(group_id, Optional):
2472 if not isinstance(group_id, Optional):
2467 q = q.filter(RepoGroup.group_parent_id == group_id)
2473 q = q.filter(RepoGroup.group_parent_id == group_id)
2468
2474
2469 if case_insensitive:
2475 if case_insensitive:
2470 q = q.order_by(func.lower(RepoGroup.group_name))
2476 q = q.order_by(func.lower(RepoGroup.group_name))
2471 else:
2477 else:
2472 q = q.order_by(RepoGroup.group_name)
2478 q = q.order_by(RepoGroup.group_name)
2473 return q.all()
2479 return q.all()
2474
2480
2475 @property
2481 @property
2476 def parents(self):
2482 def parents(self):
2477 parents_recursion_limit = 10
2483 parents_recursion_limit = 10
2478 groups = []
2484 groups = []
2479 if self.parent_group is None:
2485 if self.parent_group is None:
2480 return groups
2486 return groups
2481 cur_gr = self.parent_group
2487 cur_gr = self.parent_group
2482 groups.insert(0, cur_gr)
2488 groups.insert(0, cur_gr)
2483 cnt = 0
2489 cnt = 0
2484 while 1:
2490 while 1:
2485 cnt += 1
2491 cnt += 1
2486 gr = getattr(cur_gr, 'parent_group', None)
2492 gr = getattr(cur_gr, 'parent_group', None)
2487 cur_gr = cur_gr.parent_group
2493 cur_gr = cur_gr.parent_group
2488 if gr is None:
2494 if gr is None:
2489 break
2495 break
2490 if cnt == parents_recursion_limit:
2496 if cnt == parents_recursion_limit:
2491 # this will prevent accidental infinit loops
2497 # this will prevent accidental infinit loops
2492 log.error(('more than %s parents found for group %s, stopping '
2498 log.error(('more than %s parents found for group %s, stopping '
2493 'recursive parent fetching' % (parents_recursion_limit, self)))
2499 'recursive parent fetching' % (parents_recursion_limit, self)))
2494 break
2500 break
2495
2501
2496 groups.insert(0, gr)
2502 groups.insert(0, gr)
2497 return groups
2503 return groups
2498
2504
2499 @property
2505 @property
2500 def last_db_change(self):
2506 def last_db_change(self):
2501 return self.updated_on
2507 return self.updated_on
2502
2508
2503 @property
2509 @property
2504 def children(self):
2510 def children(self):
2505 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2511 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2506
2512
2507 @property
2513 @property
2508 def name(self):
2514 def name(self):
2509 return self.group_name.split(RepoGroup.url_sep())[-1]
2515 return self.group_name.split(RepoGroup.url_sep())[-1]
2510
2516
2511 @property
2517 @property
2512 def full_path(self):
2518 def full_path(self):
2513 return self.group_name
2519 return self.group_name
2514
2520
2515 @property
2521 @property
2516 def full_path_splitted(self):
2522 def full_path_splitted(self):
2517 return self.group_name.split(RepoGroup.url_sep())
2523 return self.group_name.split(RepoGroup.url_sep())
2518
2524
2519 @property
2525 @property
2520 def repositories(self):
2526 def repositories(self):
2521 return Repository.query()\
2527 return Repository.query()\
2522 .filter(Repository.group == self)\
2528 .filter(Repository.group == self)\
2523 .order_by(Repository.repo_name)
2529 .order_by(Repository.repo_name)
2524
2530
2525 @property
2531 @property
2526 def repositories_recursive_count(self):
2532 def repositories_recursive_count(self):
2527 cnt = self.repositories.count()
2533 cnt = self.repositories.count()
2528
2534
2529 def children_count(group):
2535 def children_count(group):
2530 cnt = 0
2536 cnt = 0
2531 for child in group.children:
2537 for child in group.children:
2532 cnt += child.repositories.count()
2538 cnt += child.repositories.count()
2533 cnt += children_count(child)
2539 cnt += children_count(child)
2534 return cnt
2540 return cnt
2535
2541
2536 return cnt + children_count(self)
2542 return cnt + children_count(self)
2537
2543
2538 def _recursive_objects(self, include_repos=True):
2544 def _recursive_objects(self, include_repos=True):
2539 all_ = []
2545 all_ = []
2540
2546
2541 def _get_members(root_gr):
2547 def _get_members(root_gr):
2542 if include_repos:
2548 if include_repos:
2543 for r in root_gr.repositories:
2549 for r in root_gr.repositories:
2544 all_.append(r)
2550 all_.append(r)
2545 childs = root_gr.children.all()
2551 childs = root_gr.children.all()
2546 if childs:
2552 if childs:
2547 for gr in childs:
2553 for gr in childs:
2548 all_.append(gr)
2554 all_.append(gr)
2549 _get_members(gr)
2555 _get_members(gr)
2550
2556
2551 _get_members(self)
2557 _get_members(self)
2552 return [self] + all_
2558 return [self] + all_
2553
2559
2554 def recursive_groups_and_repos(self):
2560 def recursive_groups_and_repos(self):
2555 """
2561 """
2556 Recursive return all groups, with repositories in those groups
2562 Recursive return all groups, with repositories in those groups
2557 """
2563 """
2558 return self._recursive_objects()
2564 return self._recursive_objects()
2559
2565
2560 def recursive_groups(self):
2566 def recursive_groups(self):
2561 """
2567 """
2562 Returns all children groups for this group including children of children
2568 Returns all children groups for this group including children of children
2563 """
2569 """
2564 return self._recursive_objects(include_repos=False)
2570 return self._recursive_objects(include_repos=False)
2565
2571
2566 def get_new_name(self, group_name):
2572 def get_new_name(self, group_name):
2567 """
2573 """
2568 returns new full group name based on parent and new name
2574 returns new full group name based on parent and new name
2569
2575
2570 :param group_name:
2576 :param group_name:
2571 """
2577 """
2572 path_prefix = (self.parent_group.full_path_splitted if
2578 path_prefix = (self.parent_group.full_path_splitted if
2573 self.parent_group else [])
2579 self.parent_group else [])
2574 return RepoGroup.url_sep().join(path_prefix + [group_name])
2580 return RepoGroup.url_sep().join(path_prefix + [group_name])
2575
2581
2576 def permissions(self, with_admins=True, with_owner=True):
2582 def permissions(self, with_admins=True, with_owner=True):
2577 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2583 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2578 q = q.options(joinedload(UserRepoGroupToPerm.group),
2584 q = q.options(joinedload(UserRepoGroupToPerm.group),
2579 joinedload(UserRepoGroupToPerm.user),
2585 joinedload(UserRepoGroupToPerm.user),
2580 joinedload(UserRepoGroupToPerm.permission),)
2586 joinedload(UserRepoGroupToPerm.permission),)
2581
2587
2582 # get owners and admins and permissions. We do a trick of re-writing
2588 # get owners and admins and permissions. We do a trick of re-writing
2583 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2589 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2584 # has a global reference and changing one object propagates to all
2590 # has a global reference and changing one object propagates to all
2585 # others. This means if admin is also an owner admin_row that change
2591 # others. This means if admin is also an owner admin_row that change
2586 # would propagate to both objects
2592 # would propagate to both objects
2587 perm_rows = []
2593 perm_rows = []
2588 for _usr in q.all():
2594 for _usr in q.all():
2589 usr = AttributeDict(_usr.user.get_dict())
2595 usr = AttributeDict(_usr.user.get_dict())
2590 usr.permission = _usr.permission.permission_name
2596 usr.permission = _usr.permission.permission_name
2591 perm_rows.append(usr)
2597 perm_rows.append(usr)
2592
2598
2593 # filter the perm rows by 'default' first and then sort them by
2599 # filter the perm rows by 'default' first and then sort them by
2594 # admin,write,read,none permissions sorted again alphabetically in
2600 # admin,write,read,none permissions sorted again alphabetically in
2595 # each group
2601 # each group
2596 perm_rows = sorted(perm_rows, key=display_user_sort)
2602 perm_rows = sorted(perm_rows, key=display_user_sort)
2597
2603
2598 _admin_perm = 'group.admin'
2604 _admin_perm = 'group.admin'
2599 owner_row = []
2605 owner_row = []
2600 if with_owner:
2606 if with_owner:
2601 usr = AttributeDict(self.user.get_dict())
2607 usr = AttributeDict(self.user.get_dict())
2602 usr.owner_row = True
2608 usr.owner_row = True
2603 usr.permission = _admin_perm
2609 usr.permission = _admin_perm
2604 owner_row.append(usr)
2610 owner_row.append(usr)
2605
2611
2606 super_admin_rows = []
2612 super_admin_rows = []
2607 if with_admins:
2613 if with_admins:
2608 for usr in User.get_all_super_admins():
2614 for usr in User.get_all_super_admins():
2609 # if this admin is also owner, don't double the record
2615 # if this admin is also owner, don't double the record
2610 if usr.user_id == owner_row[0].user_id:
2616 if usr.user_id == owner_row[0].user_id:
2611 owner_row[0].admin_row = True
2617 owner_row[0].admin_row = True
2612 else:
2618 else:
2613 usr = AttributeDict(usr.get_dict())
2619 usr = AttributeDict(usr.get_dict())
2614 usr.admin_row = True
2620 usr.admin_row = True
2615 usr.permission = _admin_perm
2621 usr.permission = _admin_perm
2616 super_admin_rows.append(usr)
2622 super_admin_rows.append(usr)
2617
2623
2618 return super_admin_rows + owner_row + perm_rows
2624 return super_admin_rows + owner_row + perm_rows
2619
2625
2620 def permission_user_groups(self):
2626 def permission_user_groups(self):
2621 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2627 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2622 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2628 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2623 joinedload(UserGroupRepoGroupToPerm.users_group),
2629 joinedload(UserGroupRepoGroupToPerm.users_group),
2624 joinedload(UserGroupRepoGroupToPerm.permission),)
2630 joinedload(UserGroupRepoGroupToPerm.permission),)
2625
2631
2626 perm_rows = []
2632 perm_rows = []
2627 for _user_group in q.all():
2633 for _user_group in q.all():
2628 usr = AttributeDict(_user_group.users_group.get_dict())
2634 usr = AttributeDict(_user_group.users_group.get_dict())
2629 usr.permission = _user_group.permission.permission_name
2635 usr.permission = _user_group.permission.permission_name
2630 perm_rows.append(usr)
2636 perm_rows.append(usr)
2631
2637
2632 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2638 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2633 return perm_rows
2639 return perm_rows
2634
2640
2635 def get_api_data(self):
2641 def get_api_data(self):
2636 """
2642 """
2637 Common function for generating api data
2643 Common function for generating api data
2638
2644
2639 """
2645 """
2640 group = self
2646 group = self
2641 data = {
2647 data = {
2642 'group_id': group.group_id,
2648 'group_id': group.group_id,
2643 'group_name': group.group_name,
2649 'group_name': group.group_name,
2644 'group_description': group.description_safe,
2650 'group_description': group.description_safe,
2645 'parent_group': group.parent_group.group_name if group.parent_group else None,
2651 'parent_group': group.parent_group.group_name if group.parent_group else None,
2646 'repositories': [x.repo_name for x in group.repositories],
2652 'repositories': [x.repo_name for x in group.repositories],
2647 'owner': group.user.username,
2653 'owner': group.user.username,
2648 }
2654 }
2649 return data
2655 return data
2650
2656
2651
2657
2652 class Permission(Base, BaseModel):
2658 class Permission(Base, BaseModel):
2653 __tablename__ = 'permissions'
2659 __tablename__ = 'permissions'
2654 __table_args__ = (
2660 __table_args__ = (
2655 Index('p_perm_name_idx', 'permission_name'),
2661 Index('p_perm_name_idx', 'permission_name'),
2656 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2662 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2657 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2663 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2658 )
2664 )
2659 PERMS = [
2665 PERMS = [
2660 ('hg.admin', _('RhodeCode Super Administrator')),
2666 ('hg.admin', _('RhodeCode Super Administrator')),
2661
2667
2662 ('repository.none', _('Repository no access')),
2668 ('repository.none', _('Repository no access')),
2663 ('repository.read', _('Repository read access')),
2669 ('repository.read', _('Repository read access')),
2664 ('repository.write', _('Repository write access')),
2670 ('repository.write', _('Repository write access')),
2665 ('repository.admin', _('Repository admin access')),
2671 ('repository.admin', _('Repository admin access')),
2666
2672
2667 ('group.none', _('Repository group no access')),
2673 ('group.none', _('Repository group no access')),
2668 ('group.read', _('Repository group read access')),
2674 ('group.read', _('Repository group read access')),
2669 ('group.write', _('Repository group write access')),
2675 ('group.write', _('Repository group write access')),
2670 ('group.admin', _('Repository group admin access')),
2676 ('group.admin', _('Repository group admin access')),
2671
2677
2672 ('usergroup.none', _('User group no access')),
2678 ('usergroup.none', _('User group no access')),
2673 ('usergroup.read', _('User group read access')),
2679 ('usergroup.read', _('User group read access')),
2674 ('usergroup.write', _('User group write access')),
2680 ('usergroup.write', _('User group write access')),
2675 ('usergroup.admin', _('User group admin access')),
2681 ('usergroup.admin', _('User group admin access')),
2676
2682
2677 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2683 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2678 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2684 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2679
2685
2680 ('hg.usergroup.create.false', _('User Group creation disabled')),
2686 ('hg.usergroup.create.false', _('User Group creation disabled')),
2681 ('hg.usergroup.create.true', _('User Group creation enabled')),
2687 ('hg.usergroup.create.true', _('User Group creation enabled')),
2682
2688
2683 ('hg.create.none', _('Repository creation disabled')),
2689 ('hg.create.none', _('Repository creation disabled')),
2684 ('hg.create.repository', _('Repository creation enabled')),
2690 ('hg.create.repository', _('Repository creation enabled')),
2685 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2691 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2686 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2692 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2687
2693
2688 ('hg.fork.none', _('Repository forking disabled')),
2694 ('hg.fork.none', _('Repository forking disabled')),
2689 ('hg.fork.repository', _('Repository forking enabled')),
2695 ('hg.fork.repository', _('Repository forking enabled')),
2690
2696
2691 ('hg.register.none', _('Registration disabled')),
2697 ('hg.register.none', _('Registration disabled')),
2692 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2698 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2693 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2699 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2694
2700
2695 ('hg.password_reset.enabled', _('Password reset enabled')),
2701 ('hg.password_reset.enabled', _('Password reset enabled')),
2696 ('hg.password_reset.hidden', _('Password reset hidden')),
2702 ('hg.password_reset.hidden', _('Password reset hidden')),
2697 ('hg.password_reset.disabled', _('Password reset disabled')),
2703 ('hg.password_reset.disabled', _('Password reset disabled')),
2698
2704
2699 ('hg.extern_activate.manual', _('Manual activation of external account')),
2705 ('hg.extern_activate.manual', _('Manual activation of external account')),
2700 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2706 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2701
2707
2702 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2708 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2703 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2709 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2704 ]
2710 ]
2705
2711
2706 # definition of system default permissions for DEFAULT user
2712 # definition of system default permissions for DEFAULT user
2707 DEFAULT_USER_PERMISSIONS = [
2713 DEFAULT_USER_PERMISSIONS = [
2708 'repository.read',
2714 'repository.read',
2709 'group.read',
2715 'group.read',
2710 'usergroup.read',
2716 'usergroup.read',
2711 'hg.create.repository',
2717 'hg.create.repository',
2712 'hg.repogroup.create.false',
2718 'hg.repogroup.create.false',
2713 'hg.usergroup.create.false',
2719 'hg.usergroup.create.false',
2714 'hg.create.write_on_repogroup.true',
2720 'hg.create.write_on_repogroup.true',
2715 'hg.fork.repository',
2721 'hg.fork.repository',
2716 'hg.register.manual_activate',
2722 'hg.register.manual_activate',
2717 'hg.password_reset.enabled',
2723 'hg.password_reset.enabled',
2718 'hg.extern_activate.auto',
2724 'hg.extern_activate.auto',
2719 'hg.inherit_default_perms.true',
2725 'hg.inherit_default_perms.true',
2720 ]
2726 ]
2721
2727
2722 # defines which permissions are more important higher the more important
2728 # defines which permissions are more important higher the more important
2723 # Weight defines which permissions are more important.
2729 # Weight defines which permissions are more important.
2724 # The higher number the more important.
2730 # The higher number the more important.
2725 PERM_WEIGHTS = {
2731 PERM_WEIGHTS = {
2726 'repository.none': 0,
2732 'repository.none': 0,
2727 'repository.read': 1,
2733 'repository.read': 1,
2728 'repository.write': 3,
2734 'repository.write': 3,
2729 'repository.admin': 4,
2735 'repository.admin': 4,
2730
2736
2731 'group.none': 0,
2737 'group.none': 0,
2732 'group.read': 1,
2738 'group.read': 1,
2733 'group.write': 3,
2739 'group.write': 3,
2734 'group.admin': 4,
2740 'group.admin': 4,
2735
2741
2736 'usergroup.none': 0,
2742 'usergroup.none': 0,
2737 'usergroup.read': 1,
2743 'usergroup.read': 1,
2738 'usergroup.write': 3,
2744 'usergroup.write': 3,
2739 'usergroup.admin': 4,
2745 'usergroup.admin': 4,
2740
2746
2741 'hg.repogroup.create.false': 0,
2747 'hg.repogroup.create.false': 0,
2742 'hg.repogroup.create.true': 1,
2748 'hg.repogroup.create.true': 1,
2743
2749
2744 'hg.usergroup.create.false': 0,
2750 'hg.usergroup.create.false': 0,
2745 'hg.usergroup.create.true': 1,
2751 'hg.usergroup.create.true': 1,
2746
2752
2747 'hg.fork.none': 0,
2753 'hg.fork.none': 0,
2748 'hg.fork.repository': 1,
2754 'hg.fork.repository': 1,
2749 'hg.create.none': 0,
2755 'hg.create.none': 0,
2750 'hg.create.repository': 1
2756 'hg.create.repository': 1
2751 }
2757 }
2752
2758
2753 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2759 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2754 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2760 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2755 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2761 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2756
2762
2757 def __unicode__(self):
2763 def __unicode__(self):
2758 return u"<%s('%s:%s')>" % (
2764 return u"<%s('%s:%s')>" % (
2759 self.__class__.__name__, self.permission_id, self.permission_name
2765 self.__class__.__name__, self.permission_id, self.permission_name
2760 )
2766 )
2761
2767
2762 @classmethod
2768 @classmethod
2763 def get_by_key(cls, key):
2769 def get_by_key(cls, key):
2764 return cls.query().filter(cls.permission_name == key).scalar()
2770 return cls.query().filter(cls.permission_name == key).scalar()
2765
2771
2766 @classmethod
2772 @classmethod
2767 def get_default_repo_perms(cls, user_id, repo_id=None):
2773 def get_default_repo_perms(cls, user_id, repo_id=None):
2768 q = Session().query(UserRepoToPerm, Repository, Permission)\
2774 q = Session().query(UserRepoToPerm, Repository, Permission)\
2769 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2775 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2770 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2776 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2771 .filter(UserRepoToPerm.user_id == user_id)
2777 .filter(UserRepoToPerm.user_id == user_id)
2772 if repo_id:
2778 if repo_id:
2773 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2779 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2774 return q.all()
2780 return q.all()
2775
2781
2776 @classmethod
2782 @classmethod
2777 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2783 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2778 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2784 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2779 .join(
2785 .join(
2780 Permission,
2786 Permission,
2781 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2787 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2782 .join(
2788 .join(
2783 Repository,
2789 Repository,
2784 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2790 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2785 .join(
2791 .join(
2786 UserGroup,
2792 UserGroup,
2787 UserGroupRepoToPerm.users_group_id ==
2793 UserGroupRepoToPerm.users_group_id ==
2788 UserGroup.users_group_id)\
2794 UserGroup.users_group_id)\
2789 .join(
2795 .join(
2790 UserGroupMember,
2796 UserGroupMember,
2791 UserGroupRepoToPerm.users_group_id ==
2797 UserGroupRepoToPerm.users_group_id ==
2792 UserGroupMember.users_group_id)\
2798 UserGroupMember.users_group_id)\
2793 .filter(
2799 .filter(
2794 UserGroupMember.user_id == user_id,
2800 UserGroupMember.user_id == user_id,
2795 UserGroup.users_group_active == true())
2801 UserGroup.users_group_active == true())
2796 if repo_id:
2802 if repo_id:
2797 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2803 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2798 return q.all()
2804 return q.all()
2799
2805
2800 @classmethod
2806 @classmethod
2801 def get_default_group_perms(cls, user_id, repo_group_id=None):
2807 def get_default_group_perms(cls, user_id, repo_group_id=None):
2802 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2808 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2803 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2809 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2804 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2810 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2805 .filter(UserRepoGroupToPerm.user_id == user_id)
2811 .filter(UserRepoGroupToPerm.user_id == user_id)
2806 if repo_group_id:
2812 if repo_group_id:
2807 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2813 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2808 return q.all()
2814 return q.all()
2809
2815
2810 @classmethod
2816 @classmethod
2811 def get_default_group_perms_from_user_group(
2817 def get_default_group_perms_from_user_group(
2812 cls, user_id, repo_group_id=None):
2818 cls, user_id, repo_group_id=None):
2813 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2819 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2814 .join(
2820 .join(
2815 Permission,
2821 Permission,
2816 UserGroupRepoGroupToPerm.permission_id ==
2822 UserGroupRepoGroupToPerm.permission_id ==
2817 Permission.permission_id)\
2823 Permission.permission_id)\
2818 .join(
2824 .join(
2819 RepoGroup,
2825 RepoGroup,
2820 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2826 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2821 .join(
2827 .join(
2822 UserGroup,
2828 UserGroup,
2823 UserGroupRepoGroupToPerm.users_group_id ==
2829 UserGroupRepoGroupToPerm.users_group_id ==
2824 UserGroup.users_group_id)\
2830 UserGroup.users_group_id)\
2825 .join(
2831 .join(
2826 UserGroupMember,
2832 UserGroupMember,
2827 UserGroupRepoGroupToPerm.users_group_id ==
2833 UserGroupRepoGroupToPerm.users_group_id ==
2828 UserGroupMember.users_group_id)\
2834 UserGroupMember.users_group_id)\
2829 .filter(
2835 .filter(
2830 UserGroupMember.user_id == user_id,
2836 UserGroupMember.user_id == user_id,
2831 UserGroup.users_group_active == true())
2837 UserGroup.users_group_active == true())
2832 if repo_group_id:
2838 if repo_group_id:
2833 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2839 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2834 return q.all()
2840 return q.all()
2835
2841
2836 @classmethod
2842 @classmethod
2837 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2843 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2838 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2844 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2839 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2845 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2840 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2846 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2841 .filter(UserUserGroupToPerm.user_id == user_id)
2847 .filter(UserUserGroupToPerm.user_id == user_id)
2842 if user_group_id:
2848 if user_group_id:
2843 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2849 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2844 return q.all()
2850 return q.all()
2845
2851
2846 @classmethod
2852 @classmethod
2847 def get_default_user_group_perms_from_user_group(
2853 def get_default_user_group_perms_from_user_group(
2848 cls, user_id, user_group_id=None):
2854 cls, user_id, user_group_id=None):
2849 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2855 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2850 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2856 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2851 .join(
2857 .join(
2852 Permission,
2858 Permission,
2853 UserGroupUserGroupToPerm.permission_id ==
2859 UserGroupUserGroupToPerm.permission_id ==
2854 Permission.permission_id)\
2860 Permission.permission_id)\
2855 .join(
2861 .join(
2856 TargetUserGroup,
2862 TargetUserGroup,
2857 UserGroupUserGroupToPerm.target_user_group_id ==
2863 UserGroupUserGroupToPerm.target_user_group_id ==
2858 TargetUserGroup.users_group_id)\
2864 TargetUserGroup.users_group_id)\
2859 .join(
2865 .join(
2860 UserGroup,
2866 UserGroup,
2861 UserGroupUserGroupToPerm.user_group_id ==
2867 UserGroupUserGroupToPerm.user_group_id ==
2862 UserGroup.users_group_id)\
2868 UserGroup.users_group_id)\
2863 .join(
2869 .join(
2864 UserGroupMember,
2870 UserGroupMember,
2865 UserGroupUserGroupToPerm.user_group_id ==
2871 UserGroupUserGroupToPerm.user_group_id ==
2866 UserGroupMember.users_group_id)\
2872 UserGroupMember.users_group_id)\
2867 .filter(
2873 .filter(
2868 UserGroupMember.user_id == user_id,
2874 UserGroupMember.user_id == user_id,
2869 UserGroup.users_group_active == true())
2875 UserGroup.users_group_active == true())
2870 if user_group_id:
2876 if user_group_id:
2871 q = q.filter(
2877 q = q.filter(
2872 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2878 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2873
2879
2874 return q.all()
2880 return q.all()
2875
2881
2876
2882
2877 class UserRepoToPerm(Base, BaseModel):
2883 class UserRepoToPerm(Base, BaseModel):
2878 __tablename__ = 'repo_to_perm'
2884 __tablename__ = 'repo_to_perm'
2879 __table_args__ = (
2885 __table_args__ = (
2880 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2886 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2881 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2887 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2882 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2888 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2883 )
2889 )
2884 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2890 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2885 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2891 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2886 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2892 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2887 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2893 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2888
2894
2889 user = relationship('User')
2895 user = relationship('User')
2890 repository = relationship('Repository')
2896 repository = relationship('Repository')
2891 permission = relationship('Permission')
2897 permission = relationship('Permission')
2892
2898
2893 @classmethod
2899 @classmethod
2894 def create(cls, user, repository, permission):
2900 def create(cls, user, repository, permission):
2895 n = cls()
2901 n = cls()
2896 n.user = user
2902 n.user = user
2897 n.repository = repository
2903 n.repository = repository
2898 n.permission = permission
2904 n.permission = permission
2899 Session().add(n)
2905 Session().add(n)
2900 return n
2906 return n
2901
2907
2902 def __unicode__(self):
2908 def __unicode__(self):
2903 return u'<%s => %s >' % (self.user, self.repository)
2909 return u'<%s => %s >' % (self.user, self.repository)
2904
2910
2905
2911
2906 class UserUserGroupToPerm(Base, BaseModel):
2912 class UserUserGroupToPerm(Base, BaseModel):
2907 __tablename__ = 'user_user_group_to_perm'
2913 __tablename__ = 'user_user_group_to_perm'
2908 __table_args__ = (
2914 __table_args__ = (
2909 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2915 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2910 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2916 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2911 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2917 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2912 )
2918 )
2913 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2919 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2914 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2920 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2915 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2921 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2916 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2922 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2917
2923
2918 user = relationship('User')
2924 user = relationship('User')
2919 user_group = relationship('UserGroup')
2925 user_group = relationship('UserGroup')
2920 permission = relationship('Permission')
2926 permission = relationship('Permission')
2921
2927
2922 @classmethod
2928 @classmethod
2923 def create(cls, user, user_group, permission):
2929 def create(cls, user, user_group, permission):
2924 n = cls()
2930 n = cls()
2925 n.user = user
2931 n.user = user
2926 n.user_group = user_group
2932 n.user_group = user_group
2927 n.permission = permission
2933 n.permission = permission
2928 Session().add(n)
2934 Session().add(n)
2929 return n
2935 return n
2930
2936
2931 def __unicode__(self):
2937 def __unicode__(self):
2932 return u'<%s => %s >' % (self.user, self.user_group)
2938 return u'<%s => %s >' % (self.user, self.user_group)
2933
2939
2934
2940
2935 class UserToPerm(Base, BaseModel):
2941 class UserToPerm(Base, BaseModel):
2936 __tablename__ = 'user_to_perm'
2942 __tablename__ = 'user_to_perm'
2937 __table_args__ = (
2943 __table_args__ = (
2938 UniqueConstraint('user_id', 'permission_id'),
2944 UniqueConstraint('user_id', 'permission_id'),
2939 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2945 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2940 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2946 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2941 )
2947 )
2942 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2948 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2943 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2949 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2944 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2950 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2945
2951
2946 user = relationship('User')
2952 user = relationship('User')
2947 permission = relationship('Permission', lazy='joined')
2953 permission = relationship('Permission', lazy='joined')
2948
2954
2949 def __unicode__(self):
2955 def __unicode__(self):
2950 return u'<%s => %s >' % (self.user, self.permission)
2956 return u'<%s => %s >' % (self.user, self.permission)
2951
2957
2952
2958
2953 class UserGroupRepoToPerm(Base, BaseModel):
2959 class UserGroupRepoToPerm(Base, BaseModel):
2954 __tablename__ = 'users_group_repo_to_perm'
2960 __tablename__ = 'users_group_repo_to_perm'
2955 __table_args__ = (
2961 __table_args__ = (
2956 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2962 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2957 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2958 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2959 )
2965 )
2960 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2966 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2961 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2967 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2962 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2968 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2963 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2969 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2964
2970
2965 users_group = relationship('UserGroup')
2971 users_group = relationship('UserGroup')
2966 permission = relationship('Permission')
2972 permission = relationship('Permission')
2967 repository = relationship('Repository')
2973 repository = relationship('Repository')
2968
2974
2969 @classmethod
2975 @classmethod
2970 def create(cls, users_group, repository, permission):
2976 def create(cls, users_group, repository, permission):
2971 n = cls()
2977 n = cls()
2972 n.users_group = users_group
2978 n.users_group = users_group
2973 n.repository = repository
2979 n.repository = repository
2974 n.permission = permission
2980 n.permission = permission
2975 Session().add(n)
2981 Session().add(n)
2976 return n
2982 return n
2977
2983
2978 def __unicode__(self):
2984 def __unicode__(self):
2979 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2985 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2980
2986
2981
2987
2982 class UserGroupUserGroupToPerm(Base, BaseModel):
2988 class UserGroupUserGroupToPerm(Base, BaseModel):
2983 __tablename__ = 'user_group_user_group_to_perm'
2989 __tablename__ = 'user_group_user_group_to_perm'
2984 __table_args__ = (
2990 __table_args__ = (
2985 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2991 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2986 CheckConstraint('target_user_group_id != user_group_id'),
2992 CheckConstraint('target_user_group_id != user_group_id'),
2987 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2988 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2989 )
2995 )
2990 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2996 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2991 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2997 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2992 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2998 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2993 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2999 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2994
3000
2995 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3001 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2996 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3002 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2997 permission = relationship('Permission')
3003 permission = relationship('Permission')
2998
3004
2999 @classmethod
3005 @classmethod
3000 def create(cls, target_user_group, user_group, permission):
3006 def create(cls, target_user_group, user_group, permission):
3001 n = cls()
3007 n = cls()
3002 n.target_user_group = target_user_group
3008 n.target_user_group = target_user_group
3003 n.user_group = user_group
3009 n.user_group = user_group
3004 n.permission = permission
3010 n.permission = permission
3005 Session().add(n)
3011 Session().add(n)
3006 return n
3012 return n
3007
3013
3008 def __unicode__(self):
3014 def __unicode__(self):
3009 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3015 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3010
3016
3011
3017
3012 class UserGroupToPerm(Base, BaseModel):
3018 class UserGroupToPerm(Base, BaseModel):
3013 __tablename__ = 'users_group_to_perm'
3019 __tablename__ = 'users_group_to_perm'
3014 __table_args__ = (
3020 __table_args__ = (
3015 UniqueConstraint('users_group_id', 'permission_id',),
3021 UniqueConstraint('users_group_id', 'permission_id',),
3016 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3022 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3017 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3023 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3018 )
3024 )
3019 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3025 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3020 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3026 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3021 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3027 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3022
3028
3023 users_group = relationship('UserGroup')
3029 users_group = relationship('UserGroup')
3024 permission = relationship('Permission')
3030 permission = relationship('Permission')
3025
3031
3026
3032
3027 class UserRepoGroupToPerm(Base, BaseModel):
3033 class UserRepoGroupToPerm(Base, BaseModel):
3028 __tablename__ = 'user_repo_group_to_perm'
3034 __tablename__ = 'user_repo_group_to_perm'
3029 __table_args__ = (
3035 __table_args__ = (
3030 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3036 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3031 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3032 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3033 )
3039 )
3034
3040
3035 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3041 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3036 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3042 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3037 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3043 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3038 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3044 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3039
3045
3040 user = relationship('User')
3046 user = relationship('User')
3041 group = relationship('RepoGroup')
3047 group = relationship('RepoGroup')
3042 permission = relationship('Permission')
3048 permission = relationship('Permission')
3043
3049
3044 @classmethod
3050 @classmethod
3045 def create(cls, user, repository_group, permission):
3051 def create(cls, user, repository_group, permission):
3046 n = cls()
3052 n = cls()
3047 n.user = user
3053 n.user = user
3048 n.group = repository_group
3054 n.group = repository_group
3049 n.permission = permission
3055 n.permission = permission
3050 Session().add(n)
3056 Session().add(n)
3051 return n
3057 return n
3052
3058
3053
3059
3054 class UserGroupRepoGroupToPerm(Base, BaseModel):
3060 class UserGroupRepoGroupToPerm(Base, BaseModel):
3055 __tablename__ = 'users_group_repo_group_to_perm'
3061 __tablename__ = 'users_group_repo_group_to_perm'
3056 __table_args__ = (
3062 __table_args__ = (
3057 UniqueConstraint('users_group_id', 'group_id'),
3063 UniqueConstraint('users_group_id', 'group_id'),
3058 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3064 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3059 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3065 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3060 )
3066 )
3061
3067
3062 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3068 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3063 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3069 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3064 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3070 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3065 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3071 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3066
3072
3067 users_group = relationship('UserGroup')
3073 users_group = relationship('UserGroup')
3068 permission = relationship('Permission')
3074 permission = relationship('Permission')
3069 group = relationship('RepoGroup')
3075 group = relationship('RepoGroup')
3070
3076
3071 @classmethod
3077 @classmethod
3072 def create(cls, user_group, repository_group, permission):
3078 def create(cls, user_group, repository_group, permission):
3073 n = cls()
3079 n = cls()
3074 n.users_group = user_group
3080 n.users_group = user_group
3075 n.group = repository_group
3081 n.group = repository_group
3076 n.permission = permission
3082 n.permission = permission
3077 Session().add(n)
3083 Session().add(n)
3078 return n
3084 return n
3079
3085
3080 def __unicode__(self):
3086 def __unicode__(self):
3081 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3087 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3082
3088
3083
3089
3084 class Statistics(Base, BaseModel):
3090 class Statistics(Base, BaseModel):
3085 __tablename__ = 'statistics'
3091 __tablename__ = 'statistics'
3086 __table_args__ = (
3092 __table_args__ = (
3087 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3093 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3088 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3094 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3089 )
3095 )
3090 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3096 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3091 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3097 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3092 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3098 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3093 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3099 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3094 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3100 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3095 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3101 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3096
3102
3097 repository = relationship('Repository', single_parent=True)
3103 repository = relationship('Repository', single_parent=True)
3098
3104
3099
3105
3100 class UserFollowing(Base, BaseModel):
3106 class UserFollowing(Base, BaseModel):
3101 __tablename__ = 'user_followings'
3107 __tablename__ = 'user_followings'
3102 __table_args__ = (
3108 __table_args__ = (
3103 UniqueConstraint('user_id', 'follows_repository_id'),
3109 UniqueConstraint('user_id', 'follows_repository_id'),
3104 UniqueConstraint('user_id', 'follows_user_id'),
3110 UniqueConstraint('user_id', 'follows_user_id'),
3105 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3111 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3106 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3112 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3107 )
3113 )
3108
3114
3109 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3115 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3110 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3116 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3111 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3117 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3112 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3118 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3113 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3119 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3114
3120
3115 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3121 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3116
3122
3117 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3123 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3118 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3124 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3119
3125
3120 @classmethod
3126 @classmethod
3121 def get_repo_followers(cls, repo_id):
3127 def get_repo_followers(cls, repo_id):
3122 return cls.query().filter(cls.follows_repo_id == repo_id)
3128 return cls.query().filter(cls.follows_repo_id == repo_id)
3123
3129
3124
3130
3125 class CacheKey(Base, BaseModel):
3131 class CacheKey(Base, BaseModel):
3126 __tablename__ = 'cache_invalidation'
3132 __tablename__ = 'cache_invalidation'
3127 __table_args__ = (
3133 __table_args__ = (
3128 UniqueConstraint('cache_key'),
3134 UniqueConstraint('cache_key'),
3129 Index('key_idx', 'cache_key'),
3135 Index('key_idx', 'cache_key'),
3130 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3136 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3131 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3137 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3132 )
3138 )
3133 CACHE_TYPE_ATOM = 'ATOM'
3139 CACHE_TYPE_ATOM = 'ATOM'
3134 CACHE_TYPE_RSS = 'RSS'
3140 CACHE_TYPE_RSS = 'RSS'
3135 CACHE_TYPE_README = 'README'
3141 CACHE_TYPE_README = 'README'
3136
3142
3137 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3143 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3138 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3144 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3139 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3145 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3140 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3146 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3141
3147
3142 def __init__(self, cache_key, cache_args=''):
3148 def __init__(self, cache_key, cache_args=''):
3143 self.cache_key = cache_key
3149 self.cache_key = cache_key
3144 self.cache_args = cache_args
3150 self.cache_args = cache_args
3145 self.cache_active = False
3151 self.cache_active = False
3146
3152
3147 def __unicode__(self):
3153 def __unicode__(self):
3148 return u"<%s('%s:%s[%s]')>" % (
3154 return u"<%s('%s:%s[%s]')>" % (
3149 self.__class__.__name__,
3155 self.__class__.__name__,
3150 self.cache_id, self.cache_key, self.cache_active)
3156 self.cache_id, self.cache_key, self.cache_active)
3151
3157
3152 def _cache_key_partition(self):
3158 def _cache_key_partition(self):
3153 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3159 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3154 return prefix, repo_name, suffix
3160 return prefix, repo_name, suffix
3155
3161
3156 def get_prefix(self):
3162 def get_prefix(self):
3157 """
3163 """
3158 Try to extract prefix from existing cache key. The key could consist
3164 Try to extract prefix from existing cache key. The key could consist
3159 of prefix, repo_name, suffix
3165 of prefix, repo_name, suffix
3160 """
3166 """
3161 # this returns prefix, repo_name, suffix
3167 # this returns prefix, repo_name, suffix
3162 return self._cache_key_partition()[0]
3168 return self._cache_key_partition()[0]
3163
3169
3164 def get_suffix(self):
3170 def get_suffix(self):
3165 """
3171 """
3166 get suffix that might have been used in _get_cache_key to
3172 get suffix that might have been used in _get_cache_key to
3167 generate self.cache_key. Only used for informational purposes
3173 generate self.cache_key. Only used for informational purposes
3168 in repo_edit.mako.
3174 in repo_edit.mako.
3169 """
3175 """
3170 # prefix, repo_name, suffix
3176 # prefix, repo_name, suffix
3171 return self._cache_key_partition()[2]
3177 return self._cache_key_partition()[2]
3172
3178
3173 @classmethod
3179 @classmethod
3174 def delete_all_cache(cls):
3180 def delete_all_cache(cls):
3175 """
3181 """
3176 Delete all cache keys from database.
3182 Delete all cache keys from database.
3177 Should only be run when all instances are down and all entries
3183 Should only be run when all instances are down and all entries
3178 thus stale.
3184 thus stale.
3179 """
3185 """
3180 cls.query().delete()
3186 cls.query().delete()
3181 Session().commit()
3187 Session().commit()
3182
3188
3183 @classmethod
3189 @classmethod
3184 def get_cache_key(cls, repo_name, cache_type):
3190 def get_cache_key(cls, repo_name, cache_type):
3185 """
3191 """
3186
3192
3187 Generate a cache key for this process of RhodeCode instance.
3193 Generate a cache key for this process of RhodeCode instance.
3188 Prefix most likely will be process id or maybe explicitly set
3194 Prefix most likely will be process id or maybe explicitly set
3189 instance_id from .ini file.
3195 instance_id from .ini file.
3190 """
3196 """
3191 import rhodecode
3197 import rhodecode
3192 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3198 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3193
3199
3194 repo_as_unicode = safe_unicode(repo_name)
3200 repo_as_unicode = safe_unicode(repo_name)
3195 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3201 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3196 if cache_type else repo_as_unicode
3202 if cache_type else repo_as_unicode
3197
3203
3198 return u'{}{}'.format(prefix, key)
3204 return u'{}{}'.format(prefix, key)
3199
3205
3200 @classmethod
3206 @classmethod
3201 def set_invalidate(cls, repo_name, delete=False):
3207 def set_invalidate(cls, repo_name, delete=False):
3202 """
3208 """
3203 Mark all caches of a repo as invalid in the database.
3209 Mark all caches of a repo as invalid in the database.
3204 """
3210 """
3205
3211
3206 try:
3212 try:
3207 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3213 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3208 if delete:
3214 if delete:
3209 log.debug('cache objects deleted for repo %s',
3215 log.debug('cache objects deleted for repo %s',
3210 safe_str(repo_name))
3216 safe_str(repo_name))
3211 qry.delete()
3217 qry.delete()
3212 else:
3218 else:
3213 log.debug('cache objects marked as invalid for repo %s',
3219 log.debug('cache objects marked as invalid for repo %s',
3214 safe_str(repo_name))
3220 safe_str(repo_name))
3215 qry.update({"cache_active": False})
3221 qry.update({"cache_active": False})
3216
3222
3217 Session().commit()
3223 Session().commit()
3218 except Exception:
3224 except Exception:
3219 log.exception(
3225 log.exception(
3220 'Cache key invalidation failed for repository %s',
3226 'Cache key invalidation failed for repository %s',
3221 safe_str(repo_name))
3227 safe_str(repo_name))
3222 Session().rollback()
3228 Session().rollback()
3223
3229
3224 @classmethod
3230 @classmethod
3225 def get_active_cache(cls, cache_key):
3231 def get_active_cache(cls, cache_key):
3226 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3232 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3227 if inv_obj:
3233 if inv_obj:
3228 return inv_obj
3234 return inv_obj
3229 return None
3235 return None
3230
3236
3231 @classmethod
3237 @classmethod
3232 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3238 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3233 thread_scoped=False):
3239 thread_scoped=False):
3234 """
3240 """
3235 @cache_region('long_term')
3241 @cache_region('long_term')
3236 def _heavy_calculation(cache_key):
3242 def _heavy_calculation(cache_key):
3237 return 'result'
3243 return 'result'
3238
3244
3239 cache_context = CacheKey.repo_context_cache(
3245 cache_context = CacheKey.repo_context_cache(
3240 _heavy_calculation, repo_name, cache_type)
3246 _heavy_calculation, repo_name, cache_type)
3241
3247
3242 with cache_context as context:
3248 with cache_context as context:
3243 context.invalidate()
3249 context.invalidate()
3244 computed = context.compute()
3250 computed = context.compute()
3245
3251
3246 assert computed == 'result'
3252 assert computed == 'result'
3247 """
3253 """
3248 from rhodecode.lib import caches
3254 from rhodecode.lib import caches
3249 return caches.InvalidationContext(
3255 return caches.InvalidationContext(
3250 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3256 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3251
3257
3252
3258
3253 class ChangesetComment(Base, BaseModel):
3259 class ChangesetComment(Base, BaseModel):
3254 __tablename__ = 'changeset_comments'
3260 __tablename__ = 'changeset_comments'
3255 __table_args__ = (
3261 __table_args__ = (
3256 Index('cc_revision_idx', 'revision'),
3262 Index('cc_revision_idx', 'revision'),
3257 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3263 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3258 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3264 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3259 )
3265 )
3260
3266
3261 COMMENT_OUTDATED = u'comment_outdated'
3267 COMMENT_OUTDATED = u'comment_outdated'
3262 COMMENT_TYPE_NOTE = u'note'
3268 COMMENT_TYPE_NOTE = u'note'
3263 COMMENT_TYPE_TODO = u'todo'
3269 COMMENT_TYPE_TODO = u'todo'
3264 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3270 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3265
3271
3266 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3272 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3267 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3273 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3268 revision = Column('revision', String(40), nullable=True)
3274 revision = Column('revision', String(40), nullable=True)
3269 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3275 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3270 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3276 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3271 line_no = Column('line_no', Unicode(10), nullable=True)
3277 line_no = Column('line_no', Unicode(10), nullable=True)
3272 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3278 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3273 f_path = Column('f_path', Unicode(1000), nullable=True)
3279 f_path = Column('f_path', Unicode(1000), nullable=True)
3274 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3280 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3275 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3281 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3276 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3282 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3277 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3283 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3278 renderer = Column('renderer', Unicode(64), nullable=True)
3284 renderer = Column('renderer', Unicode(64), nullable=True)
3279 display_state = Column('display_state', Unicode(128), nullable=True)
3285 display_state = Column('display_state', Unicode(128), nullable=True)
3280
3286
3281 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3287 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3282 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3288 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3283 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3289 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3284 author = relationship('User', lazy='joined')
3290 author = relationship('User', lazy='joined')
3285 repo = relationship('Repository')
3291 repo = relationship('Repository')
3286 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3292 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3287 pull_request = relationship('PullRequest', lazy='joined')
3293 pull_request = relationship('PullRequest', lazy='joined')
3288 pull_request_version = relationship('PullRequestVersion')
3294 pull_request_version = relationship('PullRequestVersion')
3289
3295
3290 @classmethod
3296 @classmethod
3291 def get_users(cls, revision=None, pull_request_id=None):
3297 def get_users(cls, revision=None, pull_request_id=None):
3292 """
3298 """
3293 Returns user associated with this ChangesetComment. ie those
3299 Returns user associated with this ChangesetComment. ie those
3294 who actually commented
3300 who actually commented
3295
3301
3296 :param cls:
3302 :param cls:
3297 :param revision:
3303 :param revision:
3298 """
3304 """
3299 q = Session().query(User)\
3305 q = Session().query(User)\
3300 .join(ChangesetComment.author)
3306 .join(ChangesetComment.author)
3301 if revision:
3307 if revision:
3302 q = q.filter(cls.revision == revision)
3308 q = q.filter(cls.revision == revision)
3303 elif pull_request_id:
3309 elif pull_request_id:
3304 q = q.filter(cls.pull_request_id == pull_request_id)
3310 q = q.filter(cls.pull_request_id == pull_request_id)
3305 return q.all()
3311 return q.all()
3306
3312
3307 @classmethod
3313 @classmethod
3308 def get_index_from_version(cls, pr_version, versions):
3314 def get_index_from_version(cls, pr_version, versions):
3309 num_versions = [x.pull_request_version_id for x in versions]
3315 num_versions = [x.pull_request_version_id for x in versions]
3310 try:
3316 try:
3311 return num_versions.index(pr_version) +1
3317 return num_versions.index(pr_version) +1
3312 except (IndexError, ValueError):
3318 except (IndexError, ValueError):
3313 return
3319 return
3314
3320
3315 @property
3321 @property
3316 def outdated(self):
3322 def outdated(self):
3317 return self.display_state == self.COMMENT_OUTDATED
3323 return self.display_state == self.COMMENT_OUTDATED
3318
3324
3319 def outdated_at_version(self, version):
3325 def outdated_at_version(self, version):
3320 """
3326 """
3321 Checks if comment is outdated for given pull request version
3327 Checks if comment is outdated for given pull request version
3322 """
3328 """
3323 return self.outdated and self.pull_request_version_id != version
3329 return self.outdated and self.pull_request_version_id != version
3324
3330
3325 def older_than_version(self, version):
3331 def older_than_version(self, version):
3326 """
3332 """
3327 Checks if comment is made from previous version than given
3333 Checks if comment is made from previous version than given
3328 """
3334 """
3329 if version is None:
3335 if version is None:
3330 return self.pull_request_version_id is not None
3336 return self.pull_request_version_id is not None
3331
3337
3332 return self.pull_request_version_id < version
3338 return self.pull_request_version_id < version
3333
3339
3334 @property
3340 @property
3335 def resolved(self):
3341 def resolved(self):
3336 return self.resolved_by[0] if self.resolved_by else None
3342 return self.resolved_by[0] if self.resolved_by else None
3337
3343
3338 @property
3344 @property
3339 def is_todo(self):
3345 def is_todo(self):
3340 return self.comment_type == self.COMMENT_TYPE_TODO
3346 return self.comment_type == self.COMMENT_TYPE_TODO
3341
3347
3342 @property
3348 @property
3343 def is_inline(self):
3349 def is_inline(self):
3344 return self.line_no and self.f_path
3350 return self.line_no and self.f_path
3345
3351
3346 def get_index_version(self, versions):
3352 def get_index_version(self, versions):
3347 return self.get_index_from_version(
3353 return self.get_index_from_version(
3348 self.pull_request_version_id, versions)
3354 self.pull_request_version_id, versions)
3349
3355
3350 def __repr__(self):
3356 def __repr__(self):
3351 if self.comment_id:
3357 if self.comment_id:
3352 return '<DB:Comment #%s>' % self.comment_id
3358 return '<DB:Comment #%s>' % self.comment_id
3353 else:
3359 else:
3354 return '<DB:Comment at %#x>' % id(self)
3360 return '<DB:Comment at %#x>' % id(self)
3355
3361
3356 def get_api_data(self):
3362 def get_api_data(self):
3357 comment = self
3363 comment = self
3358 data = {
3364 data = {
3359 'comment_id': comment.comment_id,
3365 'comment_id': comment.comment_id,
3360 'comment_type': comment.comment_type,
3366 'comment_type': comment.comment_type,
3361 'comment_text': comment.text,
3367 'comment_text': comment.text,
3362 'comment_status': comment.status_change,
3368 'comment_status': comment.status_change,
3363 'comment_f_path': comment.f_path,
3369 'comment_f_path': comment.f_path,
3364 'comment_lineno': comment.line_no,
3370 'comment_lineno': comment.line_no,
3365 'comment_author': comment.author,
3371 'comment_author': comment.author,
3366 'comment_created_on': comment.created_on
3372 'comment_created_on': comment.created_on
3367 }
3373 }
3368 return data
3374 return data
3369
3375
3370 def __json__(self):
3376 def __json__(self):
3371 data = dict()
3377 data = dict()
3372 data.update(self.get_api_data())
3378 data.update(self.get_api_data())
3373 return data
3379 return data
3374
3380
3375
3381
3376 class ChangesetStatus(Base, BaseModel):
3382 class ChangesetStatus(Base, BaseModel):
3377 __tablename__ = 'changeset_statuses'
3383 __tablename__ = 'changeset_statuses'
3378 __table_args__ = (
3384 __table_args__ = (
3379 Index('cs_revision_idx', 'revision'),
3385 Index('cs_revision_idx', 'revision'),
3380 Index('cs_version_idx', 'version'),
3386 Index('cs_version_idx', 'version'),
3381 UniqueConstraint('repo_id', 'revision', 'version'),
3387 UniqueConstraint('repo_id', 'revision', 'version'),
3382 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3388 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3383 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3389 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3384 )
3390 )
3385 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3391 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3386 STATUS_APPROVED = 'approved'
3392 STATUS_APPROVED = 'approved'
3387 STATUS_REJECTED = 'rejected'
3393 STATUS_REJECTED = 'rejected'
3388 STATUS_UNDER_REVIEW = 'under_review'
3394 STATUS_UNDER_REVIEW = 'under_review'
3389
3395
3390 STATUSES = [
3396 STATUSES = [
3391 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3397 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3392 (STATUS_APPROVED, _("Approved")),
3398 (STATUS_APPROVED, _("Approved")),
3393 (STATUS_REJECTED, _("Rejected")),
3399 (STATUS_REJECTED, _("Rejected")),
3394 (STATUS_UNDER_REVIEW, _("Under Review")),
3400 (STATUS_UNDER_REVIEW, _("Under Review")),
3395 ]
3401 ]
3396
3402
3397 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3403 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3398 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3404 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3399 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3405 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3400 revision = Column('revision', String(40), nullable=False)
3406 revision = Column('revision', String(40), nullable=False)
3401 status = Column('status', String(128), nullable=False, default=DEFAULT)
3407 status = Column('status', String(128), nullable=False, default=DEFAULT)
3402 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3408 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3403 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3409 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3404 version = Column('version', Integer(), nullable=False, default=0)
3410 version = Column('version', Integer(), nullable=False, default=0)
3405 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3411 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3406
3412
3407 author = relationship('User', lazy='joined')
3413 author = relationship('User', lazy='joined')
3408 repo = relationship('Repository')
3414 repo = relationship('Repository')
3409 comment = relationship('ChangesetComment', lazy='joined')
3415 comment = relationship('ChangesetComment', lazy='joined')
3410 pull_request = relationship('PullRequest', lazy='joined')
3416 pull_request = relationship('PullRequest', lazy='joined')
3411
3417
3412 def __unicode__(self):
3418 def __unicode__(self):
3413 return u"<%s('%s[v%s]:%s')>" % (
3419 return u"<%s('%s[v%s]:%s')>" % (
3414 self.__class__.__name__,
3420 self.__class__.__name__,
3415 self.status, self.version, self.author
3421 self.status, self.version, self.author
3416 )
3422 )
3417
3423
3418 @classmethod
3424 @classmethod
3419 def get_status_lbl(cls, value):
3425 def get_status_lbl(cls, value):
3420 return dict(cls.STATUSES).get(value)
3426 return dict(cls.STATUSES).get(value)
3421
3427
3422 @property
3428 @property
3423 def status_lbl(self):
3429 def status_lbl(self):
3424 return ChangesetStatus.get_status_lbl(self.status)
3430 return ChangesetStatus.get_status_lbl(self.status)
3425
3431
3426 def get_api_data(self):
3432 def get_api_data(self):
3427 status = self
3433 status = self
3428 data = {
3434 data = {
3429 'status_id': status.changeset_status_id,
3435 'status_id': status.changeset_status_id,
3430 'status': status.status,
3436 'status': status.status,
3431 }
3437 }
3432 return data
3438 return data
3433
3439
3434 def __json__(self):
3440 def __json__(self):
3435 data = dict()
3441 data = dict()
3436 data.update(self.get_api_data())
3442 data.update(self.get_api_data())
3437 return data
3443 return data
3438
3444
3439
3445
3440 class _PullRequestBase(BaseModel):
3446 class _PullRequestBase(BaseModel):
3441 """
3447 """
3442 Common attributes of pull request and version entries.
3448 Common attributes of pull request and version entries.
3443 """
3449 """
3444
3450
3445 # .status values
3451 # .status values
3446 STATUS_NEW = u'new'
3452 STATUS_NEW = u'new'
3447 STATUS_OPEN = u'open'
3453 STATUS_OPEN = u'open'
3448 STATUS_CLOSED = u'closed'
3454 STATUS_CLOSED = u'closed'
3449
3455
3450 title = Column('title', Unicode(255), nullable=True)
3456 title = Column('title', Unicode(255), nullable=True)
3451 description = Column(
3457 description = Column(
3452 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3458 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3453 nullable=True)
3459 nullable=True)
3454 # new/open/closed status of pull request (not approve/reject/etc)
3460 # new/open/closed status of pull request (not approve/reject/etc)
3455 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3461 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3456 created_on = Column(
3462 created_on = Column(
3457 'created_on', DateTime(timezone=False), nullable=False,
3463 'created_on', DateTime(timezone=False), nullable=False,
3458 default=datetime.datetime.now)
3464 default=datetime.datetime.now)
3459 updated_on = Column(
3465 updated_on = Column(
3460 'updated_on', DateTime(timezone=False), nullable=False,
3466 'updated_on', DateTime(timezone=False), nullable=False,
3461 default=datetime.datetime.now)
3467 default=datetime.datetime.now)
3462
3468
3463 @declared_attr
3469 @declared_attr
3464 def user_id(cls):
3470 def user_id(cls):
3465 return Column(
3471 return Column(
3466 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3472 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3467 unique=None)
3473 unique=None)
3468
3474
3469 # 500 revisions max
3475 # 500 revisions max
3470 _revisions = Column(
3476 _revisions = Column(
3471 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3477 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3472
3478
3473 @declared_attr
3479 @declared_attr
3474 def source_repo_id(cls):
3480 def source_repo_id(cls):
3475 # TODO: dan: rename column to source_repo_id
3481 # TODO: dan: rename column to source_repo_id
3476 return Column(
3482 return Column(
3477 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3483 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3478 nullable=False)
3484 nullable=False)
3479
3485
3480 source_ref = Column('org_ref', Unicode(255), nullable=False)
3486 source_ref = Column('org_ref', Unicode(255), nullable=False)
3481
3487
3482 @declared_attr
3488 @declared_attr
3483 def target_repo_id(cls):
3489 def target_repo_id(cls):
3484 # TODO: dan: rename column to target_repo_id
3490 # TODO: dan: rename column to target_repo_id
3485 return Column(
3491 return Column(
3486 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3492 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3487 nullable=False)
3493 nullable=False)
3488
3494
3489 target_ref = Column('other_ref', Unicode(255), nullable=False)
3495 target_ref = Column('other_ref', Unicode(255), nullable=False)
3490 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3496 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3491
3497
3492 # TODO: dan: rename column to last_merge_source_rev
3498 # TODO: dan: rename column to last_merge_source_rev
3493 _last_merge_source_rev = Column(
3499 _last_merge_source_rev = Column(
3494 'last_merge_org_rev', String(40), nullable=True)
3500 'last_merge_org_rev', String(40), nullable=True)
3495 # TODO: dan: rename column to last_merge_target_rev
3501 # TODO: dan: rename column to last_merge_target_rev
3496 _last_merge_target_rev = Column(
3502 _last_merge_target_rev = Column(
3497 'last_merge_other_rev', String(40), nullable=True)
3503 'last_merge_other_rev', String(40), nullable=True)
3498 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3504 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3499 merge_rev = Column('merge_rev', String(40), nullable=True)
3505 merge_rev = Column('merge_rev', String(40), nullable=True)
3500
3506
3501 reviewer_data = Column(
3507 reviewer_data = Column(
3502 'reviewer_data_json', MutationObj.as_mutable(
3508 'reviewer_data_json', MutationObj.as_mutable(
3503 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3509 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3504
3510
3505 @property
3511 @property
3506 def reviewer_data_json(self):
3512 def reviewer_data_json(self):
3507 return json.dumps(self.reviewer_data)
3513 return json.dumps(self.reviewer_data)
3508
3514
3509 @hybrid_property
3515 @hybrid_property
3510 def description_safe(self):
3516 def description_safe(self):
3511 from rhodecode.lib import helpers as h
3517 from rhodecode.lib import helpers as h
3512 return h.escape(self.description)
3518 return h.escape(self.description)
3513
3519
3514 @hybrid_property
3520 @hybrid_property
3515 def revisions(self):
3521 def revisions(self):
3516 return self._revisions.split(':') if self._revisions else []
3522 return self._revisions.split(':') if self._revisions else []
3517
3523
3518 @revisions.setter
3524 @revisions.setter
3519 def revisions(self, val):
3525 def revisions(self, val):
3520 self._revisions = ':'.join(val)
3526 self._revisions = ':'.join(val)
3521
3527
3522 @hybrid_property
3528 @hybrid_property
3523 def last_merge_status(self):
3529 def last_merge_status(self):
3524 return safe_int(self._last_merge_status)
3530 return safe_int(self._last_merge_status)
3525
3531
3526 @last_merge_status.setter
3532 @last_merge_status.setter
3527 def last_merge_status(self, val):
3533 def last_merge_status(self, val):
3528 self._last_merge_status = val
3534 self._last_merge_status = val
3529
3535
3530 @declared_attr
3536 @declared_attr
3531 def author(cls):
3537 def author(cls):
3532 return relationship('User', lazy='joined')
3538 return relationship('User', lazy='joined')
3533
3539
3534 @declared_attr
3540 @declared_attr
3535 def source_repo(cls):
3541 def source_repo(cls):
3536 return relationship(
3542 return relationship(
3537 'Repository',
3543 'Repository',
3538 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3544 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3539
3545
3540 @property
3546 @property
3541 def source_ref_parts(self):
3547 def source_ref_parts(self):
3542 return self.unicode_to_reference(self.source_ref)
3548 return self.unicode_to_reference(self.source_ref)
3543
3549
3544 @declared_attr
3550 @declared_attr
3545 def target_repo(cls):
3551 def target_repo(cls):
3546 return relationship(
3552 return relationship(
3547 'Repository',
3553 'Repository',
3548 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3554 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3549
3555
3550 @property
3556 @property
3551 def target_ref_parts(self):
3557 def target_ref_parts(self):
3552 return self.unicode_to_reference(self.target_ref)
3558 return self.unicode_to_reference(self.target_ref)
3553
3559
3554 @property
3560 @property
3555 def shadow_merge_ref(self):
3561 def shadow_merge_ref(self):
3556 return self.unicode_to_reference(self._shadow_merge_ref)
3562 return self.unicode_to_reference(self._shadow_merge_ref)
3557
3563
3558 @shadow_merge_ref.setter
3564 @shadow_merge_ref.setter
3559 def shadow_merge_ref(self, ref):
3565 def shadow_merge_ref(self, ref):
3560 self._shadow_merge_ref = self.reference_to_unicode(ref)
3566 self._shadow_merge_ref = self.reference_to_unicode(ref)
3561
3567
3562 def unicode_to_reference(self, raw):
3568 def unicode_to_reference(self, raw):
3563 """
3569 """
3564 Convert a unicode (or string) to a reference object.
3570 Convert a unicode (or string) to a reference object.
3565 If unicode evaluates to False it returns None.
3571 If unicode evaluates to False it returns None.
3566 """
3572 """
3567 if raw:
3573 if raw:
3568 refs = raw.split(':')
3574 refs = raw.split(':')
3569 return Reference(*refs)
3575 return Reference(*refs)
3570 else:
3576 else:
3571 return None
3577 return None
3572
3578
3573 def reference_to_unicode(self, ref):
3579 def reference_to_unicode(self, ref):
3574 """
3580 """
3575 Convert a reference object to unicode.
3581 Convert a reference object to unicode.
3576 If reference is None it returns None.
3582 If reference is None it returns None.
3577 """
3583 """
3578 if ref:
3584 if ref:
3579 return u':'.join(ref)
3585 return u':'.join(ref)
3580 else:
3586 else:
3581 return None
3587 return None
3582
3588
3583 def get_api_data(self, with_merge_state=True):
3589 def get_api_data(self, with_merge_state=True):
3584 from rhodecode.model.pull_request import PullRequestModel
3590 from rhodecode.model.pull_request import PullRequestModel
3585
3591
3586 pull_request = self
3592 pull_request = self
3587 if with_merge_state:
3593 if with_merge_state:
3588 merge_status = PullRequestModel().merge_status(pull_request)
3594 merge_status = PullRequestModel().merge_status(pull_request)
3589 merge_state = {
3595 merge_state = {
3590 'status': merge_status[0],
3596 'status': merge_status[0],
3591 'message': safe_unicode(merge_status[1]),
3597 'message': safe_unicode(merge_status[1]),
3592 }
3598 }
3593 else:
3599 else:
3594 merge_state = {'status': 'not_available',
3600 merge_state = {'status': 'not_available',
3595 'message': 'not_available'}
3601 'message': 'not_available'}
3596
3602
3597 merge_data = {
3603 merge_data = {
3598 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3604 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3599 'reference': (
3605 'reference': (
3600 pull_request.shadow_merge_ref._asdict()
3606 pull_request.shadow_merge_ref._asdict()
3601 if pull_request.shadow_merge_ref else None),
3607 if pull_request.shadow_merge_ref else None),
3602 }
3608 }
3603
3609
3604 data = {
3610 data = {
3605 'pull_request_id': pull_request.pull_request_id,
3611 'pull_request_id': pull_request.pull_request_id,
3606 'url': PullRequestModel().get_url(pull_request),
3612 'url': PullRequestModel().get_url(pull_request),
3607 'title': pull_request.title,
3613 'title': pull_request.title,
3608 'description': pull_request.description,
3614 'description': pull_request.description,
3609 'status': pull_request.status,
3615 'status': pull_request.status,
3610 'created_on': pull_request.created_on,
3616 'created_on': pull_request.created_on,
3611 'updated_on': pull_request.updated_on,
3617 'updated_on': pull_request.updated_on,
3612 'commit_ids': pull_request.revisions,
3618 'commit_ids': pull_request.revisions,
3613 'review_status': pull_request.calculated_review_status(),
3619 'review_status': pull_request.calculated_review_status(),
3614 'mergeable': merge_state,
3620 'mergeable': merge_state,
3615 'source': {
3621 'source': {
3616 'clone_url': pull_request.source_repo.clone_url(),
3622 'clone_url': pull_request.source_repo.clone_url(),
3617 'repository': pull_request.source_repo.repo_name,
3623 'repository': pull_request.source_repo.repo_name,
3618 'reference': {
3624 'reference': {
3619 'name': pull_request.source_ref_parts.name,
3625 'name': pull_request.source_ref_parts.name,
3620 'type': pull_request.source_ref_parts.type,
3626 'type': pull_request.source_ref_parts.type,
3621 'commit_id': pull_request.source_ref_parts.commit_id,
3627 'commit_id': pull_request.source_ref_parts.commit_id,
3622 },
3628 },
3623 },
3629 },
3624 'target': {
3630 'target': {
3625 'clone_url': pull_request.target_repo.clone_url(),
3631 'clone_url': pull_request.target_repo.clone_url(),
3626 'repository': pull_request.target_repo.repo_name,
3632 'repository': pull_request.target_repo.repo_name,
3627 'reference': {
3633 'reference': {
3628 'name': pull_request.target_ref_parts.name,
3634 'name': pull_request.target_ref_parts.name,
3629 'type': pull_request.target_ref_parts.type,
3635 'type': pull_request.target_ref_parts.type,
3630 'commit_id': pull_request.target_ref_parts.commit_id,
3636 'commit_id': pull_request.target_ref_parts.commit_id,
3631 },
3637 },
3632 },
3638 },
3633 'merge': merge_data,
3639 'merge': merge_data,
3634 'author': pull_request.author.get_api_data(include_secrets=False,
3640 'author': pull_request.author.get_api_data(include_secrets=False,
3635 details='basic'),
3641 details='basic'),
3636 'reviewers': [
3642 'reviewers': [
3637 {
3643 {
3638 'user': reviewer.get_api_data(include_secrets=False,
3644 'user': reviewer.get_api_data(include_secrets=False,
3639 details='basic'),
3645 details='basic'),
3640 'reasons': reasons,
3646 'reasons': reasons,
3641 'review_status': st[0][1].status if st else 'not_reviewed',
3647 'review_status': st[0][1].status if st else 'not_reviewed',
3642 }
3648 }
3643 for obj, reviewer, reasons, mandatory, st in
3649 for obj, reviewer, reasons, mandatory, st in
3644 pull_request.reviewers_statuses()
3650 pull_request.reviewers_statuses()
3645 ]
3651 ]
3646 }
3652 }
3647
3653
3648 return data
3654 return data
3649
3655
3650
3656
3651 class PullRequest(Base, _PullRequestBase):
3657 class PullRequest(Base, _PullRequestBase):
3652 __tablename__ = 'pull_requests'
3658 __tablename__ = 'pull_requests'
3653 __table_args__ = (
3659 __table_args__ = (
3654 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3660 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3655 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3661 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3656 )
3662 )
3657
3663
3658 pull_request_id = Column(
3664 pull_request_id = Column(
3659 'pull_request_id', Integer(), nullable=False, primary_key=True)
3665 'pull_request_id', Integer(), nullable=False, primary_key=True)
3660
3666
3661 def __repr__(self):
3667 def __repr__(self):
3662 if self.pull_request_id:
3668 if self.pull_request_id:
3663 return '<DB:PullRequest #%s>' % self.pull_request_id
3669 return '<DB:PullRequest #%s>' % self.pull_request_id
3664 else:
3670 else:
3665 return '<DB:PullRequest at %#x>' % id(self)
3671 return '<DB:PullRequest at %#x>' % id(self)
3666
3672
3667 reviewers = relationship('PullRequestReviewers',
3673 reviewers = relationship('PullRequestReviewers',
3668 cascade="all, delete, delete-orphan")
3674 cascade="all, delete, delete-orphan")
3669 statuses = relationship('ChangesetStatus',
3675 statuses = relationship('ChangesetStatus',
3670 cascade="all, delete, delete-orphan")
3676 cascade="all, delete, delete-orphan")
3671 comments = relationship('ChangesetComment',
3677 comments = relationship('ChangesetComment',
3672 cascade="all, delete, delete-orphan")
3678 cascade="all, delete, delete-orphan")
3673 versions = relationship('PullRequestVersion',
3679 versions = relationship('PullRequestVersion',
3674 cascade="all, delete, delete-orphan",
3680 cascade="all, delete, delete-orphan",
3675 lazy='dynamic')
3681 lazy='dynamic')
3676
3682
3677 @classmethod
3683 @classmethod
3678 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3684 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3679 internal_methods=None):
3685 internal_methods=None):
3680
3686
3681 class PullRequestDisplay(object):
3687 class PullRequestDisplay(object):
3682 """
3688 """
3683 Special object wrapper for showing PullRequest data via Versions
3689 Special object wrapper for showing PullRequest data via Versions
3684 It mimics PR object as close as possible. This is read only object
3690 It mimics PR object as close as possible. This is read only object
3685 just for display
3691 just for display
3686 """
3692 """
3687
3693
3688 def __init__(self, attrs, internal=None):
3694 def __init__(self, attrs, internal=None):
3689 self.attrs = attrs
3695 self.attrs = attrs
3690 # internal have priority over the given ones via attrs
3696 # internal have priority over the given ones via attrs
3691 self.internal = internal or ['versions']
3697 self.internal = internal or ['versions']
3692
3698
3693 def __getattr__(self, item):
3699 def __getattr__(self, item):
3694 if item in self.internal:
3700 if item in self.internal:
3695 return getattr(self, item)
3701 return getattr(self, item)
3696 try:
3702 try:
3697 return self.attrs[item]
3703 return self.attrs[item]
3698 except KeyError:
3704 except KeyError:
3699 raise AttributeError(
3705 raise AttributeError(
3700 '%s object has no attribute %s' % (self, item))
3706 '%s object has no attribute %s' % (self, item))
3701
3707
3702 def __repr__(self):
3708 def __repr__(self):
3703 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3709 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3704
3710
3705 def versions(self):
3711 def versions(self):
3706 return pull_request_obj.versions.order_by(
3712 return pull_request_obj.versions.order_by(
3707 PullRequestVersion.pull_request_version_id).all()
3713 PullRequestVersion.pull_request_version_id).all()
3708
3714
3709 def is_closed(self):
3715 def is_closed(self):
3710 return pull_request_obj.is_closed()
3716 return pull_request_obj.is_closed()
3711
3717
3712 @property
3718 @property
3713 def pull_request_version_id(self):
3719 def pull_request_version_id(self):
3714 return getattr(pull_request_obj, 'pull_request_version_id', None)
3720 return getattr(pull_request_obj, 'pull_request_version_id', None)
3715
3721
3716 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3722 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3717
3723
3718 attrs.author = StrictAttributeDict(
3724 attrs.author = StrictAttributeDict(
3719 pull_request_obj.author.get_api_data())
3725 pull_request_obj.author.get_api_data())
3720 if pull_request_obj.target_repo:
3726 if pull_request_obj.target_repo:
3721 attrs.target_repo = StrictAttributeDict(
3727 attrs.target_repo = StrictAttributeDict(
3722 pull_request_obj.target_repo.get_api_data())
3728 pull_request_obj.target_repo.get_api_data())
3723 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3729 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3724
3730
3725 if pull_request_obj.source_repo:
3731 if pull_request_obj.source_repo:
3726 attrs.source_repo = StrictAttributeDict(
3732 attrs.source_repo = StrictAttributeDict(
3727 pull_request_obj.source_repo.get_api_data())
3733 pull_request_obj.source_repo.get_api_data())
3728 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3734 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3729
3735
3730 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3736 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3731 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3737 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3732 attrs.revisions = pull_request_obj.revisions
3738 attrs.revisions = pull_request_obj.revisions
3733
3739
3734 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3740 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3735 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3741 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3736 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3742 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3737
3743
3738 return PullRequestDisplay(attrs, internal=internal_methods)
3744 return PullRequestDisplay(attrs, internal=internal_methods)
3739
3745
3740 def is_closed(self):
3746 def is_closed(self):
3741 return self.status == self.STATUS_CLOSED
3747 return self.status == self.STATUS_CLOSED
3742
3748
3743 def __json__(self):
3749 def __json__(self):
3744 return {
3750 return {
3745 'revisions': self.revisions,
3751 'revisions': self.revisions,
3746 }
3752 }
3747
3753
3748 def calculated_review_status(self):
3754 def calculated_review_status(self):
3749 from rhodecode.model.changeset_status import ChangesetStatusModel
3755 from rhodecode.model.changeset_status import ChangesetStatusModel
3750 return ChangesetStatusModel().calculated_review_status(self)
3756 return ChangesetStatusModel().calculated_review_status(self)
3751
3757
3752 def reviewers_statuses(self):
3758 def reviewers_statuses(self):
3753 from rhodecode.model.changeset_status import ChangesetStatusModel
3759 from rhodecode.model.changeset_status import ChangesetStatusModel
3754 return ChangesetStatusModel().reviewers_statuses(self)
3760 return ChangesetStatusModel().reviewers_statuses(self)
3755
3761
3756 @property
3762 @property
3757 def workspace_id(self):
3763 def workspace_id(self):
3758 from rhodecode.model.pull_request import PullRequestModel
3764 from rhodecode.model.pull_request import PullRequestModel
3759 return PullRequestModel()._workspace_id(self)
3765 return PullRequestModel()._workspace_id(self)
3760
3766
3761 def get_shadow_repo(self):
3767 def get_shadow_repo(self):
3762 workspace_id = self.workspace_id
3768 workspace_id = self.workspace_id
3763 vcs_obj = self.target_repo.scm_instance()
3769 vcs_obj = self.target_repo.scm_instance()
3764 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3770 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3765 workspace_id)
3771 self.target_repo.repo_id, workspace_id)
3766 if os.path.isdir(shadow_repository_path):
3772 if os.path.isdir(shadow_repository_path):
3767 return vcs_obj._get_shadow_instance(shadow_repository_path)
3773 return vcs_obj._get_shadow_instance(shadow_repository_path)
3768
3774
3769
3775
3770 class PullRequestVersion(Base, _PullRequestBase):
3776 class PullRequestVersion(Base, _PullRequestBase):
3771 __tablename__ = 'pull_request_versions'
3777 __tablename__ = 'pull_request_versions'
3772 __table_args__ = (
3778 __table_args__ = (
3773 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3779 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3774 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3780 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3775 )
3781 )
3776
3782
3777 pull_request_version_id = Column(
3783 pull_request_version_id = Column(
3778 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3784 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3779 pull_request_id = Column(
3785 pull_request_id = Column(
3780 'pull_request_id', Integer(),
3786 'pull_request_id', Integer(),
3781 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3787 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3782 pull_request = relationship('PullRequest')
3788 pull_request = relationship('PullRequest')
3783
3789
3784 def __repr__(self):
3790 def __repr__(self):
3785 if self.pull_request_version_id:
3791 if self.pull_request_version_id:
3786 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3792 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3787 else:
3793 else:
3788 return '<DB:PullRequestVersion at %#x>' % id(self)
3794 return '<DB:PullRequestVersion at %#x>' % id(self)
3789
3795
3790 @property
3796 @property
3791 def reviewers(self):
3797 def reviewers(self):
3792 return self.pull_request.reviewers
3798 return self.pull_request.reviewers
3793
3799
3794 @property
3800 @property
3795 def versions(self):
3801 def versions(self):
3796 return self.pull_request.versions
3802 return self.pull_request.versions
3797
3803
3798 def is_closed(self):
3804 def is_closed(self):
3799 # calculate from original
3805 # calculate from original
3800 return self.pull_request.status == self.STATUS_CLOSED
3806 return self.pull_request.status == self.STATUS_CLOSED
3801
3807
3802 def calculated_review_status(self):
3808 def calculated_review_status(self):
3803 return self.pull_request.calculated_review_status()
3809 return self.pull_request.calculated_review_status()
3804
3810
3805 def reviewers_statuses(self):
3811 def reviewers_statuses(self):
3806 return self.pull_request.reviewers_statuses()
3812 return self.pull_request.reviewers_statuses()
3807
3813
3808
3814
3809 class PullRequestReviewers(Base, BaseModel):
3815 class PullRequestReviewers(Base, BaseModel):
3810 __tablename__ = 'pull_request_reviewers'
3816 __tablename__ = 'pull_request_reviewers'
3811 __table_args__ = (
3817 __table_args__ = (
3812 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3818 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3813 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3819 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3814 )
3820 )
3815
3821
3816 @hybrid_property
3822 @hybrid_property
3817 def reasons(self):
3823 def reasons(self):
3818 if not self._reasons:
3824 if not self._reasons:
3819 return []
3825 return []
3820 return self._reasons
3826 return self._reasons
3821
3827
3822 @reasons.setter
3828 @reasons.setter
3823 def reasons(self, val):
3829 def reasons(self, val):
3824 val = val or []
3830 val = val or []
3825 if any(not isinstance(x, basestring) for x in val):
3831 if any(not isinstance(x, basestring) for x in val):
3826 raise Exception('invalid reasons type, must be list of strings')
3832 raise Exception('invalid reasons type, must be list of strings')
3827 self._reasons = val
3833 self._reasons = val
3828
3834
3829 pull_requests_reviewers_id = Column(
3835 pull_requests_reviewers_id = Column(
3830 'pull_requests_reviewers_id', Integer(), nullable=False,
3836 'pull_requests_reviewers_id', Integer(), nullable=False,
3831 primary_key=True)
3837 primary_key=True)
3832 pull_request_id = Column(
3838 pull_request_id = Column(
3833 "pull_request_id", Integer(),
3839 "pull_request_id", Integer(),
3834 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3840 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3835 user_id = Column(
3841 user_id = Column(
3836 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3842 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3837 _reasons = Column(
3843 _reasons = Column(
3838 'reason', MutationList.as_mutable(
3844 'reason', MutationList.as_mutable(
3839 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3845 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3840
3846
3841 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3847 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3842 user = relationship('User')
3848 user = relationship('User')
3843 pull_request = relationship('PullRequest')
3849 pull_request = relationship('PullRequest')
3844
3850
3845 rule_data = Column(
3851 rule_data = Column(
3846 'rule_data_json',
3852 'rule_data_json',
3847 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3853 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3848
3854
3849 def rule_user_group_data(self):
3855 def rule_user_group_data(self):
3850 """
3856 """
3851 Returns the voting user group rule data for this reviewer
3857 Returns the voting user group rule data for this reviewer
3852 """
3858 """
3853
3859
3854 if self.rule_data and 'vote_rule' in self.rule_data:
3860 if self.rule_data and 'vote_rule' in self.rule_data:
3855 user_group_data = {}
3861 user_group_data = {}
3856 if 'rule_user_group_entry_id' in self.rule_data:
3862 if 'rule_user_group_entry_id' in self.rule_data:
3857 # means a group with voting rules !
3863 # means a group with voting rules !
3858 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3864 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3859 user_group_data['name'] = self.rule_data['rule_name']
3865 user_group_data['name'] = self.rule_data['rule_name']
3860 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3866 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3861
3867
3862 return user_group_data
3868 return user_group_data
3863
3869
3864 def __unicode__(self):
3870 def __unicode__(self):
3865 return u"<%s('id:%s')>" % (self.__class__.__name__,
3871 return u"<%s('id:%s')>" % (self.__class__.__name__,
3866 self.pull_requests_reviewers_id)
3872 self.pull_requests_reviewers_id)
3867
3873
3868
3874
3869 class Notification(Base, BaseModel):
3875 class Notification(Base, BaseModel):
3870 __tablename__ = 'notifications'
3876 __tablename__ = 'notifications'
3871 __table_args__ = (
3877 __table_args__ = (
3872 Index('notification_type_idx', 'type'),
3878 Index('notification_type_idx', 'type'),
3873 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3879 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3874 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3880 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3875 )
3881 )
3876
3882
3877 TYPE_CHANGESET_COMMENT = u'cs_comment'
3883 TYPE_CHANGESET_COMMENT = u'cs_comment'
3878 TYPE_MESSAGE = u'message'
3884 TYPE_MESSAGE = u'message'
3879 TYPE_MENTION = u'mention'
3885 TYPE_MENTION = u'mention'
3880 TYPE_REGISTRATION = u'registration'
3886 TYPE_REGISTRATION = u'registration'
3881 TYPE_PULL_REQUEST = u'pull_request'
3887 TYPE_PULL_REQUEST = u'pull_request'
3882 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3888 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3883
3889
3884 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3890 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3885 subject = Column('subject', Unicode(512), nullable=True)
3891 subject = Column('subject', Unicode(512), nullable=True)
3886 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3892 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3887 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3893 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3888 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3894 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3889 type_ = Column('type', Unicode(255))
3895 type_ = Column('type', Unicode(255))
3890
3896
3891 created_by_user = relationship('User')
3897 created_by_user = relationship('User')
3892 notifications_to_users = relationship('UserNotification', lazy='joined',
3898 notifications_to_users = relationship('UserNotification', lazy='joined',
3893 cascade="all, delete, delete-orphan")
3899 cascade="all, delete, delete-orphan")
3894
3900
3895 @property
3901 @property
3896 def recipients(self):
3902 def recipients(self):
3897 return [x.user for x in UserNotification.query()\
3903 return [x.user for x in UserNotification.query()\
3898 .filter(UserNotification.notification == self)\
3904 .filter(UserNotification.notification == self)\
3899 .order_by(UserNotification.user_id.asc()).all()]
3905 .order_by(UserNotification.user_id.asc()).all()]
3900
3906
3901 @classmethod
3907 @classmethod
3902 def create(cls, created_by, subject, body, recipients, type_=None):
3908 def create(cls, created_by, subject, body, recipients, type_=None):
3903 if type_ is None:
3909 if type_ is None:
3904 type_ = Notification.TYPE_MESSAGE
3910 type_ = Notification.TYPE_MESSAGE
3905
3911
3906 notification = cls()
3912 notification = cls()
3907 notification.created_by_user = created_by
3913 notification.created_by_user = created_by
3908 notification.subject = subject
3914 notification.subject = subject
3909 notification.body = body
3915 notification.body = body
3910 notification.type_ = type_
3916 notification.type_ = type_
3911 notification.created_on = datetime.datetime.now()
3917 notification.created_on = datetime.datetime.now()
3912
3918
3913 for u in recipients:
3919 for u in recipients:
3914 assoc = UserNotification()
3920 assoc = UserNotification()
3915 assoc.notification = notification
3921 assoc.notification = notification
3916
3922
3917 # if created_by is inside recipients mark his notification
3923 # if created_by is inside recipients mark his notification
3918 # as read
3924 # as read
3919 if u.user_id == created_by.user_id:
3925 if u.user_id == created_by.user_id:
3920 assoc.read = True
3926 assoc.read = True
3921
3927
3922 u.notifications.append(assoc)
3928 u.notifications.append(assoc)
3923 Session().add(notification)
3929 Session().add(notification)
3924
3930
3925 return notification
3931 return notification
3926
3932
3927
3933
3928 class UserNotification(Base, BaseModel):
3934 class UserNotification(Base, BaseModel):
3929 __tablename__ = 'user_to_notification'
3935 __tablename__ = 'user_to_notification'
3930 __table_args__ = (
3936 __table_args__ = (
3931 UniqueConstraint('user_id', 'notification_id'),
3937 UniqueConstraint('user_id', 'notification_id'),
3932 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3938 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3933 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3939 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3934 )
3940 )
3935 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3941 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3936 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3942 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3937 read = Column('read', Boolean, default=False)
3943 read = Column('read', Boolean, default=False)
3938 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3944 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3939
3945
3940 user = relationship('User', lazy="joined")
3946 user = relationship('User', lazy="joined")
3941 notification = relationship('Notification', lazy="joined",
3947 notification = relationship('Notification', lazy="joined",
3942 order_by=lambda: Notification.created_on.desc(),)
3948 order_by=lambda: Notification.created_on.desc(),)
3943
3949
3944 def mark_as_read(self):
3950 def mark_as_read(self):
3945 self.read = True
3951 self.read = True
3946 Session().add(self)
3952 Session().add(self)
3947
3953
3948
3954
3949 class Gist(Base, BaseModel):
3955 class Gist(Base, BaseModel):
3950 __tablename__ = 'gists'
3956 __tablename__ = 'gists'
3951 __table_args__ = (
3957 __table_args__ = (
3952 Index('g_gist_access_id_idx', 'gist_access_id'),
3958 Index('g_gist_access_id_idx', 'gist_access_id'),
3953 Index('g_created_on_idx', 'created_on'),
3959 Index('g_created_on_idx', 'created_on'),
3954 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3960 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3955 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3961 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3956 )
3962 )
3957 GIST_PUBLIC = u'public'
3963 GIST_PUBLIC = u'public'
3958 GIST_PRIVATE = u'private'
3964 GIST_PRIVATE = u'private'
3959 DEFAULT_FILENAME = u'gistfile1.txt'
3965 DEFAULT_FILENAME = u'gistfile1.txt'
3960
3966
3961 ACL_LEVEL_PUBLIC = u'acl_public'
3967 ACL_LEVEL_PUBLIC = u'acl_public'
3962 ACL_LEVEL_PRIVATE = u'acl_private'
3968 ACL_LEVEL_PRIVATE = u'acl_private'
3963
3969
3964 gist_id = Column('gist_id', Integer(), primary_key=True)
3970 gist_id = Column('gist_id', Integer(), primary_key=True)
3965 gist_access_id = Column('gist_access_id', Unicode(250))
3971 gist_access_id = Column('gist_access_id', Unicode(250))
3966 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3972 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3967 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3973 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3968 gist_expires = Column('gist_expires', Float(53), nullable=False)
3974 gist_expires = Column('gist_expires', Float(53), nullable=False)
3969 gist_type = Column('gist_type', Unicode(128), nullable=False)
3975 gist_type = Column('gist_type', Unicode(128), nullable=False)
3970 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3976 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3971 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3977 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3972 acl_level = Column('acl_level', Unicode(128), nullable=True)
3978 acl_level = Column('acl_level', Unicode(128), nullable=True)
3973
3979
3974 owner = relationship('User')
3980 owner = relationship('User')
3975
3981
3976 def __repr__(self):
3982 def __repr__(self):
3977 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3983 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3978
3984
3979 @hybrid_property
3985 @hybrid_property
3980 def description_safe(self):
3986 def description_safe(self):
3981 from rhodecode.lib import helpers as h
3987 from rhodecode.lib import helpers as h
3982 return h.escape(self.gist_description)
3988 return h.escape(self.gist_description)
3983
3989
3984 @classmethod
3990 @classmethod
3985 def get_or_404(cls, id_):
3991 def get_or_404(cls, id_):
3986 from pyramid.httpexceptions import HTTPNotFound
3992 from pyramid.httpexceptions import HTTPNotFound
3987
3993
3988 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3994 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3989 if not res:
3995 if not res:
3990 raise HTTPNotFound()
3996 raise HTTPNotFound()
3991 return res
3997 return res
3992
3998
3993 @classmethod
3999 @classmethod
3994 def get_by_access_id(cls, gist_access_id):
4000 def get_by_access_id(cls, gist_access_id):
3995 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4001 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3996
4002
3997 def gist_url(self):
4003 def gist_url(self):
3998 from rhodecode.model.gist import GistModel
4004 from rhodecode.model.gist import GistModel
3999 return GistModel().get_url(self)
4005 return GistModel().get_url(self)
4000
4006
4001 @classmethod
4007 @classmethod
4002 def base_path(cls):
4008 def base_path(cls):
4003 """
4009 """
4004 Returns base path when all gists are stored
4010 Returns base path when all gists are stored
4005
4011
4006 :param cls:
4012 :param cls:
4007 """
4013 """
4008 from rhodecode.model.gist import GIST_STORE_LOC
4014 from rhodecode.model.gist import GIST_STORE_LOC
4009 q = Session().query(RhodeCodeUi)\
4015 q = Session().query(RhodeCodeUi)\
4010 .filter(RhodeCodeUi.ui_key == URL_SEP)
4016 .filter(RhodeCodeUi.ui_key == URL_SEP)
4011 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4017 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4012 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4018 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4013
4019
4014 def get_api_data(self):
4020 def get_api_data(self):
4015 """
4021 """
4016 Common function for generating gist related data for API
4022 Common function for generating gist related data for API
4017 """
4023 """
4018 gist = self
4024 gist = self
4019 data = {
4025 data = {
4020 'gist_id': gist.gist_id,
4026 'gist_id': gist.gist_id,
4021 'type': gist.gist_type,
4027 'type': gist.gist_type,
4022 'access_id': gist.gist_access_id,
4028 'access_id': gist.gist_access_id,
4023 'description': gist.gist_description,
4029 'description': gist.gist_description,
4024 'url': gist.gist_url(),
4030 'url': gist.gist_url(),
4025 'expires': gist.gist_expires,
4031 'expires': gist.gist_expires,
4026 'created_on': gist.created_on,
4032 'created_on': gist.created_on,
4027 'modified_at': gist.modified_at,
4033 'modified_at': gist.modified_at,
4028 'content': None,
4034 'content': None,
4029 'acl_level': gist.acl_level,
4035 'acl_level': gist.acl_level,
4030 }
4036 }
4031 return data
4037 return data
4032
4038
4033 def __json__(self):
4039 def __json__(self):
4034 data = dict(
4040 data = dict(
4035 )
4041 )
4036 data.update(self.get_api_data())
4042 data.update(self.get_api_data())
4037 return data
4043 return data
4038 # SCM functions
4044 # SCM functions
4039
4045
4040 def scm_instance(self, **kwargs):
4046 def scm_instance(self, **kwargs):
4041 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4047 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4042 return get_vcs_instance(
4048 return get_vcs_instance(
4043 repo_path=safe_str(full_repo_path), create=False)
4049 repo_path=safe_str(full_repo_path), create=False)
4044
4050
4045
4051
4046 class ExternalIdentity(Base, BaseModel):
4052 class ExternalIdentity(Base, BaseModel):
4047 __tablename__ = 'external_identities'
4053 __tablename__ = 'external_identities'
4048 __table_args__ = (
4054 __table_args__ = (
4049 Index('local_user_id_idx', 'local_user_id'),
4055 Index('local_user_id_idx', 'local_user_id'),
4050 Index('external_id_idx', 'external_id'),
4056 Index('external_id_idx', 'external_id'),
4051 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4057 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4052 'mysql_charset': 'utf8'})
4058 'mysql_charset': 'utf8'})
4053
4059
4054 external_id = Column('external_id', Unicode(255), default=u'',
4060 external_id = Column('external_id', Unicode(255), default=u'',
4055 primary_key=True)
4061 primary_key=True)
4056 external_username = Column('external_username', Unicode(1024), default=u'')
4062 external_username = Column('external_username', Unicode(1024), default=u'')
4057 local_user_id = Column('local_user_id', Integer(),
4063 local_user_id = Column('local_user_id', Integer(),
4058 ForeignKey('users.user_id'), primary_key=True)
4064 ForeignKey('users.user_id'), primary_key=True)
4059 provider_name = Column('provider_name', Unicode(255), default=u'',
4065 provider_name = Column('provider_name', Unicode(255), default=u'',
4060 primary_key=True)
4066 primary_key=True)
4061 access_token = Column('access_token', String(1024), default=u'')
4067 access_token = Column('access_token', String(1024), default=u'')
4062 alt_token = Column('alt_token', String(1024), default=u'')
4068 alt_token = Column('alt_token', String(1024), default=u'')
4063 token_secret = Column('token_secret', String(1024), default=u'')
4069 token_secret = Column('token_secret', String(1024), default=u'')
4064
4070
4065 @classmethod
4071 @classmethod
4066 def by_external_id_and_provider(cls, external_id, provider_name,
4072 def by_external_id_and_provider(cls, external_id, provider_name,
4067 local_user_id=None):
4073 local_user_id=None):
4068 """
4074 """
4069 Returns ExternalIdentity instance based on search params
4075 Returns ExternalIdentity instance based on search params
4070
4076
4071 :param external_id:
4077 :param external_id:
4072 :param provider_name:
4078 :param provider_name:
4073 :return: ExternalIdentity
4079 :return: ExternalIdentity
4074 """
4080 """
4075 query = cls.query()
4081 query = cls.query()
4076 query = query.filter(cls.external_id == external_id)
4082 query = query.filter(cls.external_id == external_id)
4077 query = query.filter(cls.provider_name == provider_name)
4083 query = query.filter(cls.provider_name == provider_name)
4078 if local_user_id:
4084 if local_user_id:
4079 query = query.filter(cls.local_user_id == local_user_id)
4085 query = query.filter(cls.local_user_id == local_user_id)
4080 return query.first()
4086 return query.first()
4081
4087
4082 @classmethod
4088 @classmethod
4083 def user_by_external_id_and_provider(cls, external_id, provider_name):
4089 def user_by_external_id_and_provider(cls, external_id, provider_name):
4084 """
4090 """
4085 Returns User instance based on search params
4091 Returns User instance based on search params
4086
4092
4087 :param external_id:
4093 :param external_id:
4088 :param provider_name:
4094 :param provider_name:
4089 :return: User
4095 :return: User
4090 """
4096 """
4091 query = User.query()
4097 query = User.query()
4092 query = query.filter(cls.external_id == external_id)
4098 query = query.filter(cls.external_id == external_id)
4093 query = query.filter(cls.provider_name == provider_name)
4099 query = query.filter(cls.provider_name == provider_name)
4094 query = query.filter(User.user_id == cls.local_user_id)
4100 query = query.filter(User.user_id == cls.local_user_id)
4095 return query.first()
4101 return query.first()
4096
4102
4097 @classmethod
4103 @classmethod
4098 def by_local_user_id(cls, local_user_id):
4104 def by_local_user_id(cls, local_user_id):
4099 """
4105 """
4100 Returns all tokens for user
4106 Returns all tokens for user
4101
4107
4102 :param local_user_id:
4108 :param local_user_id:
4103 :return: ExternalIdentity
4109 :return: ExternalIdentity
4104 """
4110 """
4105 query = cls.query()
4111 query = cls.query()
4106 query = query.filter(cls.local_user_id == local_user_id)
4112 query = query.filter(cls.local_user_id == local_user_id)
4107 return query
4113 return query
4108
4114
4109
4115
4110 class Integration(Base, BaseModel):
4116 class Integration(Base, BaseModel):
4111 __tablename__ = 'integrations'
4117 __tablename__ = 'integrations'
4112 __table_args__ = (
4118 __table_args__ = (
4113 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4119 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4114 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
4120 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
4115 )
4121 )
4116
4122
4117 integration_id = Column('integration_id', Integer(), primary_key=True)
4123 integration_id = Column('integration_id', Integer(), primary_key=True)
4118 integration_type = Column('integration_type', String(255))
4124 integration_type = Column('integration_type', String(255))
4119 enabled = Column('enabled', Boolean(), nullable=False)
4125 enabled = Column('enabled', Boolean(), nullable=False)
4120 name = Column('name', String(255), nullable=False)
4126 name = Column('name', String(255), nullable=False)
4121 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4127 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4122 default=False)
4128 default=False)
4123
4129
4124 settings = Column(
4130 settings = Column(
4125 'settings_json', MutationObj.as_mutable(
4131 'settings_json', MutationObj.as_mutable(
4126 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4132 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4127 repo_id = Column(
4133 repo_id = Column(
4128 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4134 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4129 nullable=True, unique=None, default=None)
4135 nullable=True, unique=None, default=None)
4130 repo = relationship('Repository', lazy='joined')
4136 repo = relationship('Repository', lazy='joined')
4131
4137
4132 repo_group_id = Column(
4138 repo_group_id = Column(
4133 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4139 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4134 nullable=True, unique=None, default=None)
4140 nullable=True, unique=None, default=None)
4135 repo_group = relationship('RepoGroup', lazy='joined')
4141 repo_group = relationship('RepoGroup', lazy='joined')
4136
4142
4137 @property
4143 @property
4138 def scope(self):
4144 def scope(self):
4139 if self.repo:
4145 if self.repo:
4140 return repr(self.repo)
4146 return repr(self.repo)
4141 if self.repo_group:
4147 if self.repo_group:
4142 if self.child_repos_only:
4148 if self.child_repos_only:
4143 return repr(self.repo_group) + ' (child repos only)'
4149 return repr(self.repo_group) + ' (child repos only)'
4144 else:
4150 else:
4145 return repr(self.repo_group) + ' (recursive)'
4151 return repr(self.repo_group) + ' (recursive)'
4146 if self.child_repos_only:
4152 if self.child_repos_only:
4147 return 'root_repos'
4153 return 'root_repos'
4148 return 'global'
4154 return 'global'
4149
4155
4150 def __repr__(self):
4156 def __repr__(self):
4151 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4157 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4152
4158
4153
4159
4154 class RepoReviewRuleUser(Base, BaseModel):
4160 class RepoReviewRuleUser(Base, BaseModel):
4155 __tablename__ = 'repo_review_rules_users'
4161 __tablename__ = 'repo_review_rules_users'
4156 __table_args__ = (
4162 __table_args__ = (
4157 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4163 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4158 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4164 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4159 )
4165 )
4160
4166
4161 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4167 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4162 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4168 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4163 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4169 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4164 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4170 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4165 user = relationship('User')
4171 user = relationship('User')
4166
4172
4167 def rule_data(self):
4173 def rule_data(self):
4168 return {
4174 return {
4169 'mandatory': self.mandatory
4175 'mandatory': self.mandatory
4170 }
4176 }
4171
4177
4172
4178
4173 class RepoReviewRuleUserGroup(Base, BaseModel):
4179 class RepoReviewRuleUserGroup(Base, BaseModel):
4174 __tablename__ = 'repo_review_rules_users_groups'
4180 __tablename__ = 'repo_review_rules_users_groups'
4175 __table_args__ = (
4181 __table_args__ = (
4176 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4182 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4177 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4183 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4178 )
4184 )
4179 VOTE_RULE_ALL = -1
4185 VOTE_RULE_ALL = -1
4180
4186
4181 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4187 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4182 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4188 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4183 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4189 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4184 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4190 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4185 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4191 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4186 users_group = relationship('UserGroup')
4192 users_group = relationship('UserGroup')
4187
4193
4188 def rule_data(self):
4194 def rule_data(self):
4189 return {
4195 return {
4190 'mandatory': self.mandatory,
4196 'mandatory': self.mandatory,
4191 'vote_rule': self.vote_rule
4197 'vote_rule': self.vote_rule
4192 }
4198 }
4193
4199
4194 @property
4200 @property
4195 def vote_rule_label(self):
4201 def vote_rule_label(self):
4196 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4202 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4197 return 'all must vote'
4203 return 'all must vote'
4198 else:
4204 else:
4199 return 'min. vote {}'.format(self.vote_rule)
4205 return 'min. vote {}'.format(self.vote_rule)
4200
4206
4201
4207
4202 class RepoReviewRule(Base, BaseModel):
4208 class RepoReviewRule(Base, BaseModel):
4203 __tablename__ = 'repo_review_rules'
4209 __tablename__ = 'repo_review_rules'
4204 __table_args__ = (
4210 __table_args__ = (
4205 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4206 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4207 )
4213 )
4208
4214
4209 repo_review_rule_id = Column(
4215 repo_review_rule_id = Column(
4210 'repo_review_rule_id', Integer(), primary_key=True)
4216 'repo_review_rule_id', Integer(), primary_key=True)
4211 repo_id = Column(
4217 repo_id = Column(
4212 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4218 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4213 repo = relationship('Repository', backref='review_rules')
4219 repo = relationship('Repository', backref='review_rules')
4214
4220
4215 review_rule_name = Column('review_rule_name', String(255))
4221 review_rule_name = Column('review_rule_name', String(255))
4216 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4222 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4217 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4223 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4218 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4224 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4219
4225
4220 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4226 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4221 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4227 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4222 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4228 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4223 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4229 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4224
4230
4225 rule_users = relationship('RepoReviewRuleUser')
4231 rule_users = relationship('RepoReviewRuleUser')
4226 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4232 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4227
4233
4228 def _validate_glob(self, value):
4234 def _validate_glob(self, value):
4229 re.compile('^' + glob2re(value) + '$')
4235 re.compile('^' + glob2re(value) + '$')
4230
4236
4231 @hybrid_property
4237 @hybrid_property
4232 def source_branch_pattern(self):
4238 def source_branch_pattern(self):
4233 return self._branch_pattern or '*'
4239 return self._branch_pattern or '*'
4234
4240
4235 @source_branch_pattern.setter
4241 @source_branch_pattern.setter
4236 def source_branch_pattern(self, value):
4242 def source_branch_pattern(self, value):
4237 self._validate_glob(value)
4243 self._validate_glob(value)
4238 self._branch_pattern = value or '*'
4244 self._branch_pattern = value or '*'
4239
4245
4240 @hybrid_property
4246 @hybrid_property
4241 def target_branch_pattern(self):
4247 def target_branch_pattern(self):
4242 return self._target_branch_pattern or '*'
4248 return self._target_branch_pattern or '*'
4243
4249
4244 @target_branch_pattern.setter
4250 @target_branch_pattern.setter
4245 def target_branch_pattern(self, value):
4251 def target_branch_pattern(self, value):
4246 self._validate_glob(value)
4252 self._validate_glob(value)
4247 self._target_branch_pattern = value or '*'
4253 self._target_branch_pattern = value or '*'
4248
4254
4249 @hybrid_property
4255 @hybrid_property
4250 def file_pattern(self):
4256 def file_pattern(self):
4251 return self._file_pattern or '*'
4257 return self._file_pattern or '*'
4252
4258
4253 @file_pattern.setter
4259 @file_pattern.setter
4254 def file_pattern(self, value):
4260 def file_pattern(self, value):
4255 self._validate_glob(value)
4261 self._validate_glob(value)
4256 self._file_pattern = value or '*'
4262 self._file_pattern = value or '*'
4257
4263
4258 def matches(self, source_branch, target_branch, files_changed):
4264 def matches(self, source_branch, target_branch, files_changed):
4259 """
4265 """
4260 Check if this review rule matches a branch/files in a pull request
4266 Check if this review rule matches a branch/files in a pull request
4261
4267
4262 :param source_branch: source branch name for the commit
4268 :param source_branch: source branch name for the commit
4263 :param target_branch: target branch name for the commit
4269 :param target_branch: target branch name for the commit
4264 :param files_changed: list of file paths changed in the pull request
4270 :param files_changed: list of file paths changed in the pull request
4265 """
4271 """
4266
4272
4267 source_branch = source_branch or ''
4273 source_branch = source_branch or ''
4268 target_branch = target_branch or ''
4274 target_branch = target_branch or ''
4269 files_changed = files_changed or []
4275 files_changed = files_changed or []
4270
4276
4271 branch_matches = True
4277 branch_matches = True
4272 if source_branch or target_branch:
4278 if source_branch or target_branch:
4273 if self.source_branch_pattern == '*':
4279 if self.source_branch_pattern == '*':
4274 source_branch_match = True
4280 source_branch_match = True
4275 else:
4281 else:
4276 source_branch_regex = re.compile(
4282 source_branch_regex = re.compile(
4277 '^' + glob2re(self.source_branch_pattern) + '$')
4283 '^' + glob2re(self.source_branch_pattern) + '$')
4278 source_branch_match = bool(source_branch_regex.search(source_branch))
4284 source_branch_match = bool(source_branch_regex.search(source_branch))
4279 if self.target_branch_pattern == '*':
4285 if self.target_branch_pattern == '*':
4280 target_branch_match = True
4286 target_branch_match = True
4281 else:
4287 else:
4282 target_branch_regex = re.compile(
4288 target_branch_regex = re.compile(
4283 '^' + glob2re(self.target_branch_pattern) + '$')
4289 '^' + glob2re(self.target_branch_pattern) + '$')
4284 target_branch_match = bool(target_branch_regex.search(target_branch))
4290 target_branch_match = bool(target_branch_regex.search(target_branch))
4285
4291
4286 branch_matches = source_branch_match and target_branch_match
4292 branch_matches = source_branch_match and target_branch_match
4287
4293
4288 files_matches = True
4294 files_matches = True
4289 if self.file_pattern != '*':
4295 if self.file_pattern != '*':
4290 files_matches = False
4296 files_matches = False
4291 file_regex = re.compile(glob2re(self.file_pattern))
4297 file_regex = re.compile(glob2re(self.file_pattern))
4292 for filename in files_changed:
4298 for filename in files_changed:
4293 if file_regex.search(filename):
4299 if file_regex.search(filename):
4294 files_matches = True
4300 files_matches = True
4295 break
4301 break
4296
4302
4297 return branch_matches and files_matches
4303 return branch_matches and files_matches
4298
4304
4299 @property
4305 @property
4300 def review_users(self):
4306 def review_users(self):
4301 """ Returns the users which this rule applies to """
4307 """ Returns the users which this rule applies to """
4302
4308
4303 users = collections.OrderedDict()
4309 users = collections.OrderedDict()
4304
4310
4305 for rule_user in self.rule_users:
4311 for rule_user in self.rule_users:
4306 if rule_user.user.active:
4312 if rule_user.user.active:
4307 if rule_user.user not in users:
4313 if rule_user.user not in users:
4308 users[rule_user.user.username] = {
4314 users[rule_user.user.username] = {
4309 'user': rule_user.user,
4315 'user': rule_user.user,
4310 'source': 'user',
4316 'source': 'user',
4311 'source_data': {},
4317 'source_data': {},
4312 'data': rule_user.rule_data()
4318 'data': rule_user.rule_data()
4313 }
4319 }
4314
4320
4315 for rule_user_group in self.rule_user_groups:
4321 for rule_user_group in self.rule_user_groups:
4316 source_data = {
4322 source_data = {
4317 'user_group_id': rule_user_group.users_group.users_group_id,
4323 'user_group_id': rule_user_group.users_group.users_group_id,
4318 'name': rule_user_group.users_group.users_group_name,
4324 'name': rule_user_group.users_group.users_group_name,
4319 'members': len(rule_user_group.users_group.members)
4325 'members': len(rule_user_group.users_group.members)
4320 }
4326 }
4321 for member in rule_user_group.users_group.members:
4327 for member in rule_user_group.users_group.members:
4322 if member.user.active:
4328 if member.user.active:
4323 key = member.user.username
4329 key = member.user.username
4324 if key in users:
4330 if key in users:
4325 # skip this member as we have him already
4331 # skip this member as we have him already
4326 # this prevents from override the "first" matched
4332 # this prevents from override the "first" matched
4327 # users with duplicates in multiple groups
4333 # users with duplicates in multiple groups
4328 continue
4334 continue
4329
4335
4330 users[key] = {
4336 users[key] = {
4331 'user': member.user,
4337 'user': member.user,
4332 'source': 'user_group',
4338 'source': 'user_group',
4333 'source_data': source_data,
4339 'source_data': source_data,
4334 'data': rule_user_group.rule_data()
4340 'data': rule_user_group.rule_data()
4335 }
4341 }
4336
4342
4337 return users
4343 return users
4338
4344
4339 def user_group_vote_rule(self):
4345 def user_group_vote_rule(self):
4340 rules = []
4346 rules = []
4341 if self.rule_user_groups:
4347 if self.rule_user_groups:
4342 for user_group in self.rule_user_groups:
4348 for user_group in self.rule_user_groups:
4343 rules.append(user_group)
4349 rules.append(user_group)
4344 return rules
4350 return rules
4345
4351
4346 def __repr__(self):
4352 def __repr__(self):
4347 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4353 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4348 self.repo_review_rule_id, self.repo)
4354 self.repo_review_rule_id, self.repo)
4349
4355
4350
4356
4351 class ScheduleEntry(Base, BaseModel):
4357 class ScheduleEntry(Base, BaseModel):
4352 __tablename__ = 'schedule_entries'
4358 __tablename__ = 'schedule_entries'
4353 __table_args__ = (
4359 __table_args__ = (
4354 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4360 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4355 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4361 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4356 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4362 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4357 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4363 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4358 )
4364 )
4359 schedule_types = ['crontab', 'timedelta', 'integer']
4365 schedule_types = ['crontab', 'timedelta', 'integer']
4360 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4366 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4361
4367
4362 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4368 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4363 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4369 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4364 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4370 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4365
4371
4366 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4372 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4367 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4373 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4368
4374
4369 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4375 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4370 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4376 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4371
4377
4372 # task
4378 # task
4373 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4379 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4374 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4380 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4375 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4381 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4376 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4382 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4377
4383
4378 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4384 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4379 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4385 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4380
4386
4381 @hybrid_property
4387 @hybrid_property
4382 def schedule_type(self):
4388 def schedule_type(self):
4383 return self._schedule_type
4389 return self._schedule_type
4384
4390
4385 @schedule_type.setter
4391 @schedule_type.setter
4386 def schedule_type(self, val):
4392 def schedule_type(self, val):
4387 if val not in self.schedule_types:
4393 if val not in self.schedule_types:
4388 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4394 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4389 val, self.schedule_type))
4395 val, self.schedule_type))
4390
4396
4391 self._schedule_type = val
4397 self._schedule_type = val
4392
4398
4393 @classmethod
4399 @classmethod
4394 def get_uid(cls, obj):
4400 def get_uid(cls, obj):
4395 args = obj.task_args
4401 args = obj.task_args
4396 kwargs = obj.task_kwargs
4402 kwargs = obj.task_kwargs
4397 if isinstance(args, JsonRaw):
4403 if isinstance(args, JsonRaw):
4398 try:
4404 try:
4399 args = json.loads(args)
4405 args = json.loads(args)
4400 except ValueError:
4406 except ValueError:
4401 args = tuple()
4407 args = tuple()
4402
4408
4403 if isinstance(kwargs, JsonRaw):
4409 if isinstance(kwargs, JsonRaw):
4404 try:
4410 try:
4405 kwargs = json.loads(kwargs)
4411 kwargs = json.loads(kwargs)
4406 except ValueError:
4412 except ValueError:
4407 kwargs = dict()
4413 kwargs = dict()
4408
4414
4409 dot_notation = obj.task_dot_notation
4415 dot_notation = obj.task_dot_notation
4410 val = '.'.join(map(safe_str, [
4416 val = '.'.join(map(safe_str, [
4411 sorted(dot_notation), args, sorted(kwargs.items())]))
4417 sorted(dot_notation), args, sorted(kwargs.items())]))
4412 return hashlib.sha1(val).hexdigest()
4418 return hashlib.sha1(val).hexdigest()
4413
4419
4414 @classmethod
4420 @classmethod
4415 def get_by_schedule_name(cls, schedule_name):
4421 def get_by_schedule_name(cls, schedule_name):
4416 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4422 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4417
4423
4418 @classmethod
4424 @classmethod
4419 def get_by_schedule_id(cls, schedule_id):
4425 def get_by_schedule_id(cls, schedule_id):
4420 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4426 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4421
4427
4422 @property
4428 @property
4423 def task(self):
4429 def task(self):
4424 return self.task_dot_notation
4430 return self.task_dot_notation
4425
4431
4426 @property
4432 @property
4427 def schedule(self):
4433 def schedule(self):
4428 from rhodecode.lib.celerylib.utils import raw_2_schedule
4434 from rhodecode.lib.celerylib.utils import raw_2_schedule
4429 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4435 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4430 return schedule
4436 return schedule
4431
4437
4432 @property
4438 @property
4433 def args(self):
4439 def args(self):
4434 try:
4440 try:
4435 return list(self.task_args or [])
4441 return list(self.task_args or [])
4436 except ValueError:
4442 except ValueError:
4437 return list()
4443 return list()
4438
4444
4439 @property
4445 @property
4440 def kwargs(self):
4446 def kwargs(self):
4441 try:
4447 try:
4442 return dict(self.task_kwargs or {})
4448 return dict(self.task_kwargs or {})
4443 except ValueError:
4449 except ValueError:
4444 return dict()
4450 return dict()
4445
4451
4446 def _as_raw(self, val):
4452 def _as_raw(self, val):
4447 if hasattr(val, 'de_coerce'):
4453 if hasattr(val, 'de_coerce'):
4448 val = val.de_coerce()
4454 val = val.de_coerce()
4449 if val:
4455 if val:
4450 val = json.dumps(val)
4456 val = json.dumps(val)
4451
4457
4452 return val
4458 return val
4453
4459
4454 @property
4460 @property
4455 def schedule_definition_raw(self):
4461 def schedule_definition_raw(self):
4456 return self._as_raw(self.schedule_definition)
4462 return self._as_raw(self.schedule_definition)
4457
4463
4458 @property
4464 @property
4459 def args_raw(self):
4465 def args_raw(self):
4460 return self._as_raw(self.task_args)
4466 return self._as_raw(self.task_args)
4461
4467
4462 @property
4468 @property
4463 def kwargs_raw(self):
4469 def kwargs_raw(self):
4464 return self._as_raw(self.task_kwargs)
4470 return self._as_raw(self.task_kwargs)
4465
4471
4466 def __repr__(self):
4472 def __repr__(self):
4467 return '<DB:ScheduleEntry({}:{})>'.format(
4473 return '<DB:ScheduleEntry({}:{})>'.format(
4468 self.schedule_entry_id, self.schedule_name)
4474 self.schedule_entry_id, self.schedule_name)
4469
4475
4470
4476
4471 @event.listens_for(ScheduleEntry, 'before_update')
4477 @event.listens_for(ScheduleEntry, 'before_update')
4472 def update_task_uid(mapper, connection, target):
4478 def update_task_uid(mapper, connection, target):
4473 target.task_uid = ScheduleEntry.get_uid(target)
4479 target.task_uid = ScheduleEntry.get_uid(target)
4474
4480
4475
4481
4476 @event.listens_for(ScheduleEntry, 'before_insert')
4482 @event.listens_for(ScheduleEntry, 'before_insert')
4477 def set_task_uid(mapper, connection, target):
4483 def set_task_uid(mapper, connection, target):
4478 target.task_uid = ScheduleEntry.get_uid(target)
4484 target.task_uid = ScheduleEntry.get_uid(target)
4479
4485
4480
4486
4481 class DbMigrateVersion(Base, BaseModel):
4487 class DbMigrateVersion(Base, BaseModel):
4482 __tablename__ = 'db_migrate_version'
4488 __tablename__ = 'db_migrate_version'
4483 __table_args__ = (
4489 __table_args__ = (
4484 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4490 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4485 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4491 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4486 )
4492 )
4487 repository_id = Column('repository_id', String(250), primary_key=True)
4493 repository_id = Column('repository_id', String(250), primary_key=True)
4488 repository_path = Column('repository_path', Text)
4494 repository_path = Column('repository_path', Text)
4489 version = Column('version', Integer)
4495 version = Column('version', Integer)
4490
4496
4491
4497
4492 class DbSession(Base, BaseModel):
4498 class DbSession(Base, BaseModel):
4493 __tablename__ = 'db_session'
4499 __tablename__ = 'db_session'
4494 __table_args__ = (
4500 __table_args__ = (
4495 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4501 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4496 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4502 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4497 )
4503 )
4498
4504
4499 def __repr__(self):
4505 def __repr__(self):
4500 return '<DB:DbSession({})>'.format(self.id)
4506 return '<DB:DbSession({})>'.format(self.id)
4501
4507
4502 id = Column('id', Integer())
4508 id = Column('id', Integer())
4503 namespace = Column('namespace', String(255), primary_key=True)
4509 namespace = Column('namespace', String(255), primary_key=True)
4504 accessed = Column('accessed', DateTime, nullable=False)
4510 accessed = Column('accessed', DateTime, nullable=False)
4505 created = Column('created', DateTime, nullable=False)
4511 created = Column('created', DateTime, nullable=False)
4506 data = Column('data', PickleType, nullable=False)
4512 data = Column('data', PickleType, nullable=False)
4507
4513
4508
4514
4509
4515
4510 class BeakerCache(Base, BaseModel):
4516 class BeakerCache(Base, BaseModel):
4511 __tablename__ = 'beaker_cache'
4517 __tablename__ = 'beaker_cache'
4512 __table_args__ = (
4518 __table_args__ = (
4513 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4519 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4514 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4520 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4515 )
4521 )
4516
4522
4517 def __repr__(self):
4523 def __repr__(self):
4518 return '<DB:DbSession({})>'.format(self.id)
4524 return '<DB:DbSession({})>'.format(self.id)
4519
4525
4520 id = Column('id', Integer())
4526 id = Column('id', Integer())
4521 namespace = Column('namespace', String(255), primary_key=True)
4527 namespace = Column('namespace', String(255), primary_key=True)
4522 accessed = Column('accessed', DateTime, nullable=False)
4528 accessed = Column('accessed', DateTime, nullable=False)
4523 created = Column('created', DateTime, nullable=False)
4529 created = Column('created', DateTime, nullable=False)
4524 data = Column('data', PickleType, nullable=False)
4530 data = Column('data', PickleType, nullable=False)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now