##// END OF EJS Templates
pull-requests: added update pull-requests email+notifications...
marcink -
r4120:7cd93c2b default
parent child Browse files
Show More
@@ -0,0 +1,164 b''
1 ## -*- coding: utf-8 -*-
2 <%inherit file="base.mako"/>
3 <%namespace name="base" file="base.mako"/>
4
5 ## EMAIL SUBJECT
6 <%def name="subject()" filter="n,trim,whitespace_filter">
7 <%
8 data = {
9 'updating_user': '@'+h.person(updating_user),
10 'pr_id': pull_request.pull_request_id,
11 'pr_title': pull_request.title,
12 }
13 %>
14
15 ${_('{updating_user} updated pull request. !{pr_id}: "{pr_title}"').format(**data) |n}
16 </%def>
17
18 ## PLAINTEXT VERSION OF BODY
19 <%def name="body_plaintext()" filter="n,trim">
20 <%
21 data = {
22 'updating_user': h.person(updating_user),
23 'pr_id': pull_request.pull_request_id,
24 'pr_title': pull_request.title,
25 'source_ref_type': pull_request.source_ref_parts.type,
26 'source_ref_name': pull_request.source_ref_parts.name,
27 'target_ref_type': pull_request.target_ref_parts.type,
28 'target_ref_name': pull_request.target_ref_parts.name,
29 'repo_url': pull_request_source_repo_url,
30 'source_repo': pull_request_source_repo.repo_name,
31 'target_repo': pull_request_target_repo.repo_name,
32 'source_repo_url': pull_request_source_repo_url,
33 'target_repo_url': pull_request_target_repo_url,
34 }
35 %>
36
37 * ${_('Pull Request link')}: ${pull_request_url}
38
39 * ${h.literal(_('Commit flow: {source_ref_type}:{source_ref_name} of {source_repo_url} into {target_ref_type}:{target_ref_name} of {target_repo_url}').format(**data))}
40
41 * ${_('Title')}: ${pull_request.title}
42
43 * ${_('Description')}:
44
45 ${pull_request.description | trim}
46
47 * Changed commits:
48
49 - Added: ${len(added_commits)}
50 - Removed: ${len(removed_commits)}
51
52 * Changed files:
53
54 %if not changed_files:
55 No file changes found
56 %else:
57 %for file_name in added_files:
58 - A `${file_name}`
59 %endfor
60 %for file_name in modified_files:
61 - M `${file_name}`
62 %endfor
63 %for file_name in removed_files:
64 - R `${file_name}`
65 %endfor
66 %endif
67
68 ---
69 ${self.plaintext_footer()}
70 </%def>
71 <%
72 data = {
73 'updating_user': h.person(updating_user),
74 'pr_id': pull_request.pull_request_id,
75 'pr_title': pull_request.title,
76 'source_ref_type': pull_request.source_ref_parts.type,
77 'source_ref_name': pull_request.source_ref_parts.name,
78 'target_ref_type': pull_request.target_ref_parts.type,
79 'target_ref_name': pull_request.target_ref_parts.name,
80 'repo_url': pull_request_source_repo_url,
81 'source_repo': pull_request_source_repo.repo_name,
82 'target_repo': pull_request_target_repo.repo_name,
83 'source_repo_url': h.link_to(pull_request_source_repo.repo_name, pull_request_source_repo_url),
84 'target_repo_url': h.link_to(pull_request_target_repo.repo_name, pull_request_target_repo_url),
85 }
86 %>
87
88 <table style="text-align:left;vertical-align:middle;width: 100%">
89 <tr>
90 <td style="width:100%;border-bottom:1px solid #dbd9da;">
91
92 <h4 style="margin: 0">
93 <div style="margin-bottom: 4px">
94 <span style="color:#7E7F7F">@${h.person(updating_user.username)}</span>
95 ${_('updated')}
96 <a href="${pull_request_url}" style="${base.link_css()}">
97 ${_('pull request.').format(**data) }
98 </a>
99 </div>
100 <div style="margin-top: 10px"></div>
101 ${_('Pull request')} <code>!${data['pr_id']}: ${data['pr_title']}</code>
102 </h4>
103
104 </td>
105 </tr>
106
107 </table>
108
109 <table style="text-align:left;vertical-align:middle;width: 100%">
110 ## spacing def
111 <tr>
112 <td style="width: 130px"></td>
113 <td></td>
114 </tr>
115
116 <tr>
117 <td style="padding-right:20px;">${_('Pull request')}:</td>
118 <td>
119 <a href="${pull_request_url}" style="${base.link_css()}">
120 !${pull_request.pull_request_id}
121 </a>
122 </td>
123 </tr>
124
125 <tr>
126 <td style="padding-right:20px;line-height:20px;">${_('Commit Flow')}:</td>
127 <td style="line-height:20px;">
128 <code>${'{}:{}'.format(data['source_ref_type'], pull_request.source_ref_parts.name)}</code> ${_('of')} ${data['source_repo_url']}
129 &rarr;
130 <code>${'{}:{}'.format(data['target_ref_type'], pull_request.target_ref_parts.name)}</code> ${_('of')} ${data['target_repo_url']}
131 </td>
132 </tr>
133
134 <tr>
135 <td style="padding-right:20px;">${_('Description')}:</td>
136 <td style="white-space:pre-wrap"><code>${pull_request.description | trim}</code></td>
137 </tr>
138 <tr>
139 <td style="padding-right:20px;">${_('Changes')}:</td>
140 <td style="white-space:pre-line">\
141 <strong>Changed commits:</strong>
142
143 - Added: ${len(added_commits)}
144 - Removed: ${len(removed_commits)}
145
146 <strong>Changed files:</strong>
147
148 %if not changed_files:
149 No file changes found
150 %else:
151 %for file_name in added_files:
152 - A <a href="${pull_request_url + '#a_' + h.FID(ancestor_commit_id, file_name)}">${file_name}</a>
153 %endfor
154 %for file_name in modified_files:
155 - M <a href="${pull_request_url + '#a_' + h.FID(ancestor_commit_id, file_name)}">${file_name}</a>
156 %endfor
157 %for file_name in removed_files:
158 - R <a href="${pull_request_url + '#a_' + h.FID(ancestor_commit_id, file_name)}">${file_name}</a>
159 %endfor
160 %endif
161 </td>
162 </tr>
163
164 </table>
@@ -1,1009 +1,1011 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from rhodecode import events
25 25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 26 from rhodecode.api.utils import (
27 27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 29 validate_repo_permissions, resolve_ref_or_error, validate_set_owner_permissions)
30 30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 from rhodecode.lib.utils2 import str2bool
33 33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 34 from rhodecode.model.comment import CommentsModel
35 35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment, PullRequest
36 36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 37 from rhodecode.model.settings import SettingsModel
38 38 from rhodecode.model.validation_schema import Invalid
39 39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 40 ReviewerListSchema)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 @jsonrpc_method()
46 46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None),
47 47 merge_state=Optional(False)):
48 48 """
49 49 Get a pull request based on the given ID.
50 50
51 51 :param apiuser: This is filled automatically from the |authtoken|.
52 52 :type apiuser: AuthUser
53 53 :param repoid: Optional, repository name or repository ID from where
54 54 the pull request was opened.
55 55 :type repoid: str or int
56 56 :param pullrequestid: ID of the requested pull request.
57 57 :type pullrequestid: int
58 58 :param merge_state: Optional calculate merge state for each repository.
59 59 This could result in longer time to fetch the data
60 60 :type merge_state: bool
61 61
62 62 Example output:
63 63
64 64 .. code-block:: bash
65 65
66 66 "id": <id_given_in_input>,
67 67 "result":
68 68 {
69 69 "pull_request_id": "<pull_request_id>",
70 70 "url": "<url>",
71 71 "title": "<title>",
72 72 "description": "<description>",
73 73 "status" : "<status>",
74 74 "created_on": "<date_time_created>",
75 75 "updated_on": "<date_time_updated>",
76 76 "commit_ids": [
77 77 ...
78 78 "<commit_id>",
79 79 "<commit_id>",
80 80 ...
81 81 ],
82 82 "review_status": "<review_status>",
83 83 "mergeable": {
84 84 "status": "<bool>",
85 85 "message": "<message>",
86 86 },
87 87 "source": {
88 88 "clone_url": "<clone_url>",
89 89 "repository": "<repository_name>",
90 90 "reference":
91 91 {
92 92 "name": "<name>",
93 93 "type": "<type>",
94 94 "commit_id": "<commit_id>",
95 95 }
96 96 },
97 97 "target": {
98 98 "clone_url": "<clone_url>",
99 99 "repository": "<repository_name>",
100 100 "reference":
101 101 {
102 102 "name": "<name>",
103 103 "type": "<type>",
104 104 "commit_id": "<commit_id>",
105 105 }
106 106 },
107 107 "merge": {
108 108 "clone_url": "<clone_url>",
109 109 "reference":
110 110 {
111 111 "name": "<name>",
112 112 "type": "<type>",
113 113 "commit_id": "<commit_id>",
114 114 }
115 115 },
116 116 "author": <user_obj>,
117 117 "reviewers": [
118 118 ...
119 119 {
120 120 "user": "<user_obj>",
121 121 "review_status": "<review_status>",
122 122 }
123 123 ...
124 124 ]
125 125 },
126 126 "error": null
127 127 """
128 128
129 129 pull_request = get_pull_request_or_error(pullrequestid)
130 130 if Optional.extract(repoid):
131 131 repo = get_repo_or_error(repoid)
132 132 else:
133 133 repo = pull_request.target_repo
134 134
135 135 if not PullRequestModel().check_user_read(pull_request, apiuser, api=True):
136 136 raise JSONRPCError('repository `%s` or pull request `%s` '
137 137 'does not exist' % (repoid, pullrequestid))
138 138
139 139 # NOTE(marcink): only calculate and return merge state if the pr state is 'created'
140 140 # otherwise we can lock the repo on calculation of merge state while update/merge
141 141 # is happening.
142 142 pr_created = pull_request.pull_request_state == pull_request.STATE_CREATED
143 143 merge_state = Optional.extract(merge_state, binary=True) and pr_created
144 144 data = pull_request.get_api_data(with_merge_state=merge_state)
145 145 return data
146 146
147 147
148 148 @jsonrpc_method()
149 149 def get_pull_requests(request, apiuser, repoid, status=Optional('new'),
150 150 merge_state=Optional(False)):
151 151 """
152 152 Get all pull requests from the repository specified in `repoid`.
153 153
154 154 :param apiuser: This is filled automatically from the |authtoken|.
155 155 :type apiuser: AuthUser
156 156 :param repoid: Optional repository name or repository ID.
157 157 :type repoid: str or int
158 158 :param status: Only return pull requests with the specified status.
159 159 Valid options are.
160 160 * ``new`` (default)
161 161 * ``open``
162 162 * ``closed``
163 163 :type status: str
164 164 :param merge_state: Optional calculate merge state for each repository.
165 165 This could result in longer time to fetch the data
166 166 :type merge_state: bool
167 167
168 168 Example output:
169 169
170 170 .. code-block:: bash
171 171
172 172 "id": <id_given_in_input>,
173 173 "result":
174 174 [
175 175 ...
176 176 {
177 177 "pull_request_id": "<pull_request_id>",
178 178 "url": "<url>",
179 179 "title" : "<title>",
180 180 "description": "<description>",
181 181 "status": "<status>",
182 182 "created_on": "<date_time_created>",
183 183 "updated_on": "<date_time_updated>",
184 184 "commit_ids": [
185 185 ...
186 186 "<commit_id>",
187 187 "<commit_id>",
188 188 ...
189 189 ],
190 190 "review_status": "<review_status>",
191 191 "mergeable": {
192 192 "status": "<bool>",
193 193 "message: "<message>",
194 194 },
195 195 "source": {
196 196 "clone_url": "<clone_url>",
197 197 "reference":
198 198 {
199 199 "name": "<name>",
200 200 "type": "<type>",
201 201 "commit_id": "<commit_id>",
202 202 }
203 203 },
204 204 "target": {
205 205 "clone_url": "<clone_url>",
206 206 "reference":
207 207 {
208 208 "name": "<name>",
209 209 "type": "<type>",
210 210 "commit_id": "<commit_id>",
211 211 }
212 212 },
213 213 "merge": {
214 214 "clone_url": "<clone_url>",
215 215 "reference":
216 216 {
217 217 "name": "<name>",
218 218 "type": "<type>",
219 219 "commit_id": "<commit_id>",
220 220 }
221 221 },
222 222 "author": <user_obj>,
223 223 "reviewers": [
224 224 ...
225 225 {
226 226 "user": "<user_obj>",
227 227 "review_status": "<review_status>",
228 228 }
229 229 ...
230 230 ]
231 231 }
232 232 ...
233 233 ],
234 234 "error": null
235 235
236 236 """
237 237 repo = get_repo_or_error(repoid)
238 238 if not has_superadmin_permission(apiuser):
239 239 _perms = (
240 240 'repository.admin', 'repository.write', 'repository.read',)
241 241 validate_repo_permissions(apiuser, repoid, repo, _perms)
242 242
243 243 status = Optional.extract(status)
244 244 merge_state = Optional.extract(merge_state, binary=True)
245 245 pull_requests = PullRequestModel().get_all(repo, statuses=[status],
246 246 order_by='id', order_dir='desc')
247 247 data = [pr.get_api_data(with_merge_state=merge_state) for pr in pull_requests]
248 248 return data
249 249
250 250
251 251 @jsonrpc_method()
252 252 def merge_pull_request(
253 253 request, apiuser, pullrequestid, repoid=Optional(None),
254 254 userid=Optional(OAttr('apiuser'))):
255 255 """
256 256 Merge the pull request specified by `pullrequestid` into its target
257 257 repository.
258 258
259 259 :param apiuser: This is filled automatically from the |authtoken|.
260 260 :type apiuser: AuthUser
261 261 :param repoid: Optional, repository name or repository ID of the
262 262 target repository to which the |pr| is to be merged.
263 263 :type repoid: str or int
264 264 :param pullrequestid: ID of the pull request which shall be merged.
265 265 :type pullrequestid: int
266 266 :param userid: Merge the pull request as this user.
267 267 :type userid: Optional(str or int)
268 268
269 269 Example output:
270 270
271 271 .. code-block:: bash
272 272
273 273 "id": <id_given_in_input>,
274 274 "result": {
275 275 "executed": "<bool>",
276 276 "failure_reason": "<int>",
277 277 "merge_status_message": "<str>",
278 278 "merge_commit_id": "<merge_commit_id>",
279 279 "possible": "<bool>",
280 280 "merge_ref": {
281 281 "commit_id": "<commit_id>",
282 282 "type": "<type>",
283 283 "name": "<name>"
284 284 }
285 285 },
286 286 "error": null
287 287 """
288 288 pull_request = get_pull_request_or_error(pullrequestid)
289 289 if Optional.extract(repoid):
290 290 repo = get_repo_or_error(repoid)
291 291 else:
292 292 repo = pull_request.target_repo
293 293 auth_user = apiuser
294 294 if not isinstance(userid, Optional):
295 295 if (has_superadmin_permission(apiuser) or
296 296 HasRepoPermissionAnyApi('repository.admin')(
297 297 user=apiuser, repo_name=repo.repo_name)):
298 298 apiuser = get_user_or_error(userid)
299 299 auth_user = apiuser.AuthUser()
300 300 else:
301 301 raise JSONRPCError('userid is not the same as your user')
302 302
303 303 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
304 304 raise JSONRPCError(
305 305 'Operation forbidden because pull request is in state {}, '
306 306 'only state {} is allowed.'.format(
307 307 pull_request.pull_request_state, PullRequest.STATE_CREATED))
308 308
309 309 with pull_request.set_state(PullRequest.STATE_UPDATING):
310 310 check = MergeCheck.validate(pull_request, auth_user=auth_user,
311 311 translator=request.translate)
312 312 merge_possible = not check.failed
313 313
314 314 if not merge_possible:
315 315 error_messages = []
316 316 for err_type, error_msg in check.errors:
317 317 error_msg = request.translate(error_msg)
318 318 error_messages.append(error_msg)
319 319
320 320 reasons = ','.join(error_messages)
321 321 raise JSONRPCError(
322 322 'merge not possible for following reasons: {}'.format(reasons))
323 323
324 324 target_repo = pull_request.target_repo
325 325 extras = vcs_operation_context(
326 326 request.environ, repo_name=target_repo.repo_name,
327 327 username=auth_user.username, action='push',
328 328 scm=target_repo.repo_type)
329 329 with pull_request.set_state(PullRequest.STATE_UPDATING):
330 330 merge_response = PullRequestModel().merge_repo(
331 331 pull_request, apiuser, extras=extras)
332 332 if merge_response.executed:
333 333 PullRequestModel().close_pull_request(pull_request.pull_request_id, auth_user)
334 334
335 335 Session().commit()
336 336
337 337 # In previous versions the merge response directly contained the merge
338 338 # commit id. It is now contained in the merge reference object. To be
339 339 # backwards compatible we have to extract it again.
340 340 merge_response = merge_response.asdict()
341 341 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
342 342
343 343 return merge_response
344 344
345 345
346 346 @jsonrpc_method()
347 347 def get_pull_request_comments(
348 348 request, apiuser, pullrequestid, repoid=Optional(None)):
349 349 """
350 350 Get all comments of pull request specified with the `pullrequestid`
351 351
352 352 :param apiuser: This is filled automatically from the |authtoken|.
353 353 :type apiuser: AuthUser
354 354 :param repoid: Optional repository name or repository ID.
355 355 :type repoid: str or int
356 356 :param pullrequestid: The pull request ID.
357 357 :type pullrequestid: int
358 358
359 359 Example output:
360 360
361 361 .. code-block:: bash
362 362
363 363 id : <id_given_in_input>
364 364 result : [
365 365 {
366 366 "comment_author": {
367 367 "active": true,
368 368 "full_name_or_username": "Tom Gore",
369 369 "username": "admin"
370 370 },
371 371 "comment_created_on": "2017-01-02T18:43:45.533",
372 372 "comment_f_path": null,
373 373 "comment_id": 25,
374 374 "comment_lineno": null,
375 375 "comment_status": {
376 376 "status": "under_review",
377 377 "status_lbl": "Under Review"
378 378 },
379 379 "comment_text": "Example text",
380 380 "comment_type": null,
381 381 "pull_request_version": null
382 382 }
383 383 ],
384 384 error : null
385 385 """
386 386
387 387 pull_request = get_pull_request_or_error(pullrequestid)
388 388 if Optional.extract(repoid):
389 389 repo = get_repo_or_error(repoid)
390 390 else:
391 391 repo = pull_request.target_repo
392 392
393 393 if not PullRequestModel().check_user_read(
394 394 pull_request, apiuser, api=True):
395 395 raise JSONRPCError('repository `%s` or pull request `%s` '
396 396 'does not exist' % (repoid, pullrequestid))
397 397
398 398 (pull_request_latest,
399 399 pull_request_at_ver,
400 400 pull_request_display_obj,
401 401 at_version) = PullRequestModel().get_pr_version(
402 402 pull_request.pull_request_id, version=None)
403 403
404 404 versions = pull_request_display_obj.versions()
405 405 ver_map = {
406 406 ver.pull_request_version_id: cnt
407 407 for cnt, ver in enumerate(versions, 1)
408 408 }
409 409
410 410 # GENERAL COMMENTS with versions #
411 411 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
412 412 q = q.order_by(ChangesetComment.comment_id.asc())
413 413 general_comments = q.all()
414 414
415 415 # INLINE COMMENTS with versions #
416 416 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
417 417 q = q.order_by(ChangesetComment.comment_id.asc())
418 418 inline_comments = q.all()
419 419
420 420 data = []
421 421 for comment in inline_comments + general_comments:
422 422 full_data = comment.get_api_data()
423 423 pr_version_id = None
424 424 if comment.pull_request_version_id:
425 425 pr_version_id = 'v{}'.format(
426 426 ver_map[comment.pull_request_version_id])
427 427
428 428 # sanitize some entries
429 429
430 430 full_data['pull_request_version'] = pr_version_id
431 431 full_data['comment_author'] = {
432 432 'username': full_data['comment_author'].username,
433 433 'full_name_or_username': full_data['comment_author'].full_name_or_username,
434 434 'active': full_data['comment_author'].active,
435 435 }
436 436
437 437 if full_data['comment_status']:
438 438 full_data['comment_status'] = {
439 439 'status': full_data['comment_status'][0].status,
440 440 'status_lbl': full_data['comment_status'][0].status_lbl,
441 441 }
442 442 else:
443 443 full_data['comment_status'] = {}
444 444
445 445 data.append(full_data)
446 446 return data
447 447
448 448
449 449 @jsonrpc_method()
450 450 def comment_pull_request(
451 451 request, apiuser, pullrequestid, repoid=Optional(None),
452 452 message=Optional(None), commit_id=Optional(None), status=Optional(None),
453 453 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
454 454 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
455 455 userid=Optional(OAttr('apiuser'))):
456 456 """
457 457 Comment on the pull request specified with the `pullrequestid`,
458 458 in the |repo| specified by the `repoid`, and optionally change the
459 459 review status.
460 460
461 461 :param apiuser: This is filled automatically from the |authtoken|.
462 462 :type apiuser: AuthUser
463 463 :param repoid: Optional repository name or repository ID.
464 464 :type repoid: str or int
465 465 :param pullrequestid: The pull request ID.
466 466 :type pullrequestid: int
467 467 :param commit_id: Specify the commit_id for which to set a comment. If
468 468 given commit_id is different than latest in the PR status
469 469 change won't be performed.
470 470 :type commit_id: str
471 471 :param message: The text content of the comment.
472 472 :type message: str
473 473 :param status: (**Optional**) Set the approval status of the pull
474 474 request. One of: 'not_reviewed', 'approved', 'rejected',
475 475 'under_review'
476 476 :type status: str
477 477 :param comment_type: Comment type, one of: 'note', 'todo'
478 478 :type comment_type: Optional(str), default: 'note'
479 479 :param resolves_comment_id: id of comment which this one will resolve
480 480 :type resolves_comment_id: Optional(int)
481 481 :param extra_recipients: list of user ids or usernames to add
482 482 notifications for this comment. Acts like a CC for notification
483 483 :type extra_recipients: Optional(list)
484 484 :param userid: Comment on the pull request as this user
485 485 :type userid: Optional(str or int)
486 486
487 487 Example output:
488 488
489 489 .. code-block:: bash
490 490
491 491 id : <id_given_in_input>
492 492 result : {
493 493 "pull_request_id": "<Integer>",
494 494 "comment_id": "<Integer>",
495 495 "status": {"given": <given_status>,
496 496 "was_changed": <bool status_was_actually_changed> },
497 497 },
498 498 error : null
499 499 """
500 500 pull_request = get_pull_request_or_error(pullrequestid)
501 501 if Optional.extract(repoid):
502 502 repo = get_repo_or_error(repoid)
503 503 else:
504 504 repo = pull_request.target_repo
505 505
506 506 auth_user = apiuser
507 507 if not isinstance(userid, Optional):
508 508 if (has_superadmin_permission(apiuser) or
509 509 HasRepoPermissionAnyApi('repository.admin')(
510 510 user=apiuser, repo_name=repo.repo_name)):
511 511 apiuser = get_user_or_error(userid)
512 512 auth_user = apiuser.AuthUser()
513 513 else:
514 514 raise JSONRPCError('userid is not the same as your user')
515 515
516 516 if pull_request.is_closed():
517 517 raise JSONRPCError(
518 518 'pull request `%s` comment failed, pull request is closed' % (
519 519 pullrequestid,))
520 520
521 521 if not PullRequestModel().check_user_read(
522 522 pull_request, apiuser, api=True):
523 523 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
524 524 message = Optional.extract(message)
525 525 status = Optional.extract(status)
526 526 commit_id = Optional.extract(commit_id)
527 527 comment_type = Optional.extract(comment_type)
528 528 resolves_comment_id = Optional.extract(resolves_comment_id)
529 529 extra_recipients = Optional.extract(extra_recipients)
530 530
531 531 if not message and not status:
532 532 raise JSONRPCError(
533 533 'Both message and status parameters are missing. '
534 534 'At least one is required.')
535 535
536 536 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
537 537 status is not None):
538 538 raise JSONRPCError('Unknown comment status: `%s`' % status)
539 539
540 540 if commit_id and commit_id not in pull_request.revisions:
541 541 raise JSONRPCError(
542 542 'Invalid commit_id `%s` for this pull request.' % commit_id)
543 543
544 544 allowed_to_change_status = PullRequestModel().check_user_change_status(
545 545 pull_request, apiuser)
546 546
547 547 # if commit_id is passed re-validated if user is allowed to change status
548 548 # based on latest commit_id from the PR
549 549 if commit_id:
550 550 commit_idx = pull_request.revisions.index(commit_id)
551 551 if commit_idx != 0:
552 552 allowed_to_change_status = False
553 553
554 554 if resolves_comment_id:
555 555 comment = ChangesetComment.get(resolves_comment_id)
556 556 if not comment:
557 557 raise JSONRPCError(
558 558 'Invalid resolves_comment_id `%s` for this pull request.'
559 559 % resolves_comment_id)
560 560 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
561 561 raise JSONRPCError(
562 562 'Comment `%s` is wrong type for setting status to resolved.'
563 563 % resolves_comment_id)
564 564
565 565 text = message
566 566 status_label = ChangesetStatus.get_status_lbl(status)
567 567 if status and allowed_to_change_status:
568 568 st_message = ('Status change %(transition_icon)s %(status)s'
569 569 % {'transition_icon': '>', 'status': status_label})
570 570 text = message or st_message
571 571
572 572 rc_config = SettingsModel().get_all_settings()
573 573 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
574 574
575 575 status_change = status and allowed_to_change_status
576 576 comment = CommentsModel().create(
577 577 text=text,
578 578 repo=pull_request.target_repo.repo_id,
579 579 user=apiuser.user_id,
580 580 pull_request=pull_request.pull_request_id,
581 581 f_path=None,
582 582 line_no=None,
583 583 status_change=(status_label if status_change else None),
584 584 status_change_type=(status if status_change else None),
585 585 closing_pr=False,
586 586 renderer=renderer,
587 587 comment_type=comment_type,
588 588 resolves_comment_id=resolves_comment_id,
589 589 auth_user=auth_user,
590 590 extra_recipients=extra_recipients
591 591 )
592 592
593 593 if allowed_to_change_status and status:
594 594 old_calculated_status = pull_request.calculated_review_status()
595 595 ChangesetStatusModel().set_status(
596 596 pull_request.target_repo.repo_id,
597 597 status,
598 598 apiuser.user_id,
599 599 comment,
600 600 pull_request=pull_request.pull_request_id
601 601 )
602 602 Session().flush()
603 603
604 604 Session().commit()
605 605
606 606 PullRequestModel().trigger_pull_request_hook(
607 607 pull_request, apiuser, 'comment',
608 608 data={'comment': comment})
609 609
610 610 if allowed_to_change_status and status:
611 611 # we now calculate the status of pull request, and based on that
612 612 # calculation we set the commits status
613 613 calculated_status = pull_request.calculated_review_status()
614 614 if old_calculated_status != calculated_status:
615 615 PullRequestModel().trigger_pull_request_hook(
616 616 pull_request, apiuser, 'review_status_change',
617 617 data={'status': calculated_status})
618 618
619 619 data = {
620 620 'pull_request_id': pull_request.pull_request_id,
621 621 'comment_id': comment.comment_id if comment else None,
622 622 'status': {'given': status, 'was_changed': status_change},
623 623 }
624 624 return data
625 625
626 626
627 627 @jsonrpc_method()
628 628 def create_pull_request(
629 629 request, apiuser, source_repo, target_repo, source_ref, target_ref,
630 630 owner=Optional(OAttr('apiuser')), title=Optional(''), description=Optional(''),
631 631 description_renderer=Optional(''), reviewers=Optional(None)):
632 632 """
633 633 Creates a new pull request.
634 634
635 635 Accepts refs in the following formats:
636 636
637 637 * branch:<branch_name>:<sha>
638 638 * branch:<branch_name>
639 639 * bookmark:<bookmark_name>:<sha> (Mercurial only)
640 640 * bookmark:<bookmark_name> (Mercurial only)
641 641
642 642 :param apiuser: This is filled automatically from the |authtoken|.
643 643 :type apiuser: AuthUser
644 644 :param source_repo: Set the source repository name.
645 645 :type source_repo: str
646 646 :param target_repo: Set the target repository name.
647 647 :type target_repo: str
648 648 :param source_ref: Set the source ref name.
649 649 :type source_ref: str
650 650 :param target_ref: Set the target ref name.
651 651 :type target_ref: str
652 652 :param owner: user_id or username
653 653 :type owner: Optional(str)
654 654 :param title: Optionally Set the pull request title, it's generated otherwise
655 655 :type title: str
656 656 :param description: Set the pull request description.
657 657 :type description: Optional(str)
658 658 :type description_renderer: Optional(str)
659 659 :param description_renderer: Set pull request renderer for the description.
660 660 It should be 'rst', 'markdown' or 'plain'. If not give default
661 661 system renderer will be used
662 662 :param reviewers: Set the new pull request reviewers list.
663 663 Reviewer defined by review rules will be added automatically to the
664 664 defined list.
665 665 :type reviewers: Optional(list)
666 666 Accepts username strings or objects of the format:
667 667
668 668 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
669 669 """
670 670
671 671 source_db_repo = get_repo_or_error(source_repo)
672 672 target_db_repo = get_repo_or_error(target_repo)
673 673 if not has_superadmin_permission(apiuser):
674 674 _perms = ('repository.admin', 'repository.write', 'repository.read',)
675 675 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
676 676
677 677 owner = validate_set_owner_permissions(apiuser, owner)
678 678
679 679 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
680 680 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
681 681
682 682 source_scm = source_db_repo.scm_instance()
683 683 target_scm = target_db_repo.scm_instance()
684 684
685 685 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
686 686 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
687 687
688 688 ancestor = source_scm.get_common_ancestor(
689 689 source_commit.raw_id, target_commit.raw_id, target_scm)
690 690 if not ancestor:
691 691 raise JSONRPCError('no common ancestor found')
692 692
693 693 # recalculate target ref based on ancestor
694 694 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
695 695 full_target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
696 696
697 697 commit_ranges = target_scm.compare(
698 698 target_commit.raw_id, source_commit.raw_id, source_scm,
699 699 merge=True, pre_load=[])
700 700
701 701 if not commit_ranges:
702 702 raise JSONRPCError('no commits found')
703 703
704 704 reviewer_objects = Optional.extract(reviewers) or []
705 705
706 706 # serialize and validate passed in given reviewers
707 707 if reviewer_objects:
708 708 schema = ReviewerListSchema()
709 709 try:
710 710 reviewer_objects = schema.deserialize(reviewer_objects)
711 711 except Invalid as err:
712 712 raise JSONRPCValidationError(colander_exc=err)
713 713
714 714 # validate users
715 715 for reviewer_object in reviewer_objects:
716 716 user = get_user_or_error(reviewer_object['username'])
717 717 reviewer_object['user_id'] = user.user_id
718 718
719 719 get_default_reviewers_data, validate_default_reviewers = \
720 720 PullRequestModel().get_reviewer_functions()
721 721
722 722 # recalculate reviewers logic, to make sure we can validate this
723 723 reviewer_rules = get_default_reviewers_data(
724 724 owner, source_db_repo,
725 725 source_commit, target_db_repo, target_commit)
726 726
727 727 # now MERGE our given with the calculated
728 728 reviewer_objects = reviewer_rules['reviewers'] + reviewer_objects
729 729
730 730 try:
731 731 reviewers = validate_default_reviewers(
732 732 reviewer_objects, reviewer_rules)
733 733 except ValueError as e:
734 734 raise JSONRPCError('Reviewers Validation: {}'.format(e))
735 735
736 736 title = Optional.extract(title)
737 737 if not title:
738 738 title_source_ref = source_ref.split(':', 2)[1]
739 739 title = PullRequestModel().generate_pullrequest_title(
740 740 source=source_repo,
741 741 source_ref=title_source_ref,
742 742 target=target_repo
743 743 )
744 744 # fetch renderer, if set fallback to plain in case of PR
745 745 rc_config = SettingsModel().get_all_settings()
746 746 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
747 747 description = Optional.extract(description)
748 748 description_renderer = Optional.extract(description_renderer) or default_system_renderer
749 749
750 750 pull_request = PullRequestModel().create(
751 751 created_by=owner.user_id,
752 752 source_repo=source_repo,
753 753 source_ref=full_source_ref,
754 754 target_repo=target_repo,
755 755 target_ref=full_target_ref,
756 756 revisions=[commit.raw_id for commit in reversed(commit_ranges)],
757 757 reviewers=reviewers,
758 758 title=title,
759 759 description=description,
760 760 description_renderer=description_renderer,
761 761 reviewer_data=reviewer_rules,
762 762 auth_user=apiuser
763 763 )
764 764
765 765 Session().commit()
766 766 data = {
767 767 'msg': 'Created new pull request `{}`'.format(title),
768 768 'pull_request_id': pull_request.pull_request_id,
769 769 }
770 770 return data
771 771
772 772
773 773 @jsonrpc_method()
774 774 def update_pull_request(
775 775 request, apiuser, pullrequestid, repoid=Optional(None),
776 776 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
777 777 reviewers=Optional(None), update_commits=Optional(None)):
778 778 """
779 779 Updates a pull request.
780 780
781 781 :param apiuser: This is filled automatically from the |authtoken|.
782 782 :type apiuser: AuthUser
783 783 :param repoid: Optional repository name or repository ID.
784 784 :type repoid: str or int
785 785 :param pullrequestid: The pull request ID.
786 786 :type pullrequestid: int
787 787 :param title: Set the pull request title.
788 788 :type title: str
789 789 :param description: Update pull request description.
790 790 :type description: Optional(str)
791 791 :type description_renderer: Optional(str)
792 792 :param description_renderer: Update pull request renderer for the description.
793 793 It should be 'rst', 'markdown' or 'plain'
794 794 :param reviewers: Update pull request reviewers list with new value.
795 795 :type reviewers: Optional(list)
796 796 Accepts username strings or objects of the format:
797 797
798 798 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
799 799
800 800 :param update_commits: Trigger update of commits for this pull request
801 801 :type: update_commits: Optional(bool)
802 802
803 803 Example output:
804 804
805 805 .. code-block:: bash
806 806
807 807 id : <id_given_in_input>
808 808 result : {
809 809 "msg": "Updated pull request `63`",
810 810 "pull_request": <pull_request_object>,
811 811 "updated_reviewers": {
812 812 "added": [
813 813 "username"
814 814 ],
815 815 "removed": []
816 816 },
817 817 "updated_commits": {
818 818 "added": [
819 819 "<sha1_hash>"
820 820 ],
821 821 "common": [
822 822 "<sha1_hash>",
823 823 "<sha1_hash>",
824 824 ],
825 825 "removed": []
826 826 }
827 827 }
828 828 error : null
829 829 """
830 830
831 831 pull_request = get_pull_request_or_error(pullrequestid)
832 832 if Optional.extract(repoid):
833 833 repo = get_repo_or_error(repoid)
834 834 else:
835 835 repo = pull_request.target_repo
836 836
837 837 if not PullRequestModel().check_user_update(
838 838 pull_request, apiuser, api=True):
839 839 raise JSONRPCError(
840 840 'pull request `%s` update failed, no permission to update.' % (
841 841 pullrequestid,))
842 842 if pull_request.is_closed():
843 843 raise JSONRPCError(
844 844 'pull request `%s` update failed, pull request is closed' % (
845 845 pullrequestid,))
846 846
847 847 reviewer_objects = Optional.extract(reviewers) or []
848 848
849 849 if reviewer_objects:
850 850 schema = ReviewerListSchema()
851 851 try:
852 852 reviewer_objects = schema.deserialize(reviewer_objects)
853 853 except Invalid as err:
854 854 raise JSONRPCValidationError(colander_exc=err)
855 855
856 856 # validate users
857 857 for reviewer_object in reviewer_objects:
858 858 user = get_user_or_error(reviewer_object['username'])
859 859 reviewer_object['user_id'] = user.user_id
860 860
861 861 get_default_reviewers_data, get_validated_reviewers = \
862 862 PullRequestModel().get_reviewer_functions()
863 863
864 864 # re-use stored rules
865 865 reviewer_rules = pull_request.reviewer_data
866 866 try:
867 867 reviewers = get_validated_reviewers(
868 868 reviewer_objects, reviewer_rules)
869 869 except ValueError as e:
870 870 raise JSONRPCError('Reviewers Validation: {}'.format(e))
871 871 else:
872 872 reviewers = []
873 873
874 874 title = Optional.extract(title)
875 875 description = Optional.extract(description)
876 876 description_renderer = Optional.extract(description_renderer)
877 877
878 878 if title or description:
879 879 PullRequestModel().edit(
880 880 pull_request,
881 881 title or pull_request.title,
882 882 description or pull_request.description,
883 883 description_renderer or pull_request.description_renderer,
884 884 apiuser)
885 885 Session().commit()
886 886
887 887 commit_changes = {"added": [], "common": [], "removed": []}
888 888 if str2bool(Optional.extract(update_commits)):
889 889
890 890 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
891 891 raise JSONRPCError(
892 892 'Operation forbidden because pull request is in state {}, '
893 893 'only state {} is allowed.'.format(
894 894 pull_request.pull_request_state, PullRequest.STATE_CREATED))
895 895
896 896 with pull_request.set_state(PullRequest.STATE_UPDATING):
897 897 if PullRequestModel().has_valid_update_type(pull_request):
898 update_response = PullRequestModel().update_commits(pull_request)
898 db_user = apiuser.get_instance()
899 update_response = PullRequestModel().update_commits(
900 pull_request, db_user)
899 901 commit_changes = update_response.changes or commit_changes
900 902 Session().commit()
901 903
902 904 reviewers_changes = {"added": [], "removed": []}
903 905 if reviewers:
904 906 old_calculated_status = pull_request.calculated_review_status()
905 907 added_reviewers, removed_reviewers = \
906 908 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
907 909
908 910 reviewers_changes['added'] = sorted(
909 911 [get_user_or_error(n).username for n in added_reviewers])
910 912 reviewers_changes['removed'] = sorted(
911 913 [get_user_or_error(n).username for n in removed_reviewers])
912 914 Session().commit()
913 915
914 916 # trigger status changed if change in reviewers changes the status
915 917 calculated_status = pull_request.calculated_review_status()
916 918 if old_calculated_status != calculated_status:
917 919 PullRequestModel().trigger_pull_request_hook(
918 920 pull_request, apiuser, 'review_status_change',
919 921 data={'status': calculated_status})
920 922
921 923 data = {
922 924 'msg': 'Updated pull request `{}`'.format(
923 925 pull_request.pull_request_id),
924 926 'pull_request': pull_request.get_api_data(),
925 927 'updated_commits': commit_changes,
926 928 'updated_reviewers': reviewers_changes
927 929 }
928 930
929 931 return data
930 932
931 933
932 934 @jsonrpc_method()
933 935 def close_pull_request(
934 936 request, apiuser, pullrequestid, repoid=Optional(None),
935 937 userid=Optional(OAttr('apiuser')), message=Optional('')):
936 938 """
937 939 Close the pull request specified by `pullrequestid`.
938 940
939 941 :param apiuser: This is filled automatically from the |authtoken|.
940 942 :type apiuser: AuthUser
941 943 :param repoid: Repository name or repository ID to which the pull
942 944 request belongs.
943 945 :type repoid: str or int
944 946 :param pullrequestid: ID of the pull request to be closed.
945 947 :type pullrequestid: int
946 948 :param userid: Close the pull request as this user.
947 949 :type userid: Optional(str or int)
948 950 :param message: Optional message to close the Pull Request with. If not
949 951 specified it will be generated automatically.
950 952 :type message: Optional(str)
951 953
952 954 Example output:
953 955
954 956 .. code-block:: bash
955 957
956 958 "id": <id_given_in_input>,
957 959 "result": {
958 960 "pull_request_id": "<int>",
959 961 "close_status": "<str:status_lbl>,
960 962 "closed": "<bool>"
961 963 },
962 964 "error": null
963 965
964 966 """
965 967 _ = request.translate
966 968
967 969 pull_request = get_pull_request_or_error(pullrequestid)
968 970 if Optional.extract(repoid):
969 971 repo = get_repo_or_error(repoid)
970 972 else:
971 973 repo = pull_request.target_repo
972 974
973 975 if not isinstance(userid, Optional):
974 976 if (has_superadmin_permission(apiuser) or
975 977 HasRepoPermissionAnyApi('repository.admin')(
976 978 user=apiuser, repo_name=repo.repo_name)):
977 979 apiuser = get_user_or_error(userid)
978 980 else:
979 981 raise JSONRPCError('userid is not the same as your user')
980 982
981 983 if pull_request.is_closed():
982 984 raise JSONRPCError(
983 985 'pull request `%s` is already closed' % (pullrequestid,))
984 986
985 987 # only owner or admin or person with write permissions
986 988 allowed_to_close = PullRequestModel().check_user_update(
987 989 pull_request, apiuser, api=True)
988 990
989 991 if not allowed_to_close:
990 992 raise JSONRPCError(
991 993 'pull request `%s` close failed, no permission to close.' % (
992 994 pullrequestid,))
993 995
994 996 # message we're using to close the PR, else it's automatically generated
995 997 message = Optional.extract(message)
996 998
997 999 # finally close the PR, with proper message comment
998 1000 comment, status = PullRequestModel().close_pull_request_with_comment(
999 1001 pull_request, apiuser, repo, message=message, auth_user=apiuser)
1000 1002 status_lbl = ChangesetStatus.get_status_lbl(status)
1001 1003
1002 1004 Session().commit()
1003 1005
1004 1006 data = {
1005 1007 'pull_request_id': pull_request.pull_request_id,
1006 1008 'close_status': status_lbl,
1007 1009 'closed': True,
1008 1010 }
1009 1011 return data
@@ -1,783 +1,782 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23 import collections
24 24
25 25 import datetime
26 26 import formencode
27 27 import formencode.htmlfill
28 28
29 29 import rhodecode
30 30 from pyramid.view import view_config
31 31 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
32 32 from pyramid.renderers import render
33 33 from pyramid.response import Response
34 34
35 35 from rhodecode.apps._base import BaseAppView
36 36 from rhodecode.apps._base.navigation import navigation_list
37 37 from rhodecode.apps.svn_support.config_keys import generate_config
38 38 from rhodecode.lib import helpers as h
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
41 41 from rhodecode.lib.celerylib import tasks, run_task
42 42 from rhodecode.lib.utils import repo2db_mapper
43 43 from rhodecode.lib.utils2 import str2bool, safe_unicode, AttributeDict
44 44 from rhodecode.lib.index import searcher_from_config
45 45
46 46 from rhodecode.model.db import RhodeCodeUi, Repository
47 47 from rhodecode.model.forms import (ApplicationSettingsForm,
48 48 ApplicationUiSettingsForm, ApplicationVisualisationForm,
49 49 LabsSettingsForm, IssueTrackerPatternsForm)
50 50 from rhodecode.model.repo_group import RepoGroupModel
51 51
52 52 from rhodecode.model.scm import ScmModel
53 53 from rhodecode.model.notification import EmailNotificationModel
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.settings import (
56 56 IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound,
57 57 SettingsModel)
58 58
59 59
60 60 log = logging.getLogger(__name__)
61 61
62 62
63 63 class AdminSettingsView(BaseAppView):
64 64
65 65 def load_default_context(self):
66 66 c = self._get_local_tmpl_context()
67 67 c.labs_active = str2bool(
68 68 rhodecode.CONFIG.get('labs_settings_active', 'true'))
69 69 c.navlist = navigation_list(self.request)
70 70
71 71 return c
72 72
73 73 @classmethod
74 74 def _get_ui_settings(cls):
75 75 ret = RhodeCodeUi.query().all()
76 76
77 77 if not ret:
78 78 raise Exception('Could not get application ui settings !')
79 79 settings = {}
80 80 for each in ret:
81 81 k = each.ui_key
82 82 v = each.ui_value
83 83 if k == '/':
84 84 k = 'root_path'
85 85
86 86 if k in ['push_ssl', 'publish', 'enabled']:
87 87 v = str2bool(v)
88 88
89 89 if k.find('.') != -1:
90 90 k = k.replace('.', '_')
91 91
92 92 if each.ui_section in ['hooks', 'extensions']:
93 93 v = each.ui_active
94 94
95 95 settings[each.ui_section + '_' + k] = v
96 96 return settings
97 97
98 98 @classmethod
99 99 def _form_defaults(cls):
100 100 defaults = SettingsModel().get_all_settings()
101 101 defaults.update(cls._get_ui_settings())
102 102
103 103 defaults.update({
104 104 'new_svn_branch': '',
105 105 'new_svn_tag': '',
106 106 })
107 107 return defaults
108 108
109 109 @LoginRequired()
110 110 @HasPermissionAllDecorator('hg.admin')
111 111 @view_config(
112 112 route_name='admin_settings_vcs', request_method='GET',
113 113 renderer='rhodecode:templates/admin/settings/settings.mako')
114 114 def settings_vcs(self):
115 115 c = self.load_default_context()
116 116 c.active = 'vcs'
117 117 model = VcsSettingsModel()
118 118 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
119 119 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
120 120
121 121 settings = self.request.registry.settings
122 122 c.svn_proxy_generate_config = settings[generate_config]
123 123
124 124 defaults = self._form_defaults()
125 125
126 126 model.create_largeobjects_dirs_if_needed(defaults['paths_root_path'])
127 127
128 128 data = render('rhodecode:templates/admin/settings/settings.mako',
129 129 self._get_template_context(c), self.request)
130 130 html = formencode.htmlfill.render(
131 131 data,
132 132 defaults=defaults,
133 133 encoding="UTF-8",
134 134 force_defaults=False
135 135 )
136 136 return Response(html)
137 137
138 138 @LoginRequired()
139 139 @HasPermissionAllDecorator('hg.admin')
140 140 @CSRFRequired()
141 141 @view_config(
142 142 route_name='admin_settings_vcs_update', request_method='POST',
143 143 renderer='rhodecode:templates/admin/settings/settings.mako')
144 144 def settings_vcs_update(self):
145 145 _ = self.request.translate
146 146 c = self.load_default_context()
147 147 c.active = 'vcs'
148 148
149 149 model = VcsSettingsModel()
150 150 c.svn_branch_patterns = model.get_global_svn_branch_patterns()
151 151 c.svn_tag_patterns = model.get_global_svn_tag_patterns()
152 152
153 153 settings = self.request.registry.settings
154 154 c.svn_proxy_generate_config = settings[generate_config]
155 155
156 156 application_form = ApplicationUiSettingsForm(self.request.translate)()
157 157
158 158 try:
159 159 form_result = application_form.to_python(dict(self.request.POST))
160 160 except formencode.Invalid as errors:
161 161 h.flash(
162 162 _("Some form inputs contain invalid data."),
163 163 category='error')
164 164 data = render('rhodecode:templates/admin/settings/settings.mako',
165 165 self._get_template_context(c), self.request)
166 166 html = formencode.htmlfill.render(
167 167 data,
168 168 defaults=errors.value,
169 169 errors=errors.error_dict or {},
170 170 prefix_error=False,
171 171 encoding="UTF-8",
172 172 force_defaults=False
173 173 )
174 174 return Response(html)
175 175
176 176 try:
177 177 if c.visual.allow_repo_location_change:
178 178 model.update_global_path_setting(form_result['paths_root_path'])
179 179
180 180 model.update_global_ssl_setting(form_result['web_push_ssl'])
181 181 model.update_global_hook_settings(form_result)
182 182
183 183 model.create_or_update_global_svn_settings(form_result)
184 184 model.create_or_update_global_hg_settings(form_result)
185 185 model.create_or_update_global_git_settings(form_result)
186 186 model.create_or_update_global_pr_settings(form_result)
187 187 except Exception:
188 188 log.exception("Exception while updating settings")
189 189 h.flash(_('Error occurred during updating '
190 190 'application settings'), category='error')
191 191 else:
192 192 Session().commit()
193 193 h.flash(_('Updated VCS settings'), category='success')
194 194 raise HTTPFound(h.route_path('admin_settings_vcs'))
195 195
196 196 data = render('rhodecode:templates/admin/settings/settings.mako',
197 197 self._get_template_context(c), self.request)
198 198 html = formencode.htmlfill.render(
199 199 data,
200 200 defaults=self._form_defaults(),
201 201 encoding="UTF-8",
202 202 force_defaults=False
203 203 )
204 204 return Response(html)
205 205
206 206 @LoginRequired()
207 207 @HasPermissionAllDecorator('hg.admin')
208 208 @CSRFRequired()
209 209 @view_config(
210 210 route_name='admin_settings_vcs_svn_pattern_delete', request_method='POST',
211 211 renderer='json_ext', xhr=True)
212 212 def settings_vcs_delete_svn_pattern(self):
213 213 delete_pattern_id = self.request.POST.get('delete_svn_pattern')
214 214 model = VcsSettingsModel()
215 215 try:
216 216 model.delete_global_svn_pattern(delete_pattern_id)
217 217 except SettingNotFound:
218 218 log.exception(
219 219 'Failed to delete svn_pattern with id %s', delete_pattern_id)
220 220 raise HTTPNotFound()
221 221
222 222 Session().commit()
223 223 return True
224 224
225 225 @LoginRequired()
226 226 @HasPermissionAllDecorator('hg.admin')
227 227 @view_config(
228 228 route_name='admin_settings_mapping', request_method='GET',
229 229 renderer='rhodecode:templates/admin/settings/settings.mako')
230 230 def settings_mapping(self):
231 231 c = self.load_default_context()
232 232 c.active = 'mapping'
233 233
234 234 data = render('rhodecode:templates/admin/settings/settings.mako',
235 235 self._get_template_context(c), self.request)
236 236 html = formencode.htmlfill.render(
237 237 data,
238 238 defaults=self._form_defaults(),
239 239 encoding="UTF-8",
240 240 force_defaults=False
241 241 )
242 242 return Response(html)
243 243
244 244 @LoginRequired()
245 245 @HasPermissionAllDecorator('hg.admin')
246 246 @CSRFRequired()
247 247 @view_config(
248 248 route_name='admin_settings_mapping_update', request_method='POST',
249 249 renderer='rhodecode:templates/admin/settings/settings.mako')
250 250 def settings_mapping_update(self):
251 251 _ = self.request.translate
252 252 c = self.load_default_context()
253 253 c.active = 'mapping'
254 254 rm_obsolete = self.request.POST.get('destroy', False)
255 255 invalidate_cache = self.request.POST.get('invalidate', False)
256 256 log.debug(
257 257 'rescanning repo location with destroy obsolete=%s', rm_obsolete)
258 258
259 259 if invalidate_cache:
260 260 log.debug('invalidating all repositories cache')
261 261 for repo in Repository.get_all():
262 262 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
263 263
264 264 filesystem_repos = ScmModel().repo_scan()
265 265 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete)
266 266 _repr = lambda l: ', '.join(map(safe_unicode, l)) or '-'
267 267 h.flash(_('Repositories successfully '
268 268 'rescanned added: %s ; removed: %s') %
269 269 (_repr(added), _repr(removed)),
270 270 category='success')
271 271 raise HTTPFound(h.route_path('admin_settings_mapping'))
272 272
273 273 @LoginRequired()
274 274 @HasPermissionAllDecorator('hg.admin')
275 275 @view_config(
276 276 route_name='admin_settings', request_method='GET',
277 277 renderer='rhodecode:templates/admin/settings/settings.mako')
278 278 @view_config(
279 279 route_name='admin_settings_global', request_method='GET',
280 280 renderer='rhodecode:templates/admin/settings/settings.mako')
281 281 def settings_global(self):
282 282 c = self.load_default_context()
283 283 c.active = 'global'
284 284 c.personal_repo_group_default_pattern = RepoGroupModel()\
285 285 .get_personal_group_name_pattern()
286 286
287 287 data = render('rhodecode:templates/admin/settings/settings.mako',
288 288 self._get_template_context(c), self.request)
289 289 html = formencode.htmlfill.render(
290 290 data,
291 291 defaults=self._form_defaults(),
292 292 encoding="UTF-8",
293 293 force_defaults=False
294 294 )
295 295 return Response(html)
296 296
297 297 @LoginRequired()
298 298 @HasPermissionAllDecorator('hg.admin')
299 299 @CSRFRequired()
300 300 @view_config(
301 301 route_name='admin_settings_update', request_method='POST',
302 302 renderer='rhodecode:templates/admin/settings/settings.mako')
303 303 @view_config(
304 304 route_name='admin_settings_global_update', request_method='POST',
305 305 renderer='rhodecode:templates/admin/settings/settings.mako')
306 306 def settings_global_update(self):
307 307 _ = self.request.translate
308 308 c = self.load_default_context()
309 309 c.active = 'global'
310 310 c.personal_repo_group_default_pattern = RepoGroupModel()\
311 311 .get_personal_group_name_pattern()
312 312 application_form = ApplicationSettingsForm(self.request.translate)()
313 313 try:
314 314 form_result = application_form.to_python(dict(self.request.POST))
315 315 except formencode.Invalid as errors:
316 316 h.flash(
317 317 _("Some form inputs contain invalid data."),
318 318 category='error')
319 319 data = render('rhodecode:templates/admin/settings/settings.mako',
320 320 self._get_template_context(c), self.request)
321 321 html = formencode.htmlfill.render(
322 322 data,
323 323 defaults=errors.value,
324 324 errors=errors.error_dict or {},
325 325 prefix_error=False,
326 326 encoding="UTF-8",
327 327 force_defaults=False
328 328 )
329 329 return Response(html)
330 330
331 331 settings = [
332 332 ('title', 'rhodecode_title', 'unicode'),
333 333 ('realm', 'rhodecode_realm', 'unicode'),
334 334 ('pre_code', 'rhodecode_pre_code', 'unicode'),
335 335 ('post_code', 'rhodecode_post_code', 'unicode'),
336 336 ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'),
337 337 ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'),
338 338 ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'),
339 339 ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'),
340 340 ]
341 341 try:
342 342 for setting, form_key, type_ in settings:
343 343 sett = SettingsModel().create_or_update_setting(
344 344 setting, form_result[form_key], type_)
345 345 Session().add(sett)
346 346
347 347 Session().commit()
348 348 SettingsModel().invalidate_settings_cache()
349 349 h.flash(_('Updated application settings'), category='success')
350 350 except Exception:
351 351 log.exception("Exception while updating application settings")
352 352 h.flash(
353 353 _('Error occurred during updating application settings'),
354 354 category='error')
355 355
356 356 raise HTTPFound(h.route_path('admin_settings_global'))
357 357
358 358 @LoginRequired()
359 359 @HasPermissionAllDecorator('hg.admin')
360 360 @view_config(
361 361 route_name='admin_settings_visual', request_method='GET',
362 362 renderer='rhodecode:templates/admin/settings/settings.mako')
363 363 def settings_visual(self):
364 364 c = self.load_default_context()
365 365 c.active = 'visual'
366 366
367 367 data = render('rhodecode:templates/admin/settings/settings.mako',
368 368 self._get_template_context(c), self.request)
369 369 html = formencode.htmlfill.render(
370 370 data,
371 371 defaults=self._form_defaults(),
372 372 encoding="UTF-8",
373 373 force_defaults=False
374 374 )
375 375 return Response(html)
376 376
377 377 @LoginRequired()
378 378 @HasPermissionAllDecorator('hg.admin')
379 379 @CSRFRequired()
380 380 @view_config(
381 381 route_name='admin_settings_visual_update', request_method='POST',
382 382 renderer='rhodecode:templates/admin/settings/settings.mako')
383 383 def settings_visual_update(self):
384 384 _ = self.request.translate
385 385 c = self.load_default_context()
386 386 c.active = 'visual'
387 387 application_form = ApplicationVisualisationForm(self.request.translate)()
388 388 try:
389 389 form_result = application_form.to_python(dict(self.request.POST))
390 390 except formencode.Invalid as errors:
391 391 h.flash(
392 392 _("Some form inputs contain invalid data."),
393 393 category='error')
394 394 data = render('rhodecode:templates/admin/settings/settings.mako',
395 395 self._get_template_context(c), self.request)
396 396 html = formencode.htmlfill.render(
397 397 data,
398 398 defaults=errors.value,
399 399 errors=errors.error_dict or {},
400 400 prefix_error=False,
401 401 encoding="UTF-8",
402 402 force_defaults=False
403 403 )
404 404 return Response(html)
405 405
406 406 try:
407 407 settings = [
408 408 ('show_public_icon', 'rhodecode_show_public_icon', 'bool'),
409 409 ('show_private_icon', 'rhodecode_show_private_icon', 'bool'),
410 410 ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'),
411 411 ('repository_fields', 'rhodecode_repository_fields', 'bool'),
412 412 ('dashboard_items', 'rhodecode_dashboard_items', 'int'),
413 413 ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'),
414 414 ('show_version', 'rhodecode_show_version', 'bool'),
415 415 ('use_gravatar', 'rhodecode_use_gravatar', 'bool'),
416 416 ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'),
417 417 ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'),
418 418 ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'),
419 419 ('clone_uri_ssh_tmpl', 'rhodecode_clone_uri_ssh_tmpl', 'unicode'),
420 420 ('support_url', 'rhodecode_support_url', 'unicode'),
421 421 ('show_revision_number', 'rhodecode_show_revision_number', 'bool'),
422 422 ('show_sha_length', 'rhodecode_show_sha_length', 'int'),
423 423 ]
424 424 for setting, form_key, type_ in settings:
425 425 sett = SettingsModel().create_or_update_setting(
426 426 setting, form_result[form_key], type_)
427 427 Session().add(sett)
428 428
429 429 Session().commit()
430 430 SettingsModel().invalidate_settings_cache()
431 431 h.flash(_('Updated visualisation settings'), category='success')
432 432 except Exception:
433 433 log.exception("Exception updating visualization settings")
434 434 h.flash(_('Error occurred during updating '
435 435 'visualisation settings'),
436 436 category='error')
437 437
438 438 raise HTTPFound(h.route_path('admin_settings_visual'))
439 439
440 440 @LoginRequired()
441 441 @HasPermissionAllDecorator('hg.admin')
442 442 @view_config(
443 443 route_name='admin_settings_issuetracker', request_method='GET',
444 444 renderer='rhodecode:templates/admin/settings/settings.mako')
445 445 def settings_issuetracker(self):
446 446 c = self.load_default_context()
447 447 c.active = 'issuetracker'
448 448 defaults = c.rc_config
449 449
450 450 entry_key = 'rhodecode_issuetracker_pat_'
451 451
452 452 c.issuetracker_entries = {}
453 453 for k, v in defaults.items():
454 454 if k.startswith(entry_key):
455 455 uid = k[len(entry_key):]
456 456 c.issuetracker_entries[uid] = None
457 457
458 458 for uid in c.issuetracker_entries:
459 459 c.issuetracker_entries[uid] = AttributeDict({
460 460 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid),
461 461 'url': defaults.get('rhodecode_issuetracker_url_' + uid),
462 462 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid),
463 463 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid),
464 464 })
465 465
466 466 return self._get_template_context(c)
467 467
468 468 @LoginRequired()
469 469 @HasPermissionAllDecorator('hg.admin')
470 470 @CSRFRequired()
471 471 @view_config(
472 472 route_name='admin_settings_issuetracker_test', request_method='POST',
473 473 renderer='string', xhr=True)
474 474 def settings_issuetracker_test(self):
475 475 return h.urlify_commit_message(
476 476 self.request.POST.get('test_text', ''),
477 477 'repo_group/test_repo1')
478 478
479 479 @LoginRequired()
480 480 @HasPermissionAllDecorator('hg.admin')
481 481 @CSRFRequired()
482 482 @view_config(
483 483 route_name='admin_settings_issuetracker_update', request_method='POST',
484 484 renderer='rhodecode:templates/admin/settings/settings.mako')
485 485 def settings_issuetracker_update(self):
486 486 _ = self.request.translate
487 487 self.load_default_context()
488 488 settings_model = IssueTrackerSettingsModel()
489 489
490 490 try:
491 491 form = IssueTrackerPatternsForm(self.request.translate)()
492 492 data = form.to_python(self.request.POST)
493 493 except formencode.Invalid as errors:
494 494 log.exception('Failed to add new pattern')
495 495 error = errors
496 496 h.flash(_('Invalid issue tracker pattern: {}'.format(error)),
497 497 category='error')
498 498 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
499 499
500 500 if data:
501 501 for uid in data.get('delete_patterns', []):
502 502 settings_model.delete_entries(uid)
503 503
504 504 for pattern in data.get('patterns', []):
505 505 for setting, value, type_ in pattern:
506 506 sett = settings_model.create_or_update_setting(
507 507 setting, value, type_)
508 508 Session().add(sett)
509 509
510 510 Session().commit()
511 511
512 512 SettingsModel().invalidate_settings_cache()
513 513 h.flash(_('Updated issue tracker entries'), category='success')
514 514 raise HTTPFound(h.route_path('admin_settings_issuetracker'))
515 515
516 516 @LoginRequired()
517 517 @HasPermissionAllDecorator('hg.admin')
518 518 @CSRFRequired()
519 519 @view_config(
520 520 route_name='admin_settings_issuetracker_delete', request_method='POST',
521 521 renderer='json_ext', xhr=True)
522 522 def settings_issuetracker_delete(self):
523 523 _ = self.request.translate
524 524 self.load_default_context()
525 525 uid = self.request.POST.get('uid')
526 526 try:
527 527 IssueTrackerSettingsModel().delete_entries(uid)
528 528 except Exception:
529 529 log.exception('Failed to delete issue tracker setting %s', uid)
530 530 raise HTTPNotFound()
531 531
532 532 SettingsModel().invalidate_settings_cache()
533 533 h.flash(_('Removed issue tracker entry.'), category='success')
534 534
535 535 return {'deleted': uid}
536 536
537 537 @LoginRequired()
538 538 @HasPermissionAllDecorator('hg.admin')
539 539 @view_config(
540 540 route_name='admin_settings_email', request_method='GET',
541 541 renderer='rhodecode:templates/admin/settings/settings.mako')
542 542 def settings_email(self):
543 543 c = self.load_default_context()
544 544 c.active = 'email'
545 545 c.rhodecode_ini = rhodecode.CONFIG
546 546
547 547 data = render('rhodecode:templates/admin/settings/settings.mako',
548 548 self._get_template_context(c), self.request)
549 549 html = formencode.htmlfill.render(
550 550 data,
551 551 defaults=self._form_defaults(),
552 552 encoding="UTF-8",
553 553 force_defaults=False
554 554 )
555 555 return Response(html)
556 556
557 557 @LoginRequired()
558 558 @HasPermissionAllDecorator('hg.admin')
559 559 @CSRFRequired()
560 560 @view_config(
561 561 route_name='admin_settings_email_update', request_method='POST',
562 562 renderer='rhodecode:templates/admin/settings/settings.mako')
563 563 def settings_email_update(self):
564 564 _ = self.request.translate
565 565 c = self.load_default_context()
566 566 c.active = 'email'
567 567
568 568 test_email = self.request.POST.get('test_email')
569 569
570 570 if not test_email:
571 571 h.flash(_('Please enter email address'), category='error')
572 572 raise HTTPFound(h.route_path('admin_settings_email'))
573 573
574 574 email_kwargs = {
575 575 'date': datetime.datetime.now(),
576 'user': c.rhodecode_user,
577 'rhodecode_version': c.rhodecode_version
576 'user': c.rhodecode_user
578 577 }
579 578
580 579 (subject, headers, email_body,
581 580 email_body_plaintext) = EmailNotificationModel().render_email(
582 581 EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs)
583 582
584 583 recipients = [test_email] if test_email else None
585 584
586 585 run_task(tasks.send_email, recipients, subject,
587 586 email_body_plaintext, email_body)
588 587
589 588 h.flash(_('Send email task created'), category='success')
590 589 raise HTTPFound(h.route_path('admin_settings_email'))
591 590
592 591 @LoginRequired()
593 592 @HasPermissionAllDecorator('hg.admin')
594 593 @view_config(
595 594 route_name='admin_settings_hooks', request_method='GET',
596 595 renderer='rhodecode:templates/admin/settings/settings.mako')
597 596 def settings_hooks(self):
598 597 c = self.load_default_context()
599 598 c.active = 'hooks'
600 599
601 600 model = SettingsModel()
602 601 c.hooks = model.get_builtin_hooks()
603 602 c.custom_hooks = model.get_custom_hooks()
604 603
605 604 data = render('rhodecode:templates/admin/settings/settings.mako',
606 605 self._get_template_context(c), self.request)
607 606 html = formencode.htmlfill.render(
608 607 data,
609 608 defaults=self._form_defaults(),
610 609 encoding="UTF-8",
611 610 force_defaults=False
612 611 )
613 612 return Response(html)
614 613
615 614 @LoginRequired()
616 615 @HasPermissionAllDecorator('hg.admin')
617 616 @CSRFRequired()
618 617 @view_config(
619 618 route_name='admin_settings_hooks_update', request_method='POST',
620 619 renderer='rhodecode:templates/admin/settings/settings.mako')
621 620 @view_config(
622 621 route_name='admin_settings_hooks_delete', request_method='POST',
623 622 renderer='rhodecode:templates/admin/settings/settings.mako')
624 623 def settings_hooks_update(self):
625 624 _ = self.request.translate
626 625 c = self.load_default_context()
627 626 c.active = 'hooks'
628 627 if c.visual.allow_custom_hooks_settings:
629 628 ui_key = self.request.POST.get('new_hook_ui_key')
630 629 ui_value = self.request.POST.get('new_hook_ui_value')
631 630
632 631 hook_id = self.request.POST.get('hook_id')
633 632 new_hook = False
634 633
635 634 model = SettingsModel()
636 635 try:
637 636 if ui_value and ui_key:
638 637 model.create_or_update_hook(ui_key, ui_value)
639 638 h.flash(_('Added new hook'), category='success')
640 639 new_hook = True
641 640 elif hook_id:
642 641 RhodeCodeUi.delete(hook_id)
643 642 Session().commit()
644 643
645 644 # check for edits
646 645 update = False
647 646 _d = self.request.POST.dict_of_lists()
648 647 for k, v in zip(_d.get('hook_ui_key', []),
649 648 _d.get('hook_ui_value_new', [])):
650 649 model.create_or_update_hook(k, v)
651 650 update = True
652 651
653 652 if update and not new_hook:
654 653 h.flash(_('Updated hooks'), category='success')
655 654 Session().commit()
656 655 except Exception:
657 656 log.exception("Exception during hook creation")
658 657 h.flash(_('Error occurred during hook creation'),
659 658 category='error')
660 659
661 660 raise HTTPFound(h.route_path('admin_settings_hooks'))
662 661
663 662 @LoginRequired()
664 663 @HasPermissionAllDecorator('hg.admin')
665 664 @view_config(
666 665 route_name='admin_settings_search', request_method='GET',
667 666 renderer='rhodecode:templates/admin/settings/settings.mako')
668 667 def settings_search(self):
669 668 c = self.load_default_context()
670 669 c.active = 'search'
671 670
672 671 c.searcher = searcher_from_config(self.request.registry.settings)
673 672 c.statistics = c.searcher.statistics(self.request.translate)
674 673
675 674 return self._get_template_context(c)
676 675
677 676 @LoginRequired()
678 677 @HasPermissionAllDecorator('hg.admin')
679 678 @view_config(
680 679 route_name='admin_settings_automation', request_method='GET',
681 680 renderer='rhodecode:templates/admin/settings/settings.mako')
682 681 def settings_automation(self):
683 682 c = self.load_default_context()
684 683 c.active = 'automation'
685 684
686 685 return self._get_template_context(c)
687 686
688 687 @LoginRequired()
689 688 @HasPermissionAllDecorator('hg.admin')
690 689 @view_config(
691 690 route_name='admin_settings_labs', request_method='GET',
692 691 renderer='rhodecode:templates/admin/settings/settings.mako')
693 692 def settings_labs(self):
694 693 c = self.load_default_context()
695 694 if not c.labs_active:
696 695 raise HTTPFound(h.route_path('admin_settings'))
697 696
698 697 c.active = 'labs'
699 698 c.lab_settings = _LAB_SETTINGS
700 699
701 700 data = render('rhodecode:templates/admin/settings/settings.mako',
702 701 self._get_template_context(c), self.request)
703 702 html = formencode.htmlfill.render(
704 703 data,
705 704 defaults=self._form_defaults(),
706 705 encoding="UTF-8",
707 706 force_defaults=False
708 707 )
709 708 return Response(html)
710 709
711 710 @LoginRequired()
712 711 @HasPermissionAllDecorator('hg.admin')
713 712 @CSRFRequired()
714 713 @view_config(
715 714 route_name='admin_settings_labs_update', request_method='POST',
716 715 renderer='rhodecode:templates/admin/settings/settings.mako')
717 716 def settings_labs_update(self):
718 717 _ = self.request.translate
719 718 c = self.load_default_context()
720 719 c.active = 'labs'
721 720
722 721 application_form = LabsSettingsForm(self.request.translate)()
723 722 try:
724 723 form_result = application_form.to_python(dict(self.request.POST))
725 724 except formencode.Invalid as errors:
726 725 h.flash(
727 726 _("Some form inputs contain invalid data."),
728 727 category='error')
729 728 data = render('rhodecode:templates/admin/settings/settings.mako',
730 729 self._get_template_context(c), self.request)
731 730 html = formencode.htmlfill.render(
732 731 data,
733 732 defaults=errors.value,
734 733 errors=errors.error_dict or {},
735 734 prefix_error=False,
736 735 encoding="UTF-8",
737 736 force_defaults=False
738 737 )
739 738 return Response(html)
740 739
741 740 try:
742 741 session = Session()
743 742 for setting in _LAB_SETTINGS:
744 743 setting_name = setting.key[len('rhodecode_'):]
745 744 sett = SettingsModel().create_or_update_setting(
746 745 setting_name, form_result[setting.key], setting.type)
747 746 session.add(sett)
748 747
749 748 except Exception:
750 749 log.exception('Exception while updating lab settings')
751 750 h.flash(_('Error occurred during updating labs settings'),
752 751 category='error')
753 752 else:
754 753 Session().commit()
755 754 SettingsModel().invalidate_settings_cache()
756 755 h.flash(_('Updated Labs settings'), category='success')
757 756 raise HTTPFound(h.route_path('admin_settings_labs'))
758 757
759 758 data = render('rhodecode:templates/admin/settings/settings.mako',
760 759 self._get_template_context(c), self.request)
761 760 html = formencode.htmlfill.render(
762 761 data,
763 762 defaults=self._form_defaults(),
764 763 encoding="UTF-8",
765 764 force_defaults=False
766 765 )
767 766 return Response(html)
768 767
769 768
770 769 # :param key: name of the setting including the 'rhodecode_' prefix
771 770 # :param type: the RhodeCodeSetting type to use.
772 771 # :param group: the i18ned group in which we should dispaly this setting
773 772 # :param label: the i18ned label we should display for this setting
774 773 # :param help: the i18ned help we should dispaly for this setting
775 774 LabSetting = collections.namedtuple(
776 775 'LabSetting', ('key', 'type', 'group', 'label', 'help'))
777 776
778 777
779 778 # This list has to be kept in sync with the form
780 779 # rhodecode.model.forms.LabsSettingsForm.
781 780 _LAB_SETTINGS = [
782 781
783 782 ]
@@ -1,347 +1,386 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import logging
23 23 import datetime
24 24
25 25 from pyramid.view import view_config
26 26 from pyramid.renderers import render_to_response
27 27 from rhodecode.apps._base import BaseAppView
28 28 from rhodecode.lib.celerylib import run_task, tasks
29 29 from rhodecode.lib.utils2 import AttributeDict
30 30 from rhodecode.model.db import User
31 31 from rhodecode.model.notification import EmailNotificationModel
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 class DebugStyleView(BaseAppView):
37 37 def load_default_context(self):
38 38 c = self._get_local_tmpl_context()
39 39
40 40 return c
41 41
42 42 @view_config(
43 43 route_name='debug_style_home', request_method='GET',
44 44 renderer=None)
45 45 def index(self):
46 46 c = self.load_default_context()
47 47 c.active = 'index'
48 48
49 49 return render_to_response(
50 50 'debug_style/index.html', self._get_template_context(c),
51 51 request=self.request)
52 52
53 53 @view_config(
54 54 route_name='debug_style_email', request_method='GET',
55 55 renderer=None)
56 56 @view_config(
57 57 route_name='debug_style_email_plain_rendered', request_method='GET',
58 58 renderer=None)
59 59 def render_email(self):
60 60 c = self.load_default_context()
61 61 email_id = self.request.matchdict['email_id']
62 62 c.active = 'emails'
63 63
64 64 pr = AttributeDict(
65 65 pull_request_id=123,
66 66 title='digital_ocean: fix redis, elastic search start on boot, '
67 67 'fix fd limits on supervisor, set postgres 11 version',
68 68 description='''
69 69 Check if we should use full-topic or mini-topic.
70 70
71 71 - full topic produces some problems with merge states etc
72 72 - server-mini-topic needs probably tweeks.
73 73 ''',
74 74 repo_name='foobar',
75 75 source_ref_parts=AttributeDict(type='branch', name='fix-ticket-2000'),
76 76 target_ref_parts=AttributeDict(type='branch', name='master'),
77 77 )
78 78 target_repo = AttributeDict(repo_name='repo_group/target_repo')
79 79 source_repo = AttributeDict(repo_name='repo_group/source_repo')
80 80 user = User.get_by_username(self.request.GET.get('user')) or self._rhodecode_db_user
81
81 # file/commit changes for PR update
82 commit_changes = AttributeDict({
83 'added': ['aaaaaaabbbbb', 'cccccccddddddd'],
84 'removed': ['eeeeeeeeeee'],
85 })
86 file_changes = AttributeDict({
87 'added': ['a/file1.md', 'file2.py'],
88 'modified': ['b/modified_file.rst'],
89 'removed': ['.idea'],
90 })
82 91 email_kwargs = {
83 92 'test': {},
84 93 'message': {
85 94 'body': 'message body !'
86 95 },
87 96 'email_test': {
88 97 'user': user,
89 98 'date': datetime.datetime.now(),
90 'rhodecode_version': c.rhodecode_version
91 99 },
92 100 'password_reset': {
93 101 'password_reset_url': 'http://example.com/reset-rhodecode-password/token',
94 102
95 103 'user': user,
96 104 'date': datetime.datetime.now(),
97 105 'email': 'test@rhodecode.com',
98 106 'first_admin_email': User.get_first_super_admin().email
99 107 },
100 108 'password_reset_confirmation': {
101 109 'new_password': 'new-password-example',
102 110 'user': user,
103 111 'date': datetime.datetime.now(),
104 112 'email': 'test@rhodecode.com',
105 113 'first_admin_email': User.get_first_super_admin().email
106 114 },
107 115 'registration': {
108 116 'user': user,
109 117 'date': datetime.datetime.now(),
110 118 },
111 119
112 120 'pull_request_comment': {
113 121 'user': user,
114 122
115 123 'status_change': None,
116 124 'status_change_type': None,
117 125
118 126 'pull_request': pr,
119 127 'pull_request_commits': [],
120 128
121 129 'pull_request_target_repo': target_repo,
122 130 'pull_request_target_repo_url': 'http://target-repo/url',
123 131
124 132 'pull_request_source_repo': source_repo,
125 133 'pull_request_source_repo_url': 'http://source-repo/url',
126 134
127 135 'pull_request_url': 'http://localhost/pr1',
128 136 'pr_comment_url': 'http://comment-url',
129 137 'pr_comment_reply_url': 'http://comment-url#reply',
130 138
131 139 'comment_file': None,
132 140 'comment_line': None,
133 141 'comment_type': 'note',
134 142 'comment_body': 'This is my comment body. *I like !*',
135 143 'comment_id': 2048,
136 144 'renderer_type': 'markdown',
137 145 'mention': True,
138 146
139 147 },
140 148 'pull_request_comment+status': {
141 149 'user': user,
142 150
143 151 'status_change': 'approved',
144 152 'status_change_type': 'approved',
145 153
146 154 'pull_request': pr,
147 155 'pull_request_commits': [],
148 156
149 157 'pull_request_target_repo': target_repo,
150 158 'pull_request_target_repo_url': 'http://target-repo/url',
151 159
152 160 'pull_request_source_repo': source_repo,
153 161 'pull_request_source_repo_url': 'http://source-repo/url',
154 162
155 163 'pull_request_url': 'http://localhost/pr1',
156 164 'pr_comment_url': 'http://comment-url',
157 165 'pr_comment_reply_url': 'http://comment-url#reply',
158 166
159 167 'comment_type': 'todo',
160 168 'comment_file': None,
161 169 'comment_line': None,
162 170 'comment_body': '''
163 171 I think something like this would be better
164 172
165 173 ```py
166 174
167 175 def db():
168 176 global connection
169 177 return connection
170 178
171 179 ```
172 180
173 181 ''',
174 182 'comment_id': 2048,
175 183 'renderer_type': 'markdown',
176 184 'mention': True,
177 185
178 186 },
179 187 'pull_request_comment+file': {
180 188 'user': user,
181 189
182 190 'status_change': None,
183 191 'status_change_type': None,
184 192
185 193 'pull_request': pr,
186 194 'pull_request_commits': [],
187 195
188 196 'pull_request_target_repo': target_repo,
189 197 'pull_request_target_repo_url': 'http://target-repo/url',
190 198
191 199 'pull_request_source_repo': source_repo,
192 200 'pull_request_source_repo_url': 'http://source-repo/url',
193 201
194 202 'pull_request_url': 'http://localhost/pr1',
195 203
196 204 'pr_comment_url': 'http://comment-url',
197 205 'pr_comment_reply_url': 'http://comment-url#reply',
198 206
199 207 'comment_file': 'rhodecode/model/db.py',
200 208 'comment_line': 'o1210',
201 209 'comment_type': 'todo',
202 210 'comment_body': '''
203 211 I like this !
204 212
205 213 But please check this code::
206 214
207 215 def main():
208 216 print 'ok'
209 217
210 218 This should work better !
211 219 ''',
212 220 'comment_id': 2048,
213 221 'renderer_type': 'rst',
214 222 'mention': True,
215 223
216 224 },
217 225
226 'pull_request_update': {
227 'updating_user': user,
228
229 'status_change': None,
230 'status_change_type': None,
231
232 'pull_request': pr,
233 'pull_request_commits': [],
234
235 'pull_request_target_repo': target_repo,
236 'pull_request_target_repo_url': 'http://target-repo/url',
237
238 'pull_request_source_repo': source_repo,
239 'pull_request_source_repo_url': 'http://source-repo/url',
240
241 'pull_request_url': 'http://localhost/pr1',
242
243 # update comment links
244 'pr_comment_url': 'http://comment-url',
245 'pr_comment_reply_url': 'http://comment-url#reply',
246 'ancestor_commit_id': 'f39bd443',
247 'added_commits': commit_changes.added,
248 'removed_commits': commit_changes.removed,
249 'changed_files': (file_changes.added + file_changes.modified + file_changes.removed),
250 'added_files': file_changes.added,
251 'modified_files': file_changes.modified,
252 'removed_files': file_changes.removed,
253 },
254
218 255 'cs_comment': {
219 256 'user': user,
220 257 'commit': AttributeDict(idx=123, raw_id='a'*40, message='Commit message'),
221 258 'status_change': None,
222 259 'status_change_type': None,
223 260
224 261 'commit_target_repo_url': 'http://foo.example.com/#comment1',
225 262 'repo_name': 'test-repo',
226 263 'comment_type': 'note',
227 264 'comment_file': None,
228 265 'comment_line': None,
229 266 'commit_comment_url': 'http://comment-url',
230 267 'commit_comment_reply_url': 'http://comment-url#reply',
231 268 'comment_body': 'This is my comment body. *I like !*',
232 269 'comment_id': 2048,
233 270 'renderer_type': 'markdown',
234 271 'mention': True,
235 272 },
236 273 'cs_comment+status': {
237 274 'user': user,
238 275 'commit': AttributeDict(idx=123, raw_id='a' * 40, message='Commit message'),
239 276 'status_change': 'approved',
240 277 'status_change_type': 'approved',
241 278
242 279 'commit_target_repo_url': 'http://foo.example.com/#comment1',
243 280 'repo_name': 'test-repo',
244 281 'comment_type': 'note',
245 282 'comment_file': None,
246 283 'comment_line': None,
247 284 'commit_comment_url': 'http://comment-url',
248 285 'commit_comment_reply_url': 'http://comment-url#reply',
249 286 'comment_body': '''
250 287 Hello **world**
251 288
252 289 This is a multiline comment :)
253 290
254 291 - list
255 292 - list2
256 293 ''',
257 294 'comment_id': 2048,
258 295 'renderer_type': 'markdown',
259 296 'mention': True,
260 297 },
261 298 'cs_comment+file': {
262 299 'user': user,
263 300 'commit': AttributeDict(idx=123, raw_id='a' * 40, message='Commit message'),
264 301 'status_change': None,
265 302 'status_change_type': None,
266 303
267 304 'commit_target_repo_url': 'http://foo.example.com/#comment1',
268 305 'repo_name': 'test-repo',
269 306
270 307 'comment_type': 'note',
271 308 'comment_file': 'test-file.py',
272 309 'comment_line': 'n100',
273 310
274 311 'commit_comment_url': 'http://comment-url',
275 312 'commit_comment_reply_url': 'http://comment-url#reply',
276 313 'comment_body': 'This is my comment body. *I like !*',
277 314 'comment_id': 2048,
278 315 'renderer_type': 'markdown',
279 316 'mention': True,
280 317 },
281 318
282 319 'pull_request': {
283 320 'user': user,
284 321 'pull_request': pr,
285 322 'pull_request_commits': [
286 323 ('472d1df03bf7206e278fcedc6ac92b46b01c4e21', '''\
287 324 my-account: moved email closer to profile as it's similar data just moved outside.
288 325 '''),
289 326 ('cbfa3061b6de2696c7161ed15ba5c6a0045f90a7', '''\
290 327 users: description edit fixes
291 328
292 329 - tests
293 330 - added metatags info
294 331 '''),
295 332 ],
296 333
297 334 'pull_request_target_repo': target_repo,
298 335 'pull_request_target_repo_url': 'http://target-repo/url',
299 336
300 337 'pull_request_source_repo': source_repo,
301 338 'pull_request_source_repo_url': 'http://source-repo/url',
302 339
303 340 'pull_request_url': 'http://code.rhodecode.com/_pull-request/123',
304 341 }
305 342
306 343 }
307 344
308 345 template_type = email_id.split('+')[0]
309 346 (c.subject, c.headers, c.email_body,
310 347 c.email_body_plaintext) = EmailNotificationModel().render_email(
311 348 template_type, **email_kwargs.get(email_id, {}))
312 349
313 350 test_email = self.request.GET.get('email')
314 351 if test_email:
315 352 recipients = [test_email]
316 353 run_task(tasks.send_email, recipients, c.subject,
317 354 c.email_body_plaintext, c.email_body)
318 355
319 356 if self.request.matched_route.name == 'debug_style_email_plain_rendered':
320 357 template = 'debug_style/email_plain_rendered.mako'
321 358 else:
322 359 template = 'debug_style/email.mako'
323 360 return render_to_response(
324 361 template, self._get_template_context(c),
325 362 request=self.request)
326 363
327 364 @view_config(
328 365 route_name='debug_style_template', request_method='GET',
329 366 renderer=None)
330 367 def template(self):
331 368 t_path = self.request.matchdict['t_path']
332 369 c = self.load_default_context()
333 370 c.active = os.path.splitext(t_path)[0]
334 371 c.came_from = ''
335 372 c.email_types = {
336 373 'cs_comment+file': {},
337 374 'cs_comment+status': {},
338 375
339 376 'pull_request_comment+file': {},
340 377 'pull_request_comment+status': {},
378
379 'pull_request_update': {},
341 380 }
342 381 c.email_types.update(EmailNotificationModel.email_types)
343 382
344 383 return render_to_response(
345 384 'debug_style/' + t_path, self._get_template_context(c),
346 385 request=self.request)
347 386
@@ -1,1476 +1,1477 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.auth import (
39 39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 40 NotAnonymous, CSRFRequired)
41 41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 44 RepositoryRequirementError, EmptyRepositoryError)
45 45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 46 from rhodecode.model.comment import CommentsModel
47 47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 48 ChangesetComment, ChangesetStatus, Repository)
49 49 from rhodecode.model.forms import PullRequestForm
50 50 from rhodecode.model.meta import Session
51 51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 52 from rhodecode.model.scm import ScmModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 58
59 59 def load_default_context(self):
60 60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 63 # backward compat., we use for OLD PRs a plain renderer
64 64 c.renderer = 'plain'
65 65 return c
66 66
67 67 def _get_pull_requests_list(
68 68 self, repo_name, source, filter_type, opened_by, statuses):
69 69
70 70 draw, start, limit = self._extract_chunk(self.request)
71 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 72 _render = self.request.get_partial_renderer(
73 73 'rhodecode:templates/data_table/_dt_elements.mako')
74 74
75 75 # pagination
76 76
77 77 if filter_type == 'awaiting_review':
78 78 pull_requests = PullRequestModel().get_awaiting_review(
79 79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 80 statuses=statuses, offset=start, length=limit,
81 81 order_by=order_by, order_dir=order_dir)
82 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 83 repo_name, search_q=search_q, source=source, statuses=statuses,
84 84 opened_by=opened_by)
85 85 elif filter_type == 'awaiting_my_review':
86 86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 89 offset=start, length=limit, order_by=order_by,
90 90 order_dir=order_dir)
91 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 93 statuses=statuses, opened_by=opened_by)
94 94 else:
95 95 pull_requests = PullRequestModel().get_all(
96 96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 97 statuses=statuses, offset=start, length=limit,
98 98 order_by=order_by, order_dir=order_dir)
99 99 pull_requests_total_count = PullRequestModel().count_all(
100 100 repo_name, search_q=search_q, source=source, statuses=statuses,
101 101 opened_by=opened_by)
102 102
103 103 data = []
104 104 comments_model = CommentsModel()
105 105 for pr in pull_requests:
106 106 comments = comments_model.get_all_comments(
107 107 self.db_repo.repo_id, pull_request=pr)
108 108
109 109 data.append({
110 110 'name': _render('pullrequest_name',
111 111 pr.pull_request_id, pr.pull_request_state,
112 112 pr.work_in_progress, pr.target_repo.repo_name),
113 113 'name_raw': pr.pull_request_id,
114 114 'status': _render('pullrequest_status',
115 115 pr.calculated_review_status()),
116 116 'title': _render('pullrequest_title', pr.title, pr.description),
117 117 'description': h.escape(pr.description),
118 118 'updated_on': _render('pullrequest_updated_on',
119 119 h.datetime_to_time(pr.updated_on)),
120 120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 121 'created_on': _render('pullrequest_updated_on',
122 122 h.datetime_to_time(pr.created_on)),
123 123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 124 'state': pr.pull_request_state,
125 125 'author': _render('pullrequest_author',
126 126 pr.author.full_contact, ),
127 127 'author_raw': pr.author.full_name,
128 128 'comments': _render('pullrequest_comments', len(comments)),
129 129 'comments_raw': len(comments),
130 130 'closed': pr.is_closed(),
131 131 })
132 132
133 133 data = ({
134 134 'draw': draw,
135 135 'data': data,
136 136 'recordsTotal': pull_requests_total_count,
137 137 'recordsFiltered': pull_requests_total_count,
138 138 })
139 139 return data
140 140
141 141 @LoginRequired()
142 142 @HasRepoPermissionAnyDecorator(
143 143 'repository.read', 'repository.write', 'repository.admin')
144 144 @view_config(
145 145 route_name='pullrequest_show_all', request_method='GET',
146 146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 147 def pull_request_list(self):
148 148 c = self.load_default_context()
149 149
150 150 req_get = self.request.GET
151 151 c.source = str2bool(req_get.get('source'))
152 152 c.closed = str2bool(req_get.get('closed'))
153 153 c.my = str2bool(req_get.get('my'))
154 154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156 156
157 157 c.active = 'open'
158 158 if c.my:
159 159 c.active = 'my'
160 160 if c.closed:
161 161 c.active = 'closed'
162 162 if c.awaiting_review and not c.source:
163 163 c.active = 'awaiting'
164 164 if c.source and not c.awaiting_review:
165 165 c.active = 'source'
166 166 if c.awaiting_my_review:
167 167 c.active = 'awaiting_my'
168 168
169 169 return self._get_template_context(c)
170 170
171 171 @LoginRequired()
172 172 @HasRepoPermissionAnyDecorator(
173 173 'repository.read', 'repository.write', 'repository.admin')
174 174 @view_config(
175 175 route_name='pullrequest_show_all_data', request_method='GET',
176 176 renderer='json_ext', xhr=True)
177 177 def pull_request_list_data(self):
178 178 self.load_default_context()
179 179
180 180 # additional filters
181 181 req_get = self.request.GET
182 182 source = str2bool(req_get.get('source'))
183 183 closed = str2bool(req_get.get('closed'))
184 184 my = str2bool(req_get.get('my'))
185 185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187 187
188 188 filter_type = 'awaiting_review' if awaiting_review \
189 189 else 'awaiting_my_review' if awaiting_my_review \
190 190 else None
191 191
192 192 opened_by = None
193 193 if my:
194 194 opened_by = [self._rhodecode_user.user_id]
195 195
196 196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 197 if closed:
198 198 statuses = [PullRequest.STATUS_CLOSED]
199 199
200 200 data = self._get_pull_requests_list(
201 201 repo_name=self.db_repo_name, source=source,
202 202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203 203
204 204 return data
205 205
206 206 def _is_diff_cache_enabled(self, target_repo):
207 207 caching_enabled = self._get_general_setting(
208 208 target_repo, 'rhodecode_diff_cache')
209 209 log.debug('Diff caching enabled: %s', caching_enabled)
210 210 return caching_enabled
211 211
212 212 def _get_diffset(self, source_repo_name, source_repo,
213 213 source_ref_id, target_ref_id,
214 214 target_commit, source_commit, diff_limit, file_limit,
215 215 fulldiff, hide_whitespace_changes, diff_context):
216 216
217 217 vcs_diff = PullRequestModel().get_diff(
218 218 source_repo, source_ref_id, target_ref_id,
219 219 hide_whitespace_changes, diff_context)
220 220
221 221 diff_processor = diffs.DiffProcessor(
222 222 vcs_diff, format='newdiff', diff_limit=diff_limit,
223 223 file_limit=file_limit, show_full_diff=fulldiff)
224 224
225 225 _parsed = diff_processor.prepare()
226 226
227 227 diffset = codeblocks.DiffSet(
228 228 repo_name=self.db_repo_name,
229 229 source_repo_name=source_repo_name,
230 230 source_node_getter=codeblocks.diffset_node_getter(target_commit),
231 231 target_node_getter=codeblocks.diffset_node_getter(source_commit),
232 232 )
233 233 diffset = self.path_filter.render_patchset_filtered(
234 234 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
235 235
236 236 return diffset
237 237
238 238 def _get_range_diffset(self, source_scm, source_repo,
239 239 commit1, commit2, diff_limit, file_limit,
240 240 fulldiff, hide_whitespace_changes, diff_context):
241 241 vcs_diff = source_scm.get_diff(
242 242 commit1, commit2,
243 243 ignore_whitespace=hide_whitespace_changes,
244 244 context=diff_context)
245 245
246 246 diff_processor = diffs.DiffProcessor(
247 247 vcs_diff, format='newdiff', diff_limit=diff_limit,
248 248 file_limit=file_limit, show_full_diff=fulldiff)
249 249
250 250 _parsed = diff_processor.prepare()
251 251
252 252 diffset = codeblocks.DiffSet(
253 253 repo_name=source_repo.repo_name,
254 254 source_node_getter=codeblocks.diffset_node_getter(commit1),
255 255 target_node_getter=codeblocks.diffset_node_getter(commit2))
256 256
257 257 diffset = self.path_filter.render_patchset_filtered(
258 258 diffset, _parsed, commit1.raw_id, commit2.raw_id)
259 259
260 260 return diffset
261 261
262 262 @LoginRequired()
263 263 @HasRepoPermissionAnyDecorator(
264 264 'repository.read', 'repository.write', 'repository.admin')
265 265 @view_config(
266 266 route_name='pullrequest_show', request_method='GET',
267 267 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
268 268 def pull_request_show(self):
269 269 _ = self.request.translate
270 270 c = self.load_default_context()
271 271
272 272 pull_request = PullRequest.get_or_404(
273 273 self.request.matchdict['pull_request_id'])
274 274 pull_request_id = pull_request.pull_request_id
275 275
276 276 c.state_progressing = pull_request.is_state_changing()
277 277
278 278 version = self.request.GET.get('version')
279 279 from_version = self.request.GET.get('from_version') or version
280 280 merge_checks = self.request.GET.get('merge_checks')
281 281 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
282 282
283 283 # fetch global flags of ignore ws or context lines
284 284 diff_context = diffs.get_diff_context(self.request)
285 285 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
286 286
287 287 force_refresh = str2bool(self.request.GET.get('force_refresh'))
288 288
289 289 (pull_request_latest,
290 290 pull_request_at_ver,
291 291 pull_request_display_obj,
292 292 at_version) = PullRequestModel().get_pr_version(
293 293 pull_request_id, version=version)
294 294 pr_closed = pull_request_latest.is_closed()
295 295
296 296 if pr_closed and (version or from_version):
297 297 # not allow to browse versions
298 298 raise HTTPFound(h.route_path(
299 299 'pullrequest_show', repo_name=self.db_repo_name,
300 300 pull_request_id=pull_request_id))
301 301
302 302 versions = pull_request_display_obj.versions()
303 303 # used to store per-commit range diffs
304 304 c.changes = collections.OrderedDict()
305 305 c.range_diff_on = self.request.GET.get('range-diff') == "1"
306 306
307 307 c.at_version = at_version
308 308 c.at_version_num = (at_version
309 309 if at_version and at_version != 'latest'
310 310 else None)
311 311 c.at_version_pos = ChangesetComment.get_index_from_version(
312 312 c.at_version_num, versions)
313 313
314 314 (prev_pull_request_latest,
315 315 prev_pull_request_at_ver,
316 316 prev_pull_request_display_obj,
317 317 prev_at_version) = PullRequestModel().get_pr_version(
318 318 pull_request_id, version=from_version)
319 319
320 320 c.from_version = prev_at_version
321 321 c.from_version_num = (prev_at_version
322 322 if prev_at_version and prev_at_version != 'latest'
323 323 else None)
324 324 c.from_version_pos = ChangesetComment.get_index_from_version(
325 325 c.from_version_num, versions)
326 326
327 327 # define if we're in COMPARE mode or VIEW at version mode
328 328 compare = at_version != prev_at_version
329 329
330 330 # pull_requests repo_name we opened it against
331 331 # ie. target_repo must match
332 332 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
333 333 raise HTTPNotFound()
334 334
335 335 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
336 336 pull_request_at_ver)
337 337
338 338 c.pull_request = pull_request_display_obj
339 339 c.renderer = pull_request_at_ver.description_renderer or c.renderer
340 340 c.pull_request_latest = pull_request_latest
341 341
342 342 if compare or (at_version and not at_version == 'latest'):
343 343 c.allowed_to_change_status = False
344 344 c.allowed_to_update = False
345 345 c.allowed_to_merge = False
346 346 c.allowed_to_delete = False
347 347 c.allowed_to_comment = False
348 348 c.allowed_to_close = False
349 349 else:
350 350 can_change_status = PullRequestModel().check_user_change_status(
351 351 pull_request_at_ver, self._rhodecode_user)
352 352 c.allowed_to_change_status = can_change_status and not pr_closed
353 353
354 354 c.allowed_to_update = PullRequestModel().check_user_update(
355 355 pull_request_latest, self._rhodecode_user) and not pr_closed
356 356 c.allowed_to_merge = PullRequestModel().check_user_merge(
357 357 pull_request_latest, self._rhodecode_user) and not pr_closed
358 358 c.allowed_to_delete = PullRequestModel().check_user_delete(
359 359 pull_request_latest, self._rhodecode_user) and not pr_closed
360 360 c.allowed_to_comment = not pr_closed
361 361 c.allowed_to_close = c.allowed_to_merge and not pr_closed
362 362
363 363 c.forbid_adding_reviewers = False
364 364 c.forbid_author_to_review = False
365 365 c.forbid_commit_author_to_review = False
366 366
367 367 if pull_request_latest.reviewer_data and \
368 368 'rules' in pull_request_latest.reviewer_data:
369 369 rules = pull_request_latest.reviewer_data['rules'] or {}
370 370 try:
371 371 c.forbid_adding_reviewers = rules.get(
372 372 'forbid_adding_reviewers')
373 373 c.forbid_author_to_review = rules.get(
374 374 'forbid_author_to_review')
375 375 c.forbid_commit_author_to_review = rules.get(
376 376 'forbid_commit_author_to_review')
377 377 except Exception:
378 378 pass
379 379
380 380 # check merge capabilities
381 381 _merge_check = MergeCheck.validate(
382 382 pull_request_latest, auth_user=self._rhodecode_user,
383 383 translator=self.request.translate,
384 384 force_shadow_repo_refresh=force_refresh)
385 385 c.pr_merge_errors = _merge_check.error_details
386 386 c.pr_merge_possible = not _merge_check.failed
387 387 c.pr_merge_message = _merge_check.merge_msg
388 388
389 389 c.pr_merge_info = MergeCheck.get_merge_conditions(
390 390 pull_request_latest, translator=self.request.translate)
391 391
392 392 c.pull_request_review_status = _merge_check.review_status
393 393 if merge_checks:
394 394 self.request.override_renderer = \
395 395 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
396 396 return self._get_template_context(c)
397 397
398 398 comments_model = CommentsModel()
399 399
400 400 # reviewers and statuses
401 401 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
402 402 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
403 403
404 404 # GENERAL COMMENTS with versions #
405 405 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
406 406 q = q.order_by(ChangesetComment.comment_id.asc())
407 407 general_comments = q
408 408
409 409 # pick comments we want to render at current version
410 410 c.comment_versions = comments_model.aggregate_comments(
411 411 general_comments, versions, c.at_version_num)
412 412 c.comments = c.comment_versions[c.at_version_num]['until']
413 413
414 414 # INLINE COMMENTS with versions #
415 415 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
416 416 q = q.order_by(ChangesetComment.comment_id.asc())
417 417 inline_comments = q
418 418
419 419 c.inline_versions = comments_model.aggregate_comments(
420 420 inline_comments, versions, c.at_version_num, inline=True)
421 421
422 422 # TODOs
423 423 c.unresolved_comments = CommentsModel() \
424 424 .get_pull_request_unresolved_todos(pull_request)
425 425 c.resolved_comments = CommentsModel() \
426 426 .get_pull_request_resolved_todos(pull_request)
427 427
428 428 # inject latest version
429 429 latest_ver = PullRequest.get_pr_display_object(
430 430 pull_request_latest, pull_request_latest)
431 431
432 432 c.versions = versions + [latest_ver]
433 433
434 434 # if we use version, then do not show later comments
435 435 # than current version
436 436 display_inline_comments = collections.defaultdict(
437 437 lambda: collections.defaultdict(list))
438 438 for co in inline_comments:
439 439 if c.at_version_num:
440 440 # pick comments that are at least UPTO given version, so we
441 441 # don't render comments for higher version
442 442 should_render = co.pull_request_version_id and \
443 443 co.pull_request_version_id <= c.at_version_num
444 444 else:
445 445 # showing all, for 'latest'
446 446 should_render = True
447 447
448 448 if should_render:
449 449 display_inline_comments[co.f_path][co.line_no].append(co)
450 450
451 451 # load diff data into template context, if we use compare mode then
452 452 # diff is calculated based on changes between versions of PR
453 453
454 454 source_repo = pull_request_at_ver.source_repo
455 455 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
456 456
457 457 target_repo = pull_request_at_ver.target_repo
458 458 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
459 459
460 460 if compare:
461 461 # in compare switch the diff base to latest commit from prev version
462 462 target_ref_id = prev_pull_request_display_obj.revisions[0]
463 463
464 464 # despite opening commits for bookmarks/branches/tags, we always
465 465 # convert this to rev to prevent changes after bookmark or branch change
466 466 c.source_ref_type = 'rev'
467 467 c.source_ref = source_ref_id
468 468
469 469 c.target_ref_type = 'rev'
470 470 c.target_ref = target_ref_id
471 471
472 472 c.source_repo = source_repo
473 473 c.target_repo = target_repo
474 474
475 475 c.commit_ranges = []
476 476 source_commit = EmptyCommit()
477 477 target_commit = EmptyCommit()
478 478 c.missing_requirements = False
479 479
480 480 source_scm = source_repo.scm_instance()
481 481 target_scm = target_repo.scm_instance()
482 482
483 483 shadow_scm = None
484 484 try:
485 485 shadow_scm = pull_request_latest.get_shadow_repo()
486 486 except Exception:
487 487 log.debug('Failed to get shadow repo', exc_info=True)
488 488 # try first the existing source_repo, and then shadow
489 489 # repo if we can obtain one
490 490 commits_source_repo = source_scm or shadow_scm
491 491
492 492 c.commits_source_repo = commits_source_repo
493 493 c.ancestor = None # set it to None, to hide it from PR view
494 494
495 495 # empty version means latest, so we keep this to prevent
496 496 # double caching
497 497 version_normalized = version or 'latest'
498 498 from_version_normalized = from_version or 'latest'
499 499
500 500 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
501 501 cache_file_path = diff_cache_exist(
502 502 cache_path, 'pull_request', pull_request_id, version_normalized,
503 503 from_version_normalized, source_ref_id, target_ref_id,
504 504 hide_whitespace_changes, diff_context, c.fulldiff)
505 505
506 506 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
507 507 force_recache = self.get_recache_flag()
508 508
509 509 cached_diff = None
510 510 if caching_enabled:
511 511 cached_diff = load_cached_diff(cache_file_path)
512 512
513 513 has_proper_commit_cache = (
514 514 cached_diff and cached_diff.get('commits')
515 515 and len(cached_diff.get('commits', [])) == 5
516 516 and cached_diff.get('commits')[0]
517 517 and cached_diff.get('commits')[3])
518 518
519 519 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
520 520 diff_commit_cache = \
521 521 (ancestor_commit, commit_cache, missing_requirements,
522 522 source_commit, target_commit) = cached_diff['commits']
523 523 else:
524 524 diff_commit_cache = \
525 525 (ancestor_commit, commit_cache, missing_requirements,
526 526 source_commit, target_commit) = self.get_commits(
527 527 commits_source_repo,
528 528 pull_request_at_ver,
529 529 source_commit,
530 530 source_ref_id,
531 531 source_scm,
532 532 target_commit,
533 533 target_ref_id,
534 534 target_scm)
535 535
536 536 # register our commit range
537 537 for comm in commit_cache.values():
538 538 c.commit_ranges.append(comm)
539 539
540 540 c.missing_requirements = missing_requirements
541 541 c.ancestor_commit = ancestor_commit
542 542 c.statuses = source_repo.statuses(
543 543 [x.raw_id for x in c.commit_ranges])
544 544
545 545 # auto collapse if we have more than limit
546 546 collapse_limit = diffs.DiffProcessor._collapse_commits_over
547 547 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
548 548 c.compare_mode = compare
549 549
550 550 # diff_limit is the old behavior, will cut off the whole diff
551 551 # if the limit is applied otherwise will just hide the
552 552 # big files from the front-end
553 553 diff_limit = c.visual.cut_off_limit_diff
554 554 file_limit = c.visual.cut_off_limit_file
555 555
556 556 c.missing_commits = False
557 557 if (c.missing_requirements
558 558 or isinstance(source_commit, EmptyCommit)
559 559 or source_commit == target_commit):
560 560
561 561 c.missing_commits = True
562 562 else:
563 563 c.inline_comments = display_inline_comments
564 564
565 565 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
566 566 if not force_recache and has_proper_diff_cache:
567 567 c.diffset = cached_diff['diff']
568 568 (ancestor_commit, commit_cache, missing_requirements,
569 569 source_commit, target_commit) = cached_diff['commits']
570 570 else:
571 571 c.diffset = self._get_diffset(
572 572 c.source_repo.repo_name, commits_source_repo,
573 573 source_ref_id, target_ref_id,
574 574 target_commit, source_commit,
575 575 diff_limit, file_limit, c.fulldiff,
576 576 hide_whitespace_changes, diff_context)
577 577
578 578 # save cached diff
579 579 if caching_enabled:
580 580 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
581 581
582 582 c.limited_diff = c.diffset.limited_diff
583 583
584 584 # calculate removed files that are bound to comments
585 585 comment_deleted_files = [
586 586 fname for fname in display_inline_comments
587 587 if fname not in c.diffset.file_stats]
588 588
589 589 c.deleted_files_comments = collections.defaultdict(dict)
590 590 for fname, per_line_comments in display_inline_comments.items():
591 591 if fname in comment_deleted_files:
592 592 c.deleted_files_comments[fname]['stats'] = 0
593 593 c.deleted_files_comments[fname]['comments'] = list()
594 594 for lno, comments in per_line_comments.items():
595 595 c.deleted_files_comments[fname]['comments'].extend(comments)
596 596
597 597 # maybe calculate the range diff
598 598 if c.range_diff_on:
599 599 # TODO(marcink): set whitespace/context
600 600 context_lcl = 3
601 601 ign_whitespace_lcl = False
602 602
603 603 for commit in c.commit_ranges:
604 604 commit2 = commit
605 605 commit1 = commit.first_parent
606 606
607 607 range_diff_cache_file_path = diff_cache_exist(
608 608 cache_path, 'diff', commit.raw_id,
609 609 ign_whitespace_lcl, context_lcl, c.fulldiff)
610 610
611 611 cached_diff = None
612 612 if caching_enabled:
613 613 cached_diff = load_cached_diff(range_diff_cache_file_path)
614 614
615 615 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
616 616 if not force_recache and has_proper_diff_cache:
617 617 diffset = cached_diff['diff']
618 618 else:
619 619 diffset = self._get_range_diffset(
620 620 source_scm, source_repo,
621 621 commit1, commit2, diff_limit, file_limit,
622 622 c.fulldiff, ign_whitespace_lcl, context_lcl
623 623 )
624 624
625 625 # save cached diff
626 626 if caching_enabled:
627 627 cache_diff(range_diff_cache_file_path, diffset, None)
628 628
629 629 c.changes[commit.raw_id] = diffset
630 630
631 631 # this is a hack to properly display links, when creating PR, the
632 632 # compare view and others uses different notation, and
633 633 # compare_commits.mako renders links based on the target_repo.
634 634 # We need to swap that here to generate it properly on the html side
635 635 c.target_repo = c.source_repo
636 636
637 637 c.commit_statuses = ChangesetStatus.STATUSES
638 638
639 639 c.show_version_changes = not pr_closed
640 640 if c.show_version_changes:
641 641 cur_obj = pull_request_at_ver
642 642 prev_obj = prev_pull_request_at_ver
643 643
644 644 old_commit_ids = prev_obj.revisions
645 645 new_commit_ids = cur_obj.revisions
646 646 commit_changes = PullRequestModel()._calculate_commit_id_changes(
647 647 old_commit_ids, new_commit_ids)
648 648 c.commit_changes_summary = commit_changes
649 649
650 650 # calculate the diff for commits between versions
651 651 c.commit_changes = []
652 652 mark = lambda cs, fw: list(
653 653 h.itertools.izip_longest([], cs, fillvalue=fw))
654 654 for c_type, raw_id in mark(commit_changes.added, 'a') \
655 655 + mark(commit_changes.removed, 'r') \
656 656 + mark(commit_changes.common, 'c'):
657 657
658 658 if raw_id in commit_cache:
659 659 commit = commit_cache[raw_id]
660 660 else:
661 661 try:
662 662 commit = commits_source_repo.get_commit(raw_id)
663 663 except CommitDoesNotExistError:
664 664 # in case we fail extracting still use "dummy" commit
665 665 # for display in commit diff
666 666 commit = h.AttributeDict(
667 667 {'raw_id': raw_id,
668 668 'message': 'EMPTY or MISSING COMMIT'})
669 669 c.commit_changes.append([c_type, commit])
670 670
671 671 # current user review statuses for each version
672 672 c.review_versions = {}
673 673 if self._rhodecode_user.user_id in allowed_reviewers:
674 674 for co in general_comments:
675 675 if co.author.user_id == self._rhodecode_user.user_id:
676 676 status = co.status_change
677 677 if status:
678 678 _ver_pr = status[0].comment.pull_request_version_id
679 679 c.review_versions[_ver_pr] = status[0]
680 680
681 681 return self._get_template_context(c)
682 682
683 683 def get_commits(
684 684 self, commits_source_repo, pull_request_at_ver, source_commit,
685 685 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
686 686 commit_cache = collections.OrderedDict()
687 687 missing_requirements = False
688 688 try:
689 689 pre_load = ["author", "date", "message", "branch", "parents"]
690 690 show_revs = pull_request_at_ver.revisions
691 691 for rev in show_revs:
692 692 comm = commits_source_repo.get_commit(
693 693 commit_id=rev, pre_load=pre_load)
694 694 commit_cache[comm.raw_id] = comm
695 695
696 696 # Order here matters, we first need to get target, and then
697 697 # the source
698 698 target_commit = commits_source_repo.get_commit(
699 699 commit_id=safe_str(target_ref_id))
700 700
701 701 source_commit = commits_source_repo.get_commit(
702 702 commit_id=safe_str(source_ref_id))
703 703 except CommitDoesNotExistError:
704 704 log.warning(
705 705 'Failed to get commit from `{}` repo'.format(
706 706 commits_source_repo), exc_info=True)
707 707 except RepositoryRequirementError:
708 708 log.warning(
709 709 'Failed to get all required data from repo', exc_info=True)
710 710 missing_requirements = True
711 711 ancestor_commit = None
712 712 try:
713 713 ancestor_id = source_scm.get_common_ancestor(
714 714 source_commit.raw_id, target_commit.raw_id, target_scm)
715 715 ancestor_commit = source_scm.get_commit(ancestor_id)
716 716 except Exception:
717 717 ancestor_commit = None
718 718 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
719 719
720 720 def assure_not_empty_repo(self):
721 721 _ = self.request.translate
722 722
723 723 try:
724 724 self.db_repo.scm_instance().get_commit()
725 725 except EmptyRepositoryError:
726 726 h.flash(h.literal(_('There are no commits yet')),
727 727 category='warning')
728 728 raise HTTPFound(
729 729 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
730 730
731 731 @LoginRequired()
732 732 @NotAnonymous()
733 733 @HasRepoPermissionAnyDecorator(
734 734 'repository.read', 'repository.write', 'repository.admin')
735 735 @view_config(
736 736 route_name='pullrequest_new', request_method='GET',
737 737 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
738 738 def pull_request_new(self):
739 739 _ = self.request.translate
740 740 c = self.load_default_context()
741 741
742 742 self.assure_not_empty_repo()
743 743 source_repo = self.db_repo
744 744
745 745 commit_id = self.request.GET.get('commit')
746 746 branch_ref = self.request.GET.get('branch')
747 747 bookmark_ref = self.request.GET.get('bookmark')
748 748
749 749 try:
750 750 source_repo_data = PullRequestModel().generate_repo_data(
751 751 source_repo, commit_id=commit_id,
752 752 branch=branch_ref, bookmark=bookmark_ref,
753 753 translator=self.request.translate)
754 754 except CommitDoesNotExistError as e:
755 755 log.exception(e)
756 756 h.flash(_('Commit does not exist'), 'error')
757 757 raise HTTPFound(
758 758 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
759 759
760 760 default_target_repo = source_repo
761 761
762 762 if source_repo.parent and c.has_origin_repo_read_perm:
763 763 parent_vcs_obj = source_repo.parent.scm_instance()
764 764 if parent_vcs_obj and not parent_vcs_obj.is_empty():
765 765 # change default if we have a parent repo
766 766 default_target_repo = source_repo.parent
767 767
768 768 target_repo_data = PullRequestModel().generate_repo_data(
769 769 default_target_repo, translator=self.request.translate)
770 770
771 771 selected_source_ref = source_repo_data['refs']['selected_ref']
772 772 title_source_ref = ''
773 773 if selected_source_ref:
774 774 title_source_ref = selected_source_ref.split(':', 2)[1]
775 775 c.default_title = PullRequestModel().generate_pullrequest_title(
776 776 source=source_repo.repo_name,
777 777 source_ref=title_source_ref,
778 778 target=default_target_repo.repo_name
779 779 )
780 780
781 781 c.default_repo_data = {
782 782 'source_repo_name': source_repo.repo_name,
783 783 'source_refs_json': json.dumps(source_repo_data),
784 784 'target_repo_name': default_target_repo.repo_name,
785 785 'target_refs_json': json.dumps(target_repo_data),
786 786 }
787 787 c.default_source_ref = selected_source_ref
788 788
789 789 return self._get_template_context(c)
790 790
791 791 @LoginRequired()
792 792 @NotAnonymous()
793 793 @HasRepoPermissionAnyDecorator(
794 794 'repository.read', 'repository.write', 'repository.admin')
795 795 @view_config(
796 796 route_name='pullrequest_repo_refs', request_method='GET',
797 797 renderer='json_ext', xhr=True)
798 798 def pull_request_repo_refs(self):
799 799 self.load_default_context()
800 800 target_repo_name = self.request.matchdict['target_repo_name']
801 801 repo = Repository.get_by_repo_name(target_repo_name)
802 802 if not repo:
803 803 raise HTTPNotFound()
804 804
805 805 target_perm = HasRepoPermissionAny(
806 806 'repository.read', 'repository.write', 'repository.admin')(
807 807 target_repo_name)
808 808 if not target_perm:
809 809 raise HTTPNotFound()
810 810
811 811 return PullRequestModel().generate_repo_data(
812 812 repo, translator=self.request.translate)
813 813
814 814 @LoginRequired()
815 815 @NotAnonymous()
816 816 @HasRepoPermissionAnyDecorator(
817 817 'repository.read', 'repository.write', 'repository.admin')
818 818 @view_config(
819 819 route_name='pullrequest_repo_targets', request_method='GET',
820 820 renderer='json_ext', xhr=True)
821 821 def pullrequest_repo_targets(self):
822 822 _ = self.request.translate
823 823 filter_query = self.request.GET.get('query')
824 824
825 825 # get the parents
826 826 parent_target_repos = []
827 827 if self.db_repo.parent:
828 828 parents_query = Repository.query() \
829 829 .order_by(func.length(Repository.repo_name)) \
830 830 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
831 831
832 832 if filter_query:
833 833 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
834 834 parents_query = parents_query.filter(
835 835 Repository.repo_name.ilike(ilike_expression))
836 836 parents = parents_query.limit(20).all()
837 837
838 838 for parent in parents:
839 839 parent_vcs_obj = parent.scm_instance()
840 840 if parent_vcs_obj and not parent_vcs_obj.is_empty():
841 841 parent_target_repos.append(parent)
842 842
843 843 # get other forks, and repo itself
844 844 query = Repository.query() \
845 845 .order_by(func.length(Repository.repo_name)) \
846 846 .filter(
847 847 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
848 848 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
849 849 ) \
850 850 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
851 851
852 852 if filter_query:
853 853 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
854 854 query = query.filter(Repository.repo_name.ilike(ilike_expression))
855 855
856 856 limit = max(20 - len(parent_target_repos), 5) # not less then 5
857 857 target_repos = query.limit(limit).all()
858 858
859 859 all_target_repos = target_repos + parent_target_repos
860 860
861 861 repos = []
862 862 # This checks permissions to the repositories
863 863 for obj in ScmModel().get_repos(all_target_repos):
864 864 repos.append({
865 865 'id': obj['name'],
866 866 'text': obj['name'],
867 867 'type': 'repo',
868 868 'repo_id': obj['dbrepo']['repo_id'],
869 869 'repo_type': obj['dbrepo']['repo_type'],
870 870 'private': obj['dbrepo']['private'],
871 871
872 872 })
873 873
874 874 data = {
875 875 'more': False,
876 876 'results': [{
877 877 'text': _('Repositories'),
878 878 'children': repos
879 879 }] if repos else []
880 880 }
881 881 return data
882 882
883 883 @LoginRequired()
884 884 @NotAnonymous()
885 885 @HasRepoPermissionAnyDecorator(
886 886 'repository.read', 'repository.write', 'repository.admin')
887 887 @CSRFRequired()
888 888 @view_config(
889 889 route_name='pullrequest_create', request_method='POST',
890 890 renderer=None)
891 891 def pull_request_create(self):
892 892 _ = self.request.translate
893 893 self.assure_not_empty_repo()
894 894 self.load_default_context()
895 895
896 896 controls = peppercorn.parse(self.request.POST.items())
897 897
898 898 try:
899 899 form = PullRequestForm(
900 900 self.request.translate, self.db_repo.repo_id)()
901 901 _form = form.to_python(controls)
902 902 except formencode.Invalid as errors:
903 903 if errors.error_dict.get('revisions'):
904 904 msg = 'Revisions: %s' % errors.error_dict['revisions']
905 905 elif errors.error_dict.get('pullrequest_title'):
906 906 msg = errors.error_dict.get('pullrequest_title')
907 907 else:
908 908 msg = _('Error creating pull request: {}').format(errors)
909 909 log.exception(msg)
910 910 h.flash(msg, 'error')
911 911
912 912 # would rather just go back to form ...
913 913 raise HTTPFound(
914 914 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
915 915
916 916 source_repo = _form['source_repo']
917 917 source_ref = _form['source_ref']
918 918 target_repo = _form['target_repo']
919 919 target_ref = _form['target_ref']
920 920 commit_ids = _form['revisions'][::-1]
921 921
922 922 # find the ancestor for this pr
923 923 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
924 924 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
925 925
926 926 if not (source_db_repo or target_db_repo):
927 927 h.flash(_('source_repo or target repo not found'), category='error')
928 928 raise HTTPFound(
929 929 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
930 930
931 931 # re-check permissions again here
932 932 # source_repo we must have read permissions
933 933
934 934 source_perm = HasRepoPermissionAny(
935 935 'repository.read', 'repository.write', 'repository.admin')(
936 936 source_db_repo.repo_name)
937 937 if not source_perm:
938 938 msg = _('Not Enough permissions to source repo `{}`.'.format(
939 939 source_db_repo.repo_name))
940 940 h.flash(msg, category='error')
941 941 # copy the args back to redirect
942 942 org_query = self.request.GET.mixed()
943 943 raise HTTPFound(
944 944 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
945 945 _query=org_query))
946 946
947 947 # target repo we must have read permissions, and also later on
948 948 # we want to check branch permissions here
949 949 target_perm = HasRepoPermissionAny(
950 950 'repository.read', 'repository.write', 'repository.admin')(
951 951 target_db_repo.repo_name)
952 952 if not target_perm:
953 953 msg = _('Not Enough permissions to target repo `{}`.'.format(
954 954 target_db_repo.repo_name))
955 955 h.flash(msg, category='error')
956 956 # copy the args back to redirect
957 957 org_query = self.request.GET.mixed()
958 958 raise HTTPFound(
959 959 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
960 960 _query=org_query))
961 961
962 962 source_scm = source_db_repo.scm_instance()
963 963 target_scm = target_db_repo.scm_instance()
964 964
965 965 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
966 966 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
967 967
968 968 ancestor = source_scm.get_common_ancestor(
969 969 source_commit.raw_id, target_commit.raw_id, target_scm)
970 970
971 971 # recalculate target ref based on ancestor
972 972 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
973 973 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
974 974
975 975 get_default_reviewers_data, validate_default_reviewers = \
976 976 PullRequestModel().get_reviewer_functions()
977 977
978 978 # recalculate reviewers logic, to make sure we can validate this
979 979 reviewer_rules = get_default_reviewers_data(
980 980 self._rhodecode_db_user, source_db_repo,
981 981 source_commit, target_db_repo, target_commit)
982 982
983 983 given_reviewers = _form['review_members']
984 984 reviewers = validate_default_reviewers(
985 985 given_reviewers, reviewer_rules)
986 986
987 987 pullrequest_title = _form['pullrequest_title']
988 988 title_source_ref = source_ref.split(':', 2)[1]
989 989 if not pullrequest_title:
990 990 pullrequest_title = PullRequestModel().generate_pullrequest_title(
991 991 source=source_repo,
992 992 source_ref=title_source_ref,
993 993 target=target_repo
994 994 )
995 995
996 996 description = _form['pullrequest_desc']
997 997 description_renderer = _form['description_renderer']
998 998
999 999 try:
1000 1000 pull_request = PullRequestModel().create(
1001 1001 created_by=self._rhodecode_user.user_id,
1002 1002 source_repo=source_repo,
1003 1003 source_ref=source_ref,
1004 1004 target_repo=target_repo,
1005 1005 target_ref=target_ref,
1006 1006 revisions=commit_ids,
1007 1007 reviewers=reviewers,
1008 1008 title=pullrequest_title,
1009 1009 description=description,
1010 1010 description_renderer=description_renderer,
1011 1011 reviewer_data=reviewer_rules,
1012 1012 auth_user=self._rhodecode_user
1013 1013 )
1014 1014 Session().commit()
1015 1015
1016 1016 h.flash(_('Successfully opened new pull request'),
1017 1017 category='success')
1018 1018 except Exception:
1019 1019 msg = _('Error occurred during creation of this pull request.')
1020 1020 log.exception(msg)
1021 1021 h.flash(msg, category='error')
1022 1022
1023 1023 # copy the args back to redirect
1024 1024 org_query = self.request.GET.mixed()
1025 1025 raise HTTPFound(
1026 1026 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1027 1027 _query=org_query))
1028 1028
1029 1029 raise HTTPFound(
1030 1030 h.route_path('pullrequest_show', repo_name=target_repo,
1031 1031 pull_request_id=pull_request.pull_request_id))
1032 1032
1033 1033 @LoginRequired()
1034 1034 @NotAnonymous()
1035 1035 @HasRepoPermissionAnyDecorator(
1036 1036 'repository.read', 'repository.write', 'repository.admin')
1037 1037 @CSRFRequired()
1038 1038 @view_config(
1039 1039 route_name='pullrequest_update', request_method='POST',
1040 1040 renderer='json_ext')
1041 1041 def pull_request_update(self):
1042 1042 pull_request = PullRequest.get_or_404(
1043 1043 self.request.matchdict['pull_request_id'])
1044 1044 _ = self.request.translate
1045 1045
1046 1046 self.load_default_context()
1047 1047 redirect_url = None
1048 1048
1049 1049 if pull_request.is_closed():
1050 1050 log.debug('update: forbidden because pull request is closed')
1051 1051 msg = _(u'Cannot update closed pull requests.')
1052 1052 h.flash(msg, category='error')
1053 1053 return {'response': True,
1054 1054 'redirect_url': redirect_url}
1055 1055
1056 1056 is_state_changing = pull_request.is_state_changing()
1057 1057
1058 1058 # only owner or admin can update it
1059 1059 allowed_to_update = PullRequestModel().check_user_update(
1060 1060 pull_request, self._rhodecode_user)
1061 1061 if allowed_to_update:
1062 1062 controls = peppercorn.parse(self.request.POST.items())
1063 1063 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1064 1064
1065 1065 if 'review_members' in controls:
1066 1066 self._update_reviewers(
1067 1067 pull_request, controls['review_members'],
1068 1068 pull_request.reviewer_data)
1069 1069 elif str2bool(self.request.POST.get('update_commits', 'false')):
1070 1070 if is_state_changing:
1071 1071 log.debug('commits update: forbidden because pull request is in state %s',
1072 1072 pull_request.pull_request_state)
1073 1073 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1074 1074 u'Current state is: `{}`').format(
1075 1075 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1076 1076 h.flash(msg, category='error')
1077 1077 return {'response': True,
1078 1078 'redirect_url': redirect_url}
1079 1079
1080 1080 self._update_commits(pull_request)
1081 1081 if force_refresh:
1082 1082 redirect_url = h.route_path(
1083 1083 'pullrequest_show', repo_name=self.db_repo_name,
1084 1084 pull_request_id=pull_request.pull_request_id,
1085 1085 _query={"force_refresh": 1})
1086 1086 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1087 1087 self._edit_pull_request(pull_request)
1088 1088 else:
1089 1089 raise HTTPBadRequest()
1090 1090
1091 1091 return {'response': True,
1092 1092 'redirect_url': redirect_url}
1093 1093 raise HTTPForbidden()
1094 1094
1095 1095 def _edit_pull_request(self, pull_request):
1096 1096 _ = self.request.translate
1097 1097
1098 1098 try:
1099 1099 PullRequestModel().edit(
1100 1100 pull_request,
1101 1101 self.request.POST.get('title'),
1102 1102 self.request.POST.get('description'),
1103 1103 self.request.POST.get('description_renderer'),
1104 1104 self._rhodecode_user)
1105 1105 except ValueError:
1106 1106 msg = _(u'Cannot update closed pull requests.')
1107 1107 h.flash(msg, category='error')
1108 1108 return
1109 1109 else:
1110 1110 Session().commit()
1111 1111
1112 1112 msg = _(u'Pull request title & description updated.')
1113 1113 h.flash(msg, category='success')
1114 1114 return
1115 1115
1116 1116 def _update_commits(self, pull_request):
1117 1117 _ = self.request.translate
1118 1118
1119 1119 with pull_request.set_state(PullRequest.STATE_UPDATING):
1120 resp = PullRequestModel().update_commits(pull_request)
1120 resp = PullRequestModel().update_commits(
1121 pull_request, self._rhodecode_db_user)
1121 1122
1122 1123 if resp.executed:
1123 1124
1124 1125 if resp.target_changed and resp.source_changed:
1125 1126 changed = 'target and source repositories'
1126 1127 elif resp.target_changed and not resp.source_changed:
1127 1128 changed = 'target repository'
1128 1129 elif not resp.target_changed and resp.source_changed:
1129 1130 changed = 'source repository'
1130 1131 else:
1131 1132 changed = 'nothing'
1132 1133
1133 1134 msg = _(u'Pull request updated to "{source_commit_id}" with '
1134 1135 u'{count_added} added, {count_removed} removed commits. '
1135 1136 u'Source of changes: {change_source}')
1136 1137 msg = msg.format(
1137 1138 source_commit_id=pull_request.source_ref_parts.commit_id,
1138 1139 count_added=len(resp.changes.added),
1139 1140 count_removed=len(resp.changes.removed),
1140 1141 change_source=changed)
1141 1142 h.flash(msg, category='success')
1142 1143
1143 1144 channel = '/repo${}$/pr/{}'.format(
1144 1145 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1145 1146 message = msg + (
1146 1147 ' - <a onclick="window.location.reload()">'
1147 1148 '<strong>{}</strong></a>'.format(_('Reload page')))
1148 1149 channelstream.post_message(
1149 1150 channel, message, self._rhodecode_user.username,
1150 1151 registry=self.request.registry)
1151 1152 else:
1152 1153 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1153 1154 warning_reasons = [
1154 1155 UpdateFailureReason.NO_CHANGE,
1155 1156 UpdateFailureReason.WRONG_REF_TYPE,
1156 1157 ]
1157 1158 category = 'warning' if resp.reason in warning_reasons else 'error'
1158 1159 h.flash(msg, category=category)
1159 1160
1160 1161 @LoginRequired()
1161 1162 @NotAnonymous()
1162 1163 @HasRepoPermissionAnyDecorator(
1163 1164 'repository.read', 'repository.write', 'repository.admin')
1164 1165 @CSRFRequired()
1165 1166 @view_config(
1166 1167 route_name='pullrequest_merge', request_method='POST',
1167 1168 renderer='json_ext')
1168 1169 def pull_request_merge(self):
1169 1170 """
1170 1171 Merge will perform a server-side merge of the specified
1171 1172 pull request, if the pull request is approved and mergeable.
1172 1173 After successful merging, the pull request is automatically
1173 1174 closed, with a relevant comment.
1174 1175 """
1175 1176 pull_request = PullRequest.get_or_404(
1176 1177 self.request.matchdict['pull_request_id'])
1177 1178 _ = self.request.translate
1178 1179
1179 1180 if pull_request.is_state_changing():
1180 1181 log.debug('show: forbidden because pull request is in state %s',
1181 1182 pull_request.pull_request_state)
1182 1183 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1183 1184 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1184 1185 pull_request.pull_request_state)
1185 1186 h.flash(msg, category='error')
1186 1187 raise HTTPFound(
1187 1188 h.route_path('pullrequest_show',
1188 1189 repo_name=pull_request.target_repo.repo_name,
1189 1190 pull_request_id=pull_request.pull_request_id))
1190 1191
1191 1192 self.load_default_context()
1192 1193
1193 1194 with pull_request.set_state(PullRequest.STATE_UPDATING):
1194 1195 check = MergeCheck.validate(
1195 1196 pull_request, auth_user=self._rhodecode_user,
1196 1197 translator=self.request.translate)
1197 1198 merge_possible = not check.failed
1198 1199
1199 1200 for err_type, error_msg in check.errors:
1200 1201 h.flash(error_msg, category=err_type)
1201 1202
1202 1203 if merge_possible:
1203 1204 log.debug("Pre-conditions checked, trying to merge.")
1204 1205 extras = vcs_operation_context(
1205 1206 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1206 1207 username=self._rhodecode_db_user.username, action='push',
1207 1208 scm=pull_request.target_repo.repo_type)
1208 1209 with pull_request.set_state(PullRequest.STATE_UPDATING):
1209 1210 self._merge_pull_request(
1210 1211 pull_request, self._rhodecode_db_user, extras)
1211 1212 else:
1212 1213 log.debug("Pre-conditions failed, NOT merging.")
1213 1214
1214 1215 raise HTTPFound(
1215 1216 h.route_path('pullrequest_show',
1216 1217 repo_name=pull_request.target_repo.repo_name,
1217 1218 pull_request_id=pull_request.pull_request_id))
1218 1219
1219 1220 def _merge_pull_request(self, pull_request, user, extras):
1220 1221 _ = self.request.translate
1221 1222 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1222 1223
1223 1224 if merge_resp.executed:
1224 1225 log.debug("The merge was successful, closing the pull request.")
1225 1226 PullRequestModel().close_pull_request(
1226 1227 pull_request.pull_request_id, user)
1227 1228 Session().commit()
1228 1229 msg = _('Pull request was successfully merged and closed.')
1229 1230 h.flash(msg, category='success')
1230 1231 else:
1231 1232 log.debug(
1232 1233 "The merge was not successful. Merge response: %s", merge_resp)
1233 1234 msg = merge_resp.merge_status_message
1234 1235 h.flash(msg, category='error')
1235 1236
1236 1237 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1237 1238 _ = self.request.translate
1238 1239
1239 1240 get_default_reviewers_data, validate_default_reviewers = \
1240 1241 PullRequestModel().get_reviewer_functions()
1241 1242
1242 1243 try:
1243 1244 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1244 1245 except ValueError as e:
1245 1246 log.error('Reviewers Validation: {}'.format(e))
1246 1247 h.flash(e, category='error')
1247 1248 return
1248 1249
1249 1250 old_calculated_status = pull_request.calculated_review_status()
1250 1251 PullRequestModel().update_reviewers(
1251 1252 pull_request, reviewers, self._rhodecode_user)
1252 1253 h.flash(_('Pull request reviewers updated.'), category='success')
1253 1254 Session().commit()
1254 1255
1255 1256 # trigger status changed if change in reviewers changes the status
1256 1257 calculated_status = pull_request.calculated_review_status()
1257 1258 if old_calculated_status != calculated_status:
1258 1259 PullRequestModel().trigger_pull_request_hook(
1259 1260 pull_request, self._rhodecode_user, 'review_status_change',
1260 1261 data={'status': calculated_status})
1261 1262
1262 1263 @LoginRequired()
1263 1264 @NotAnonymous()
1264 1265 @HasRepoPermissionAnyDecorator(
1265 1266 'repository.read', 'repository.write', 'repository.admin')
1266 1267 @CSRFRequired()
1267 1268 @view_config(
1268 1269 route_name='pullrequest_delete', request_method='POST',
1269 1270 renderer='json_ext')
1270 1271 def pull_request_delete(self):
1271 1272 _ = self.request.translate
1272 1273
1273 1274 pull_request = PullRequest.get_or_404(
1274 1275 self.request.matchdict['pull_request_id'])
1275 1276 self.load_default_context()
1276 1277
1277 1278 pr_closed = pull_request.is_closed()
1278 1279 allowed_to_delete = PullRequestModel().check_user_delete(
1279 1280 pull_request, self._rhodecode_user) and not pr_closed
1280 1281
1281 1282 # only owner can delete it !
1282 1283 if allowed_to_delete:
1283 1284 PullRequestModel().delete(pull_request, self._rhodecode_user)
1284 1285 Session().commit()
1285 1286 h.flash(_('Successfully deleted pull request'),
1286 1287 category='success')
1287 1288 raise HTTPFound(h.route_path('pullrequest_show_all',
1288 1289 repo_name=self.db_repo_name))
1289 1290
1290 1291 log.warning('user %s tried to delete pull request without access',
1291 1292 self._rhodecode_user)
1292 1293 raise HTTPNotFound()
1293 1294
1294 1295 @LoginRequired()
1295 1296 @NotAnonymous()
1296 1297 @HasRepoPermissionAnyDecorator(
1297 1298 'repository.read', 'repository.write', 'repository.admin')
1298 1299 @CSRFRequired()
1299 1300 @view_config(
1300 1301 route_name='pullrequest_comment_create', request_method='POST',
1301 1302 renderer='json_ext')
1302 1303 def pull_request_comment_create(self):
1303 1304 _ = self.request.translate
1304 1305
1305 1306 pull_request = PullRequest.get_or_404(
1306 1307 self.request.matchdict['pull_request_id'])
1307 1308 pull_request_id = pull_request.pull_request_id
1308 1309
1309 1310 if pull_request.is_closed():
1310 1311 log.debug('comment: forbidden because pull request is closed')
1311 1312 raise HTTPForbidden()
1312 1313
1313 1314 allowed_to_comment = PullRequestModel().check_user_comment(
1314 1315 pull_request, self._rhodecode_user)
1315 1316 if not allowed_to_comment:
1316 1317 log.debug(
1317 1318 'comment: forbidden because pull request is from forbidden repo')
1318 1319 raise HTTPForbidden()
1319 1320
1320 1321 c = self.load_default_context()
1321 1322
1322 1323 status = self.request.POST.get('changeset_status', None)
1323 1324 text = self.request.POST.get('text')
1324 1325 comment_type = self.request.POST.get('comment_type')
1325 1326 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1326 1327 close_pull_request = self.request.POST.get('close_pull_request')
1327 1328
1328 1329 # the logic here should work like following, if we submit close
1329 1330 # pr comment, use `close_pull_request_with_comment` function
1330 1331 # else handle regular comment logic
1331 1332
1332 1333 if close_pull_request:
1333 1334 # only owner or admin or person with write permissions
1334 1335 allowed_to_close = PullRequestModel().check_user_update(
1335 1336 pull_request, self._rhodecode_user)
1336 1337 if not allowed_to_close:
1337 1338 log.debug('comment: forbidden because not allowed to close '
1338 1339 'pull request %s', pull_request_id)
1339 1340 raise HTTPForbidden()
1340 1341
1341 1342 # This also triggers `review_status_change`
1342 1343 comment, status = PullRequestModel().close_pull_request_with_comment(
1343 1344 pull_request, self._rhodecode_user, self.db_repo, message=text,
1344 1345 auth_user=self._rhodecode_user)
1345 1346 Session().flush()
1346 1347
1347 1348 PullRequestModel().trigger_pull_request_hook(
1348 1349 pull_request, self._rhodecode_user, 'comment',
1349 1350 data={'comment': comment})
1350 1351
1351 1352 else:
1352 1353 # regular comment case, could be inline, or one with status.
1353 1354 # for that one we check also permissions
1354 1355
1355 1356 allowed_to_change_status = PullRequestModel().check_user_change_status(
1356 1357 pull_request, self._rhodecode_user)
1357 1358
1358 1359 if status and allowed_to_change_status:
1359 1360 message = (_('Status change %(transition_icon)s %(status)s')
1360 1361 % {'transition_icon': '>',
1361 1362 'status': ChangesetStatus.get_status_lbl(status)})
1362 1363 text = text or message
1363 1364
1364 1365 comment = CommentsModel().create(
1365 1366 text=text,
1366 1367 repo=self.db_repo.repo_id,
1367 1368 user=self._rhodecode_user.user_id,
1368 1369 pull_request=pull_request,
1369 1370 f_path=self.request.POST.get('f_path'),
1370 1371 line_no=self.request.POST.get('line'),
1371 1372 status_change=(ChangesetStatus.get_status_lbl(status)
1372 1373 if status and allowed_to_change_status else None),
1373 1374 status_change_type=(status
1374 1375 if status and allowed_to_change_status else None),
1375 1376 comment_type=comment_type,
1376 1377 resolves_comment_id=resolves_comment_id,
1377 1378 auth_user=self._rhodecode_user
1378 1379 )
1379 1380
1380 1381 if allowed_to_change_status:
1381 1382 # calculate old status before we change it
1382 1383 old_calculated_status = pull_request.calculated_review_status()
1383 1384
1384 1385 # get status if set !
1385 1386 if status:
1386 1387 ChangesetStatusModel().set_status(
1387 1388 self.db_repo.repo_id,
1388 1389 status,
1389 1390 self._rhodecode_user.user_id,
1390 1391 comment,
1391 1392 pull_request=pull_request
1392 1393 )
1393 1394
1394 1395 Session().flush()
1395 1396 # this is somehow required to get access to some relationship
1396 1397 # loaded on comment
1397 1398 Session().refresh(comment)
1398 1399
1399 1400 PullRequestModel().trigger_pull_request_hook(
1400 1401 pull_request, self._rhodecode_user, 'comment',
1401 1402 data={'comment': comment})
1402 1403
1403 1404 # we now calculate the status of pull request, and based on that
1404 1405 # calculation we set the commits status
1405 1406 calculated_status = pull_request.calculated_review_status()
1406 1407 if old_calculated_status != calculated_status:
1407 1408 PullRequestModel().trigger_pull_request_hook(
1408 1409 pull_request, self._rhodecode_user, 'review_status_change',
1409 1410 data={'status': calculated_status})
1410 1411
1411 1412 Session().commit()
1412 1413
1413 1414 data = {
1414 1415 'target_id': h.safeid(h.safe_unicode(
1415 1416 self.request.POST.get('f_path'))),
1416 1417 }
1417 1418 if comment:
1418 1419 c.co = comment
1419 1420 rendered_comment = render(
1420 1421 'rhodecode:templates/changeset/changeset_comment_block.mako',
1421 1422 self._get_template_context(c), self.request)
1422 1423
1423 1424 data.update(comment.get_dict())
1424 1425 data.update({'rendered_text': rendered_comment})
1425 1426
1426 1427 return data
1427 1428
1428 1429 @LoginRequired()
1429 1430 @NotAnonymous()
1430 1431 @HasRepoPermissionAnyDecorator(
1431 1432 'repository.read', 'repository.write', 'repository.admin')
1432 1433 @CSRFRequired()
1433 1434 @view_config(
1434 1435 route_name='pullrequest_comment_delete', request_method='POST',
1435 1436 renderer='json_ext')
1436 1437 def pull_request_comment_delete(self):
1437 1438 pull_request = PullRequest.get_or_404(
1438 1439 self.request.matchdict['pull_request_id'])
1439 1440
1440 1441 comment = ChangesetComment.get_or_404(
1441 1442 self.request.matchdict['comment_id'])
1442 1443 comment_id = comment.comment_id
1443 1444
1444 1445 if pull_request.is_closed():
1445 1446 log.debug('comment: forbidden because pull request is closed')
1446 1447 raise HTTPForbidden()
1447 1448
1448 1449 if not comment:
1449 1450 log.debug('Comment with id:%s not found, skipping', comment_id)
1450 1451 # comment already deleted in another call probably
1451 1452 return True
1452 1453
1453 1454 if comment.pull_request.is_closed():
1454 1455 # don't allow deleting comments on closed pull request
1455 1456 raise HTTPForbidden()
1456 1457
1457 1458 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1458 1459 super_admin = h.HasPermissionAny('hg.admin')()
1459 1460 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1460 1461 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1461 1462 comment_repo_admin = is_repo_admin and is_repo_comment
1462 1463
1463 1464 if super_admin or comment_owner or comment_repo_admin:
1464 1465 old_calculated_status = comment.pull_request.calculated_review_status()
1465 1466 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1466 1467 Session().commit()
1467 1468 calculated_status = comment.pull_request.calculated_review_status()
1468 1469 if old_calculated_status != calculated_status:
1469 1470 PullRequestModel().trigger_pull_request_hook(
1470 1471 comment.pull_request, self._rhodecode_user, 'review_status_change',
1471 1472 data={'status': calculated_status})
1472 1473 return True
1473 1474 else:
1474 1475 log.warning('No permissions for user %s to delete comment_id: %s',
1475 1476 self._rhodecode_db_user, comment_id)
1476 1477 raise HTTPNotFound()
@@ -1,1942 +1,1943 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27
28 28 import os
29 29 import random
30 30 import hashlib
31 31 import StringIO
32 32 import textwrap
33 33 import urllib
34 34 import math
35 35 import logging
36 36 import re
37 37 import time
38 38 import string
39 39 import hashlib
40 40 from collections import OrderedDict
41 41
42 42 import pygments
43 43 import itertools
44 44 import fnmatch
45 45 import bleach
46 46
47 47 from pyramid import compat
48 48 from datetime import datetime
49 49 from functools import partial
50 50 from pygments.formatters.html import HtmlFormatter
51 51 from pygments.lexers import (
52 52 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
53 53
54 54 from pyramid.threadlocal import get_current_request
55 55
56 56 from webhelpers2.html import literal, HTML, escape
57 57 from webhelpers2.html._autolink import _auto_link_urls
58 58 from webhelpers2.html.tools import (
59 59 button_to, highlight, js_obfuscate, strip_links, strip_tags)
60 60
61 61 from webhelpers2.text import (
62 62 chop_at, collapse, convert_accented_entities,
63 63 convert_misc_entities, lchop, plural, rchop, remove_formatting,
64 64 replace_whitespace, urlify, truncate, wrap_paragraphs)
65 65 from webhelpers2.date import time_ago_in_words
66 66
67 67 from webhelpers2.html.tags import (
68 68 _input, NotGiven, _make_safe_id_component as safeid,
69 69 form as insecure_form,
70 70 auto_discovery_link, checkbox, end_form, file,
71 71 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
72 72 select as raw_select, stylesheet_link, submit, text, password, textarea,
73 73 ul, radio, Options)
74 74
75 75 from webhelpers2.number import format_byte_size
76 76
77 77 from rhodecode.lib.action_parser import action_parser
78 78 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
79 79 from rhodecode.lib.ext_json import json
80 80 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
81 81 from rhodecode.lib.utils2 import (
82 82 str2bool, safe_unicode, safe_str,
83 83 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
84 84 AttributeDict, safe_int, md5, md5_safe, get_host_info)
85 85 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
86 86 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
87 87 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
88 88 from rhodecode.lib.index.search_utils import get_matching_line_offsets
89 89 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
90 90 from rhodecode.model.changeset_status import ChangesetStatusModel
91 91 from rhodecode.model.db import Permission, User, Repository
92 92 from rhodecode.model.repo_group import RepoGroupModel
93 93 from rhodecode.model.settings import IssueTrackerSettingsModel
94 94
95 95
96 96 log = logging.getLogger(__name__)
97 97
98 98
99 99 DEFAULT_USER = User.DEFAULT_USER
100 100 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
101 101
102 102
103 103 def asset(path, ver=None, **kwargs):
104 104 """
105 105 Helper to generate a static asset file path for rhodecode assets
106 106
107 107 eg. h.asset('images/image.png', ver='3923')
108 108
109 109 :param path: path of asset
110 110 :param ver: optional version query param to append as ?ver=
111 111 """
112 112 request = get_current_request()
113 113 query = {}
114 114 query.update(kwargs)
115 115 if ver:
116 116 query = {'ver': ver}
117 117 return request.static_path(
118 118 'rhodecode:public/{}'.format(path), _query=query)
119 119
120 120
121 121 default_html_escape_table = {
122 122 ord('&'): u'&amp;',
123 123 ord('<'): u'&lt;',
124 124 ord('>'): u'&gt;',
125 125 ord('"'): u'&quot;',
126 126 ord("'"): u'&#39;',
127 127 }
128 128
129 129
130 130 def html_escape(text, html_escape_table=default_html_escape_table):
131 131 """Produce entities within text."""
132 132 return text.translate(html_escape_table)
133 133
134 134
135 135 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
136 136 """
137 137 Truncate string ``s`` at the first occurrence of ``sub``.
138 138
139 139 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
140 140 """
141 141 suffix_if_chopped = suffix_if_chopped or ''
142 142 pos = s.find(sub)
143 143 if pos == -1:
144 144 return s
145 145
146 146 if inclusive:
147 147 pos += len(sub)
148 148
149 149 chopped = s[:pos]
150 150 left = s[pos:].strip()
151 151
152 152 if left and suffix_if_chopped:
153 153 chopped += suffix_if_chopped
154 154
155 155 return chopped
156 156
157 157
158 158 def shorter(text, size=20, prefix=False):
159 159 postfix = '...'
160 160 if len(text) > size:
161 161 if prefix:
162 162 # shorten in front
163 163 return postfix + text[-(size - len(postfix)):]
164 164 else:
165 165 return text[:size - len(postfix)] + postfix
166 166 return text
167 167
168 168
169 169 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
170 170 """
171 171 Reset button
172 172 """
173 173 return _input(type, name, value, id, attrs)
174 174
175 175
176 176 def select(name, selected_values, options, id=NotGiven, **attrs):
177 177
178 178 if isinstance(options, (list, tuple)):
179 179 options_iter = options
180 180 # Handle old value,label lists ... where value also can be value,label lists
181 181 options = Options()
182 182 for opt in options_iter:
183 183 if isinstance(opt, tuple) and len(opt) == 2:
184 184 value, label = opt
185 185 elif isinstance(opt, basestring):
186 186 value = label = opt
187 187 else:
188 188 raise ValueError('invalid select option type %r' % type(opt))
189 189
190 190 if isinstance(value, (list, tuple)):
191 191 option_group = options.add_optgroup(label)
192 192 for opt2 in value:
193 193 if isinstance(opt2, tuple) and len(opt2) == 2:
194 194 group_value, group_label = opt
195 195 elif isinstance(opt2, basestring):
196 196 group_value = group_label = opt2
197 197 else:
198 198 raise ValueError('invalid select option type %r' % type(opt2))
199 199
200 200 option_group.add_option(group_label, group_value)
201 201 else:
202 202 options.add_option(label, value)
203 203
204 204 return raw_select(name, selected_values, options, id=id, **attrs)
205 205
206 206
207 207 def branding(name, length=40):
208 208 return truncate(name, length, indicator="")
209 209
210 210
211 211 def FID(raw_id, path):
212 212 """
213 213 Creates a unique ID for filenode based on it's hash of path and commit
214 214 it's safe to use in urls
215 215
216 216 :param raw_id:
217 217 :param path:
218 218 """
219 219
220 220 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
221 221
222 222
223 223 class _GetError(object):
224 224 """Get error from form_errors, and represent it as span wrapped error
225 225 message
226 226
227 227 :param field_name: field to fetch errors for
228 228 :param form_errors: form errors dict
229 229 """
230 230
231 231 def __call__(self, field_name, form_errors):
232 232 tmpl = """<span class="error_msg">%s</span>"""
233 233 if form_errors and field_name in form_errors:
234 234 return literal(tmpl % form_errors.get(field_name))
235 235
236 236
237 237 get_error = _GetError()
238 238
239 239
240 240 class _ToolTip(object):
241 241
242 242 def __call__(self, tooltip_title, trim_at=50):
243 243 """
244 244 Special function just to wrap our text into nice formatted
245 245 autowrapped text
246 246
247 247 :param tooltip_title:
248 248 """
249 249 tooltip_title = escape(tooltip_title)
250 250 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
251 251 return tooltip_title
252 252
253 253
254 254 tooltip = _ToolTip()
255 255
256 256 files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy the full path"></i>'
257 257
258 258
259 259 def files_breadcrumbs(repo_name, commit_id, file_path, at_ref=None, limit_items=False, linkify_last_item=False):
260 260 if isinstance(file_path, str):
261 261 file_path = safe_unicode(file_path)
262 262
263 263 route_qry = {'at': at_ref} if at_ref else None
264 264
265 265 # first segment is a `..` link to repo files
266 266 root_name = literal(u'<i class="icon-home"></i>')
267 267 url_segments = [
268 268 link_to(
269 269 root_name,
270 270 route_path(
271 271 'repo_files',
272 272 repo_name=repo_name,
273 273 commit_id=commit_id,
274 274 f_path='',
275 275 _query=route_qry),
276 276 )]
277 277
278 278 path_segments = file_path.split('/')
279 279 last_cnt = len(path_segments) - 1
280 280 for cnt, segment in enumerate(path_segments):
281 281 if not segment:
282 282 continue
283 283 segment_html = escape(segment)
284 284
285 285 last_item = cnt == last_cnt
286 286
287 287 if last_item and linkify_last_item is False:
288 288 # plain version
289 289 url_segments.append(segment_html)
290 290 else:
291 291 url_segments.append(
292 292 link_to(
293 293 segment_html,
294 294 route_path(
295 295 'repo_files',
296 296 repo_name=repo_name,
297 297 commit_id=commit_id,
298 298 f_path='/'.join(path_segments[:cnt + 1]),
299 299 _query=route_qry),
300 300 ))
301 301
302 302 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
303 303 if limit_items and len(limited_url_segments) < len(url_segments):
304 304 url_segments = limited_url_segments
305 305
306 306 full_path = file_path
307 307 icon = files_icon.format(escape(full_path))
308 308 if file_path == '':
309 309 return root_name
310 310 else:
311 311 return literal(' / '.join(url_segments) + icon)
312 312
313 313
314 314 def files_url_data(request):
315 315 matchdict = request.matchdict
316 316
317 317 if 'f_path' not in matchdict:
318 318 matchdict['f_path'] = ''
319 319
320 320 if 'commit_id' not in matchdict:
321 321 matchdict['commit_id'] = 'tip'
322 322
323 323 return json.dumps(matchdict)
324 324
325 325
326 326 def code_highlight(code, lexer, formatter, use_hl_filter=False):
327 327 """
328 328 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
329 329
330 330 If ``outfile`` is given and a valid file object (an object
331 331 with a ``write`` method), the result will be written to it, otherwise
332 332 it is returned as a string.
333 333 """
334 334 if use_hl_filter:
335 335 # add HL filter
336 336 from rhodecode.lib.index import search_utils
337 337 lexer.add_filter(search_utils.ElasticSearchHLFilter())
338 338 return pygments.format(pygments.lex(code, lexer), formatter)
339 339
340 340
341 341 class CodeHtmlFormatter(HtmlFormatter):
342 342 """
343 343 My code Html Formatter for source codes
344 344 """
345 345
346 346 def wrap(self, source, outfile):
347 347 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
348 348
349 349 def _wrap_code(self, source):
350 350 for cnt, it in enumerate(source):
351 351 i, t = it
352 352 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
353 353 yield i, t
354 354
355 355 def _wrap_tablelinenos(self, inner):
356 356 dummyoutfile = StringIO.StringIO()
357 357 lncount = 0
358 358 for t, line in inner:
359 359 if t:
360 360 lncount += 1
361 361 dummyoutfile.write(line)
362 362
363 363 fl = self.linenostart
364 364 mw = len(str(lncount + fl - 1))
365 365 sp = self.linenospecial
366 366 st = self.linenostep
367 367 la = self.lineanchors
368 368 aln = self.anchorlinenos
369 369 nocls = self.noclasses
370 370 if sp:
371 371 lines = []
372 372
373 373 for i in range(fl, fl + lncount):
374 374 if i % st == 0:
375 375 if i % sp == 0:
376 376 if aln:
377 377 lines.append('<a href="#%s%d" class="special">%*d</a>' %
378 378 (la, i, mw, i))
379 379 else:
380 380 lines.append('<span class="special">%*d</span>' % (mw, i))
381 381 else:
382 382 if aln:
383 383 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
384 384 else:
385 385 lines.append('%*d' % (mw, i))
386 386 else:
387 387 lines.append('')
388 388 ls = '\n'.join(lines)
389 389 else:
390 390 lines = []
391 391 for i in range(fl, fl + lncount):
392 392 if i % st == 0:
393 393 if aln:
394 394 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
395 395 else:
396 396 lines.append('%*d' % (mw, i))
397 397 else:
398 398 lines.append('')
399 399 ls = '\n'.join(lines)
400 400
401 401 # in case you wonder about the seemingly redundant <div> here: since the
402 402 # content in the other cell also is wrapped in a div, some browsers in
403 403 # some configurations seem to mess up the formatting...
404 404 if nocls:
405 405 yield 0, ('<table class="%stable">' % self.cssclass +
406 406 '<tr><td><div class="linenodiv" '
407 407 'style="background-color: #f0f0f0; padding-right: 10px">'
408 408 '<pre style="line-height: 125%">' +
409 409 ls + '</pre></div></td><td id="hlcode" class="code">')
410 410 else:
411 411 yield 0, ('<table class="%stable">' % self.cssclass +
412 412 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
413 413 ls + '</pre></div></td><td id="hlcode" class="code">')
414 414 yield 0, dummyoutfile.getvalue()
415 415 yield 0, '</td></tr></table>'
416 416
417 417
418 418 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
419 419 def __init__(self, **kw):
420 420 # only show these line numbers if set
421 421 self.only_lines = kw.pop('only_line_numbers', [])
422 422 self.query_terms = kw.pop('query_terms', [])
423 423 self.max_lines = kw.pop('max_lines', 5)
424 424 self.line_context = kw.pop('line_context', 3)
425 425 self.url = kw.pop('url', None)
426 426
427 427 super(CodeHtmlFormatter, self).__init__(**kw)
428 428
429 429 def _wrap_code(self, source):
430 430 for cnt, it in enumerate(source):
431 431 i, t = it
432 432 t = '<pre>%s</pre>' % t
433 433 yield i, t
434 434
435 435 def _wrap_tablelinenos(self, inner):
436 436 yield 0, '<table class="code-highlight %stable">' % self.cssclass
437 437
438 438 last_shown_line_number = 0
439 439 current_line_number = 1
440 440
441 441 for t, line in inner:
442 442 if not t:
443 443 yield t, line
444 444 continue
445 445
446 446 if current_line_number in self.only_lines:
447 447 if last_shown_line_number + 1 != current_line_number:
448 448 yield 0, '<tr>'
449 449 yield 0, '<td class="line">...</td>'
450 450 yield 0, '<td id="hlcode" class="code"></td>'
451 451 yield 0, '</tr>'
452 452
453 453 yield 0, '<tr>'
454 454 if self.url:
455 455 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
456 456 self.url, current_line_number, current_line_number)
457 457 else:
458 458 yield 0, '<td class="line"><a href="">%i</a></td>' % (
459 459 current_line_number)
460 460 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
461 461 yield 0, '</tr>'
462 462
463 463 last_shown_line_number = current_line_number
464 464
465 465 current_line_number += 1
466 466
467 467 yield 0, '</table>'
468 468
469 469
470 470 def hsv_to_rgb(h, s, v):
471 471 """ Convert hsv color values to rgb """
472 472
473 473 if s == 0.0:
474 474 return v, v, v
475 475 i = int(h * 6.0) # XXX assume int() truncates!
476 476 f = (h * 6.0) - i
477 477 p = v * (1.0 - s)
478 478 q = v * (1.0 - s * f)
479 479 t = v * (1.0 - s * (1.0 - f))
480 480 i = i % 6
481 481 if i == 0:
482 482 return v, t, p
483 483 if i == 1:
484 484 return q, v, p
485 485 if i == 2:
486 486 return p, v, t
487 487 if i == 3:
488 488 return p, q, v
489 489 if i == 4:
490 490 return t, p, v
491 491 if i == 5:
492 492 return v, p, q
493 493
494 494
495 495 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
496 496 """
497 497 Generator for getting n of evenly distributed colors using
498 498 hsv color and golden ratio. It always return same order of colors
499 499
500 500 :param n: number of colors to generate
501 501 :param saturation: saturation of returned colors
502 502 :param lightness: lightness of returned colors
503 503 :returns: RGB tuple
504 504 """
505 505
506 506 golden_ratio = 0.618033988749895
507 507 h = 0.22717784590367374
508 508
509 509 for _ in xrange(n):
510 510 h += golden_ratio
511 511 h %= 1
512 512 HSV_tuple = [h, saturation, lightness]
513 513 RGB_tuple = hsv_to_rgb(*HSV_tuple)
514 514 yield map(lambda x: str(int(x * 256)), RGB_tuple)
515 515
516 516
517 517 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
518 518 """
519 519 Returns a function which when called with an argument returns a unique
520 520 color for that argument, eg.
521 521
522 522 :param n: number of colors to generate
523 523 :param saturation: saturation of returned colors
524 524 :param lightness: lightness of returned colors
525 525 :returns: css RGB string
526 526
527 527 >>> color_hash = color_hasher()
528 528 >>> color_hash('hello')
529 529 'rgb(34, 12, 59)'
530 530 >>> color_hash('hello')
531 531 'rgb(34, 12, 59)'
532 532 >>> color_hash('other')
533 533 'rgb(90, 224, 159)'
534 534 """
535 535
536 536 color_dict = {}
537 537 cgenerator = unique_color_generator(
538 538 saturation=saturation, lightness=lightness)
539 539
540 540 def get_color_string(thing):
541 541 if thing in color_dict:
542 542 col = color_dict[thing]
543 543 else:
544 544 col = color_dict[thing] = cgenerator.next()
545 545 return "rgb(%s)" % (', '.join(col))
546 546
547 547 return get_color_string
548 548
549 549
550 550 def get_lexer_safe(mimetype=None, filepath=None):
551 551 """
552 552 Tries to return a relevant pygments lexer using mimetype/filepath name,
553 553 defaulting to plain text if none could be found
554 554 """
555 555 lexer = None
556 556 try:
557 557 if mimetype:
558 558 lexer = get_lexer_for_mimetype(mimetype)
559 559 if not lexer:
560 560 lexer = get_lexer_for_filename(filepath)
561 561 except pygments.util.ClassNotFound:
562 562 pass
563 563
564 564 if not lexer:
565 565 lexer = get_lexer_by_name('text')
566 566
567 567 return lexer
568 568
569 569
570 570 def get_lexer_for_filenode(filenode):
571 571 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
572 572 return lexer
573 573
574 574
575 575 def pygmentize(filenode, **kwargs):
576 576 """
577 577 pygmentize function using pygments
578 578
579 579 :param filenode:
580 580 """
581 581 lexer = get_lexer_for_filenode(filenode)
582 582 return literal(code_highlight(filenode.content, lexer,
583 583 CodeHtmlFormatter(**kwargs)))
584 584
585 585
586 586 def is_following_repo(repo_name, user_id):
587 587 from rhodecode.model.scm import ScmModel
588 588 return ScmModel().is_following_repo(repo_name, user_id)
589 589
590 590
591 591 class _Message(object):
592 592 """A message returned by ``Flash.pop_messages()``.
593 593
594 594 Converting the message to a string returns the message text. Instances
595 595 also have the following attributes:
596 596
597 597 * ``message``: the message text.
598 598 * ``category``: the category specified when the message was created.
599 599 """
600 600
601 601 def __init__(self, category, message):
602 602 self.category = category
603 603 self.message = message
604 604
605 605 def __str__(self):
606 606 return self.message
607 607
608 608 __unicode__ = __str__
609 609
610 610 def __html__(self):
611 611 return escape(safe_unicode(self.message))
612 612
613 613
614 614 class Flash(object):
615 615 # List of allowed categories. If None, allow any category.
616 616 categories = ["warning", "notice", "error", "success"]
617 617
618 618 # Default category if none is specified.
619 619 default_category = "notice"
620 620
621 621 def __init__(self, session_key="flash", categories=None,
622 622 default_category=None):
623 623 """
624 624 Instantiate a ``Flash`` object.
625 625
626 626 ``session_key`` is the key to save the messages under in the user's
627 627 session.
628 628
629 629 ``categories`` is an optional list which overrides the default list
630 630 of categories.
631 631
632 632 ``default_category`` overrides the default category used for messages
633 633 when none is specified.
634 634 """
635 635 self.session_key = session_key
636 636 if categories is not None:
637 637 self.categories = categories
638 638 if default_category is not None:
639 639 self.default_category = default_category
640 640 if self.categories and self.default_category not in self.categories:
641 641 raise ValueError(
642 642 "unrecognized default category %r" % (self.default_category,))
643 643
644 644 def pop_messages(self, session=None, request=None):
645 645 """
646 646 Return all accumulated messages and delete them from the session.
647 647
648 648 The return value is a list of ``Message`` objects.
649 649 """
650 650 messages = []
651 651
652 652 if not session:
653 653 if not request:
654 654 request = get_current_request()
655 655 session = request.session
656 656
657 657 # Pop the 'old' pylons flash messages. They are tuples of the form
658 658 # (category, message)
659 659 for cat, msg in session.pop(self.session_key, []):
660 660 messages.append(_Message(cat, msg))
661 661
662 662 # Pop the 'new' pyramid flash messages for each category as list
663 663 # of strings.
664 664 for cat in self.categories:
665 665 for msg in session.pop_flash(queue=cat):
666 666 messages.append(_Message(cat, msg))
667 667 # Map messages from the default queue to the 'notice' category.
668 668 for msg in session.pop_flash():
669 669 messages.append(_Message('notice', msg))
670 670
671 671 session.save()
672 672 return messages
673 673
674 674 def json_alerts(self, session=None, request=None):
675 675 payloads = []
676 676 messages = flash.pop_messages(session=session, request=request)
677 677 if messages:
678 678 for message in messages:
679 679 subdata = {}
680 680 if hasattr(message.message, 'rsplit'):
681 681 flash_data = message.message.rsplit('|DELIM|', 1)
682 682 org_message = flash_data[0]
683 683 if len(flash_data) > 1:
684 684 subdata = json.loads(flash_data[1])
685 685 else:
686 686 org_message = message.message
687 687 payloads.append({
688 688 'message': {
689 689 'message': u'{}'.format(org_message),
690 690 'level': message.category,
691 691 'force': True,
692 692 'subdata': subdata
693 693 }
694 694 })
695 695 return json.dumps(payloads)
696 696
697 697 def __call__(self, message, category=None, ignore_duplicate=True,
698 698 session=None, request=None):
699 699
700 700 if not session:
701 701 if not request:
702 702 request = get_current_request()
703 703 session = request.session
704 704
705 705 session.flash(
706 706 message, queue=category, allow_duplicate=not ignore_duplicate)
707 707
708 708
709 709 flash = Flash()
710 710
711 711 #==============================================================================
712 712 # SCM FILTERS available via h.
713 713 #==============================================================================
714 714 from rhodecode.lib.vcs.utils import author_name, author_email
715 715 from rhodecode.lib.utils2 import credentials_filter, age, age_from_seconds
716 716 from rhodecode.model.db import User, ChangesetStatus
717 717
718 718 capitalize = lambda x: x.capitalize()
719 719 email = author_email
720 720 short_id = lambda x: x[:12]
721 721 hide_credentials = lambda x: ''.join(credentials_filter(x))
722 722
723 723
724 724 import pytz
725 725 import tzlocal
726 726 local_timezone = tzlocal.get_localzone()
727 727
728 728
729 729 def age_component(datetime_iso, value=None, time_is_local=False):
730 730 title = value or format_date(datetime_iso)
731 731 tzinfo = '+00:00'
732 732
733 733 # detect if we have a timezone info, otherwise, add it
734 734 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
735 735 force_timezone = os.environ.get('RC_TIMEZONE', '')
736 736 if force_timezone:
737 737 force_timezone = pytz.timezone(force_timezone)
738 738 timezone = force_timezone or local_timezone
739 739 offset = timezone.localize(datetime_iso).strftime('%z')
740 740 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
741 741
742 742 return literal(
743 743 '<time class="timeago tooltip" '
744 744 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
745 745 datetime_iso, title, tzinfo))
746 746
747 747
748 748 def _shorten_commit_id(commit_id, commit_len=None):
749 749 if commit_len is None:
750 750 request = get_current_request()
751 751 commit_len = request.call_context.visual.show_sha_length
752 752 return commit_id[:commit_len]
753 753
754 754
755 755 def show_id(commit, show_idx=None, commit_len=None):
756 756 """
757 757 Configurable function that shows ID
758 758 by default it's r123:fffeeefffeee
759 759
760 760 :param commit: commit instance
761 761 """
762 762 if show_idx is None:
763 763 request = get_current_request()
764 764 show_idx = request.call_context.visual.show_revision_number
765 765
766 766 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
767 767 if show_idx:
768 768 return 'r%s:%s' % (commit.idx, raw_id)
769 769 else:
770 770 return '%s' % (raw_id, )
771 771
772 772
773 773 def format_date(date):
774 774 """
775 775 use a standardized formatting for dates used in RhodeCode
776 776
777 777 :param date: date/datetime object
778 778 :return: formatted date
779 779 """
780 780
781 781 if date:
782 782 _fmt = "%a, %d %b %Y %H:%M:%S"
783 783 return safe_unicode(date.strftime(_fmt))
784 784
785 785 return u""
786 786
787 787
788 788 class _RepoChecker(object):
789 789
790 790 def __init__(self, backend_alias):
791 791 self._backend_alias = backend_alias
792 792
793 793 def __call__(self, repository):
794 794 if hasattr(repository, 'alias'):
795 795 _type = repository.alias
796 796 elif hasattr(repository, 'repo_type'):
797 797 _type = repository.repo_type
798 798 else:
799 799 _type = repository
800 800 return _type == self._backend_alias
801 801
802 802
803 803 is_git = _RepoChecker('git')
804 804 is_hg = _RepoChecker('hg')
805 805 is_svn = _RepoChecker('svn')
806 806
807 807
808 808 def get_repo_type_by_name(repo_name):
809 809 repo = Repository.get_by_repo_name(repo_name)
810 810 if repo:
811 811 return repo.repo_type
812 812
813 813
814 814 def is_svn_without_proxy(repository):
815 815 if is_svn(repository):
816 816 from rhodecode.model.settings import VcsSettingsModel
817 817 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
818 818 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
819 819 return False
820 820
821 821
822 822 def discover_user(author):
823 823 """
824 Tries to discover RhodeCode User based on the autho string. Author string
824 Tries to discover RhodeCode User based on the author string. Author string
825 825 is typically `FirstName LastName <email@address.com>`
826 826 """
827 827
828 828 # if author is already an instance use it for extraction
829 829 if isinstance(author, User):
830 830 return author
831 831
832 832 # Valid email in the attribute passed, see if they're in the system
833 833 _email = author_email(author)
834 834 if _email != '':
835 835 user = User.get_by_email(_email, case_insensitive=True, cache=True)
836 836 if user is not None:
837 837 return user
838 838
839 839 # Maybe it's a username, we try to extract it and fetch by username ?
840 840 _author = author_name(author)
841 841 user = User.get_by_username(_author, case_insensitive=True, cache=True)
842 842 if user is not None:
843 843 return user
844 844
845 845 return None
846 846
847 847
848 848 def email_or_none(author):
849 849 # extract email from the commit string
850 850 _email = author_email(author)
851 851
852 852 # If we have an email, use it, otherwise
853 853 # see if it contains a username we can get an email from
854 854 if _email != '':
855 855 return _email
856 856 else:
857 857 user = User.get_by_username(
858 858 author_name(author), case_insensitive=True, cache=True)
859 859
860 860 if user is not None:
861 861 return user.email
862 862
863 863 # No valid email, not a valid user in the system, none!
864 864 return None
865 865
866 866
867 867 def link_to_user(author, length=0, **kwargs):
868 868 user = discover_user(author)
869 869 # user can be None, but if we have it already it means we can re-use it
870 870 # in the person() function, so we save 1 intensive-query
871 871 if user:
872 872 author = user
873 873
874 874 display_person = person(author, 'username_or_name_or_email')
875 875 if length:
876 876 display_person = shorter(display_person, length)
877 877
878 878 if user:
879 879 return link_to(
880 880 escape(display_person),
881 881 route_path('user_profile', username=user.username),
882 882 **kwargs)
883 883 else:
884 884 return escape(display_person)
885 885
886 886
887 887 def link_to_group(users_group_name, **kwargs):
888 888 return link_to(
889 889 escape(users_group_name),
890 890 route_path('user_group_profile', user_group_name=users_group_name),
891 891 **kwargs)
892 892
893 893
894 894 def person(author, show_attr="username_and_name"):
895 895 user = discover_user(author)
896 896 if user:
897 897 return getattr(user, show_attr)
898 898 else:
899 899 _author = author_name(author)
900 900 _email = email(author)
901 901 return _author or _email
902 902
903 903
904 904 def author_string(email):
905 905 if email:
906 906 user = User.get_by_email(email, case_insensitive=True, cache=True)
907 907 if user:
908 908 if user.first_name or user.last_name:
909 909 return '%s %s &lt;%s&gt;' % (
910 910 user.first_name, user.last_name, email)
911 911 else:
912 912 return email
913 913 else:
914 914 return email
915 915 else:
916 916 return None
917 917
918 918
919 919 def person_by_id(id_, show_attr="username_and_name"):
920 920 # attr to return from fetched user
921 921 person_getter = lambda usr: getattr(usr, show_attr)
922 922
923 923 #maybe it's an ID ?
924 924 if str(id_).isdigit() or isinstance(id_, int):
925 925 id_ = int(id_)
926 926 user = User.get(id_)
927 927 if user is not None:
928 928 return person_getter(user)
929 929 return id_
930 930
931 931
932 932 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
933 933 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
934 934 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
935 935
936 936
937 937 tags_paterns = OrderedDict((
938 938 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
939 939 '<div class="metatag" tag="lang">\\2</div>')),
940 940
941 941 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
942 942 '<div class="metatag" tag="see">see: \\1 </div>')),
943 943
944 944 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
945 945 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
946 946
947 947 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
948 948 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
949 949
950 950 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
951 951 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
952 952
953 953 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
954 954 '<div class="metatag" tag="state \\1">\\1</div>')),
955 955
956 956 # label in grey
957 957 ('label', (re.compile(r'\[([a-z]+)\]'),
958 958 '<div class="metatag" tag="label">\\1</div>')),
959 959
960 960 # generic catch all in grey
961 961 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
962 962 '<div class="metatag" tag="generic">\\1</div>')),
963 963 ))
964 964
965 965
966 966 def extract_metatags(value):
967 967 """
968 968 Extract supported meta-tags from given text value
969 969 """
970 970 tags = []
971 971 if not value:
972 972 return tags, ''
973 973
974 974 for key, val in tags_paterns.items():
975 975 pat, replace_html = val
976 976 tags.extend([(key, x.group()) for x in pat.finditer(value)])
977 977 value = pat.sub('', value)
978 978
979 979 return tags, value
980 980
981 981
982 982 def style_metatag(tag_type, value):
983 983 """
984 984 converts tags from value into html equivalent
985 985 """
986 986 if not value:
987 987 return ''
988 988
989 989 html_value = value
990 990 tag_data = tags_paterns.get(tag_type)
991 991 if tag_data:
992 992 pat, replace_html = tag_data
993 993 # convert to plain `unicode` instead of a markup tag to be used in
994 994 # regex expressions. safe_unicode doesn't work here
995 995 html_value = pat.sub(replace_html, unicode(value))
996 996
997 997 return html_value
998 998
999 999
1000 1000 def bool2icon(value, show_at_false=True):
1001 1001 """
1002 1002 Returns boolean value of a given value, represented as html element with
1003 1003 classes that will represent icons
1004 1004
1005 1005 :param value: given value to convert to html node
1006 1006 """
1007 1007
1008 1008 if value: # does bool conversion
1009 1009 return HTML.tag('i', class_="icon-true", title='True')
1010 1010 else: # not true as bool
1011 1011 if show_at_false:
1012 1012 return HTML.tag('i', class_="icon-false", title='False')
1013 1013 return HTML.tag('i')
1014 1014
1015 1015 #==============================================================================
1016 1016 # PERMS
1017 1017 #==============================================================================
1018 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
1019 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
1020 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1021 csrf_token_key
1018 from rhodecode.lib.auth import (
1019 HasPermissionAny, HasPermissionAll,
1020 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1021 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1022 csrf_token_key, AuthUser)
1022 1023
1023 1024
1024 1025 #==============================================================================
1025 1026 # GRAVATAR URL
1026 1027 #==============================================================================
1027 1028 class InitialsGravatar(object):
1028 1029 def __init__(self, email_address, first_name, last_name, size=30,
1029 1030 background=None, text_color='#fff'):
1030 1031 self.size = size
1031 1032 self.first_name = first_name
1032 1033 self.last_name = last_name
1033 1034 self.email_address = email_address
1034 1035 self.background = background or self.str2color(email_address)
1035 1036 self.text_color = text_color
1036 1037
1037 1038 def get_color_bank(self):
1038 1039 """
1039 1040 returns a predefined list of colors that gravatars can use.
1040 1041 Those are randomized distinct colors that guarantee readability and
1041 1042 uniqueness.
1042 1043
1043 1044 generated with: http://phrogz.net/css/distinct-colors.html
1044 1045 """
1045 1046 return [
1046 1047 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1047 1048 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1048 1049 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1049 1050 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1050 1051 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1051 1052 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1052 1053 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1053 1054 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1054 1055 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1055 1056 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1056 1057 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1057 1058 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1058 1059 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1059 1060 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1060 1061 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1061 1062 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1062 1063 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1063 1064 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1064 1065 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1065 1066 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1066 1067 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1067 1068 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1068 1069 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1069 1070 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1070 1071 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1071 1072 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1072 1073 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1073 1074 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1074 1075 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1075 1076 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1076 1077 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1077 1078 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1078 1079 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1079 1080 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1080 1081 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1081 1082 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1082 1083 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1083 1084 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1084 1085 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1085 1086 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1086 1087 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1087 1088 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1088 1089 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1089 1090 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1090 1091 '#4f8c46', '#368dd9', '#5c0073'
1091 1092 ]
1092 1093
1093 1094 def rgb_to_hex_color(self, rgb_tuple):
1094 1095 """
1095 1096 Converts an rgb_tuple passed to an hex color.
1096 1097
1097 1098 :param rgb_tuple: tuple with 3 ints represents rgb color space
1098 1099 """
1099 1100 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1100 1101
1101 1102 def email_to_int_list(self, email_str):
1102 1103 """
1103 1104 Get every byte of the hex digest value of email and turn it to integer.
1104 1105 It's going to be always between 0-255
1105 1106 """
1106 1107 digest = md5_safe(email_str.lower())
1107 1108 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1108 1109
1109 1110 def pick_color_bank_index(self, email_str, color_bank):
1110 1111 return self.email_to_int_list(email_str)[0] % len(color_bank)
1111 1112
1112 1113 def str2color(self, email_str):
1113 1114 """
1114 1115 Tries to map in a stable algorithm an email to color
1115 1116
1116 1117 :param email_str:
1117 1118 """
1118 1119 color_bank = self.get_color_bank()
1119 1120 # pick position (module it's length so we always find it in the
1120 1121 # bank even if it's smaller than 256 values
1121 1122 pos = self.pick_color_bank_index(email_str, color_bank)
1122 1123 return color_bank[pos]
1123 1124
1124 1125 def normalize_email(self, email_address):
1125 1126 import unicodedata
1126 1127 # default host used to fill in the fake/missing email
1127 1128 default_host = u'localhost'
1128 1129
1129 1130 if not email_address:
1130 1131 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1131 1132
1132 1133 email_address = safe_unicode(email_address)
1133 1134
1134 1135 if u'@' not in email_address:
1135 1136 email_address = u'%s@%s' % (email_address, default_host)
1136 1137
1137 1138 if email_address.endswith(u'@'):
1138 1139 email_address = u'%s%s' % (email_address, default_host)
1139 1140
1140 1141 email_address = unicodedata.normalize('NFKD', email_address)\
1141 1142 .encode('ascii', 'ignore')
1142 1143 return email_address
1143 1144
1144 1145 def get_initials(self):
1145 1146 """
1146 1147 Returns 2 letter initials calculated based on the input.
1147 1148 The algorithm picks first given email address, and takes first letter
1148 1149 of part before @, and then the first letter of server name. In case
1149 1150 the part before @ is in a format of `somestring.somestring2` it replaces
1150 1151 the server letter with first letter of somestring2
1151 1152
1152 1153 In case function was initialized with both first and lastname, this
1153 1154 overrides the extraction from email by first letter of the first and
1154 1155 last name. We add special logic to that functionality, In case Full name
1155 1156 is compound, like Guido Von Rossum, we use last part of the last name
1156 1157 (Von Rossum) picking `R`.
1157 1158
1158 1159 Function also normalizes the non-ascii characters to they ascii
1159 1160 representation, eg Δ„ => A
1160 1161 """
1161 1162 import unicodedata
1162 1163 # replace non-ascii to ascii
1163 1164 first_name = unicodedata.normalize(
1164 1165 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1165 1166 last_name = unicodedata.normalize(
1166 1167 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1167 1168
1168 1169 # do NFKD encoding, and also make sure email has proper format
1169 1170 email_address = self.normalize_email(self.email_address)
1170 1171
1171 1172 # first push the email initials
1172 1173 prefix, server = email_address.split('@', 1)
1173 1174
1174 1175 # check if prefix is maybe a 'first_name.last_name' syntax
1175 1176 _dot_split = prefix.rsplit('.', 1)
1176 1177 if len(_dot_split) == 2 and _dot_split[1]:
1177 1178 initials = [_dot_split[0][0], _dot_split[1][0]]
1178 1179 else:
1179 1180 initials = [prefix[0], server[0]]
1180 1181
1181 1182 # then try to replace either first_name or last_name
1182 1183 fn_letter = (first_name or " ")[0].strip()
1183 1184 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1184 1185
1185 1186 if fn_letter:
1186 1187 initials[0] = fn_letter
1187 1188
1188 1189 if ln_letter:
1189 1190 initials[1] = ln_letter
1190 1191
1191 1192 return ''.join(initials).upper()
1192 1193
1193 1194 def get_img_data_by_type(self, font_family, img_type):
1194 1195 default_user = """
1195 1196 <svg xmlns="http://www.w3.org/2000/svg"
1196 1197 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1197 1198 viewBox="-15 -10 439.165 429.164"
1198 1199
1199 1200 xml:space="preserve"
1200 1201 style="background:{background};" >
1201 1202
1202 1203 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1203 1204 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1204 1205 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1205 1206 168.596,153.916,216.671,
1206 1207 204.583,216.671z" fill="{text_color}"/>
1207 1208 <path d="M407.164,374.717L360.88,
1208 1209 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1209 1210 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1210 1211 15.366-44.203,23.488-69.076,23.488c-24.877,
1211 1212 0-48.762-8.122-69.078-23.488
1212 1213 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1213 1214 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1214 1215 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1215 1216 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1216 1217 19.402-10.527 C409.699,390.129,
1217 1218 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1218 1219 </svg>""".format(
1219 1220 size=self.size,
1220 1221 background='#979797', # @grey4
1221 1222 text_color=self.text_color,
1222 1223 font_family=font_family)
1223 1224
1224 1225 return {
1225 1226 "default_user": default_user
1226 1227 }[img_type]
1227 1228
1228 1229 def get_img_data(self, svg_type=None):
1229 1230 """
1230 1231 generates the svg metadata for image
1231 1232 """
1232 1233 fonts = [
1233 1234 '-apple-system',
1234 1235 'BlinkMacSystemFont',
1235 1236 'Segoe UI',
1236 1237 'Roboto',
1237 1238 'Oxygen-Sans',
1238 1239 'Ubuntu',
1239 1240 'Cantarell',
1240 1241 'Helvetica Neue',
1241 1242 'sans-serif'
1242 1243 ]
1243 1244 font_family = ','.join(fonts)
1244 1245 if svg_type:
1245 1246 return self.get_img_data_by_type(font_family, svg_type)
1246 1247
1247 1248 initials = self.get_initials()
1248 1249 img_data = """
1249 1250 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1250 1251 width="{size}" height="{size}"
1251 1252 style="width: 100%; height: 100%; background-color: {background}"
1252 1253 viewBox="0 0 {size} {size}">
1253 1254 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1254 1255 pointer-events="auto" fill="{text_color}"
1255 1256 font-family="{font_family}"
1256 1257 style="font-weight: 400; font-size: {f_size}px;">{text}
1257 1258 </text>
1258 1259 </svg>""".format(
1259 1260 size=self.size,
1260 1261 f_size=self.size/2.05, # scale the text inside the box nicely
1261 1262 background=self.background,
1262 1263 text_color=self.text_color,
1263 1264 text=initials.upper(),
1264 1265 font_family=font_family)
1265 1266
1266 1267 return img_data
1267 1268
1268 1269 def generate_svg(self, svg_type=None):
1269 1270 img_data = self.get_img_data(svg_type)
1270 1271 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1271 1272
1272 1273
1273 1274 def initials_gravatar(email_address, first_name, last_name, size=30):
1274 1275 svg_type = None
1275 1276 if email_address == User.DEFAULT_USER_EMAIL:
1276 1277 svg_type = 'default_user'
1277 1278 klass = InitialsGravatar(email_address, first_name, last_name, size)
1278 1279 return klass.generate_svg(svg_type=svg_type)
1279 1280
1280 1281
1281 1282 def gravatar_url(email_address, size=30, request=None):
1282 1283 request = get_current_request()
1283 1284 _use_gravatar = request.call_context.visual.use_gravatar
1284 1285 _gravatar_url = request.call_context.visual.gravatar_url
1285 1286
1286 1287 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1287 1288
1288 1289 email_address = email_address or User.DEFAULT_USER_EMAIL
1289 1290 if isinstance(email_address, unicode):
1290 1291 # hashlib crashes on unicode items
1291 1292 email_address = safe_str(email_address)
1292 1293
1293 1294 # empty email or default user
1294 1295 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1295 1296 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1296 1297
1297 1298 if _use_gravatar:
1298 1299 # TODO: Disuse pyramid thread locals. Think about another solution to
1299 1300 # get the host and schema here.
1300 1301 request = get_current_request()
1301 1302 tmpl = safe_str(_gravatar_url)
1302 1303 tmpl = tmpl.replace('{email}', email_address)\
1303 1304 .replace('{md5email}', md5_safe(email_address.lower())) \
1304 1305 .replace('{netloc}', request.host)\
1305 1306 .replace('{scheme}', request.scheme)\
1306 1307 .replace('{size}', safe_str(size))
1307 1308 return tmpl
1308 1309 else:
1309 1310 return initials_gravatar(email_address, '', '', size=size)
1310 1311
1311 1312
1312 1313 def breadcrumb_repo_link(repo):
1313 1314 """
1314 1315 Makes a breadcrumbs path link to repo
1315 1316
1316 1317 ex::
1317 1318 group >> subgroup >> repo
1318 1319
1319 1320 :param repo: a Repository instance
1320 1321 """
1321 1322
1322 1323 path = [
1323 1324 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1324 1325 title='last change:{}'.format(format_date(group.last_commit_change)))
1325 1326 for group in repo.groups_with_parents
1326 1327 ] + [
1327 1328 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1328 1329 title='last change:{}'.format(format_date(repo.last_commit_change)))
1329 1330 ]
1330 1331
1331 1332 return literal(' &raquo; '.join(path))
1332 1333
1333 1334
1334 1335 def breadcrumb_repo_group_link(repo_group):
1335 1336 """
1336 1337 Makes a breadcrumbs path link to repo
1337 1338
1338 1339 ex::
1339 1340 group >> subgroup
1340 1341
1341 1342 :param repo_group: a Repository Group instance
1342 1343 """
1343 1344
1344 1345 path = [
1345 1346 link_to(group.name,
1346 1347 route_path('repo_group_home', repo_group_name=group.group_name),
1347 1348 title='last change:{}'.format(format_date(group.last_commit_change)))
1348 1349 for group in repo_group.parents
1349 1350 ] + [
1350 1351 link_to(repo_group.name,
1351 1352 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1352 1353 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1353 1354 ]
1354 1355
1355 1356 return literal(' &raquo; '.join(path))
1356 1357
1357 1358
1358 1359 def format_byte_size_binary(file_size):
1359 1360 """
1360 1361 Formats file/folder sizes to standard.
1361 1362 """
1362 1363 if file_size is None:
1363 1364 file_size = 0
1364 1365
1365 1366 formatted_size = format_byte_size(file_size, binary=True)
1366 1367 return formatted_size
1367 1368
1368 1369
1369 1370 def urlify_text(text_, safe=True, **href_attrs):
1370 1371 """
1371 1372 Extract urls from text and make html links out of them
1372 1373 """
1373 1374
1374 1375 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1375 1376 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1376 1377
1377 1378 def url_func(match_obj):
1378 1379 url_full = match_obj.groups()[0]
1379 1380 a_options = dict(href_attrs)
1380 1381 a_options['href'] = url_full
1381 1382 a_text = url_full
1382 1383 return HTML.tag("a", a_text, **a_options)
1383 1384
1384 1385 _new_text = url_pat.sub(url_func, text_)
1385 1386
1386 1387 if safe:
1387 1388 return literal(_new_text)
1388 1389 return _new_text
1389 1390
1390 1391
1391 1392 def urlify_commits(text_, repo_name):
1392 1393 """
1393 1394 Extract commit ids from text and make link from them
1394 1395
1395 1396 :param text_:
1396 1397 :param repo_name: repo name to build the URL with
1397 1398 """
1398 1399
1399 1400 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1400 1401
1401 1402 def url_func(match_obj):
1402 1403 commit_id = match_obj.groups()[1]
1403 1404 pref = match_obj.groups()[0]
1404 1405 suf = match_obj.groups()[2]
1405 1406
1406 1407 tmpl = (
1407 1408 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1408 1409 '%(commit_id)s</a>%(suf)s'
1409 1410 )
1410 1411 return tmpl % {
1411 1412 'pref': pref,
1412 1413 'cls': 'revision-link',
1413 1414 'url': route_url(
1414 1415 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1415 1416 'commit_id': commit_id,
1416 1417 'suf': suf,
1417 1418 'hovercard_alt': 'Commit: {}'.format(commit_id),
1418 1419 'hovercard_url': route_url(
1419 1420 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1420 1421 }
1421 1422
1422 1423 new_text = url_pat.sub(url_func, text_)
1423 1424
1424 1425 return new_text
1425 1426
1426 1427
1427 1428 def _process_url_func(match_obj, repo_name, uid, entry,
1428 1429 return_raw_data=False, link_format='html'):
1429 1430 pref = ''
1430 1431 if match_obj.group().startswith(' '):
1431 1432 pref = ' '
1432 1433
1433 1434 issue_id = ''.join(match_obj.groups())
1434 1435
1435 1436 if link_format == 'html':
1436 1437 tmpl = (
1437 1438 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1438 1439 '%(issue-prefix)s%(id-repr)s'
1439 1440 '</a>')
1440 1441 elif link_format == 'html+hovercard':
1441 1442 tmpl = (
1442 1443 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1443 1444 '%(issue-prefix)s%(id-repr)s'
1444 1445 '</a>')
1445 1446 elif link_format in ['rst', 'rst+hovercard']:
1446 1447 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1447 1448 elif link_format in ['markdown', 'markdown+hovercard']:
1448 1449 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1449 1450 else:
1450 1451 raise ValueError('Bad link_format:{}'.format(link_format))
1451 1452
1452 1453 (repo_name_cleaned,
1453 1454 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1454 1455
1455 1456 # variables replacement
1456 1457 named_vars = {
1457 1458 'id': issue_id,
1458 1459 'repo': repo_name,
1459 1460 'repo_name': repo_name_cleaned,
1460 1461 'group_name': parent_group_name,
1461 1462 # set dummy keys so we always have them
1462 1463 'hostname': '',
1463 1464 'netloc': '',
1464 1465 'scheme': ''
1465 1466 }
1466 1467
1467 1468 request = get_current_request()
1468 1469 if request:
1469 1470 # exposes, hostname, netloc, scheme
1470 1471 host_data = get_host_info(request)
1471 1472 named_vars.update(host_data)
1472 1473
1473 1474 # named regex variables
1474 1475 named_vars.update(match_obj.groupdict())
1475 1476 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1476 1477 desc = string.Template(entry['desc']).safe_substitute(**named_vars)
1477 1478 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1478 1479
1479 1480 def quote_cleaner(input_str):
1480 1481 """Remove quotes as it's HTML"""
1481 1482 return input_str.replace('"', '')
1482 1483
1483 1484 data = {
1484 1485 'pref': pref,
1485 1486 'cls': quote_cleaner('issue-tracker-link'),
1486 1487 'url': quote_cleaner(_url),
1487 1488 'id-repr': issue_id,
1488 1489 'issue-prefix': entry['pref'],
1489 1490 'serv': entry['url'],
1490 1491 'title': desc,
1491 1492 'hovercard_url': hovercard_url
1492 1493 }
1493 1494
1494 1495 if return_raw_data:
1495 1496 return {
1496 1497 'id': issue_id,
1497 1498 'url': _url
1498 1499 }
1499 1500 return tmpl % data
1500 1501
1501 1502
1502 1503 def get_active_pattern_entries(repo_name):
1503 1504 repo = None
1504 1505 if repo_name:
1505 1506 # Retrieving repo_name to avoid invalid repo_name to explode on
1506 1507 # IssueTrackerSettingsModel but still passing invalid name further down
1507 1508 repo = Repository.get_by_repo_name(repo_name, cache=True)
1508 1509
1509 1510 settings_model = IssueTrackerSettingsModel(repo=repo)
1510 1511 active_entries = settings_model.get_settings(cache=True)
1511 1512 return active_entries
1512 1513
1513 1514
1514 1515 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1515 1516
1516 1517 allowed_formats = ['html', 'rst', 'markdown',
1517 1518 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1518 1519 if link_format not in allowed_formats:
1519 1520 raise ValueError('Link format can be only one of:{} got {}'.format(
1520 1521 allowed_formats, link_format))
1521 1522
1522 1523 active_entries = active_entries or get_active_pattern_entries(repo_name)
1523 1524 issues_data = []
1524 1525 new_text = text_string
1525 1526
1526 1527 log.debug('Got %s entries to process', len(active_entries))
1527 1528 for uid, entry in active_entries.items():
1528 1529 log.debug('found issue tracker entry with uid %s', uid)
1529 1530
1530 1531 if not (entry['pat'] and entry['url']):
1531 1532 log.debug('skipping due to missing data')
1532 1533 continue
1533 1534
1534 1535 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1535 1536 uid, entry['pat'], entry['url'], entry['pref'])
1536 1537
1537 1538 try:
1538 1539 pattern = re.compile(r'%s' % entry['pat'])
1539 1540 except re.error:
1540 1541 log.exception('issue tracker pattern: `%s` failed to compile', entry['pat'])
1541 1542 continue
1542 1543
1543 1544 data_func = partial(
1544 1545 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1545 1546 return_raw_data=True)
1546 1547
1547 1548 for match_obj in pattern.finditer(text_string):
1548 1549 issues_data.append(data_func(match_obj))
1549 1550
1550 1551 url_func = partial(
1551 1552 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1552 1553 link_format=link_format)
1553 1554
1554 1555 new_text = pattern.sub(url_func, new_text)
1555 1556 log.debug('processed prefix:uid `%s`', uid)
1556 1557
1557 1558 # finally use global replace, eg !123 -> pr-link, those will not catch
1558 1559 # if already similar pattern exists
1559 1560 server_url = '${scheme}://${netloc}'
1560 1561 pr_entry = {
1561 1562 'pref': '!',
1562 1563 'url': server_url + '/_admin/pull-requests/${id}',
1563 1564 'desc': 'Pull Request !${id}',
1564 1565 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1565 1566 }
1566 1567 pr_url_func = partial(
1567 1568 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1568 1569 link_format=link_format+'+hovercard')
1569 1570 new_text = re.compile(r'(?:(?:^!)|(?: !))(\d+)').sub(pr_url_func, new_text)
1570 1571 log.debug('processed !pr pattern')
1571 1572
1572 1573 return new_text, issues_data
1573 1574
1574 1575
1575 1576 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None):
1576 1577 """
1577 1578 Parses given text message and makes proper links.
1578 1579 issues are linked to given issue-server, and rest is a commit link
1579 1580 """
1580 1581 def escaper(_text):
1581 1582 return _text.replace('<', '&lt;').replace('>', '&gt;')
1582 1583
1583 1584 new_text = escaper(commit_text)
1584 1585
1585 1586 # extract http/https links and make them real urls
1586 1587 new_text = urlify_text(new_text, safe=False)
1587 1588
1588 1589 # urlify commits - extract commit ids and make link out of them, if we have
1589 1590 # the scope of repository present.
1590 1591 if repository:
1591 1592 new_text = urlify_commits(new_text, repository)
1592 1593
1593 1594 # process issue tracker patterns
1594 1595 new_text, issues = process_patterns(new_text, repository or '',
1595 1596 active_entries=active_pattern_entries)
1596 1597
1597 1598 return literal(new_text)
1598 1599
1599 1600
1600 1601 def render_binary(repo_name, file_obj):
1601 1602 """
1602 1603 Choose how to render a binary file
1603 1604 """
1604 1605
1605 1606 filename = file_obj.name
1606 1607
1607 1608 # images
1608 1609 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1609 1610 if fnmatch.fnmatch(filename, pat=ext):
1610 1611 alt = escape(filename)
1611 1612 src = route_path(
1612 1613 'repo_file_raw', repo_name=repo_name,
1613 1614 commit_id=file_obj.commit.raw_id,
1614 1615 f_path=file_obj.path)
1615 1616 return literal(
1616 1617 '<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1617 1618
1618 1619
1619 1620 def renderer_from_filename(filename, exclude=None):
1620 1621 """
1621 1622 choose a renderer based on filename, this works only for text based files
1622 1623 """
1623 1624
1624 1625 # ipython
1625 1626 for ext in ['*.ipynb']:
1626 1627 if fnmatch.fnmatch(filename, pat=ext):
1627 1628 return 'jupyter'
1628 1629
1629 1630 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1630 1631 if is_markup:
1631 1632 return is_markup
1632 1633 return None
1633 1634
1634 1635
1635 1636 def render(source, renderer='rst', mentions=False, relative_urls=None,
1636 1637 repo_name=None):
1637 1638
1638 1639 def maybe_convert_relative_links(html_source):
1639 1640 if relative_urls:
1640 1641 return relative_links(html_source, relative_urls)
1641 1642 return html_source
1642 1643
1643 1644 if renderer == 'plain':
1644 1645 return literal(
1645 1646 MarkupRenderer.plain(source, leading_newline=False))
1646 1647
1647 1648 elif renderer == 'rst':
1648 1649 if repo_name:
1649 1650 # process patterns on comments if we pass in repo name
1650 1651 source, issues = process_patterns(
1651 1652 source, repo_name, link_format='rst')
1652 1653
1653 1654 return literal(
1654 1655 '<div class="rst-block">%s</div>' %
1655 1656 maybe_convert_relative_links(
1656 1657 MarkupRenderer.rst(source, mentions=mentions)))
1657 1658
1658 1659 elif renderer == 'markdown':
1659 1660 if repo_name:
1660 1661 # process patterns on comments if we pass in repo name
1661 1662 source, issues = process_patterns(
1662 1663 source, repo_name, link_format='markdown')
1663 1664
1664 1665 return literal(
1665 1666 '<div class="markdown-block">%s</div>' %
1666 1667 maybe_convert_relative_links(
1667 1668 MarkupRenderer.markdown(source, flavored=True,
1668 1669 mentions=mentions)))
1669 1670
1670 1671 elif renderer == 'jupyter':
1671 1672 return literal(
1672 1673 '<div class="ipynb">%s</div>' %
1673 1674 maybe_convert_relative_links(
1674 1675 MarkupRenderer.jupyter(source)))
1675 1676
1676 1677 # None means just show the file-source
1677 1678 return None
1678 1679
1679 1680
1680 1681 def commit_status(repo, commit_id):
1681 1682 return ChangesetStatusModel().get_status(repo, commit_id)
1682 1683
1683 1684
1684 1685 def commit_status_lbl(commit_status):
1685 1686 return dict(ChangesetStatus.STATUSES).get(commit_status)
1686 1687
1687 1688
1688 1689 def commit_time(repo_name, commit_id):
1689 1690 repo = Repository.get_by_repo_name(repo_name)
1690 1691 commit = repo.get_commit(commit_id=commit_id)
1691 1692 return commit.date
1692 1693
1693 1694
1694 1695 def get_permission_name(key):
1695 1696 return dict(Permission.PERMS).get(key)
1696 1697
1697 1698
1698 1699 def journal_filter_help(request):
1699 1700 _ = request.translate
1700 1701 from rhodecode.lib.audit_logger import ACTIONS
1701 1702 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1702 1703
1703 1704 return _(
1704 1705 'Example filter terms:\n' +
1705 1706 ' repository:vcs\n' +
1706 1707 ' username:marcin\n' +
1707 1708 ' username:(NOT marcin)\n' +
1708 1709 ' action:*push*\n' +
1709 1710 ' ip:127.0.0.1\n' +
1710 1711 ' date:20120101\n' +
1711 1712 ' date:[20120101100000 TO 20120102]\n' +
1712 1713 '\n' +
1713 1714 'Actions: {actions}\n' +
1714 1715 '\n' +
1715 1716 'Generate wildcards using \'*\' character:\n' +
1716 1717 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1717 1718 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1718 1719 '\n' +
1719 1720 'Optional AND / OR operators in queries\n' +
1720 1721 ' "repository:vcs OR repository:test"\n' +
1721 1722 ' "username:test AND repository:test*"\n'
1722 1723 ).format(actions=actions)
1723 1724
1724 1725
1725 1726 def not_mapped_error(repo_name):
1726 1727 from rhodecode.translation import _
1727 1728 flash(_('%s repository is not mapped to db perhaps'
1728 1729 ' it was created or renamed from the filesystem'
1729 1730 ' please run the application again'
1730 1731 ' in order to rescan repositories') % repo_name, category='error')
1731 1732
1732 1733
1733 1734 def ip_range(ip_addr):
1734 1735 from rhodecode.model.db import UserIpMap
1735 1736 s, e = UserIpMap._get_ip_range(ip_addr)
1736 1737 return '%s - %s' % (s, e)
1737 1738
1738 1739
1739 1740 def form(url, method='post', needs_csrf_token=True, **attrs):
1740 1741 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1741 1742 if method.lower() != 'get' and needs_csrf_token:
1742 1743 raise Exception(
1743 1744 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1744 1745 'CSRF token. If the endpoint does not require such token you can ' +
1745 1746 'explicitly set the parameter needs_csrf_token to false.')
1746 1747
1747 1748 return insecure_form(url, method=method, **attrs)
1748 1749
1749 1750
1750 1751 def secure_form(form_url, method="POST", multipart=False, **attrs):
1751 1752 """Start a form tag that points the action to an url. This
1752 1753 form tag will also include the hidden field containing
1753 1754 the auth token.
1754 1755
1755 1756 The url options should be given either as a string, or as a
1756 1757 ``url()`` function. The method for the form defaults to POST.
1757 1758
1758 1759 Options:
1759 1760
1760 1761 ``multipart``
1761 1762 If set to True, the enctype is set to "multipart/form-data".
1762 1763 ``method``
1763 1764 The method to use when submitting the form, usually either
1764 1765 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1765 1766 hidden input with name _method is added to simulate the verb
1766 1767 over POST.
1767 1768
1768 1769 """
1769 1770
1770 1771 if 'request' in attrs:
1771 1772 session = attrs['request'].session
1772 1773 del attrs['request']
1773 1774 else:
1774 1775 raise ValueError(
1775 1776 'Calling this form requires request= to be passed as argument')
1776 1777
1777 1778 _form = insecure_form(form_url, method, multipart, **attrs)
1778 1779 token = literal(
1779 1780 '<input type="hidden" name="{}" value="{}">'.format(
1780 1781 csrf_token_key, get_csrf_token(session)))
1781 1782
1782 1783 return literal("%s\n%s" % (_form, token))
1783 1784
1784 1785
1785 1786 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1786 1787 select_html = select(name, selected, options, **attrs)
1787 1788
1788 1789 select2 = """
1789 1790 <script>
1790 1791 $(document).ready(function() {
1791 1792 $('#%s').select2({
1792 1793 containerCssClass: 'drop-menu %s',
1793 1794 dropdownCssClass: 'drop-menu-dropdown',
1794 1795 dropdownAutoWidth: true%s
1795 1796 });
1796 1797 });
1797 1798 </script>
1798 1799 """
1799 1800
1800 1801 filter_option = """,
1801 1802 minimumResultsForSearch: -1
1802 1803 """
1803 1804 input_id = attrs.get('id') or name
1804 1805 extra_classes = ' '.join(attrs.pop('extra_classes', []))
1805 1806 filter_enabled = "" if enable_filter else filter_option
1806 1807 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
1807 1808
1808 1809 return literal(select_html+select_script)
1809 1810
1810 1811
1811 1812 def get_visual_attr(tmpl_context_var, attr_name):
1812 1813 """
1813 1814 A safe way to get a variable from visual variable of template context
1814 1815
1815 1816 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1816 1817 :param attr_name: name of the attribute we fetch from the c.visual
1817 1818 """
1818 1819 visual = getattr(tmpl_context_var, 'visual', None)
1819 1820 if not visual:
1820 1821 return
1821 1822 else:
1822 1823 return getattr(visual, attr_name, None)
1823 1824
1824 1825
1825 1826 def get_last_path_part(file_node):
1826 1827 if not file_node.path:
1827 1828 return u'/'
1828 1829
1829 1830 path = safe_unicode(file_node.path.split('/')[-1])
1830 1831 return u'../' + path
1831 1832
1832 1833
1833 1834 def route_url(*args, **kwargs):
1834 1835 """
1835 1836 Wrapper around pyramids `route_url` (fully qualified url) function.
1836 1837 """
1837 1838 req = get_current_request()
1838 1839 return req.route_url(*args, **kwargs)
1839 1840
1840 1841
1841 1842 def route_path(*args, **kwargs):
1842 1843 """
1843 1844 Wrapper around pyramids `route_path` function.
1844 1845 """
1845 1846 req = get_current_request()
1846 1847 return req.route_path(*args, **kwargs)
1847 1848
1848 1849
1849 1850 def route_path_or_none(*args, **kwargs):
1850 1851 try:
1851 1852 return route_path(*args, **kwargs)
1852 1853 except KeyError:
1853 1854 return None
1854 1855
1855 1856
1856 1857 def current_route_path(request, **kw):
1857 1858 new_args = request.GET.mixed()
1858 1859 new_args.update(kw)
1859 1860 return request.current_route_path(_query=new_args)
1860 1861
1861 1862
1862 1863 def curl_api_example(method, args):
1863 1864 args_json = json.dumps(OrderedDict([
1864 1865 ('id', 1),
1865 1866 ('auth_token', 'SECRET'),
1866 1867 ('method', method),
1867 1868 ('args', args)
1868 1869 ]))
1869 1870
1870 1871 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
1871 1872 api_url=route_url('apiv2'),
1872 1873 args_json=args_json
1873 1874 )
1874 1875
1875 1876
1876 1877 def api_call_example(method, args):
1877 1878 """
1878 1879 Generates an API call example via CURL
1879 1880 """
1880 1881 curl_call = curl_api_example(method, args)
1881 1882
1882 1883 return literal(
1883 1884 curl_call +
1884 1885 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
1885 1886 "and needs to be of `api calls` role."
1886 1887 .format(token_url=route_url('my_account_auth_tokens')))
1887 1888
1888 1889
1889 1890 def notification_description(notification, request):
1890 1891 """
1891 1892 Generate notification human readable description based on notification type
1892 1893 """
1893 1894 from rhodecode.model.notification import NotificationModel
1894 1895 return NotificationModel().make_description(
1895 1896 notification, translate=request.translate)
1896 1897
1897 1898
1898 1899 def go_import_header(request, db_repo=None):
1899 1900 """
1900 1901 Creates a header for go-import functionality in Go Lang
1901 1902 """
1902 1903
1903 1904 if not db_repo:
1904 1905 return
1905 1906 if 'go-get' not in request.GET:
1906 1907 return
1907 1908
1908 1909 clone_url = db_repo.clone_url()
1909 1910 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
1910 1911 # we have a repo and go-get flag,
1911 1912 return literal('<meta name="go-import" content="{} {} {}">'.format(
1912 1913 prefix, db_repo.repo_type, clone_url))
1913 1914
1914 1915
1915 1916 def reviewer_as_json(*args, **kwargs):
1916 1917 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
1917 1918 return _reviewer_as_json(*args, **kwargs)
1918 1919
1919 1920
1920 1921 def get_repo_view_type(request):
1921 1922 route_name = request.matched_route.name
1922 1923 route_to_view_type = {
1923 1924 'repo_changelog': 'commits',
1924 1925 'repo_commits': 'commits',
1925 1926 'repo_files': 'files',
1926 1927 'repo_summary': 'summary',
1927 1928 'repo_commit': 'commit'
1928 1929 }
1929 1930
1930 1931 return route_to_view_type.get(route_name)
1931 1932
1932 1933
1933 1934 def is_active(menu_entry, selected):
1934 1935 """
1935 1936 Returns active class for selecting menus in templates
1936 1937 <li class=${h.is_active('settings', current_active)}></li>
1937 1938 """
1938 1939 if not isinstance(menu_entry, list):
1939 1940 menu_entry = [menu_entry]
1940 1941
1941 1942 if selected in menu_entry:
1942 1943 return "active"
@@ -1,5463 +1,5464 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers2.text import remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance
59 59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
60 60 from rhodecode.lib.utils2 import (
61 61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 65 JsonRaw
66 66 from rhodecode.lib.ext_json import json
67 67 from rhodecode.lib.caching_query import FromCache
68 68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 69 from rhodecode.lib.encrypt2 import Encryptor
70 70 from rhodecode.lib.exceptions import (
71 71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 72 from rhodecode.model.meta import Base, Session
73 73
74 74 URL_SEP = '/'
75 75 log = logging.getLogger(__name__)
76 76
77 77 # =============================================================================
78 78 # BASE CLASSES
79 79 # =============================================================================
80 80
81 81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 82 # beaker.session.secret if first is not set.
83 83 # and initialized at environment.py
84 84 ENCRYPTION_KEY = None
85 85
86 86 # used to sort permissions by types, '#' used here is not allowed to be in
87 87 # usernames, and it's very early in sorted string.printable table.
88 88 PERMISSION_TYPE_SORT = {
89 89 'admin': '####',
90 90 'write': '###',
91 91 'read': '##',
92 92 'none': '#',
93 93 }
94 94
95 95
96 96 def display_user_sort(obj):
97 97 """
98 98 Sort function used to sort permissions in .permissions() function of
99 99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 100 of all other resources
101 101 """
102 102
103 103 if obj.username == User.DEFAULT_USER:
104 104 return '#####'
105 105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 106 return prefix + obj.username
107 107
108 108
109 109 def display_user_group_sort(obj):
110 110 """
111 111 Sort function used to sort permissions in .permissions() function of
112 112 Repository, RepoGroup, UserGroup. Also it put the default user in front
113 113 of all other resources
114 114 """
115 115
116 116 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
117 117 return prefix + obj.users_group_name
118 118
119 119
120 120 def _hash_key(k):
121 121 return sha1_safe(k)
122 122
123 123
124 124 def in_filter_generator(qry, items, limit=500):
125 125 """
126 126 Splits IN() into multiple with OR
127 127 e.g.::
128 128 cnt = Repository.query().filter(
129 129 or_(
130 130 *in_filter_generator(Repository.repo_id, range(100000))
131 131 )).count()
132 132 """
133 133 if not items:
134 134 # empty list will cause empty query which might cause security issues
135 135 # this can lead to hidden unpleasant results
136 136 items = [-1]
137 137
138 138 parts = []
139 139 for chunk in xrange(0, len(items), limit):
140 140 parts.append(
141 141 qry.in_(items[chunk: chunk + limit])
142 142 )
143 143
144 144 return parts
145 145
146 146
147 147 base_table_args = {
148 148 'extend_existing': True,
149 149 'mysql_engine': 'InnoDB',
150 150 'mysql_charset': 'utf8',
151 151 'sqlite_autoincrement': True
152 152 }
153 153
154 154
155 155 class EncryptedTextValue(TypeDecorator):
156 156 """
157 157 Special column for encrypted long text data, use like::
158 158
159 159 value = Column("encrypted_value", EncryptedValue(), nullable=False)
160 160
161 161 This column is intelligent so if value is in unencrypted form it return
162 162 unencrypted form, but on save it always encrypts
163 163 """
164 164 impl = Text
165 165
166 166 def process_bind_param(self, value, dialect):
167 167 """
168 168 Setter for storing value
169 169 """
170 170 import rhodecode
171 171 if not value:
172 172 return value
173 173
174 174 # protect against double encrypting if values is already encrypted
175 175 if value.startswith('enc$aes$') \
176 176 or value.startswith('enc$aes_hmac$') \
177 177 or value.startswith('enc2$'):
178 178 raise ValueError('value needs to be in unencrypted format, '
179 179 'ie. not starting with enc$ or enc2$')
180 180
181 181 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
182 182 if algo == 'aes':
183 183 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
184 184 elif algo == 'fernet':
185 185 return Encryptor(ENCRYPTION_KEY).encrypt(value)
186 186 else:
187 187 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
188 188
189 189 def process_result_value(self, value, dialect):
190 190 """
191 191 Getter for retrieving value
192 192 """
193 193
194 194 import rhodecode
195 195 if not value:
196 196 return value
197 197
198 198 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
199 199 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
200 200 if algo == 'aes':
201 201 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
202 202 elif algo == 'fernet':
203 203 return Encryptor(ENCRYPTION_KEY).decrypt(value)
204 204 else:
205 205 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
206 206 return decrypted_data
207 207
208 208
209 209 class BaseModel(object):
210 210 """
211 211 Base Model for all classes
212 212 """
213 213
214 214 @classmethod
215 215 def _get_keys(cls):
216 216 """return column names for this model """
217 217 return class_mapper(cls).c.keys()
218 218
219 219 def get_dict(self):
220 220 """
221 221 return dict with keys and values corresponding
222 222 to this model data """
223 223
224 224 d = {}
225 225 for k in self._get_keys():
226 226 d[k] = getattr(self, k)
227 227
228 228 # also use __json__() if present to get additional fields
229 229 _json_attr = getattr(self, '__json__', None)
230 230 if _json_attr:
231 231 # update with attributes from __json__
232 232 if callable(_json_attr):
233 233 _json_attr = _json_attr()
234 234 for k, val in _json_attr.iteritems():
235 235 d[k] = val
236 236 return d
237 237
238 238 def get_appstruct(self):
239 239 """return list with keys and values tuples corresponding
240 240 to this model data """
241 241
242 242 lst = []
243 243 for k in self._get_keys():
244 244 lst.append((k, getattr(self, k),))
245 245 return lst
246 246
247 247 def populate_obj(self, populate_dict):
248 248 """populate model with data from given populate_dict"""
249 249
250 250 for k in self._get_keys():
251 251 if k in populate_dict:
252 252 setattr(self, k, populate_dict[k])
253 253
254 254 @classmethod
255 255 def query(cls):
256 256 return Session().query(cls)
257 257
258 258 @classmethod
259 259 def get(cls, id_):
260 260 if id_:
261 261 return cls.query().get(id_)
262 262
263 263 @classmethod
264 264 def get_or_404(cls, id_):
265 265 from pyramid.httpexceptions import HTTPNotFound
266 266
267 267 try:
268 268 id_ = int(id_)
269 269 except (TypeError, ValueError):
270 270 raise HTTPNotFound()
271 271
272 272 res = cls.query().get(id_)
273 273 if not res:
274 274 raise HTTPNotFound()
275 275 return res
276 276
277 277 @classmethod
278 278 def getAll(cls):
279 279 # deprecated and left for backward compatibility
280 280 return cls.get_all()
281 281
282 282 @classmethod
283 283 def get_all(cls):
284 284 return cls.query().all()
285 285
286 286 @classmethod
287 287 def delete(cls, id_):
288 288 obj = cls.query().get(id_)
289 289 Session().delete(obj)
290 290
291 291 @classmethod
292 292 def identity_cache(cls, session, attr_name, value):
293 293 exist_in_session = []
294 294 for (item_cls, pkey), instance in session.identity_map.items():
295 295 if cls == item_cls and getattr(instance, attr_name) == value:
296 296 exist_in_session.append(instance)
297 297 if exist_in_session:
298 298 if len(exist_in_session) == 1:
299 299 return exist_in_session[0]
300 300 log.exception(
301 301 'multiple objects with attr %s and '
302 302 'value %s found with same name: %r',
303 303 attr_name, value, exist_in_session)
304 304
305 305 def __repr__(self):
306 306 if hasattr(self, '__unicode__'):
307 307 # python repr needs to return str
308 308 try:
309 309 return safe_str(self.__unicode__())
310 310 except UnicodeDecodeError:
311 311 pass
312 312 return '<DB:%s>' % (self.__class__.__name__)
313 313
314 314
315 315 class RhodeCodeSetting(Base, BaseModel):
316 316 __tablename__ = 'rhodecode_settings'
317 317 __table_args__ = (
318 318 UniqueConstraint('app_settings_name'),
319 319 base_table_args
320 320 )
321 321
322 322 SETTINGS_TYPES = {
323 323 'str': safe_str,
324 324 'int': safe_int,
325 325 'unicode': safe_unicode,
326 326 'bool': str2bool,
327 327 'list': functools.partial(aslist, sep=',')
328 328 }
329 329 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
330 330 GLOBAL_CONF_KEY = 'app_settings'
331 331
332 332 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
333 333 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
334 334 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
335 335 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
336 336
337 337 def __init__(self, key='', val='', type='unicode'):
338 338 self.app_settings_name = key
339 339 self.app_settings_type = type
340 340 self.app_settings_value = val
341 341
342 342 @validates('_app_settings_value')
343 343 def validate_settings_value(self, key, val):
344 344 assert type(val) == unicode
345 345 return val
346 346
347 347 @hybrid_property
348 348 def app_settings_value(self):
349 349 v = self._app_settings_value
350 350 _type = self.app_settings_type
351 351 if _type:
352 352 _type = self.app_settings_type.split('.')[0]
353 353 # decode the encrypted value
354 354 if 'encrypted' in self.app_settings_type:
355 355 cipher = EncryptedTextValue()
356 356 v = safe_unicode(cipher.process_result_value(v, None))
357 357
358 358 converter = self.SETTINGS_TYPES.get(_type) or \
359 359 self.SETTINGS_TYPES['unicode']
360 360 return converter(v)
361 361
362 362 @app_settings_value.setter
363 363 def app_settings_value(self, val):
364 364 """
365 365 Setter that will always make sure we use unicode in app_settings_value
366 366
367 367 :param val:
368 368 """
369 369 val = safe_unicode(val)
370 370 # encode the encrypted value
371 371 if 'encrypted' in self.app_settings_type:
372 372 cipher = EncryptedTextValue()
373 373 val = safe_unicode(cipher.process_bind_param(val, None))
374 374 self._app_settings_value = val
375 375
376 376 @hybrid_property
377 377 def app_settings_type(self):
378 378 return self._app_settings_type
379 379
380 380 @app_settings_type.setter
381 381 def app_settings_type(self, val):
382 382 if val.split('.')[0] not in self.SETTINGS_TYPES:
383 383 raise Exception('type must be one of %s got %s'
384 384 % (self.SETTINGS_TYPES.keys(), val))
385 385 self._app_settings_type = val
386 386
387 387 @classmethod
388 388 def get_by_prefix(cls, prefix):
389 389 return RhodeCodeSetting.query()\
390 390 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
391 391 .all()
392 392
393 393 def __unicode__(self):
394 394 return u"<%s('%s:%s[%s]')>" % (
395 395 self.__class__.__name__,
396 396 self.app_settings_name, self.app_settings_value,
397 397 self.app_settings_type
398 398 )
399 399
400 400
401 401 class RhodeCodeUi(Base, BaseModel):
402 402 __tablename__ = 'rhodecode_ui'
403 403 __table_args__ = (
404 404 UniqueConstraint('ui_key'),
405 405 base_table_args
406 406 )
407 407
408 408 HOOK_REPO_SIZE = 'changegroup.repo_size'
409 409 # HG
410 410 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
411 411 HOOK_PULL = 'outgoing.pull_logger'
412 412 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
413 413 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
414 414 HOOK_PUSH = 'changegroup.push_logger'
415 415 HOOK_PUSH_KEY = 'pushkey.key_push'
416 416
417 417 HOOKS_BUILTIN = [
418 418 HOOK_PRE_PULL,
419 419 HOOK_PULL,
420 420 HOOK_PRE_PUSH,
421 421 HOOK_PRETX_PUSH,
422 422 HOOK_PUSH,
423 423 HOOK_PUSH_KEY,
424 424 ]
425 425
426 426 # TODO: johbo: Unify way how hooks are configured for git and hg,
427 427 # git part is currently hardcoded.
428 428
429 429 # SVN PATTERNS
430 430 SVN_BRANCH_ID = 'vcs_svn_branch'
431 431 SVN_TAG_ID = 'vcs_svn_tag'
432 432
433 433 ui_id = Column(
434 434 "ui_id", Integer(), nullable=False, unique=True, default=None,
435 435 primary_key=True)
436 436 ui_section = Column(
437 437 "ui_section", String(255), nullable=True, unique=None, default=None)
438 438 ui_key = Column(
439 439 "ui_key", String(255), nullable=True, unique=None, default=None)
440 440 ui_value = Column(
441 441 "ui_value", String(255), nullable=True, unique=None, default=None)
442 442 ui_active = Column(
443 443 "ui_active", Boolean(), nullable=True, unique=None, default=True)
444 444
445 445 def __repr__(self):
446 446 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
447 447 self.ui_key, self.ui_value)
448 448
449 449
450 450 class RepoRhodeCodeSetting(Base, BaseModel):
451 451 __tablename__ = 'repo_rhodecode_settings'
452 452 __table_args__ = (
453 453 UniqueConstraint(
454 454 'app_settings_name', 'repository_id',
455 455 name='uq_repo_rhodecode_setting_name_repo_id'),
456 456 base_table_args
457 457 )
458 458
459 459 repository_id = Column(
460 460 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
461 461 nullable=False)
462 462 app_settings_id = Column(
463 463 "app_settings_id", Integer(), nullable=False, unique=True,
464 464 default=None, primary_key=True)
465 465 app_settings_name = Column(
466 466 "app_settings_name", String(255), nullable=True, unique=None,
467 467 default=None)
468 468 _app_settings_value = Column(
469 469 "app_settings_value", String(4096), nullable=True, unique=None,
470 470 default=None)
471 471 _app_settings_type = Column(
472 472 "app_settings_type", String(255), nullable=True, unique=None,
473 473 default=None)
474 474
475 475 repository = relationship('Repository')
476 476
477 477 def __init__(self, repository_id, key='', val='', type='unicode'):
478 478 self.repository_id = repository_id
479 479 self.app_settings_name = key
480 480 self.app_settings_type = type
481 481 self.app_settings_value = val
482 482
483 483 @validates('_app_settings_value')
484 484 def validate_settings_value(self, key, val):
485 485 assert type(val) == unicode
486 486 return val
487 487
488 488 @hybrid_property
489 489 def app_settings_value(self):
490 490 v = self._app_settings_value
491 491 type_ = self.app_settings_type
492 492 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
493 493 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
494 494 return converter(v)
495 495
496 496 @app_settings_value.setter
497 497 def app_settings_value(self, val):
498 498 """
499 499 Setter that will always make sure we use unicode in app_settings_value
500 500
501 501 :param val:
502 502 """
503 503 self._app_settings_value = safe_unicode(val)
504 504
505 505 @hybrid_property
506 506 def app_settings_type(self):
507 507 return self._app_settings_type
508 508
509 509 @app_settings_type.setter
510 510 def app_settings_type(self, val):
511 511 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
512 512 if val not in SETTINGS_TYPES:
513 513 raise Exception('type must be one of %s got %s'
514 514 % (SETTINGS_TYPES.keys(), val))
515 515 self._app_settings_type = val
516 516
517 517 def __unicode__(self):
518 518 return u"<%s('%s:%s:%s[%s]')>" % (
519 519 self.__class__.__name__, self.repository.repo_name,
520 520 self.app_settings_name, self.app_settings_value,
521 521 self.app_settings_type
522 522 )
523 523
524 524
525 525 class RepoRhodeCodeUi(Base, BaseModel):
526 526 __tablename__ = 'repo_rhodecode_ui'
527 527 __table_args__ = (
528 528 UniqueConstraint(
529 529 'repository_id', 'ui_section', 'ui_key',
530 530 name='uq_repo_rhodecode_ui_repository_id_section_key'),
531 531 base_table_args
532 532 )
533 533
534 534 repository_id = Column(
535 535 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
536 536 nullable=False)
537 537 ui_id = Column(
538 538 "ui_id", Integer(), nullable=False, unique=True, default=None,
539 539 primary_key=True)
540 540 ui_section = Column(
541 541 "ui_section", String(255), nullable=True, unique=None, default=None)
542 542 ui_key = Column(
543 543 "ui_key", String(255), nullable=True, unique=None, default=None)
544 544 ui_value = Column(
545 545 "ui_value", String(255), nullable=True, unique=None, default=None)
546 546 ui_active = Column(
547 547 "ui_active", Boolean(), nullable=True, unique=None, default=True)
548 548
549 549 repository = relationship('Repository')
550 550
551 551 def __repr__(self):
552 552 return '<%s[%s:%s]%s=>%s]>' % (
553 553 self.__class__.__name__, self.repository.repo_name,
554 554 self.ui_section, self.ui_key, self.ui_value)
555 555
556 556
557 557 class User(Base, BaseModel):
558 558 __tablename__ = 'users'
559 559 __table_args__ = (
560 560 UniqueConstraint('username'), UniqueConstraint('email'),
561 561 Index('u_username_idx', 'username'),
562 562 Index('u_email_idx', 'email'),
563 563 base_table_args
564 564 )
565 565
566 566 DEFAULT_USER = 'default'
567 567 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
568 568 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
569 569
570 570 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
571 571 username = Column("username", String(255), nullable=True, unique=None, default=None)
572 572 password = Column("password", String(255), nullable=True, unique=None, default=None)
573 573 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
574 574 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
575 575 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
576 576 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
577 577 _email = Column("email", String(255), nullable=True, unique=None, default=None)
578 578 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
579 579 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
580 580 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
581 581
582 582 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
583 583 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
584 584 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
585 585 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
586 586 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
587 587 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
588 588
589 589 user_log = relationship('UserLog')
590 590 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
591 591
592 592 repositories = relationship('Repository')
593 593 repository_groups = relationship('RepoGroup')
594 594 user_groups = relationship('UserGroup')
595 595
596 596 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
597 597 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
598 598
599 599 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
600 600 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
601 601 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
602 602
603 603 group_member = relationship('UserGroupMember', cascade='all')
604 604
605 605 notifications = relationship('UserNotification', cascade='all')
606 606 # notifications assigned to this user
607 607 user_created_notifications = relationship('Notification', cascade='all')
608 608 # comments created by this user
609 609 user_comments = relationship('ChangesetComment', cascade='all')
610 610 # user profile extra info
611 611 user_emails = relationship('UserEmailMap', cascade='all')
612 612 user_ip_map = relationship('UserIpMap', cascade='all')
613 613 user_auth_tokens = relationship('UserApiKeys', cascade='all')
614 614 user_ssh_keys = relationship('UserSshKeys', cascade='all')
615 615
616 616 # gists
617 617 user_gists = relationship('Gist', cascade='all')
618 618 # user pull requests
619 619 user_pull_requests = relationship('PullRequest', cascade='all')
620 620 # external identities
621 621 external_identities = relationship(
622 622 'ExternalIdentity',
623 623 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
624 624 cascade='all')
625 625 # review rules
626 626 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
627 627
628 628 # artifacts owned
629 629 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
630 630
631 631 # no cascade, set NULL
632 632 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
633 633
634 634 def __unicode__(self):
635 635 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
636 636 self.user_id, self.username)
637 637
638 638 @hybrid_property
639 639 def email(self):
640 640 return self._email
641 641
642 642 @email.setter
643 643 def email(self, val):
644 644 self._email = val.lower() if val else None
645 645
646 646 @hybrid_property
647 647 def first_name(self):
648 648 from rhodecode.lib import helpers as h
649 649 if self.name:
650 650 return h.escape(self.name)
651 651 return self.name
652 652
653 653 @hybrid_property
654 654 def last_name(self):
655 655 from rhodecode.lib import helpers as h
656 656 if self.lastname:
657 657 return h.escape(self.lastname)
658 658 return self.lastname
659 659
660 660 @hybrid_property
661 661 def api_key(self):
662 662 """
663 663 Fetch if exist an auth-token with role ALL connected to this user
664 664 """
665 665 user_auth_token = UserApiKeys.query()\
666 666 .filter(UserApiKeys.user_id == self.user_id)\
667 667 .filter(or_(UserApiKeys.expires == -1,
668 668 UserApiKeys.expires >= time.time()))\
669 669 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
670 670 if user_auth_token:
671 671 user_auth_token = user_auth_token.api_key
672 672
673 673 return user_auth_token
674 674
675 675 @api_key.setter
676 676 def api_key(self, val):
677 677 # don't allow to set API key this is deprecated for now
678 678 self._api_key = None
679 679
680 680 @property
681 681 def reviewer_pull_requests(self):
682 682 return PullRequestReviewers.query() \
683 683 .options(joinedload(PullRequestReviewers.pull_request)) \
684 684 .filter(PullRequestReviewers.user_id == self.user_id) \
685 685 .all()
686 686
687 687 @property
688 688 def firstname(self):
689 689 # alias for future
690 690 return self.name
691 691
692 692 @property
693 693 def emails(self):
694 694 other = UserEmailMap.query()\
695 695 .filter(UserEmailMap.user == self) \
696 696 .order_by(UserEmailMap.email_id.asc()) \
697 697 .all()
698 698 return [self.email] + [x.email for x in other]
699 699
700 700 def emails_cached(self):
701 701 emails = UserEmailMap.query()\
702 702 .filter(UserEmailMap.user == self) \
703 703 .order_by(UserEmailMap.email_id.asc())
704 704
705 705 emails = emails.options(
706 706 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
707 707 )
708 708
709 709 return [self.email] + [x.email for x in emails]
710 710
711 711 @property
712 712 def auth_tokens(self):
713 713 auth_tokens = self.get_auth_tokens()
714 714 return [x.api_key for x in auth_tokens]
715 715
716 716 def get_auth_tokens(self):
717 717 return UserApiKeys.query()\
718 718 .filter(UserApiKeys.user == self)\
719 719 .order_by(UserApiKeys.user_api_key_id.asc())\
720 720 .all()
721 721
722 722 @LazyProperty
723 723 def feed_token(self):
724 724 return self.get_feed_token()
725 725
726 726 def get_feed_token(self, cache=True):
727 727 feed_tokens = UserApiKeys.query()\
728 728 .filter(UserApiKeys.user == self)\
729 729 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
730 730 if cache:
731 731 feed_tokens = feed_tokens.options(
732 732 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
733 733
734 734 feed_tokens = feed_tokens.all()
735 735 if feed_tokens:
736 736 return feed_tokens[0].api_key
737 737 return 'NO_FEED_TOKEN_AVAILABLE'
738 738
739 739 @LazyProperty
740 740 def artifact_token(self):
741 741 return self.get_artifact_token()
742 742
743 743 def get_artifact_token(self, cache=True):
744 744 artifacts_tokens = UserApiKeys.query()\
745 745 .filter(UserApiKeys.user == self)\
746 746 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
747 747 if cache:
748 748 artifacts_tokens = artifacts_tokens.options(
749 749 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
750 750
751 751 artifacts_tokens = artifacts_tokens.all()
752 752 if artifacts_tokens:
753 753 return artifacts_tokens[0].api_key
754 754 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
755 755
756 756 @classmethod
757 757 def get(cls, user_id, cache=False):
758 758 if not user_id:
759 759 return
760 760
761 761 user = cls.query()
762 762 if cache:
763 763 user = user.options(
764 764 FromCache("sql_cache_short", "get_users_%s" % user_id))
765 765 return user.get(user_id)
766 766
767 767 @classmethod
768 768 def extra_valid_auth_tokens(cls, user, role=None):
769 769 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
770 770 .filter(or_(UserApiKeys.expires == -1,
771 771 UserApiKeys.expires >= time.time()))
772 772 if role:
773 773 tokens = tokens.filter(or_(UserApiKeys.role == role,
774 774 UserApiKeys.role == UserApiKeys.ROLE_ALL))
775 775 return tokens.all()
776 776
777 777 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
778 778 from rhodecode.lib import auth
779 779
780 780 log.debug('Trying to authenticate user: %s via auth-token, '
781 781 'and roles: %s', self, roles)
782 782
783 783 if not auth_token:
784 784 return False
785 785
786 786 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
787 787 tokens_q = UserApiKeys.query()\
788 788 .filter(UserApiKeys.user_id == self.user_id)\
789 789 .filter(or_(UserApiKeys.expires == -1,
790 790 UserApiKeys.expires >= time.time()))
791 791
792 792 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
793 793
794 794 crypto_backend = auth.crypto_backend()
795 795 enc_token_map = {}
796 796 plain_token_map = {}
797 797 for token in tokens_q:
798 798 if token.api_key.startswith(crypto_backend.ENC_PREF):
799 799 enc_token_map[token.api_key] = token
800 800 else:
801 801 plain_token_map[token.api_key] = token
802 802 log.debug(
803 803 'Found %s plain and %s encrypted tokens to check for authentication for this user',
804 804 len(plain_token_map), len(enc_token_map))
805 805
806 806 # plain token match comes first
807 807 match = plain_token_map.get(auth_token)
808 808
809 809 # check encrypted tokens now
810 810 if not match:
811 811 for token_hash, token in enc_token_map.items():
812 812 # NOTE(marcink): this is expensive to calculate, but most secure
813 813 if crypto_backend.hash_check(auth_token, token_hash):
814 814 match = token
815 815 break
816 816
817 817 if match:
818 818 log.debug('Found matching token %s', match)
819 819 if match.repo_id:
820 820 log.debug('Found scope, checking for scope match of token %s', match)
821 821 if match.repo_id == scope_repo_id:
822 822 return True
823 823 else:
824 824 log.debug(
825 825 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
826 826 'and calling scope is:%s, skipping further checks',
827 827 match.repo, scope_repo_id)
828 828 return False
829 829 else:
830 830 return True
831 831
832 832 return False
833 833
834 834 @property
835 835 def ip_addresses(self):
836 836 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
837 837 return [x.ip_addr for x in ret]
838 838
839 839 @property
840 840 def username_and_name(self):
841 841 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
842 842
843 843 @property
844 844 def username_or_name_or_email(self):
845 845 full_name = self.full_name if self.full_name is not ' ' else None
846 846 return self.username or full_name or self.email
847 847
848 848 @property
849 849 def full_name(self):
850 850 return '%s %s' % (self.first_name, self.last_name)
851 851
852 852 @property
853 853 def full_name_or_username(self):
854 854 return ('%s %s' % (self.first_name, self.last_name)
855 855 if (self.first_name and self.last_name) else self.username)
856 856
857 857 @property
858 858 def full_contact(self):
859 859 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
860 860
861 861 @property
862 862 def short_contact(self):
863 863 return '%s %s' % (self.first_name, self.last_name)
864 864
865 865 @property
866 866 def is_admin(self):
867 867 return self.admin
868 868
869 869 @property
870 870 def language(self):
871 871 return self.user_data.get('language')
872 872
873 873 def AuthUser(self, **kwargs):
874 874 """
875 875 Returns instance of AuthUser for this user
876 876 """
877 877 from rhodecode.lib.auth import AuthUser
878 878 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
879 879
880 880 @hybrid_property
881 881 def user_data(self):
882 882 if not self._user_data:
883 883 return {}
884 884
885 885 try:
886 886 return json.loads(self._user_data)
887 887 except TypeError:
888 888 return {}
889 889
890 890 @user_data.setter
891 891 def user_data(self, val):
892 892 if not isinstance(val, dict):
893 893 raise Exception('user_data must be dict, got %s' % type(val))
894 894 try:
895 895 self._user_data = json.dumps(val)
896 896 except Exception:
897 897 log.error(traceback.format_exc())
898 898
899 899 @classmethod
900 900 def get_by_username(cls, username, case_insensitive=False,
901 901 cache=False, identity_cache=False):
902 902 session = Session()
903 903
904 904 if case_insensitive:
905 905 q = cls.query().filter(
906 906 func.lower(cls.username) == func.lower(username))
907 907 else:
908 908 q = cls.query().filter(cls.username == username)
909 909
910 910 if cache:
911 911 if identity_cache:
912 912 val = cls.identity_cache(session, 'username', username)
913 913 if val:
914 914 return val
915 915 else:
916 916 cache_key = "get_user_by_name_%s" % _hash_key(username)
917 917 q = q.options(
918 918 FromCache("sql_cache_short", cache_key))
919 919
920 920 return q.scalar()
921 921
922 922 @classmethod
923 923 def get_by_auth_token(cls, auth_token, cache=False):
924 924 q = UserApiKeys.query()\
925 925 .filter(UserApiKeys.api_key == auth_token)\
926 926 .filter(or_(UserApiKeys.expires == -1,
927 927 UserApiKeys.expires >= time.time()))
928 928 if cache:
929 929 q = q.options(
930 930 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
931 931
932 932 match = q.first()
933 933 if match:
934 934 return match.user
935 935
936 936 @classmethod
937 937 def get_by_email(cls, email, case_insensitive=False, cache=False):
938 938
939 939 if case_insensitive:
940 940 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
941 941
942 942 else:
943 943 q = cls.query().filter(cls.email == email)
944 944
945 945 email_key = _hash_key(email)
946 946 if cache:
947 947 q = q.options(
948 948 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
949 949
950 950 ret = q.scalar()
951 951 if ret is None:
952 952 q = UserEmailMap.query()
953 953 # try fetching in alternate email map
954 954 if case_insensitive:
955 955 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
956 956 else:
957 957 q = q.filter(UserEmailMap.email == email)
958 958 q = q.options(joinedload(UserEmailMap.user))
959 959 if cache:
960 960 q = q.options(
961 961 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
962 962 ret = getattr(q.scalar(), 'user', None)
963 963
964 964 return ret
965 965
966 966 @classmethod
967 967 def get_from_cs_author(cls, author):
968 968 """
969 969 Tries to get User objects out of commit author string
970 970
971 971 :param author:
972 972 """
973 973 from rhodecode.lib.helpers import email, author_name
974 974 # Valid email in the attribute passed, see if they're in the system
975 975 _email = email(author)
976 976 if _email:
977 977 user = cls.get_by_email(_email, case_insensitive=True)
978 978 if user:
979 979 return user
980 980 # Maybe we can match by username?
981 981 _author = author_name(author)
982 982 user = cls.get_by_username(_author, case_insensitive=True)
983 983 if user:
984 984 return user
985 985
986 986 def update_userdata(self, **kwargs):
987 987 usr = self
988 988 old = usr.user_data
989 989 old.update(**kwargs)
990 990 usr.user_data = old
991 991 Session().add(usr)
992 992 log.debug('updated userdata with %s', kwargs)
993 993
994 994 def update_lastlogin(self):
995 995 """Update user lastlogin"""
996 996 self.last_login = datetime.datetime.now()
997 997 Session().add(self)
998 998 log.debug('updated user %s lastlogin', self.username)
999 999
1000 1000 def update_password(self, new_password):
1001 1001 from rhodecode.lib.auth import get_crypt_password
1002 1002
1003 1003 self.password = get_crypt_password(new_password)
1004 1004 Session().add(self)
1005 1005
1006 1006 @classmethod
1007 1007 def get_first_super_admin(cls):
1008 1008 user = User.query()\
1009 1009 .filter(User.admin == true()) \
1010 1010 .order_by(User.user_id.asc()) \
1011 1011 .first()
1012 1012
1013 1013 if user is None:
1014 1014 raise Exception('FATAL: Missing administrative account!')
1015 1015 return user
1016 1016
1017 1017 @classmethod
1018 1018 def get_all_super_admins(cls, only_active=False):
1019 1019 """
1020 1020 Returns all admin accounts sorted by username
1021 1021 """
1022 1022 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1023 1023 if only_active:
1024 1024 qry = qry.filter(User.active == true())
1025 1025 return qry.all()
1026 1026
1027 1027 @classmethod
1028 1028 def get_default_user(cls, cache=False, refresh=False):
1029 1029 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1030 1030 if user is None:
1031 1031 raise Exception('FATAL: Missing default account!')
1032 1032 if refresh:
1033 1033 # The default user might be based on outdated state which
1034 1034 # has been loaded from the cache.
1035 1035 # A call to refresh() ensures that the
1036 1036 # latest state from the database is used.
1037 1037 Session().refresh(user)
1038 1038 return user
1039 1039
1040 1040 def _get_default_perms(self, user, suffix=''):
1041 1041 from rhodecode.model.permission import PermissionModel
1042 1042 return PermissionModel().get_default_perms(user.user_perms, suffix)
1043 1043
1044 1044 def get_default_perms(self, suffix=''):
1045 1045 return self._get_default_perms(self, suffix)
1046 1046
1047 1047 def get_api_data(self, include_secrets=False, details='full'):
1048 1048 """
1049 1049 Common function for generating user related data for API
1050 1050
1051 1051 :param include_secrets: By default secrets in the API data will be replaced
1052 1052 by a placeholder value to prevent exposing this data by accident. In case
1053 1053 this data shall be exposed, set this flag to ``True``.
1054 1054
1055 1055 :param details: details can be 'basic|full' basic gives only a subset of
1056 1056 the available user information that includes user_id, name and emails.
1057 1057 """
1058 1058 user = self
1059 1059 user_data = self.user_data
1060 1060 data = {
1061 1061 'user_id': user.user_id,
1062 1062 'username': user.username,
1063 1063 'firstname': user.name,
1064 1064 'lastname': user.lastname,
1065 1065 'description': user.description,
1066 1066 'email': user.email,
1067 1067 'emails': user.emails,
1068 1068 }
1069 1069 if details == 'basic':
1070 1070 return data
1071 1071
1072 1072 auth_token_length = 40
1073 1073 auth_token_replacement = '*' * auth_token_length
1074 1074
1075 1075 extras = {
1076 1076 'auth_tokens': [auth_token_replacement],
1077 1077 'active': user.active,
1078 1078 'admin': user.admin,
1079 1079 'extern_type': user.extern_type,
1080 1080 'extern_name': user.extern_name,
1081 1081 'last_login': user.last_login,
1082 1082 'last_activity': user.last_activity,
1083 1083 'ip_addresses': user.ip_addresses,
1084 1084 'language': user_data.get('language')
1085 1085 }
1086 1086 data.update(extras)
1087 1087
1088 1088 if include_secrets:
1089 1089 data['auth_tokens'] = user.auth_tokens
1090 1090 return data
1091 1091
1092 1092 def __json__(self):
1093 1093 data = {
1094 1094 'full_name': self.full_name,
1095 1095 'full_name_or_username': self.full_name_or_username,
1096 1096 'short_contact': self.short_contact,
1097 1097 'full_contact': self.full_contact,
1098 1098 }
1099 1099 data.update(self.get_api_data())
1100 1100 return data
1101 1101
1102 1102
1103 1103 class UserApiKeys(Base, BaseModel):
1104 1104 __tablename__ = 'user_api_keys'
1105 1105 __table_args__ = (
1106 1106 Index('uak_api_key_idx', 'api_key'),
1107 1107 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1108 1108 base_table_args
1109 1109 )
1110 1110 __mapper_args__ = {}
1111 1111
1112 1112 # ApiKey role
1113 1113 ROLE_ALL = 'token_role_all'
1114 1114 ROLE_HTTP = 'token_role_http'
1115 1115 ROLE_VCS = 'token_role_vcs'
1116 1116 ROLE_API = 'token_role_api'
1117 1117 ROLE_FEED = 'token_role_feed'
1118 1118 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1119 1119 ROLE_PASSWORD_RESET = 'token_password_reset'
1120 1120
1121 1121 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1122 1122
1123 1123 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1124 1124 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1125 1125 api_key = Column("api_key", String(255), nullable=False, unique=True)
1126 1126 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1127 1127 expires = Column('expires', Float(53), nullable=False)
1128 1128 role = Column('role', String(255), nullable=True)
1129 1129 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1130 1130
1131 1131 # scope columns
1132 1132 repo_id = Column(
1133 1133 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1134 1134 nullable=True, unique=None, default=None)
1135 1135 repo = relationship('Repository', lazy='joined')
1136 1136
1137 1137 repo_group_id = Column(
1138 1138 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1139 1139 nullable=True, unique=None, default=None)
1140 1140 repo_group = relationship('RepoGroup', lazy='joined')
1141 1141
1142 1142 user = relationship('User', lazy='joined')
1143 1143
1144 1144 def __unicode__(self):
1145 1145 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1146 1146
1147 1147 def __json__(self):
1148 1148 data = {
1149 1149 'auth_token': self.api_key,
1150 1150 'role': self.role,
1151 1151 'scope': self.scope_humanized,
1152 1152 'expired': self.expired
1153 1153 }
1154 1154 return data
1155 1155
1156 1156 def get_api_data(self, include_secrets=False):
1157 1157 data = self.__json__()
1158 1158 if include_secrets:
1159 1159 return data
1160 1160 else:
1161 1161 data['auth_token'] = self.token_obfuscated
1162 1162 return data
1163 1163
1164 1164 @hybrid_property
1165 1165 def description_safe(self):
1166 1166 from rhodecode.lib import helpers as h
1167 1167 return h.escape(self.description)
1168 1168
1169 1169 @property
1170 1170 def expired(self):
1171 1171 if self.expires == -1:
1172 1172 return False
1173 1173 return time.time() > self.expires
1174 1174
1175 1175 @classmethod
1176 1176 def _get_role_name(cls, role):
1177 1177 return {
1178 1178 cls.ROLE_ALL: _('all'),
1179 1179 cls.ROLE_HTTP: _('http/web interface'),
1180 1180 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1181 1181 cls.ROLE_API: _('api calls'),
1182 1182 cls.ROLE_FEED: _('feed access'),
1183 1183 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1184 1184 }.get(role, role)
1185 1185
1186 1186 @property
1187 1187 def role_humanized(self):
1188 1188 return self._get_role_name(self.role)
1189 1189
1190 1190 def _get_scope(self):
1191 1191 if self.repo:
1192 1192 return 'Repository: {}'.format(self.repo.repo_name)
1193 1193 if self.repo_group:
1194 1194 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1195 1195 return 'Global'
1196 1196
1197 1197 @property
1198 1198 def scope_humanized(self):
1199 1199 return self._get_scope()
1200 1200
1201 1201 @property
1202 1202 def token_obfuscated(self):
1203 1203 if self.api_key:
1204 1204 return self.api_key[:4] + "****"
1205 1205
1206 1206
1207 1207 class UserEmailMap(Base, BaseModel):
1208 1208 __tablename__ = 'user_email_map'
1209 1209 __table_args__ = (
1210 1210 Index('uem_email_idx', 'email'),
1211 1211 UniqueConstraint('email'),
1212 1212 base_table_args
1213 1213 )
1214 1214 __mapper_args__ = {}
1215 1215
1216 1216 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 1217 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1218 1218 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1219 1219 user = relationship('User', lazy='joined')
1220 1220
1221 1221 @validates('_email')
1222 1222 def validate_email(self, key, email):
1223 1223 # check if this email is not main one
1224 1224 main_email = Session().query(User).filter(User.email == email).scalar()
1225 1225 if main_email is not None:
1226 1226 raise AttributeError('email %s is present is user table' % email)
1227 1227 return email
1228 1228
1229 1229 @hybrid_property
1230 1230 def email(self):
1231 1231 return self._email
1232 1232
1233 1233 @email.setter
1234 1234 def email(self, val):
1235 1235 self._email = val.lower() if val else None
1236 1236
1237 1237
1238 1238 class UserIpMap(Base, BaseModel):
1239 1239 __tablename__ = 'user_ip_map'
1240 1240 __table_args__ = (
1241 1241 UniqueConstraint('user_id', 'ip_addr'),
1242 1242 base_table_args
1243 1243 )
1244 1244 __mapper_args__ = {}
1245 1245
1246 1246 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1247 1247 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1248 1248 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1249 1249 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1250 1250 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1251 1251 user = relationship('User', lazy='joined')
1252 1252
1253 1253 @hybrid_property
1254 1254 def description_safe(self):
1255 1255 from rhodecode.lib import helpers as h
1256 1256 return h.escape(self.description)
1257 1257
1258 1258 @classmethod
1259 1259 def _get_ip_range(cls, ip_addr):
1260 1260 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1261 1261 return [str(net.network_address), str(net.broadcast_address)]
1262 1262
1263 1263 def __json__(self):
1264 1264 return {
1265 1265 'ip_addr': self.ip_addr,
1266 1266 'ip_range': self._get_ip_range(self.ip_addr),
1267 1267 }
1268 1268
1269 1269 def __unicode__(self):
1270 1270 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1271 1271 self.user_id, self.ip_addr)
1272 1272
1273 1273
1274 1274 class UserSshKeys(Base, BaseModel):
1275 1275 __tablename__ = 'user_ssh_keys'
1276 1276 __table_args__ = (
1277 1277 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1278 1278
1279 1279 UniqueConstraint('ssh_key_fingerprint'),
1280 1280
1281 1281 base_table_args
1282 1282 )
1283 1283 __mapper_args__ = {}
1284 1284
1285 1285 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1286 1286 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1287 1287 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1288 1288
1289 1289 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1290 1290
1291 1291 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1292 1292 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1293 1293 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1294 1294
1295 1295 user = relationship('User', lazy='joined')
1296 1296
1297 1297 def __json__(self):
1298 1298 data = {
1299 1299 'ssh_fingerprint': self.ssh_key_fingerprint,
1300 1300 'description': self.description,
1301 1301 'created_on': self.created_on
1302 1302 }
1303 1303 return data
1304 1304
1305 1305 def get_api_data(self):
1306 1306 data = self.__json__()
1307 1307 return data
1308 1308
1309 1309
1310 1310 class UserLog(Base, BaseModel):
1311 1311 __tablename__ = 'user_logs'
1312 1312 __table_args__ = (
1313 1313 base_table_args,
1314 1314 )
1315 1315
1316 1316 VERSION_1 = 'v1'
1317 1317 VERSION_2 = 'v2'
1318 1318 VERSIONS = [VERSION_1, VERSION_2]
1319 1319
1320 1320 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1321 1321 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1322 1322 username = Column("username", String(255), nullable=True, unique=None, default=None)
1323 1323 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1324 1324 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1325 1325 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1326 1326 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1327 1327 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1328 1328
1329 1329 version = Column("version", String(255), nullable=True, default=VERSION_1)
1330 1330 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1331 1331 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1332 1332
1333 1333 def __unicode__(self):
1334 1334 return u"<%s('id:%s:%s')>" % (
1335 1335 self.__class__.__name__, self.repository_name, self.action)
1336 1336
1337 1337 def __json__(self):
1338 1338 return {
1339 1339 'user_id': self.user_id,
1340 1340 'username': self.username,
1341 1341 'repository_id': self.repository_id,
1342 1342 'repository_name': self.repository_name,
1343 1343 'user_ip': self.user_ip,
1344 1344 'action_date': self.action_date,
1345 1345 'action': self.action,
1346 1346 }
1347 1347
1348 1348 @hybrid_property
1349 1349 def entry_id(self):
1350 1350 return self.user_log_id
1351 1351
1352 1352 @property
1353 1353 def action_as_day(self):
1354 1354 return datetime.date(*self.action_date.timetuple()[:3])
1355 1355
1356 1356 user = relationship('User')
1357 1357 repository = relationship('Repository', cascade='')
1358 1358
1359 1359
1360 1360 class UserGroup(Base, BaseModel):
1361 1361 __tablename__ = 'users_groups'
1362 1362 __table_args__ = (
1363 1363 base_table_args,
1364 1364 )
1365 1365
1366 1366 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1367 1367 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1368 1368 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1369 1369 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1370 1370 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1371 1371 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1372 1372 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1373 1373 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1374 1374
1375 1375 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1376 1376 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1377 1377 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1378 1378 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1379 1379 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1380 1380 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1381 1381
1382 1382 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1383 1383 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1384 1384
1385 1385 @classmethod
1386 1386 def _load_group_data(cls, column):
1387 1387 if not column:
1388 1388 return {}
1389 1389
1390 1390 try:
1391 1391 return json.loads(column) or {}
1392 1392 except TypeError:
1393 1393 return {}
1394 1394
1395 1395 @hybrid_property
1396 1396 def description_safe(self):
1397 1397 from rhodecode.lib import helpers as h
1398 1398 return h.escape(self.user_group_description)
1399 1399
1400 1400 @hybrid_property
1401 1401 def group_data(self):
1402 1402 return self._load_group_data(self._group_data)
1403 1403
1404 1404 @group_data.expression
1405 1405 def group_data(self, **kwargs):
1406 1406 return self._group_data
1407 1407
1408 1408 @group_data.setter
1409 1409 def group_data(self, val):
1410 1410 try:
1411 1411 self._group_data = json.dumps(val)
1412 1412 except Exception:
1413 1413 log.error(traceback.format_exc())
1414 1414
1415 1415 @classmethod
1416 1416 def _load_sync(cls, group_data):
1417 1417 if group_data:
1418 1418 return group_data.get('extern_type')
1419 1419
1420 1420 @property
1421 1421 def sync(self):
1422 1422 return self._load_sync(self.group_data)
1423 1423
1424 1424 def __unicode__(self):
1425 1425 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1426 1426 self.users_group_id,
1427 1427 self.users_group_name)
1428 1428
1429 1429 @classmethod
1430 1430 def get_by_group_name(cls, group_name, cache=False,
1431 1431 case_insensitive=False):
1432 1432 if case_insensitive:
1433 1433 q = cls.query().filter(func.lower(cls.users_group_name) ==
1434 1434 func.lower(group_name))
1435 1435
1436 1436 else:
1437 1437 q = cls.query().filter(cls.users_group_name == group_name)
1438 1438 if cache:
1439 1439 q = q.options(
1440 1440 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1441 1441 return q.scalar()
1442 1442
1443 1443 @classmethod
1444 1444 def get(cls, user_group_id, cache=False):
1445 1445 if not user_group_id:
1446 1446 return
1447 1447
1448 1448 user_group = cls.query()
1449 1449 if cache:
1450 1450 user_group = user_group.options(
1451 1451 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1452 1452 return user_group.get(user_group_id)
1453 1453
1454 1454 def permissions(self, with_admins=True, with_owner=True,
1455 1455 expand_from_user_groups=False):
1456 1456 """
1457 1457 Permissions for user groups
1458 1458 """
1459 1459 _admin_perm = 'usergroup.admin'
1460 1460
1461 1461 owner_row = []
1462 1462 if with_owner:
1463 1463 usr = AttributeDict(self.user.get_dict())
1464 1464 usr.owner_row = True
1465 1465 usr.permission = _admin_perm
1466 1466 owner_row.append(usr)
1467 1467
1468 1468 super_admin_ids = []
1469 1469 super_admin_rows = []
1470 1470 if with_admins:
1471 1471 for usr in User.get_all_super_admins():
1472 1472 super_admin_ids.append(usr.user_id)
1473 1473 # if this admin is also owner, don't double the record
1474 1474 if usr.user_id == owner_row[0].user_id:
1475 1475 owner_row[0].admin_row = True
1476 1476 else:
1477 1477 usr = AttributeDict(usr.get_dict())
1478 1478 usr.admin_row = True
1479 1479 usr.permission = _admin_perm
1480 1480 super_admin_rows.append(usr)
1481 1481
1482 1482 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1483 1483 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1484 1484 joinedload(UserUserGroupToPerm.user),
1485 1485 joinedload(UserUserGroupToPerm.permission),)
1486 1486
1487 1487 # get owners and admins and permissions. We do a trick of re-writing
1488 1488 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1489 1489 # has a global reference and changing one object propagates to all
1490 1490 # others. This means if admin is also an owner admin_row that change
1491 1491 # would propagate to both objects
1492 1492 perm_rows = []
1493 1493 for _usr in q.all():
1494 1494 usr = AttributeDict(_usr.user.get_dict())
1495 1495 # if this user is also owner/admin, mark as duplicate record
1496 1496 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1497 1497 usr.duplicate_perm = True
1498 1498 usr.permission = _usr.permission.permission_name
1499 1499 perm_rows.append(usr)
1500 1500
1501 1501 # filter the perm rows by 'default' first and then sort them by
1502 1502 # admin,write,read,none permissions sorted again alphabetically in
1503 1503 # each group
1504 1504 perm_rows = sorted(perm_rows, key=display_user_sort)
1505 1505
1506 1506 user_groups_rows = []
1507 1507 if expand_from_user_groups:
1508 1508 for ug in self.permission_user_groups(with_members=True):
1509 1509 for user_data in ug.members:
1510 1510 user_groups_rows.append(user_data)
1511 1511
1512 1512 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1513 1513
1514 1514 def permission_user_groups(self, with_members=False):
1515 1515 q = UserGroupUserGroupToPerm.query()\
1516 1516 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1517 1517 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1518 1518 joinedload(UserGroupUserGroupToPerm.target_user_group),
1519 1519 joinedload(UserGroupUserGroupToPerm.permission),)
1520 1520
1521 1521 perm_rows = []
1522 1522 for _user_group in q.all():
1523 1523 entry = AttributeDict(_user_group.user_group.get_dict())
1524 1524 entry.permission = _user_group.permission.permission_name
1525 1525 if with_members:
1526 1526 entry.members = [x.user.get_dict()
1527 1527 for x in _user_group.user_group.members]
1528 1528 perm_rows.append(entry)
1529 1529
1530 1530 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1531 1531 return perm_rows
1532 1532
1533 1533 def _get_default_perms(self, user_group, suffix=''):
1534 1534 from rhodecode.model.permission import PermissionModel
1535 1535 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1536 1536
1537 1537 def get_default_perms(self, suffix=''):
1538 1538 return self._get_default_perms(self, suffix)
1539 1539
1540 1540 def get_api_data(self, with_group_members=True, include_secrets=False):
1541 1541 """
1542 1542 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1543 1543 basically forwarded.
1544 1544
1545 1545 """
1546 1546 user_group = self
1547 1547 data = {
1548 1548 'users_group_id': user_group.users_group_id,
1549 1549 'group_name': user_group.users_group_name,
1550 1550 'group_description': user_group.user_group_description,
1551 1551 'active': user_group.users_group_active,
1552 1552 'owner': user_group.user.username,
1553 1553 'sync': user_group.sync,
1554 1554 'owner_email': user_group.user.email,
1555 1555 }
1556 1556
1557 1557 if with_group_members:
1558 1558 users = []
1559 1559 for user in user_group.members:
1560 1560 user = user.user
1561 1561 users.append(user.get_api_data(include_secrets=include_secrets))
1562 1562 data['users'] = users
1563 1563
1564 1564 return data
1565 1565
1566 1566
1567 1567 class UserGroupMember(Base, BaseModel):
1568 1568 __tablename__ = 'users_groups_members'
1569 1569 __table_args__ = (
1570 1570 base_table_args,
1571 1571 )
1572 1572
1573 1573 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1574 1574 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1575 1575 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1576 1576
1577 1577 user = relationship('User', lazy='joined')
1578 1578 users_group = relationship('UserGroup')
1579 1579
1580 1580 def __init__(self, gr_id='', u_id=''):
1581 1581 self.users_group_id = gr_id
1582 1582 self.user_id = u_id
1583 1583
1584 1584
1585 1585 class RepositoryField(Base, BaseModel):
1586 1586 __tablename__ = 'repositories_fields'
1587 1587 __table_args__ = (
1588 1588 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1589 1589 base_table_args,
1590 1590 )
1591 1591
1592 1592 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1593 1593
1594 1594 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1595 1595 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1596 1596 field_key = Column("field_key", String(250))
1597 1597 field_label = Column("field_label", String(1024), nullable=False)
1598 1598 field_value = Column("field_value", String(10000), nullable=False)
1599 1599 field_desc = Column("field_desc", String(1024), nullable=False)
1600 1600 field_type = Column("field_type", String(255), nullable=False, unique=None)
1601 1601 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1602 1602
1603 1603 repository = relationship('Repository')
1604 1604
1605 1605 @property
1606 1606 def field_key_prefixed(self):
1607 1607 return 'ex_%s' % self.field_key
1608 1608
1609 1609 @classmethod
1610 1610 def un_prefix_key(cls, key):
1611 1611 if key.startswith(cls.PREFIX):
1612 1612 return key[len(cls.PREFIX):]
1613 1613 return key
1614 1614
1615 1615 @classmethod
1616 1616 def get_by_key_name(cls, key, repo):
1617 1617 row = cls.query()\
1618 1618 .filter(cls.repository == repo)\
1619 1619 .filter(cls.field_key == key).scalar()
1620 1620 return row
1621 1621
1622 1622
1623 1623 class Repository(Base, BaseModel):
1624 1624 __tablename__ = 'repositories'
1625 1625 __table_args__ = (
1626 1626 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1627 1627 base_table_args,
1628 1628 )
1629 1629 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1630 1630 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1631 1631 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1632 1632
1633 1633 STATE_CREATED = 'repo_state_created'
1634 1634 STATE_PENDING = 'repo_state_pending'
1635 1635 STATE_ERROR = 'repo_state_error'
1636 1636
1637 1637 LOCK_AUTOMATIC = 'lock_auto'
1638 1638 LOCK_API = 'lock_api'
1639 1639 LOCK_WEB = 'lock_web'
1640 1640 LOCK_PULL = 'lock_pull'
1641 1641
1642 1642 NAME_SEP = URL_SEP
1643 1643
1644 1644 repo_id = Column(
1645 1645 "repo_id", Integer(), nullable=False, unique=True, default=None,
1646 1646 primary_key=True)
1647 1647 _repo_name = Column(
1648 1648 "repo_name", Text(), nullable=False, default=None)
1649 1649 _repo_name_hash = Column(
1650 1650 "repo_name_hash", String(255), nullable=False, unique=True)
1651 1651 repo_state = Column("repo_state", String(255), nullable=True)
1652 1652
1653 1653 clone_uri = Column(
1654 1654 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1655 1655 default=None)
1656 1656 push_uri = Column(
1657 1657 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1658 1658 default=None)
1659 1659 repo_type = Column(
1660 1660 "repo_type", String(255), nullable=False, unique=False, default=None)
1661 1661 user_id = Column(
1662 1662 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1663 1663 unique=False, default=None)
1664 1664 private = Column(
1665 1665 "private", Boolean(), nullable=True, unique=None, default=None)
1666 1666 archived = Column(
1667 1667 "archived", Boolean(), nullable=True, unique=None, default=None)
1668 1668 enable_statistics = Column(
1669 1669 "statistics", Boolean(), nullable=True, unique=None, default=True)
1670 1670 enable_downloads = Column(
1671 1671 "downloads", Boolean(), nullable=True, unique=None, default=True)
1672 1672 description = Column(
1673 1673 "description", String(10000), nullable=True, unique=None, default=None)
1674 1674 created_on = Column(
1675 1675 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1676 1676 default=datetime.datetime.now)
1677 1677 updated_on = Column(
1678 1678 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1679 1679 default=datetime.datetime.now)
1680 1680 _landing_revision = Column(
1681 1681 "landing_revision", String(255), nullable=False, unique=False,
1682 1682 default=None)
1683 1683 enable_locking = Column(
1684 1684 "enable_locking", Boolean(), nullable=False, unique=None,
1685 1685 default=False)
1686 1686 _locked = Column(
1687 1687 "locked", String(255), nullable=True, unique=False, default=None)
1688 1688 _changeset_cache = Column(
1689 1689 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1690 1690
1691 1691 fork_id = Column(
1692 1692 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1693 1693 nullable=True, unique=False, default=None)
1694 1694 group_id = Column(
1695 1695 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1696 1696 unique=False, default=None)
1697 1697
1698 1698 user = relationship('User', lazy='joined')
1699 1699 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1700 1700 group = relationship('RepoGroup', lazy='joined')
1701 1701 repo_to_perm = relationship(
1702 1702 'UserRepoToPerm', cascade='all',
1703 1703 order_by='UserRepoToPerm.repo_to_perm_id')
1704 1704 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1705 1705 stats = relationship('Statistics', cascade='all', uselist=False)
1706 1706
1707 1707 followers = relationship(
1708 1708 'UserFollowing',
1709 1709 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1710 1710 cascade='all')
1711 1711 extra_fields = relationship(
1712 1712 'RepositoryField', cascade="all, delete-orphan")
1713 1713 logs = relationship('UserLog')
1714 1714 comments = relationship(
1715 1715 'ChangesetComment', cascade="all, delete-orphan")
1716 1716 pull_requests_source = relationship(
1717 1717 'PullRequest',
1718 1718 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1719 1719 cascade="all, delete-orphan")
1720 1720 pull_requests_target = relationship(
1721 1721 'PullRequest',
1722 1722 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1723 1723 cascade="all, delete-orphan")
1724 1724 ui = relationship('RepoRhodeCodeUi', cascade="all")
1725 1725 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1726 1726 integrations = relationship('Integration', cascade="all, delete-orphan")
1727 1727
1728 1728 scoped_tokens = relationship('UserApiKeys', cascade="all")
1729 1729
1730 1730 # no cascade, set NULL
1731 1731 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1732 1732
1733 1733 def __unicode__(self):
1734 1734 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1735 1735 safe_unicode(self.repo_name))
1736 1736
1737 1737 @hybrid_property
1738 1738 def description_safe(self):
1739 1739 from rhodecode.lib import helpers as h
1740 1740 return h.escape(self.description)
1741 1741
1742 1742 @hybrid_property
1743 1743 def landing_rev(self):
1744 1744 # always should return [rev_type, rev]
1745 1745 if self._landing_revision:
1746 1746 _rev_info = self._landing_revision.split(':')
1747 1747 if len(_rev_info) < 2:
1748 1748 _rev_info.insert(0, 'rev')
1749 1749 return [_rev_info[0], _rev_info[1]]
1750 1750 return [None, None]
1751 1751
1752 1752 @landing_rev.setter
1753 1753 def landing_rev(self, val):
1754 1754 if ':' not in val:
1755 1755 raise ValueError('value must be delimited with `:` and consist '
1756 1756 'of <rev_type>:<rev>, got %s instead' % val)
1757 1757 self._landing_revision = val
1758 1758
1759 1759 @hybrid_property
1760 1760 def locked(self):
1761 1761 if self._locked:
1762 1762 user_id, timelocked, reason = self._locked.split(':')
1763 1763 lock_values = int(user_id), timelocked, reason
1764 1764 else:
1765 1765 lock_values = [None, None, None]
1766 1766 return lock_values
1767 1767
1768 1768 @locked.setter
1769 1769 def locked(self, val):
1770 1770 if val and isinstance(val, (list, tuple)):
1771 1771 self._locked = ':'.join(map(str, val))
1772 1772 else:
1773 1773 self._locked = None
1774 1774
1775 1775 @hybrid_property
1776 1776 def changeset_cache(self):
1777 1777 from rhodecode.lib.vcs.backends.base import EmptyCommit
1778 1778 dummy = EmptyCommit().__json__()
1779 1779 if not self._changeset_cache:
1780 1780 dummy['source_repo_id'] = self.repo_id
1781 1781 return json.loads(json.dumps(dummy))
1782 1782
1783 1783 try:
1784 1784 return json.loads(self._changeset_cache)
1785 1785 except TypeError:
1786 1786 return dummy
1787 1787 except Exception:
1788 1788 log.error(traceback.format_exc())
1789 1789 return dummy
1790 1790
1791 1791 @changeset_cache.setter
1792 1792 def changeset_cache(self, val):
1793 1793 try:
1794 1794 self._changeset_cache = json.dumps(val)
1795 1795 except Exception:
1796 1796 log.error(traceback.format_exc())
1797 1797
1798 1798 @hybrid_property
1799 1799 def repo_name(self):
1800 1800 return self._repo_name
1801 1801
1802 1802 @repo_name.setter
1803 1803 def repo_name(self, value):
1804 1804 self._repo_name = value
1805 1805 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1806 1806
1807 1807 @classmethod
1808 1808 def normalize_repo_name(cls, repo_name):
1809 1809 """
1810 1810 Normalizes os specific repo_name to the format internally stored inside
1811 1811 database using URL_SEP
1812 1812
1813 1813 :param cls:
1814 1814 :param repo_name:
1815 1815 """
1816 1816 return cls.NAME_SEP.join(repo_name.split(os.sep))
1817 1817
1818 1818 @classmethod
1819 1819 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1820 1820 session = Session()
1821 1821 q = session.query(cls).filter(cls.repo_name == repo_name)
1822 1822
1823 1823 if cache:
1824 1824 if identity_cache:
1825 1825 val = cls.identity_cache(session, 'repo_name', repo_name)
1826 1826 if val:
1827 1827 return val
1828 1828 else:
1829 1829 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1830 1830 q = q.options(
1831 1831 FromCache("sql_cache_short", cache_key))
1832 1832
1833 1833 return q.scalar()
1834 1834
1835 1835 @classmethod
1836 1836 def get_by_id_or_repo_name(cls, repoid):
1837 1837 if isinstance(repoid, (int, long)):
1838 1838 try:
1839 1839 repo = cls.get(repoid)
1840 1840 except ValueError:
1841 1841 repo = None
1842 1842 else:
1843 1843 repo = cls.get_by_repo_name(repoid)
1844 1844 return repo
1845 1845
1846 1846 @classmethod
1847 1847 def get_by_full_path(cls, repo_full_path):
1848 1848 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1849 1849 repo_name = cls.normalize_repo_name(repo_name)
1850 1850 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1851 1851
1852 1852 @classmethod
1853 1853 def get_repo_forks(cls, repo_id):
1854 1854 return cls.query().filter(Repository.fork_id == repo_id)
1855 1855
1856 1856 @classmethod
1857 1857 def base_path(cls):
1858 1858 """
1859 1859 Returns base path when all repos are stored
1860 1860
1861 1861 :param cls:
1862 1862 """
1863 1863 q = Session().query(RhodeCodeUi)\
1864 1864 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1865 1865 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1866 1866 return q.one().ui_value
1867 1867
1868 1868 @classmethod
1869 1869 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1870 1870 case_insensitive=True, archived=False):
1871 1871 q = Repository.query()
1872 1872
1873 1873 if not archived:
1874 1874 q = q.filter(Repository.archived.isnot(true()))
1875 1875
1876 1876 if not isinstance(user_id, Optional):
1877 1877 q = q.filter(Repository.user_id == user_id)
1878 1878
1879 1879 if not isinstance(group_id, Optional):
1880 1880 q = q.filter(Repository.group_id == group_id)
1881 1881
1882 1882 if case_insensitive:
1883 1883 q = q.order_by(func.lower(Repository.repo_name))
1884 1884 else:
1885 1885 q = q.order_by(Repository.repo_name)
1886 1886
1887 1887 return q.all()
1888 1888
1889 1889 @property
1890 1890 def repo_uid(self):
1891 1891 return '_{}'.format(self.repo_id)
1892 1892
1893 1893 @property
1894 1894 def forks(self):
1895 1895 """
1896 1896 Return forks of this repo
1897 1897 """
1898 1898 return Repository.get_repo_forks(self.repo_id)
1899 1899
1900 1900 @property
1901 1901 def parent(self):
1902 1902 """
1903 1903 Returns fork parent
1904 1904 """
1905 1905 return self.fork
1906 1906
1907 1907 @property
1908 1908 def just_name(self):
1909 1909 return self.repo_name.split(self.NAME_SEP)[-1]
1910 1910
1911 1911 @property
1912 1912 def groups_with_parents(self):
1913 1913 groups = []
1914 1914 if self.group is None:
1915 1915 return groups
1916 1916
1917 1917 cur_gr = self.group
1918 1918 groups.insert(0, cur_gr)
1919 1919 while 1:
1920 1920 gr = getattr(cur_gr, 'parent_group', None)
1921 1921 cur_gr = cur_gr.parent_group
1922 1922 if gr is None:
1923 1923 break
1924 1924 groups.insert(0, gr)
1925 1925
1926 1926 return groups
1927 1927
1928 1928 @property
1929 1929 def groups_and_repo(self):
1930 1930 return self.groups_with_parents, self
1931 1931
1932 1932 @LazyProperty
1933 1933 def repo_path(self):
1934 1934 """
1935 1935 Returns base full path for that repository means where it actually
1936 1936 exists on a filesystem
1937 1937 """
1938 1938 q = Session().query(RhodeCodeUi).filter(
1939 1939 RhodeCodeUi.ui_key == self.NAME_SEP)
1940 1940 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1941 1941 return q.one().ui_value
1942 1942
1943 1943 @property
1944 1944 def repo_full_path(self):
1945 1945 p = [self.repo_path]
1946 1946 # we need to split the name by / since this is how we store the
1947 1947 # names in the database, but that eventually needs to be converted
1948 1948 # into a valid system path
1949 1949 p += self.repo_name.split(self.NAME_SEP)
1950 1950 return os.path.join(*map(safe_unicode, p))
1951 1951
1952 1952 @property
1953 1953 def cache_keys(self):
1954 1954 """
1955 1955 Returns associated cache keys for that repo
1956 1956 """
1957 1957 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1958 1958 repo_id=self.repo_id)
1959 1959 return CacheKey.query()\
1960 1960 .filter(CacheKey.cache_args == invalidation_namespace)\
1961 1961 .order_by(CacheKey.cache_key)\
1962 1962 .all()
1963 1963
1964 1964 @property
1965 1965 def cached_diffs_relative_dir(self):
1966 1966 """
1967 1967 Return a relative to the repository store path of cached diffs
1968 1968 used for safe display for users, who shouldn't know the absolute store
1969 1969 path
1970 1970 """
1971 1971 return os.path.join(
1972 1972 os.path.dirname(self.repo_name),
1973 1973 self.cached_diffs_dir.split(os.path.sep)[-1])
1974 1974
1975 1975 @property
1976 1976 def cached_diffs_dir(self):
1977 1977 path = self.repo_full_path
1978 1978 return os.path.join(
1979 1979 os.path.dirname(path),
1980 1980 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1981 1981
1982 1982 def cached_diffs(self):
1983 1983 diff_cache_dir = self.cached_diffs_dir
1984 1984 if os.path.isdir(diff_cache_dir):
1985 1985 return os.listdir(diff_cache_dir)
1986 1986 return []
1987 1987
1988 1988 def shadow_repos(self):
1989 1989 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1990 1990 return [
1991 1991 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1992 1992 if x.startswith(shadow_repos_pattern)]
1993 1993
1994 1994 def get_new_name(self, repo_name):
1995 1995 """
1996 1996 returns new full repository name based on assigned group and new new
1997 1997
1998 1998 :param group_name:
1999 1999 """
2000 2000 path_prefix = self.group.full_path_splitted if self.group else []
2001 2001 return self.NAME_SEP.join(path_prefix + [repo_name])
2002 2002
2003 2003 @property
2004 2004 def _config(self):
2005 2005 """
2006 2006 Returns db based config object.
2007 2007 """
2008 2008 from rhodecode.lib.utils import make_db_config
2009 2009 return make_db_config(clear_session=False, repo=self)
2010 2010
2011 2011 def permissions(self, with_admins=True, with_owner=True,
2012 2012 expand_from_user_groups=False):
2013 2013 """
2014 2014 Permissions for repositories
2015 2015 """
2016 2016 _admin_perm = 'repository.admin'
2017 2017
2018 2018 owner_row = []
2019 2019 if with_owner:
2020 2020 usr = AttributeDict(self.user.get_dict())
2021 2021 usr.owner_row = True
2022 2022 usr.permission = _admin_perm
2023 2023 usr.permission_id = None
2024 2024 owner_row.append(usr)
2025 2025
2026 2026 super_admin_ids = []
2027 2027 super_admin_rows = []
2028 2028 if with_admins:
2029 2029 for usr in User.get_all_super_admins():
2030 2030 super_admin_ids.append(usr.user_id)
2031 2031 # if this admin is also owner, don't double the record
2032 2032 if usr.user_id == owner_row[0].user_id:
2033 2033 owner_row[0].admin_row = True
2034 2034 else:
2035 2035 usr = AttributeDict(usr.get_dict())
2036 2036 usr.admin_row = True
2037 2037 usr.permission = _admin_perm
2038 2038 usr.permission_id = None
2039 2039 super_admin_rows.append(usr)
2040 2040
2041 2041 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2042 2042 q = q.options(joinedload(UserRepoToPerm.repository),
2043 2043 joinedload(UserRepoToPerm.user),
2044 2044 joinedload(UserRepoToPerm.permission),)
2045 2045
2046 2046 # get owners and admins and permissions. We do a trick of re-writing
2047 2047 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2048 2048 # has a global reference and changing one object propagates to all
2049 2049 # others. This means if admin is also an owner admin_row that change
2050 2050 # would propagate to both objects
2051 2051 perm_rows = []
2052 2052 for _usr in q.all():
2053 2053 usr = AttributeDict(_usr.user.get_dict())
2054 2054 # if this user is also owner/admin, mark as duplicate record
2055 2055 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2056 2056 usr.duplicate_perm = True
2057 2057 # also check if this permission is maybe used by branch_permissions
2058 2058 if _usr.branch_perm_entry:
2059 2059 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2060 2060
2061 2061 usr.permission = _usr.permission.permission_name
2062 2062 usr.permission_id = _usr.repo_to_perm_id
2063 2063 perm_rows.append(usr)
2064 2064
2065 2065 # filter the perm rows by 'default' first and then sort them by
2066 2066 # admin,write,read,none permissions sorted again alphabetically in
2067 2067 # each group
2068 2068 perm_rows = sorted(perm_rows, key=display_user_sort)
2069 2069
2070 2070 user_groups_rows = []
2071 2071 if expand_from_user_groups:
2072 2072 for ug in self.permission_user_groups(with_members=True):
2073 2073 for user_data in ug.members:
2074 2074 user_groups_rows.append(user_data)
2075 2075
2076 2076 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2077 2077
2078 2078 def permission_user_groups(self, with_members=True):
2079 2079 q = UserGroupRepoToPerm.query()\
2080 2080 .filter(UserGroupRepoToPerm.repository == self)
2081 2081 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2082 2082 joinedload(UserGroupRepoToPerm.users_group),
2083 2083 joinedload(UserGroupRepoToPerm.permission),)
2084 2084
2085 2085 perm_rows = []
2086 2086 for _user_group in q.all():
2087 2087 entry = AttributeDict(_user_group.users_group.get_dict())
2088 2088 entry.permission = _user_group.permission.permission_name
2089 2089 if with_members:
2090 2090 entry.members = [x.user.get_dict()
2091 2091 for x in _user_group.users_group.members]
2092 2092 perm_rows.append(entry)
2093 2093
2094 2094 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2095 2095 return perm_rows
2096 2096
2097 2097 def get_api_data(self, include_secrets=False):
2098 2098 """
2099 2099 Common function for generating repo api data
2100 2100
2101 2101 :param include_secrets: See :meth:`User.get_api_data`.
2102 2102
2103 2103 """
2104 2104 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2105 2105 # move this methods on models level.
2106 2106 from rhodecode.model.settings import SettingsModel
2107 2107 from rhodecode.model.repo import RepoModel
2108 2108
2109 2109 repo = self
2110 2110 _user_id, _time, _reason = self.locked
2111 2111
2112 2112 data = {
2113 2113 'repo_id': repo.repo_id,
2114 2114 'repo_name': repo.repo_name,
2115 2115 'repo_type': repo.repo_type,
2116 2116 'clone_uri': repo.clone_uri or '',
2117 2117 'push_uri': repo.push_uri or '',
2118 2118 'url': RepoModel().get_url(self),
2119 2119 'private': repo.private,
2120 2120 'created_on': repo.created_on,
2121 2121 'description': repo.description_safe,
2122 2122 'landing_rev': repo.landing_rev,
2123 2123 'owner': repo.user.username,
2124 2124 'fork_of': repo.fork.repo_name if repo.fork else None,
2125 2125 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2126 2126 'enable_statistics': repo.enable_statistics,
2127 2127 'enable_locking': repo.enable_locking,
2128 2128 'enable_downloads': repo.enable_downloads,
2129 2129 'last_changeset': repo.changeset_cache,
2130 2130 'locked_by': User.get(_user_id).get_api_data(
2131 2131 include_secrets=include_secrets) if _user_id else None,
2132 2132 'locked_date': time_to_datetime(_time) if _time else None,
2133 2133 'lock_reason': _reason if _reason else None,
2134 2134 }
2135 2135
2136 2136 # TODO: mikhail: should be per-repo settings here
2137 2137 rc_config = SettingsModel().get_all_settings()
2138 2138 repository_fields = str2bool(
2139 2139 rc_config.get('rhodecode_repository_fields'))
2140 2140 if repository_fields:
2141 2141 for f in self.extra_fields:
2142 2142 data[f.field_key_prefixed] = f.field_value
2143 2143
2144 2144 return data
2145 2145
2146 2146 @classmethod
2147 2147 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2148 2148 if not lock_time:
2149 2149 lock_time = time.time()
2150 2150 if not lock_reason:
2151 2151 lock_reason = cls.LOCK_AUTOMATIC
2152 2152 repo.locked = [user_id, lock_time, lock_reason]
2153 2153 Session().add(repo)
2154 2154 Session().commit()
2155 2155
2156 2156 @classmethod
2157 2157 def unlock(cls, repo):
2158 2158 repo.locked = None
2159 2159 Session().add(repo)
2160 2160 Session().commit()
2161 2161
2162 2162 @classmethod
2163 2163 def getlock(cls, repo):
2164 2164 return repo.locked
2165 2165
2166 2166 def is_user_lock(self, user_id):
2167 2167 if self.lock[0]:
2168 2168 lock_user_id = safe_int(self.lock[0])
2169 2169 user_id = safe_int(user_id)
2170 2170 # both are ints, and they are equal
2171 2171 return all([lock_user_id, user_id]) and lock_user_id == user_id
2172 2172
2173 2173 return False
2174 2174
2175 2175 def get_locking_state(self, action, user_id, only_when_enabled=True):
2176 2176 """
2177 2177 Checks locking on this repository, if locking is enabled and lock is
2178 2178 present returns a tuple of make_lock, locked, locked_by.
2179 2179 make_lock can have 3 states None (do nothing) True, make lock
2180 2180 False release lock, This value is later propagated to hooks, which
2181 2181 do the locking. Think about this as signals passed to hooks what to do.
2182 2182
2183 2183 """
2184 2184 # TODO: johbo: This is part of the business logic and should be moved
2185 2185 # into the RepositoryModel.
2186 2186
2187 2187 if action not in ('push', 'pull'):
2188 2188 raise ValueError("Invalid action value: %s" % repr(action))
2189 2189
2190 2190 # defines if locked error should be thrown to user
2191 2191 currently_locked = False
2192 2192 # defines if new lock should be made, tri-state
2193 2193 make_lock = None
2194 2194 repo = self
2195 2195 user = User.get(user_id)
2196 2196
2197 2197 lock_info = repo.locked
2198 2198
2199 2199 if repo and (repo.enable_locking or not only_when_enabled):
2200 2200 if action == 'push':
2201 2201 # check if it's already locked !, if it is compare users
2202 2202 locked_by_user_id = lock_info[0]
2203 2203 if user.user_id == locked_by_user_id:
2204 2204 log.debug(
2205 2205 'Got `push` action from user %s, now unlocking', user)
2206 2206 # unlock if we have push from user who locked
2207 2207 make_lock = False
2208 2208 else:
2209 2209 # we're not the same user who locked, ban with
2210 2210 # code defined in settings (default is 423 HTTP Locked) !
2211 2211 log.debug('Repo %s is currently locked by %s', repo, user)
2212 2212 currently_locked = True
2213 2213 elif action == 'pull':
2214 2214 # [0] user [1] date
2215 2215 if lock_info[0] and lock_info[1]:
2216 2216 log.debug('Repo %s is currently locked by %s', repo, user)
2217 2217 currently_locked = True
2218 2218 else:
2219 2219 log.debug('Setting lock on repo %s by %s', repo, user)
2220 2220 make_lock = True
2221 2221
2222 2222 else:
2223 2223 log.debug('Repository %s do not have locking enabled', repo)
2224 2224
2225 2225 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2226 2226 make_lock, currently_locked, lock_info)
2227 2227
2228 2228 from rhodecode.lib.auth import HasRepoPermissionAny
2229 2229 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2230 2230 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2231 2231 # if we don't have at least write permission we cannot make a lock
2232 2232 log.debug('lock state reset back to FALSE due to lack '
2233 2233 'of at least read permission')
2234 2234 make_lock = False
2235 2235
2236 2236 return make_lock, currently_locked, lock_info
2237 2237
2238 2238 @property
2239 2239 def last_commit_cache_update_diff(self):
2240 2240 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2241 2241
2242 2242 @property
2243 2243 def last_commit_change(self):
2244 2244 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2245 2245 empty_date = datetime.datetime.fromtimestamp(0)
2246 2246 date_latest = self.changeset_cache.get('date', empty_date)
2247 2247 try:
2248 2248 return parse_datetime(date_latest)
2249 2249 except Exception:
2250 2250 return empty_date
2251 2251
2252 2252 @property
2253 2253 def last_db_change(self):
2254 2254 return self.updated_on
2255 2255
2256 2256 @property
2257 2257 def clone_uri_hidden(self):
2258 2258 clone_uri = self.clone_uri
2259 2259 if clone_uri:
2260 2260 import urlobject
2261 2261 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2262 2262 if url_obj.password:
2263 2263 clone_uri = url_obj.with_password('*****')
2264 2264 return clone_uri
2265 2265
2266 2266 @property
2267 2267 def push_uri_hidden(self):
2268 2268 push_uri = self.push_uri
2269 2269 if push_uri:
2270 2270 import urlobject
2271 2271 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2272 2272 if url_obj.password:
2273 2273 push_uri = url_obj.with_password('*****')
2274 2274 return push_uri
2275 2275
2276 2276 def clone_url(self, **override):
2277 2277 from rhodecode.model.settings import SettingsModel
2278 2278
2279 2279 uri_tmpl = None
2280 2280 if 'with_id' in override:
2281 2281 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2282 2282 del override['with_id']
2283 2283
2284 2284 if 'uri_tmpl' in override:
2285 2285 uri_tmpl = override['uri_tmpl']
2286 2286 del override['uri_tmpl']
2287 2287
2288 2288 ssh = False
2289 2289 if 'ssh' in override:
2290 2290 ssh = True
2291 2291 del override['ssh']
2292 2292
2293 2293 # we didn't override our tmpl from **overrides
2294 2294 request = get_current_request()
2295 2295 if not uri_tmpl:
2296 2296 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2297 2297 rc_config = request.call_context.rc_config
2298 2298 else:
2299 2299 rc_config = SettingsModel().get_all_settings(cache=True)
2300 2300 if ssh:
2301 2301 uri_tmpl = rc_config.get(
2302 2302 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2303 2303 else:
2304 2304 uri_tmpl = rc_config.get(
2305 2305 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2306 2306
2307 2307 return get_clone_url(request=request,
2308 2308 uri_tmpl=uri_tmpl,
2309 2309 repo_name=self.repo_name,
2310 2310 repo_id=self.repo_id, **override)
2311 2311
2312 2312 def set_state(self, state):
2313 2313 self.repo_state = state
2314 2314 Session().add(self)
2315 2315 #==========================================================================
2316 2316 # SCM PROPERTIES
2317 2317 #==========================================================================
2318 2318
2319 2319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2320 2320 return get_commit_safe(
2321 2321 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2322 2322
2323 2323 def get_changeset(self, rev=None, pre_load=None):
2324 2324 warnings.warn("Use get_commit", DeprecationWarning)
2325 2325 commit_id = None
2326 2326 commit_idx = None
2327 2327 if isinstance(rev, compat.string_types):
2328 2328 commit_id = rev
2329 2329 else:
2330 2330 commit_idx = rev
2331 2331 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2332 2332 pre_load=pre_load)
2333 2333
2334 2334 def get_landing_commit(self):
2335 2335 """
2336 2336 Returns landing commit, or if that doesn't exist returns the tip
2337 2337 """
2338 2338 _rev_type, _rev = self.landing_rev
2339 2339 commit = self.get_commit(_rev)
2340 2340 if isinstance(commit, EmptyCommit):
2341 2341 return self.get_commit()
2342 2342 return commit
2343 2343
2344 2344 def flush_commit_cache(self):
2345 2345 self.update_commit_cache(cs_cache={'raw_id':'0'})
2346 2346 self.update_commit_cache()
2347 2347
2348 2348 def update_commit_cache(self, cs_cache=None, config=None):
2349 2349 """
2350 2350 Update cache of last commit for repository, keys should be::
2351 2351
2352 2352 source_repo_id
2353 2353 short_id
2354 2354 raw_id
2355 2355 revision
2356 2356 parents
2357 2357 message
2358 2358 date
2359 2359 author
2360 2360 updated_on
2361 2361
2362 2362 """
2363 2363 from rhodecode.lib.vcs.backends.base import BaseChangeset
2364 2364 if cs_cache is None:
2365 2365 # use no-cache version here
2366 2366 scm_repo = self.scm_instance(cache=False, config=config)
2367 2367
2368 2368 empty = scm_repo is None or scm_repo.is_empty()
2369 2369 if not empty:
2370 2370 cs_cache = scm_repo.get_commit(
2371 2371 pre_load=["author", "date", "message", "parents", "branch"])
2372 2372 else:
2373 2373 cs_cache = EmptyCommit()
2374 2374
2375 2375 if isinstance(cs_cache, BaseChangeset):
2376 2376 cs_cache = cs_cache.__json__()
2377 2377
2378 2378 def is_outdated(new_cs_cache):
2379 2379 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2380 2380 new_cs_cache['revision'] != self.changeset_cache['revision']):
2381 2381 return True
2382 2382 return False
2383 2383
2384 2384 # check if we have maybe already latest cached revision
2385 2385 if is_outdated(cs_cache) or not self.changeset_cache:
2386 2386 _default = datetime.datetime.utcnow()
2387 2387 last_change = cs_cache.get('date') or _default
2388 2388 # we check if last update is newer than the new value
2389 2389 # if yes, we use the current timestamp instead. Imagine you get
2390 2390 # old commit pushed 1y ago, we'd set last update 1y to ago.
2391 2391 last_change_timestamp = datetime_to_time(last_change)
2392 2392 current_timestamp = datetime_to_time(last_change)
2393 2393 if last_change_timestamp > current_timestamp:
2394 2394 cs_cache['date'] = _default
2395 2395
2396 2396 cs_cache['updated_on'] = time.time()
2397 2397 self.changeset_cache = cs_cache
2398 2398 self.updated_on = last_change
2399 2399 Session().add(self)
2400 2400 Session().commit()
2401 2401
2402 2402 log.debug('updated repo `%s` with new commit cache %s',
2403 2403 self.repo_name, cs_cache)
2404 2404 else:
2405 2405 cs_cache = self.changeset_cache
2406 2406 cs_cache['updated_on'] = time.time()
2407 2407 self.changeset_cache = cs_cache
2408 2408 Session().add(self)
2409 2409 Session().commit()
2410 2410
2411 2411 log.debug('Skipping update_commit_cache for repo:`%s` '
2412 2412 'commit already with latest changes', self.repo_name)
2413 2413
2414 2414 @property
2415 2415 def tip(self):
2416 2416 return self.get_commit('tip')
2417 2417
2418 2418 @property
2419 2419 def author(self):
2420 2420 return self.tip.author
2421 2421
2422 2422 @property
2423 2423 def last_change(self):
2424 2424 return self.scm_instance().last_change
2425 2425
2426 2426 def get_comments(self, revisions=None):
2427 2427 """
2428 2428 Returns comments for this repository grouped by revisions
2429 2429
2430 2430 :param revisions: filter query by revisions only
2431 2431 """
2432 2432 cmts = ChangesetComment.query()\
2433 2433 .filter(ChangesetComment.repo == self)
2434 2434 if revisions:
2435 2435 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2436 2436 grouped = collections.defaultdict(list)
2437 2437 for cmt in cmts.all():
2438 2438 grouped[cmt.revision].append(cmt)
2439 2439 return grouped
2440 2440
2441 2441 def statuses(self, revisions=None):
2442 2442 """
2443 2443 Returns statuses for this repository
2444 2444
2445 2445 :param revisions: list of revisions to get statuses for
2446 2446 """
2447 2447 statuses = ChangesetStatus.query()\
2448 2448 .filter(ChangesetStatus.repo == self)\
2449 2449 .filter(ChangesetStatus.version == 0)
2450 2450
2451 2451 if revisions:
2452 2452 # Try doing the filtering in chunks to avoid hitting limits
2453 2453 size = 500
2454 2454 status_results = []
2455 2455 for chunk in xrange(0, len(revisions), size):
2456 2456 status_results += statuses.filter(
2457 2457 ChangesetStatus.revision.in_(
2458 2458 revisions[chunk: chunk+size])
2459 2459 ).all()
2460 2460 else:
2461 2461 status_results = statuses.all()
2462 2462
2463 2463 grouped = {}
2464 2464
2465 2465 # maybe we have open new pullrequest without a status?
2466 2466 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2467 2467 status_lbl = ChangesetStatus.get_status_lbl(stat)
2468 2468 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2469 2469 for rev in pr.revisions:
2470 2470 pr_id = pr.pull_request_id
2471 2471 pr_repo = pr.target_repo.repo_name
2472 2472 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2473 2473
2474 2474 for stat in status_results:
2475 2475 pr_id = pr_repo = None
2476 2476 if stat.pull_request:
2477 2477 pr_id = stat.pull_request.pull_request_id
2478 2478 pr_repo = stat.pull_request.target_repo.repo_name
2479 2479 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2480 2480 pr_id, pr_repo]
2481 2481 return grouped
2482 2482
2483 2483 # ==========================================================================
2484 2484 # SCM CACHE INSTANCE
2485 2485 # ==========================================================================
2486 2486
2487 2487 def scm_instance(self, **kwargs):
2488 2488 import rhodecode
2489 2489
2490 2490 # Passing a config will not hit the cache currently only used
2491 2491 # for repo2dbmapper
2492 2492 config = kwargs.pop('config', None)
2493 2493 cache = kwargs.pop('cache', None)
2494 2494 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2495 2495 if vcs_full_cache is not None:
2496 2496 # allows override global config
2497 2497 full_cache = vcs_full_cache
2498 2498 else:
2499 2499 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2500 2500 # if cache is NOT defined use default global, else we have a full
2501 2501 # control over cache behaviour
2502 2502 if cache is None and full_cache and not config:
2503 2503 log.debug('Initializing pure cached instance for %s', self.repo_path)
2504 2504 return self._get_instance_cached()
2505 2505
2506 2506 # cache here is sent to the "vcs server"
2507 2507 return self._get_instance(cache=bool(cache), config=config)
2508 2508
2509 2509 def _get_instance_cached(self):
2510 2510 from rhodecode.lib import rc_cache
2511 2511
2512 2512 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2513 2513 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2514 2514 repo_id=self.repo_id)
2515 2515 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2516 2516
2517 2517 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2518 2518 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2519 2519 return self._get_instance(repo_state_uid=_cache_state_uid)
2520 2520
2521 2521 # we must use thread scoped cache here,
2522 2522 # because each thread of gevent needs it's own not shared connection and cache
2523 2523 # we also alter `args` so the cache key is individual for every green thread.
2524 2524 inv_context_manager = rc_cache.InvalidationContext(
2525 2525 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2526 2526 thread_scoped=True)
2527 2527 with inv_context_manager as invalidation_context:
2528 2528 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2529 2529 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2530 2530
2531 2531 # re-compute and store cache if we get invalidate signal
2532 2532 if invalidation_context.should_invalidate():
2533 2533 instance = get_instance_cached.refresh(*args)
2534 2534 else:
2535 2535 instance = get_instance_cached(*args)
2536 2536
2537 2537 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2538 2538 return instance
2539 2539
2540 2540 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2541 2541 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2542 2542 self.repo_type, self.repo_path, cache)
2543 2543 config = config or self._config
2544 2544 custom_wire = {
2545 2545 'cache': cache, # controls the vcs.remote cache
2546 2546 'repo_state_uid': repo_state_uid
2547 2547 }
2548 2548 repo = get_vcs_instance(
2549 2549 repo_path=safe_str(self.repo_full_path),
2550 2550 config=config,
2551 2551 with_wire=custom_wire,
2552 2552 create=False,
2553 2553 _vcs_alias=self.repo_type)
2554 2554 if repo is not None:
2555 2555 repo.count() # cache rebuild
2556 2556 return repo
2557 2557
2558 2558 def get_shadow_repository_path(self, workspace_id):
2559 2559 from rhodecode.lib.vcs.backends.base import BaseRepository
2560 2560 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2561 2561 self.repo_full_path, self.repo_id, workspace_id)
2562 2562 return shadow_repo_path
2563 2563
2564 2564 def __json__(self):
2565 2565 return {'landing_rev': self.landing_rev}
2566 2566
2567 2567 def get_dict(self):
2568 2568
2569 2569 # Since we transformed `repo_name` to a hybrid property, we need to
2570 2570 # keep compatibility with the code which uses `repo_name` field.
2571 2571
2572 2572 result = super(Repository, self).get_dict()
2573 2573 result['repo_name'] = result.pop('_repo_name', None)
2574 2574 return result
2575 2575
2576 2576
2577 2577 class RepoGroup(Base, BaseModel):
2578 2578 __tablename__ = 'groups'
2579 2579 __table_args__ = (
2580 2580 UniqueConstraint('group_name', 'group_parent_id'),
2581 2581 base_table_args,
2582 2582 )
2583 2583 __mapper_args__ = {'order_by': 'group_name'}
2584 2584
2585 2585 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2586 2586
2587 2587 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2588 2588 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2589 2589 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2590 2590 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2591 2591 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2592 2592 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2593 2593 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2594 2594 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2595 2595 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2596 2596 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2597 2597 _changeset_cache = Column(
2598 2598 "changeset_cache", LargeBinary(), nullable=True) # JSON data
2599 2599
2600 2600 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2601 2601 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2602 2602 parent_group = relationship('RepoGroup', remote_side=group_id)
2603 2603 user = relationship('User')
2604 2604 integrations = relationship('Integration', cascade="all, delete-orphan")
2605 2605
2606 2606 # no cascade, set NULL
2607 2607 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2608 2608
2609 2609 def __init__(self, group_name='', parent_group=None):
2610 2610 self.group_name = group_name
2611 2611 self.parent_group = parent_group
2612 2612
2613 2613 def __unicode__(self):
2614 2614 return u"<%s('id:%s:%s')>" % (
2615 2615 self.__class__.__name__, self.group_id, self.group_name)
2616 2616
2617 2617 @hybrid_property
2618 2618 def group_name(self):
2619 2619 return self._group_name
2620 2620
2621 2621 @group_name.setter
2622 2622 def group_name(self, value):
2623 2623 self._group_name = value
2624 2624 self.group_name_hash = self.hash_repo_group_name(value)
2625 2625
2626 2626 @hybrid_property
2627 2627 def changeset_cache(self):
2628 2628 from rhodecode.lib.vcs.backends.base import EmptyCommit
2629 2629 dummy = EmptyCommit().__json__()
2630 2630 if not self._changeset_cache:
2631 2631 dummy['source_repo_id'] = ''
2632 2632 return json.loads(json.dumps(dummy))
2633 2633
2634 2634 try:
2635 2635 return json.loads(self._changeset_cache)
2636 2636 except TypeError:
2637 2637 return dummy
2638 2638 except Exception:
2639 2639 log.error(traceback.format_exc())
2640 2640 return dummy
2641 2641
2642 2642 @changeset_cache.setter
2643 2643 def changeset_cache(self, val):
2644 2644 try:
2645 2645 self._changeset_cache = json.dumps(val)
2646 2646 except Exception:
2647 2647 log.error(traceback.format_exc())
2648 2648
2649 2649 @validates('group_parent_id')
2650 2650 def validate_group_parent_id(self, key, val):
2651 2651 """
2652 2652 Check cycle references for a parent group to self
2653 2653 """
2654 2654 if self.group_id and val:
2655 2655 assert val != self.group_id
2656 2656
2657 2657 return val
2658 2658
2659 2659 @hybrid_property
2660 2660 def description_safe(self):
2661 2661 from rhodecode.lib import helpers as h
2662 2662 return h.escape(self.group_description)
2663 2663
2664 2664 @classmethod
2665 2665 def hash_repo_group_name(cls, repo_group_name):
2666 2666 val = remove_formatting(repo_group_name)
2667 2667 val = safe_str(val).lower()
2668 2668 chars = []
2669 2669 for c in val:
2670 2670 if c not in string.ascii_letters:
2671 2671 c = str(ord(c))
2672 2672 chars.append(c)
2673 2673
2674 2674 return ''.join(chars)
2675 2675
2676 2676 @classmethod
2677 2677 def _generate_choice(cls, repo_group):
2678 2678 from webhelpers2.html import literal as _literal
2679 2679 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2680 2680 return repo_group.group_id, _name(repo_group.full_path_splitted)
2681 2681
2682 2682 @classmethod
2683 2683 def groups_choices(cls, groups=None, show_empty_group=True):
2684 2684 if not groups:
2685 2685 groups = cls.query().all()
2686 2686
2687 2687 repo_groups = []
2688 2688 if show_empty_group:
2689 2689 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2690 2690
2691 2691 repo_groups.extend([cls._generate_choice(x) for x in groups])
2692 2692
2693 2693 repo_groups = sorted(
2694 2694 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2695 2695 return repo_groups
2696 2696
2697 2697 @classmethod
2698 2698 def url_sep(cls):
2699 2699 return URL_SEP
2700 2700
2701 2701 @classmethod
2702 2702 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2703 2703 if case_insensitive:
2704 2704 gr = cls.query().filter(func.lower(cls.group_name)
2705 2705 == func.lower(group_name))
2706 2706 else:
2707 2707 gr = cls.query().filter(cls.group_name == group_name)
2708 2708 if cache:
2709 2709 name_key = _hash_key(group_name)
2710 2710 gr = gr.options(
2711 2711 FromCache("sql_cache_short", "get_group_%s" % name_key))
2712 2712 return gr.scalar()
2713 2713
2714 2714 @classmethod
2715 2715 def get_user_personal_repo_group(cls, user_id):
2716 2716 user = User.get(user_id)
2717 2717 if user.username == User.DEFAULT_USER:
2718 2718 return None
2719 2719
2720 2720 return cls.query()\
2721 2721 .filter(cls.personal == true()) \
2722 2722 .filter(cls.user == user) \
2723 2723 .order_by(cls.group_id.asc()) \
2724 2724 .first()
2725 2725
2726 2726 @classmethod
2727 2727 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2728 2728 case_insensitive=True):
2729 2729 q = RepoGroup.query()
2730 2730
2731 2731 if not isinstance(user_id, Optional):
2732 2732 q = q.filter(RepoGroup.user_id == user_id)
2733 2733
2734 2734 if not isinstance(group_id, Optional):
2735 2735 q = q.filter(RepoGroup.group_parent_id == group_id)
2736 2736
2737 2737 if case_insensitive:
2738 2738 q = q.order_by(func.lower(RepoGroup.group_name))
2739 2739 else:
2740 2740 q = q.order_by(RepoGroup.group_name)
2741 2741 return q.all()
2742 2742
2743 2743 @property
2744 2744 def parents(self, parents_recursion_limit = 10):
2745 2745 groups = []
2746 2746 if self.parent_group is None:
2747 2747 return groups
2748 2748 cur_gr = self.parent_group
2749 2749 groups.insert(0, cur_gr)
2750 2750 cnt = 0
2751 2751 while 1:
2752 2752 cnt += 1
2753 2753 gr = getattr(cur_gr, 'parent_group', None)
2754 2754 cur_gr = cur_gr.parent_group
2755 2755 if gr is None:
2756 2756 break
2757 2757 if cnt == parents_recursion_limit:
2758 2758 # this will prevent accidental infinit loops
2759 2759 log.error('more than %s parents found for group %s, stopping '
2760 2760 'recursive parent fetching', parents_recursion_limit, self)
2761 2761 break
2762 2762
2763 2763 groups.insert(0, gr)
2764 2764 return groups
2765 2765
2766 2766 @property
2767 2767 def last_commit_cache_update_diff(self):
2768 2768 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2769 2769
2770 2770 @property
2771 2771 def last_commit_change(self):
2772 2772 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2773 2773 empty_date = datetime.datetime.fromtimestamp(0)
2774 2774 date_latest = self.changeset_cache.get('date', empty_date)
2775 2775 try:
2776 2776 return parse_datetime(date_latest)
2777 2777 except Exception:
2778 2778 return empty_date
2779 2779
2780 2780 @property
2781 2781 def last_db_change(self):
2782 2782 return self.updated_on
2783 2783
2784 2784 @property
2785 2785 def children(self):
2786 2786 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2787 2787
2788 2788 @property
2789 2789 def name(self):
2790 2790 return self.group_name.split(RepoGroup.url_sep())[-1]
2791 2791
2792 2792 @property
2793 2793 def full_path(self):
2794 2794 return self.group_name
2795 2795
2796 2796 @property
2797 2797 def full_path_splitted(self):
2798 2798 return self.group_name.split(RepoGroup.url_sep())
2799 2799
2800 2800 @property
2801 2801 def repositories(self):
2802 2802 return Repository.query()\
2803 2803 .filter(Repository.group == self)\
2804 2804 .order_by(Repository.repo_name)
2805 2805
2806 2806 @property
2807 2807 def repositories_recursive_count(self):
2808 2808 cnt = self.repositories.count()
2809 2809
2810 2810 def children_count(group):
2811 2811 cnt = 0
2812 2812 for child in group.children:
2813 2813 cnt += child.repositories.count()
2814 2814 cnt += children_count(child)
2815 2815 return cnt
2816 2816
2817 2817 return cnt + children_count(self)
2818 2818
2819 2819 def _recursive_objects(self, include_repos=True, include_groups=True):
2820 2820 all_ = []
2821 2821
2822 2822 def _get_members(root_gr):
2823 2823 if include_repos:
2824 2824 for r in root_gr.repositories:
2825 2825 all_.append(r)
2826 2826 childs = root_gr.children.all()
2827 2827 if childs:
2828 2828 for gr in childs:
2829 2829 if include_groups:
2830 2830 all_.append(gr)
2831 2831 _get_members(gr)
2832 2832
2833 2833 root_group = []
2834 2834 if include_groups:
2835 2835 root_group = [self]
2836 2836
2837 2837 _get_members(self)
2838 2838 return root_group + all_
2839 2839
2840 2840 def recursive_groups_and_repos(self):
2841 2841 """
2842 2842 Recursive return all groups, with repositories in those groups
2843 2843 """
2844 2844 return self._recursive_objects()
2845 2845
2846 2846 def recursive_groups(self):
2847 2847 """
2848 2848 Returns all children groups for this group including children of children
2849 2849 """
2850 2850 return self._recursive_objects(include_repos=False)
2851 2851
2852 2852 def recursive_repos(self):
2853 2853 """
2854 2854 Returns all children repositories for this group
2855 2855 """
2856 2856 return self._recursive_objects(include_groups=False)
2857 2857
2858 2858 def get_new_name(self, group_name):
2859 2859 """
2860 2860 returns new full group name based on parent and new name
2861 2861
2862 2862 :param group_name:
2863 2863 """
2864 2864 path_prefix = (self.parent_group.full_path_splitted if
2865 2865 self.parent_group else [])
2866 2866 return RepoGroup.url_sep().join(path_prefix + [group_name])
2867 2867
2868 2868 def update_commit_cache(self, config=None):
2869 2869 """
2870 2870 Update cache of last changeset for newest repository inside this group, keys should be::
2871 2871
2872 2872 source_repo_id
2873 2873 short_id
2874 2874 raw_id
2875 2875 revision
2876 2876 parents
2877 2877 message
2878 2878 date
2879 2879 author
2880 2880
2881 2881 """
2882 2882 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2883 2883
2884 2884 def repo_groups_and_repos():
2885 2885 all_entries = OrderedDefaultDict(list)
2886 2886
2887 2887 def _get_members(root_gr, pos=0):
2888 2888
2889 2889 for repo in root_gr.repositories:
2890 2890 all_entries[root_gr].append(repo)
2891 2891
2892 2892 # fill in all parent positions
2893 2893 for parent_group in root_gr.parents:
2894 2894 all_entries[parent_group].extend(all_entries[root_gr])
2895 2895
2896 2896 children_groups = root_gr.children.all()
2897 2897 if children_groups:
2898 2898 for cnt, gr in enumerate(children_groups, 1):
2899 2899 _get_members(gr, pos=pos+cnt)
2900 2900
2901 2901 _get_members(root_gr=self)
2902 2902 return all_entries
2903 2903
2904 2904 empty_date = datetime.datetime.fromtimestamp(0)
2905 2905 for repo_group, repos in repo_groups_and_repos().items():
2906 2906
2907 2907 latest_repo_cs_cache = {}
2908 2908 _date_latest = empty_date
2909 2909 for repo in repos:
2910 2910 repo_cs_cache = repo.changeset_cache
2911 2911 date_latest = latest_repo_cs_cache.get('date', empty_date)
2912 2912 date_current = repo_cs_cache.get('date', empty_date)
2913 2913 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2914 2914 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2915 2915 latest_repo_cs_cache = repo_cs_cache
2916 2916 latest_repo_cs_cache['source_repo_id'] = repo.repo_id
2917 2917 _date_latest = parse_datetime(latest_repo_cs_cache['date'])
2918 2918
2919 2919 latest_repo_cs_cache['updated_on'] = time.time()
2920 2920 repo_group.changeset_cache = latest_repo_cs_cache
2921 2921 repo_group.updated_on = _date_latest
2922 2922 Session().add(repo_group)
2923 2923 Session().commit()
2924 2924
2925 2925 log.debug('updated repo group `%s` with new commit cache %s',
2926 2926 repo_group.group_name, latest_repo_cs_cache)
2927 2927
2928 2928 def permissions(self, with_admins=True, with_owner=True,
2929 2929 expand_from_user_groups=False):
2930 2930 """
2931 2931 Permissions for repository groups
2932 2932 """
2933 2933 _admin_perm = 'group.admin'
2934 2934
2935 2935 owner_row = []
2936 2936 if with_owner:
2937 2937 usr = AttributeDict(self.user.get_dict())
2938 2938 usr.owner_row = True
2939 2939 usr.permission = _admin_perm
2940 2940 owner_row.append(usr)
2941 2941
2942 2942 super_admin_ids = []
2943 2943 super_admin_rows = []
2944 2944 if with_admins:
2945 2945 for usr in User.get_all_super_admins():
2946 2946 super_admin_ids.append(usr.user_id)
2947 2947 # if this admin is also owner, don't double the record
2948 2948 if usr.user_id == owner_row[0].user_id:
2949 2949 owner_row[0].admin_row = True
2950 2950 else:
2951 2951 usr = AttributeDict(usr.get_dict())
2952 2952 usr.admin_row = True
2953 2953 usr.permission = _admin_perm
2954 2954 super_admin_rows.append(usr)
2955 2955
2956 2956 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2957 2957 q = q.options(joinedload(UserRepoGroupToPerm.group),
2958 2958 joinedload(UserRepoGroupToPerm.user),
2959 2959 joinedload(UserRepoGroupToPerm.permission),)
2960 2960
2961 2961 # get owners and admins and permissions. We do a trick of re-writing
2962 2962 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2963 2963 # has a global reference and changing one object propagates to all
2964 2964 # others. This means if admin is also an owner admin_row that change
2965 2965 # would propagate to both objects
2966 2966 perm_rows = []
2967 2967 for _usr in q.all():
2968 2968 usr = AttributeDict(_usr.user.get_dict())
2969 2969 # if this user is also owner/admin, mark as duplicate record
2970 2970 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2971 2971 usr.duplicate_perm = True
2972 2972 usr.permission = _usr.permission.permission_name
2973 2973 perm_rows.append(usr)
2974 2974
2975 2975 # filter the perm rows by 'default' first and then sort them by
2976 2976 # admin,write,read,none permissions sorted again alphabetically in
2977 2977 # each group
2978 2978 perm_rows = sorted(perm_rows, key=display_user_sort)
2979 2979
2980 2980 user_groups_rows = []
2981 2981 if expand_from_user_groups:
2982 2982 for ug in self.permission_user_groups(with_members=True):
2983 2983 for user_data in ug.members:
2984 2984 user_groups_rows.append(user_data)
2985 2985
2986 2986 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2987 2987
2988 2988 def permission_user_groups(self, with_members=False):
2989 2989 q = UserGroupRepoGroupToPerm.query()\
2990 2990 .filter(UserGroupRepoGroupToPerm.group == self)
2991 2991 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2992 2992 joinedload(UserGroupRepoGroupToPerm.users_group),
2993 2993 joinedload(UserGroupRepoGroupToPerm.permission),)
2994 2994
2995 2995 perm_rows = []
2996 2996 for _user_group in q.all():
2997 2997 entry = AttributeDict(_user_group.users_group.get_dict())
2998 2998 entry.permission = _user_group.permission.permission_name
2999 2999 if with_members:
3000 3000 entry.members = [x.user.get_dict()
3001 3001 for x in _user_group.users_group.members]
3002 3002 perm_rows.append(entry)
3003 3003
3004 3004 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3005 3005 return perm_rows
3006 3006
3007 3007 def get_api_data(self):
3008 3008 """
3009 3009 Common function for generating api data
3010 3010
3011 3011 """
3012 3012 group = self
3013 3013 data = {
3014 3014 'group_id': group.group_id,
3015 3015 'group_name': group.group_name,
3016 3016 'group_description': group.description_safe,
3017 3017 'parent_group': group.parent_group.group_name if group.parent_group else None,
3018 3018 'repositories': [x.repo_name for x in group.repositories],
3019 3019 'owner': group.user.username,
3020 3020 }
3021 3021 return data
3022 3022
3023 3023 def get_dict(self):
3024 3024 # Since we transformed `group_name` to a hybrid property, we need to
3025 3025 # keep compatibility with the code which uses `group_name` field.
3026 3026 result = super(RepoGroup, self).get_dict()
3027 3027 result['group_name'] = result.pop('_group_name', None)
3028 3028 return result
3029 3029
3030 3030
3031 3031 class Permission(Base, BaseModel):
3032 3032 __tablename__ = 'permissions'
3033 3033 __table_args__ = (
3034 3034 Index('p_perm_name_idx', 'permission_name'),
3035 3035 base_table_args,
3036 3036 )
3037 3037
3038 3038 PERMS = [
3039 3039 ('hg.admin', _('RhodeCode Super Administrator')),
3040 3040
3041 3041 ('repository.none', _('Repository no access')),
3042 3042 ('repository.read', _('Repository read access')),
3043 3043 ('repository.write', _('Repository write access')),
3044 3044 ('repository.admin', _('Repository admin access')),
3045 3045
3046 3046 ('group.none', _('Repository group no access')),
3047 3047 ('group.read', _('Repository group read access')),
3048 3048 ('group.write', _('Repository group write access')),
3049 3049 ('group.admin', _('Repository group admin access')),
3050 3050
3051 3051 ('usergroup.none', _('User group no access')),
3052 3052 ('usergroup.read', _('User group read access')),
3053 3053 ('usergroup.write', _('User group write access')),
3054 3054 ('usergroup.admin', _('User group admin access')),
3055 3055
3056 3056 ('branch.none', _('Branch no permissions')),
3057 3057 ('branch.merge', _('Branch access by web merge')),
3058 3058 ('branch.push', _('Branch access by push')),
3059 3059 ('branch.push_force', _('Branch access by push with force')),
3060 3060
3061 3061 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3062 3062 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3063 3063
3064 3064 ('hg.usergroup.create.false', _('User Group creation disabled')),
3065 3065 ('hg.usergroup.create.true', _('User Group creation enabled')),
3066 3066
3067 3067 ('hg.create.none', _('Repository creation disabled')),
3068 3068 ('hg.create.repository', _('Repository creation enabled')),
3069 3069 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3070 3070 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3071 3071
3072 3072 ('hg.fork.none', _('Repository forking disabled')),
3073 3073 ('hg.fork.repository', _('Repository forking enabled')),
3074 3074
3075 3075 ('hg.register.none', _('Registration disabled')),
3076 3076 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3077 3077 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3078 3078
3079 3079 ('hg.password_reset.enabled', _('Password reset enabled')),
3080 3080 ('hg.password_reset.hidden', _('Password reset hidden')),
3081 3081 ('hg.password_reset.disabled', _('Password reset disabled')),
3082 3082
3083 3083 ('hg.extern_activate.manual', _('Manual activation of external account')),
3084 3084 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3085 3085
3086 3086 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3087 3087 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3088 3088 ]
3089 3089
3090 3090 # definition of system default permissions for DEFAULT user, created on
3091 3091 # system setup
3092 3092 DEFAULT_USER_PERMISSIONS = [
3093 3093 # object perms
3094 3094 'repository.read',
3095 3095 'group.read',
3096 3096 'usergroup.read',
3097 3097 # branch, for backward compat we need same value as before so forced pushed
3098 3098 'branch.push_force',
3099 3099 # global
3100 3100 'hg.create.repository',
3101 3101 'hg.repogroup.create.false',
3102 3102 'hg.usergroup.create.false',
3103 3103 'hg.create.write_on_repogroup.true',
3104 3104 'hg.fork.repository',
3105 3105 'hg.register.manual_activate',
3106 3106 'hg.password_reset.enabled',
3107 3107 'hg.extern_activate.auto',
3108 3108 'hg.inherit_default_perms.true',
3109 3109 ]
3110 3110
3111 3111 # defines which permissions are more important higher the more important
3112 3112 # Weight defines which permissions are more important.
3113 3113 # The higher number the more important.
3114 3114 PERM_WEIGHTS = {
3115 3115 'repository.none': 0,
3116 3116 'repository.read': 1,
3117 3117 'repository.write': 3,
3118 3118 'repository.admin': 4,
3119 3119
3120 3120 'group.none': 0,
3121 3121 'group.read': 1,
3122 3122 'group.write': 3,
3123 3123 'group.admin': 4,
3124 3124
3125 3125 'usergroup.none': 0,
3126 3126 'usergroup.read': 1,
3127 3127 'usergroup.write': 3,
3128 3128 'usergroup.admin': 4,
3129 3129
3130 3130 'branch.none': 0,
3131 3131 'branch.merge': 1,
3132 3132 'branch.push': 3,
3133 3133 'branch.push_force': 4,
3134 3134
3135 3135 'hg.repogroup.create.false': 0,
3136 3136 'hg.repogroup.create.true': 1,
3137 3137
3138 3138 'hg.usergroup.create.false': 0,
3139 3139 'hg.usergroup.create.true': 1,
3140 3140
3141 3141 'hg.fork.none': 0,
3142 3142 'hg.fork.repository': 1,
3143 3143 'hg.create.none': 0,
3144 3144 'hg.create.repository': 1
3145 3145 }
3146 3146
3147 3147 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3148 3148 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3149 3149 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3150 3150
3151 3151 def __unicode__(self):
3152 3152 return u"<%s('%s:%s')>" % (
3153 3153 self.__class__.__name__, self.permission_id, self.permission_name
3154 3154 )
3155 3155
3156 3156 @classmethod
3157 3157 def get_by_key(cls, key):
3158 3158 return cls.query().filter(cls.permission_name == key).scalar()
3159 3159
3160 3160 @classmethod
3161 3161 def get_default_repo_perms(cls, user_id, repo_id=None):
3162 3162 q = Session().query(UserRepoToPerm, Repository, Permission)\
3163 3163 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3164 3164 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3165 3165 .filter(UserRepoToPerm.user_id == user_id)
3166 3166 if repo_id:
3167 3167 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3168 3168 return q.all()
3169 3169
3170 3170 @classmethod
3171 3171 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3172 3172 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3173 3173 .join(
3174 3174 Permission,
3175 3175 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3176 3176 .join(
3177 3177 UserRepoToPerm,
3178 3178 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3179 3179 .filter(UserRepoToPerm.user_id == user_id)
3180 3180
3181 3181 if repo_id:
3182 3182 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3183 3183 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3184 3184
3185 3185 @classmethod
3186 3186 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3187 3187 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3188 3188 .join(
3189 3189 Permission,
3190 3190 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3191 3191 .join(
3192 3192 Repository,
3193 3193 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3194 3194 .join(
3195 3195 UserGroup,
3196 3196 UserGroupRepoToPerm.users_group_id ==
3197 3197 UserGroup.users_group_id)\
3198 3198 .join(
3199 3199 UserGroupMember,
3200 3200 UserGroupRepoToPerm.users_group_id ==
3201 3201 UserGroupMember.users_group_id)\
3202 3202 .filter(
3203 3203 UserGroupMember.user_id == user_id,
3204 3204 UserGroup.users_group_active == true())
3205 3205 if repo_id:
3206 3206 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3207 3207 return q.all()
3208 3208
3209 3209 @classmethod
3210 3210 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3211 3211 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3212 3212 .join(
3213 3213 Permission,
3214 3214 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3215 3215 .join(
3216 3216 UserGroupRepoToPerm,
3217 3217 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3218 3218 .join(
3219 3219 UserGroup,
3220 3220 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3221 3221 .join(
3222 3222 UserGroupMember,
3223 3223 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3224 3224 .filter(
3225 3225 UserGroupMember.user_id == user_id,
3226 3226 UserGroup.users_group_active == true())
3227 3227
3228 3228 if repo_id:
3229 3229 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3230 3230 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3231 3231
3232 3232 @classmethod
3233 3233 def get_default_group_perms(cls, user_id, repo_group_id=None):
3234 3234 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3235 3235 .join(
3236 3236 Permission,
3237 3237 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3238 3238 .join(
3239 3239 RepoGroup,
3240 3240 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3241 3241 .filter(UserRepoGroupToPerm.user_id == user_id)
3242 3242 if repo_group_id:
3243 3243 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3244 3244 return q.all()
3245 3245
3246 3246 @classmethod
3247 3247 def get_default_group_perms_from_user_group(
3248 3248 cls, user_id, repo_group_id=None):
3249 3249 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3250 3250 .join(
3251 3251 Permission,
3252 3252 UserGroupRepoGroupToPerm.permission_id ==
3253 3253 Permission.permission_id)\
3254 3254 .join(
3255 3255 RepoGroup,
3256 3256 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3257 3257 .join(
3258 3258 UserGroup,
3259 3259 UserGroupRepoGroupToPerm.users_group_id ==
3260 3260 UserGroup.users_group_id)\
3261 3261 .join(
3262 3262 UserGroupMember,
3263 3263 UserGroupRepoGroupToPerm.users_group_id ==
3264 3264 UserGroupMember.users_group_id)\
3265 3265 .filter(
3266 3266 UserGroupMember.user_id == user_id,
3267 3267 UserGroup.users_group_active == true())
3268 3268 if repo_group_id:
3269 3269 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3270 3270 return q.all()
3271 3271
3272 3272 @classmethod
3273 3273 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3274 3274 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3275 3275 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3276 3276 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3277 3277 .filter(UserUserGroupToPerm.user_id == user_id)
3278 3278 if user_group_id:
3279 3279 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3280 3280 return q.all()
3281 3281
3282 3282 @classmethod
3283 3283 def get_default_user_group_perms_from_user_group(
3284 3284 cls, user_id, user_group_id=None):
3285 3285 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3286 3286 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3287 3287 .join(
3288 3288 Permission,
3289 3289 UserGroupUserGroupToPerm.permission_id ==
3290 3290 Permission.permission_id)\
3291 3291 .join(
3292 3292 TargetUserGroup,
3293 3293 UserGroupUserGroupToPerm.target_user_group_id ==
3294 3294 TargetUserGroup.users_group_id)\
3295 3295 .join(
3296 3296 UserGroup,
3297 3297 UserGroupUserGroupToPerm.user_group_id ==
3298 3298 UserGroup.users_group_id)\
3299 3299 .join(
3300 3300 UserGroupMember,
3301 3301 UserGroupUserGroupToPerm.user_group_id ==
3302 3302 UserGroupMember.users_group_id)\
3303 3303 .filter(
3304 3304 UserGroupMember.user_id == user_id,
3305 3305 UserGroup.users_group_active == true())
3306 3306 if user_group_id:
3307 3307 q = q.filter(
3308 3308 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3309 3309
3310 3310 return q.all()
3311 3311
3312 3312
3313 3313 class UserRepoToPerm(Base, BaseModel):
3314 3314 __tablename__ = 'repo_to_perm'
3315 3315 __table_args__ = (
3316 3316 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3317 3317 base_table_args
3318 3318 )
3319 3319
3320 3320 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3321 3321 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3322 3322 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3323 3323 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3324 3324
3325 3325 user = relationship('User')
3326 3326 repository = relationship('Repository')
3327 3327 permission = relationship('Permission')
3328 3328
3329 3329 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3330 3330
3331 3331 @classmethod
3332 3332 def create(cls, user, repository, permission):
3333 3333 n = cls()
3334 3334 n.user = user
3335 3335 n.repository = repository
3336 3336 n.permission = permission
3337 3337 Session().add(n)
3338 3338 return n
3339 3339
3340 3340 def __unicode__(self):
3341 3341 return u'<%s => %s >' % (self.user, self.repository)
3342 3342
3343 3343
3344 3344 class UserUserGroupToPerm(Base, BaseModel):
3345 3345 __tablename__ = 'user_user_group_to_perm'
3346 3346 __table_args__ = (
3347 3347 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3348 3348 base_table_args
3349 3349 )
3350 3350
3351 3351 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3352 3352 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3353 3353 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3354 3354 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3355 3355
3356 3356 user = relationship('User')
3357 3357 user_group = relationship('UserGroup')
3358 3358 permission = relationship('Permission')
3359 3359
3360 3360 @classmethod
3361 3361 def create(cls, user, user_group, permission):
3362 3362 n = cls()
3363 3363 n.user = user
3364 3364 n.user_group = user_group
3365 3365 n.permission = permission
3366 3366 Session().add(n)
3367 3367 return n
3368 3368
3369 3369 def __unicode__(self):
3370 3370 return u'<%s => %s >' % (self.user, self.user_group)
3371 3371
3372 3372
3373 3373 class UserToPerm(Base, BaseModel):
3374 3374 __tablename__ = 'user_to_perm'
3375 3375 __table_args__ = (
3376 3376 UniqueConstraint('user_id', 'permission_id'),
3377 3377 base_table_args
3378 3378 )
3379 3379
3380 3380 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3381 3381 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3382 3382 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3383 3383
3384 3384 user = relationship('User')
3385 3385 permission = relationship('Permission', lazy='joined')
3386 3386
3387 3387 def __unicode__(self):
3388 3388 return u'<%s => %s >' % (self.user, self.permission)
3389 3389
3390 3390
3391 3391 class UserGroupRepoToPerm(Base, BaseModel):
3392 3392 __tablename__ = 'users_group_repo_to_perm'
3393 3393 __table_args__ = (
3394 3394 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3395 3395 base_table_args
3396 3396 )
3397 3397
3398 3398 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3399 3399 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3400 3400 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3401 3401 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3402 3402
3403 3403 users_group = relationship('UserGroup')
3404 3404 permission = relationship('Permission')
3405 3405 repository = relationship('Repository')
3406 3406 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3407 3407
3408 3408 @classmethod
3409 3409 def create(cls, users_group, repository, permission):
3410 3410 n = cls()
3411 3411 n.users_group = users_group
3412 3412 n.repository = repository
3413 3413 n.permission = permission
3414 3414 Session().add(n)
3415 3415 return n
3416 3416
3417 3417 def __unicode__(self):
3418 3418 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3419 3419
3420 3420
3421 3421 class UserGroupUserGroupToPerm(Base, BaseModel):
3422 3422 __tablename__ = 'user_group_user_group_to_perm'
3423 3423 __table_args__ = (
3424 3424 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3425 3425 CheckConstraint('target_user_group_id != user_group_id'),
3426 3426 base_table_args
3427 3427 )
3428 3428
3429 3429 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3430 3430 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3431 3431 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3432 3432 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3433 3433
3434 3434 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3435 3435 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3436 3436 permission = relationship('Permission')
3437 3437
3438 3438 @classmethod
3439 3439 def create(cls, target_user_group, user_group, permission):
3440 3440 n = cls()
3441 3441 n.target_user_group = target_user_group
3442 3442 n.user_group = user_group
3443 3443 n.permission = permission
3444 3444 Session().add(n)
3445 3445 return n
3446 3446
3447 3447 def __unicode__(self):
3448 3448 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3449 3449
3450 3450
3451 3451 class UserGroupToPerm(Base, BaseModel):
3452 3452 __tablename__ = 'users_group_to_perm'
3453 3453 __table_args__ = (
3454 3454 UniqueConstraint('users_group_id', 'permission_id',),
3455 3455 base_table_args
3456 3456 )
3457 3457
3458 3458 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3459 3459 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3460 3460 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3461 3461
3462 3462 users_group = relationship('UserGroup')
3463 3463 permission = relationship('Permission')
3464 3464
3465 3465
3466 3466 class UserRepoGroupToPerm(Base, BaseModel):
3467 3467 __tablename__ = 'user_repo_group_to_perm'
3468 3468 __table_args__ = (
3469 3469 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3470 3470 base_table_args
3471 3471 )
3472 3472
3473 3473 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3474 3474 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3475 3475 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3476 3476 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3477 3477
3478 3478 user = relationship('User')
3479 3479 group = relationship('RepoGroup')
3480 3480 permission = relationship('Permission')
3481 3481
3482 3482 @classmethod
3483 3483 def create(cls, user, repository_group, permission):
3484 3484 n = cls()
3485 3485 n.user = user
3486 3486 n.group = repository_group
3487 3487 n.permission = permission
3488 3488 Session().add(n)
3489 3489 return n
3490 3490
3491 3491
3492 3492 class UserGroupRepoGroupToPerm(Base, BaseModel):
3493 3493 __tablename__ = 'users_group_repo_group_to_perm'
3494 3494 __table_args__ = (
3495 3495 UniqueConstraint('users_group_id', 'group_id'),
3496 3496 base_table_args
3497 3497 )
3498 3498
3499 3499 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3500 3500 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3501 3501 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3502 3502 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3503 3503
3504 3504 users_group = relationship('UserGroup')
3505 3505 permission = relationship('Permission')
3506 3506 group = relationship('RepoGroup')
3507 3507
3508 3508 @classmethod
3509 3509 def create(cls, user_group, repository_group, permission):
3510 3510 n = cls()
3511 3511 n.users_group = user_group
3512 3512 n.group = repository_group
3513 3513 n.permission = permission
3514 3514 Session().add(n)
3515 3515 return n
3516 3516
3517 3517 def __unicode__(self):
3518 3518 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3519 3519
3520 3520
3521 3521 class Statistics(Base, BaseModel):
3522 3522 __tablename__ = 'statistics'
3523 3523 __table_args__ = (
3524 3524 base_table_args
3525 3525 )
3526 3526
3527 3527 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3528 3528 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3529 3529 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3530 3530 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3531 3531 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3532 3532 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3533 3533
3534 3534 repository = relationship('Repository', single_parent=True)
3535 3535
3536 3536
3537 3537 class UserFollowing(Base, BaseModel):
3538 3538 __tablename__ = 'user_followings'
3539 3539 __table_args__ = (
3540 3540 UniqueConstraint('user_id', 'follows_repository_id'),
3541 3541 UniqueConstraint('user_id', 'follows_user_id'),
3542 3542 base_table_args
3543 3543 )
3544 3544
3545 3545 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3546 3546 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3547 3547 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3548 3548 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3549 3549 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3550 3550
3551 3551 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3552 3552
3553 3553 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3554 3554 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3555 3555
3556 3556 @classmethod
3557 3557 def get_repo_followers(cls, repo_id):
3558 3558 return cls.query().filter(cls.follows_repo_id == repo_id)
3559 3559
3560 3560
3561 3561 class CacheKey(Base, BaseModel):
3562 3562 __tablename__ = 'cache_invalidation'
3563 3563 __table_args__ = (
3564 3564 UniqueConstraint('cache_key'),
3565 3565 Index('key_idx', 'cache_key'),
3566 3566 base_table_args,
3567 3567 )
3568 3568
3569 3569 CACHE_TYPE_FEED = 'FEED'
3570 3570
3571 3571 # namespaces used to register process/thread aware caches
3572 3572 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3573 3573 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3574 3574
3575 3575 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3576 3576 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3577 3577 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3578 3578 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3579 3579 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3580 3580
3581 3581 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3582 3582 self.cache_key = cache_key
3583 3583 self.cache_args = cache_args
3584 3584 self.cache_active = False
3585 3585 # first key should be same for all entries, since all workers should share it
3586 3586 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3587 3587
3588 3588 def __unicode__(self):
3589 3589 return u"<%s('%s:%s[%s]')>" % (
3590 3590 self.__class__.__name__,
3591 3591 self.cache_id, self.cache_key, self.cache_active)
3592 3592
3593 3593 def _cache_key_partition(self):
3594 3594 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3595 3595 return prefix, repo_name, suffix
3596 3596
3597 3597 def get_prefix(self):
3598 3598 """
3599 3599 Try to extract prefix from existing cache key. The key could consist
3600 3600 of prefix, repo_name, suffix
3601 3601 """
3602 3602 # this returns prefix, repo_name, suffix
3603 3603 return self._cache_key_partition()[0]
3604 3604
3605 3605 def get_suffix(self):
3606 3606 """
3607 3607 get suffix that might have been used in _get_cache_key to
3608 3608 generate self.cache_key. Only used for informational purposes
3609 3609 in repo_edit.mako.
3610 3610 """
3611 3611 # prefix, repo_name, suffix
3612 3612 return self._cache_key_partition()[2]
3613 3613
3614 3614 @classmethod
3615 3615 def generate_new_state_uid(cls, based_on=None):
3616 3616 if based_on:
3617 3617 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3618 3618 else:
3619 3619 return str(uuid.uuid4())
3620 3620
3621 3621 @classmethod
3622 3622 def delete_all_cache(cls):
3623 3623 """
3624 3624 Delete all cache keys from database.
3625 3625 Should only be run when all instances are down and all entries
3626 3626 thus stale.
3627 3627 """
3628 3628 cls.query().delete()
3629 3629 Session().commit()
3630 3630
3631 3631 @classmethod
3632 3632 def set_invalidate(cls, cache_uid, delete=False):
3633 3633 """
3634 3634 Mark all caches of a repo as invalid in the database.
3635 3635 """
3636 3636
3637 3637 try:
3638 3638 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3639 3639 if delete:
3640 3640 qry.delete()
3641 3641 log.debug('cache objects deleted for cache args %s',
3642 3642 safe_str(cache_uid))
3643 3643 else:
3644 3644 qry.update({"cache_active": False,
3645 3645 "cache_state_uid": cls.generate_new_state_uid()})
3646 3646 log.debug('cache objects marked as invalid for cache args %s',
3647 3647 safe_str(cache_uid))
3648 3648
3649 3649 Session().commit()
3650 3650 except Exception:
3651 3651 log.exception(
3652 3652 'Cache key invalidation failed for cache args %s',
3653 3653 safe_str(cache_uid))
3654 3654 Session().rollback()
3655 3655
3656 3656 @classmethod
3657 3657 def get_active_cache(cls, cache_key):
3658 3658 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3659 3659 if inv_obj:
3660 3660 return inv_obj
3661 3661 return None
3662 3662
3663 3663 @classmethod
3664 3664 def get_namespace_map(cls, namespace):
3665 3665 return {
3666 3666 x.cache_key: x
3667 3667 for x in cls.query().filter(cls.cache_args == namespace)}
3668 3668
3669 3669
3670 3670 class ChangesetComment(Base, BaseModel):
3671 3671 __tablename__ = 'changeset_comments'
3672 3672 __table_args__ = (
3673 3673 Index('cc_revision_idx', 'revision'),
3674 3674 base_table_args,
3675 3675 )
3676 3676
3677 3677 COMMENT_OUTDATED = u'comment_outdated'
3678 3678 COMMENT_TYPE_NOTE = u'note'
3679 3679 COMMENT_TYPE_TODO = u'todo'
3680 3680 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3681 3681
3682 3682 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3683 3683 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3684 3684 revision = Column('revision', String(40), nullable=True)
3685 3685 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3686 3686 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3687 3687 line_no = Column('line_no', Unicode(10), nullable=True)
3688 3688 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3689 3689 f_path = Column('f_path', Unicode(1000), nullable=True)
3690 3690 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3691 3691 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3692 3692 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3693 3693 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3694 3694 renderer = Column('renderer', Unicode(64), nullable=True)
3695 3695 display_state = Column('display_state', Unicode(128), nullable=True)
3696 3696
3697 3697 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3698 3698 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3699 3699
3700 3700 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3701 3701 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3702 3702
3703 3703 author = relationship('User', lazy='joined')
3704 3704 repo = relationship('Repository')
3705 3705 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined')
3706 3706 pull_request = relationship('PullRequest', lazy='joined')
3707 3707 pull_request_version = relationship('PullRequestVersion')
3708 3708
3709 3709 @classmethod
3710 3710 def get_users(cls, revision=None, pull_request_id=None):
3711 3711 """
3712 3712 Returns user associated with this ChangesetComment. ie those
3713 3713 who actually commented
3714 3714
3715 3715 :param cls:
3716 3716 :param revision:
3717 3717 """
3718 3718 q = Session().query(User)\
3719 3719 .join(ChangesetComment.author)
3720 3720 if revision:
3721 3721 q = q.filter(cls.revision == revision)
3722 3722 elif pull_request_id:
3723 3723 q = q.filter(cls.pull_request_id == pull_request_id)
3724 3724 return q.all()
3725 3725
3726 3726 @classmethod
3727 3727 def get_index_from_version(cls, pr_version, versions):
3728 3728 num_versions = [x.pull_request_version_id for x in versions]
3729 3729 try:
3730 3730 return num_versions.index(pr_version) +1
3731 3731 except (IndexError, ValueError):
3732 3732 return
3733 3733
3734 3734 @property
3735 3735 def outdated(self):
3736 3736 return self.display_state == self.COMMENT_OUTDATED
3737 3737
3738 3738 def outdated_at_version(self, version):
3739 3739 """
3740 3740 Checks if comment is outdated for given pull request version
3741 3741 """
3742 3742 return self.outdated and self.pull_request_version_id != version
3743 3743
3744 3744 def older_than_version(self, version):
3745 3745 """
3746 3746 Checks if comment is made from previous version than given
3747 3747 """
3748 3748 if version is None:
3749 3749 return self.pull_request_version_id is not None
3750 3750
3751 3751 return self.pull_request_version_id < version
3752 3752
3753 3753 @property
3754 3754 def resolved(self):
3755 3755 return self.resolved_by[0] if self.resolved_by else None
3756 3756
3757 3757 @property
3758 3758 def is_todo(self):
3759 3759 return self.comment_type == self.COMMENT_TYPE_TODO
3760 3760
3761 3761 @property
3762 3762 def is_inline(self):
3763 3763 return self.line_no and self.f_path
3764 3764
3765 3765 def get_index_version(self, versions):
3766 3766 return self.get_index_from_version(
3767 3767 self.pull_request_version_id, versions)
3768 3768
3769 3769 def __repr__(self):
3770 3770 if self.comment_id:
3771 3771 return '<DB:Comment #%s>' % self.comment_id
3772 3772 else:
3773 3773 return '<DB:Comment at %#x>' % id(self)
3774 3774
3775 3775 def get_api_data(self):
3776 3776 comment = self
3777 3777 data = {
3778 3778 'comment_id': comment.comment_id,
3779 3779 'comment_type': comment.comment_type,
3780 3780 'comment_text': comment.text,
3781 3781 'comment_status': comment.status_change,
3782 3782 'comment_f_path': comment.f_path,
3783 3783 'comment_lineno': comment.line_no,
3784 3784 'comment_author': comment.author,
3785 3785 'comment_created_on': comment.created_on,
3786 3786 'comment_resolved_by': self.resolved
3787 3787 }
3788 3788 return data
3789 3789
3790 3790 def __json__(self):
3791 3791 data = dict()
3792 3792 data.update(self.get_api_data())
3793 3793 return data
3794 3794
3795 3795
3796 3796 class ChangesetStatus(Base, BaseModel):
3797 3797 __tablename__ = 'changeset_statuses'
3798 3798 __table_args__ = (
3799 3799 Index('cs_revision_idx', 'revision'),
3800 3800 Index('cs_version_idx', 'version'),
3801 3801 UniqueConstraint('repo_id', 'revision', 'version'),
3802 3802 base_table_args
3803 3803 )
3804 3804
3805 3805 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3806 3806 STATUS_APPROVED = 'approved'
3807 3807 STATUS_REJECTED = 'rejected'
3808 3808 STATUS_UNDER_REVIEW = 'under_review'
3809 3809
3810 3810 STATUSES = [
3811 3811 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3812 3812 (STATUS_APPROVED, _("Approved")),
3813 3813 (STATUS_REJECTED, _("Rejected")),
3814 3814 (STATUS_UNDER_REVIEW, _("Under Review")),
3815 3815 ]
3816 3816
3817 3817 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3818 3818 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3819 3819 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3820 3820 revision = Column('revision', String(40), nullable=False)
3821 3821 status = Column('status', String(128), nullable=False, default=DEFAULT)
3822 3822 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3823 3823 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3824 3824 version = Column('version', Integer(), nullable=False, default=0)
3825 3825 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3826 3826
3827 3827 author = relationship('User', lazy='joined')
3828 3828 repo = relationship('Repository')
3829 3829 comment = relationship('ChangesetComment', lazy='joined')
3830 3830 pull_request = relationship('PullRequest', lazy='joined')
3831 3831
3832 3832 def __unicode__(self):
3833 3833 return u"<%s('%s[v%s]:%s')>" % (
3834 3834 self.__class__.__name__,
3835 3835 self.status, self.version, self.author
3836 3836 )
3837 3837
3838 3838 @classmethod
3839 3839 def get_status_lbl(cls, value):
3840 3840 return dict(cls.STATUSES).get(value)
3841 3841
3842 3842 @property
3843 3843 def status_lbl(self):
3844 3844 return ChangesetStatus.get_status_lbl(self.status)
3845 3845
3846 3846 def get_api_data(self):
3847 3847 status = self
3848 3848 data = {
3849 3849 'status_id': status.changeset_status_id,
3850 3850 'status': status.status,
3851 3851 }
3852 3852 return data
3853 3853
3854 3854 def __json__(self):
3855 3855 data = dict()
3856 3856 data.update(self.get_api_data())
3857 3857 return data
3858 3858
3859 3859
3860 3860 class _SetState(object):
3861 3861 """
3862 3862 Context processor allowing changing state for sensitive operation such as
3863 3863 pull request update or merge
3864 3864 """
3865 3865
3866 3866 def __init__(self, pull_request, pr_state, back_state=None):
3867 3867 self._pr = pull_request
3868 3868 self._org_state = back_state or pull_request.pull_request_state
3869 3869 self._pr_state = pr_state
3870 3870 self._current_state = None
3871 3871
3872 3872 def __enter__(self):
3873 3873 log.debug('StateLock: entering set state context, setting state to: `%s`',
3874 3874 self._pr_state)
3875 3875 self.set_pr_state(self._pr_state)
3876 3876 return self
3877 3877
3878 3878 def __exit__(self, exc_type, exc_val, exc_tb):
3879 3879 if exc_val is not None:
3880 3880 log.error(traceback.format_exc(exc_tb))
3881 3881 return None
3882 3882
3883 3883 self.set_pr_state(self._org_state)
3884 3884 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3885 3885 self._org_state)
3886 3886 @property
3887 3887 def state(self):
3888 3888 return self._current_state
3889 3889
3890 3890 def set_pr_state(self, pr_state):
3891 3891 try:
3892 3892 self._pr.pull_request_state = pr_state
3893 3893 Session().add(self._pr)
3894 3894 Session().commit()
3895 3895 self._current_state = pr_state
3896 3896 except Exception:
3897 3897 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
3898 3898 raise
3899 3899
3900 3900
3901 3901 class _PullRequestBase(BaseModel):
3902 3902 """
3903 3903 Common attributes of pull request and version entries.
3904 3904 """
3905 3905
3906 3906 # .status values
3907 3907 STATUS_NEW = u'new'
3908 3908 STATUS_OPEN = u'open'
3909 3909 STATUS_CLOSED = u'closed'
3910 3910
3911 3911 # available states
3912 3912 STATE_CREATING = u'creating'
3913 3913 STATE_UPDATING = u'updating'
3914 3914 STATE_MERGING = u'merging'
3915 3915 STATE_CREATED = u'created'
3916 3916
3917 3917 title = Column('title', Unicode(255), nullable=True)
3918 3918 description = Column(
3919 3919 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3920 3920 nullable=True)
3921 3921 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3922 3922
3923 3923 # new/open/closed status of pull request (not approve/reject/etc)
3924 3924 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3925 3925 created_on = Column(
3926 3926 'created_on', DateTime(timezone=False), nullable=False,
3927 3927 default=datetime.datetime.now)
3928 3928 updated_on = Column(
3929 3929 'updated_on', DateTime(timezone=False), nullable=False,
3930 3930 default=datetime.datetime.now)
3931 3931
3932 3932 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3933 3933
3934 3934 @declared_attr
3935 3935 def user_id(cls):
3936 3936 return Column(
3937 3937 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3938 3938 unique=None)
3939 3939
3940 3940 # 500 revisions max
3941 3941 _revisions = Column(
3942 3942 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3943 3943
3944 3944 @declared_attr
3945 3945 def source_repo_id(cls):
3946 3946 # TODO: dan: rename column to source_repo_id
3947 3947 return Column(
3948 3948 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3949 3949 nullable=False)
3950 3950
3951 3951 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3952 3952
3953 3953 @hybrid_property
3954 3954 def source_ref(self):
3955 3955 return self._source_ref
3956 3956
3957 3957 @source_ref.setter
3958 3958 def source_ref(self, val):
3959 3959 parts = (val or '').split(':')
3960 3960 if len(parts) != 3:
3961 3961 raise ValueError(
3962 3962 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3963 3963 self._source_ref = safe_unicode(val)
3964 3964
3965 3965 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3966 3966
3967 3967 @hybrid_property
3968 3968 def target_ref(self):
3969 3969 return self._target_ref
3970 3970
3971 3971 @target_ref.setter
3972 3972 def target_ref(self, val):
3973 3973 parts = (val or '').split(':')
3974 3974 if len(parts) != 3:
3975 3975 raise ValueError(
3976 3976 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3977 3977 self._target_ref = safe_unicode(val)
3978 3978
3979 3979 @declared_attr
3980 3980 def target_repo_id(cls):
3981 3981 # TODO: dan: rename column to target_repo_id
3982 3982 return Column(
3983 3983 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3984 3984 nullable=False)
3985 3985
3986 3986 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3987 3987
3988 3988 # TODO: dan: rename column to last_merge_source_rev
3989 3989 _last_merge_source_rev = Column(
3990 3990 'last_merge_org_rev', String(40), nullable=True)
3991 3991 # TODO: dan: rename column to last_merge_target_rev
3992 3992 _last_merge_target_rev = Column(
3993 3993 'last_merge_other_rev', String(40), nullable=True)
3994 3994 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3995 3995 merge_rev = Column('merge_rev', String(40), nullable=True)
3996 3996
3997 3997 reviewer_data = Column(
3998 3998 'reviewer_data_json', MutationObj.as_mutable(
3999 3999 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4000 4000
4001 4001 @property
4002 4002 def reviewer_data_json(self):
4003 4003 return json.dumps(self.reviewer_data)
4004 4004
4005 4005 @property
4006 4006 def work_in_progress(self):
4007 4007 """checks if pull request is work in progress by checking the title"""
4008 4008 title = self.title.upper()
4009 4009 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4010 4010 return True
4011 4011 return False
4012 4012
4013 4013 @hybrid_property
4014 4014 def description_safe(self):
4015 4015 from rhodecode.lib import helpers as h
4016 4016 return h.escape(self.description)
4017 4017
4018 4018 @hybrid_property
4019 4019 def revisions(self):
4020 4020 return self._revisions.split(':') if self._revisions else []
4021 4021
4022 4022 @revisions.setter
4023 4023 def revisions(self, val):
4024 4024 self._revisions = u':'.join(val)
4025 4025
4026 4026 @hybrid_property
4027 4027 def last_merge_status(self):
4028 4028 return safe_int(self._last_merge_status)
4029 4029
4030 4030 @last_merge_status.setter
4031 4031 def last_merge_status(self, val):
4032 4032 self._last_merge_status = val
4033 4033
4034 4034 @declared_attr
4035 4035 def author(cls):
4036 4036 return relationship('User', lazy='joined')
4037 4037
4038 4038 @declared_attr
4039 4039 def source_repo(cls):
4040 4040 return relationship(
4041 4041 'Repository',
4042 4042 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4043 4043
4044 4044 @property
4045 4045 def source_ref_parts(self):
4046 4046 return self.unicode_to_reference(self.source_ref)
4047 4047
4048 4048 @declared_attr
4049 4049 def target_repo(cls):
4050 4050 return relationship(
4051 4051 'Repository',
4052 4052 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4053 4053
4054 4054 @property
4055 4055 def target_ref_parts(self):
4056 4056 return self.unicode_to_reference(self.target_ref)
4057 4057
4058 4058 @property
4059 4059 def shadow_merge_ref(self):
4060 4060 return self.unicode_to_reference(self._shadow_merge_ref)
4061 4061
4062 4062 @shadow_merge_ref.setter
4063 4063 def shadow_merge_ref(self, ref):
4064 4064 self._shadow_merge_ref = self.reference_to_unicode(ref)
4065 4065
4066 4066 @staticmethod
4067 4067 def unicode_to_reference(raw):
4068 4068 """
4069 4069 Convert a unicode (or string) to a reference object.
4070 4070 If unicode evaluates to False it returns None.
4071 4071 """
4072 4072 if raw:
4073 4073 refs = raw.split(':')
4074 4074 return Reference(*refs)
4075 4075 else:
4076 4076 return None
4077 4077
4078 4078 @staticmethod
4079 4079 def reference_to_unicode(ref):
4080 4080 """
4081 4081 Convert a reference object to unicode.
4082 4082 If reference is None it returns None.
4083 4083 """
4084 4084 if ref:
4085 4085 return u':'.join(ref)
4086 4086 else:
4087 4087 return None
4088 4088
4089 4089 def get_api_data(self, with_merge_state=True):
4090 4090 from rhodecode.model.pull_request import PullRequestModel
4091 4091
4092 4092 pull_request = self
4093 4093 if with_merge_state:
4094 4094 merge_status = PullRequestModel().merge_status(pull_request)
4095 4095 merge_state = {
4096 4096 'status': merge_status[0],
4097 4097 'message': safe_unicode(merge_status[1]),
4098 4098 }
4099 4099 else:
4100 4100 merge_state = {'status': 'not_available',
4101 4101 'message': 'not_available'}
4102 4102
4103 4103 merge_data = {
4104 4104 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4105 4105 'reference': (
4106 4106 pull_request.shadow_merge_ref._asdict()
4107 4107 if pull_request.shadow_merge_ref else None),
4108 4108 }
4109 4109
4110 4110 data = {
4111 4111 'pull_request_id': pull_request.pull_request_id,
4112 4112 'url': PullRequestModel().get_url(pull_request),
4113 4113 'title': pull_request.title,
4114 4114 'description': pull_request.description,
4115 4115 'status': pull_request.status,
4116 4116 'state': pull_request.pull_request_state,
4117 4117 'created_on': pull_request.created_on,
4118 4118 'updated_on': pull_request.updated_on,
4119 4119 'commit_ids': pull_request.revisions,
4120 4120 'review_status': pull_request.calculated_review_status(),
4121 4121 'mergeable': merge_state,
4122 4122 'source': {
4123 4123 'clone_url': pull_request.source_repo.clone_url(),
4124 4124 'repository': pull_request.source_repo.repo_name,
4125 4125 'reference': {
4126 4126 'name': pull_request.source_ref_parts.name,
4127 4127 'type': pull_request.source_ref_parts.type,
4128 4128 'commit_id': pull_request.source_ref_parts.commit_id,
4129 4129 },
4130 4130 },
4131 4131 'target': {
4132 4132 'clone_url': pull_request.target_repo.clone_url(),
4133 4133 'repository': pull_request.target_repo.repo_name,
4134 4134 'reference': {
4135 4135 'name': pull_request.target_ref_parts.name,
4136 4136 'type': pull_request.target_ref_parts.type,
4137 4137 'commit_id': pull_request.target_ref_parts.commit_id,
4138 4138 },
4139 4139 },
4140 4140 'merge': merge_data,
4141 4141 'author': pull_request.author.get_api_data(include_secrets=False,
4142 4142 details='basic'),
4143 4143 'reviewers': [
4144 4144 {
4145 4145 'user': reviewer.get_api_data(include_secrets=False,
4146 4146 details='basic'),
4147 4147 'reasons': reasons,
4148 4148 'review_status': st[0][1].status if st else 'not_reviewed',
4149 4149 }
4150 4150 for obj, reviewer, reasons, mandatory, st in
4151 4151 pull_request.reviewers_statuses()
4152 4152 ]
4153 4153 }
4154 4154
4155 4155 return data
4156 4156
4157 4157 def set_state(self, pull_request_state, final_state=None):
4158 4158 """
4159 4159 # goes from initial state to updating to initial state.
4160 4160 # initial state can be changed by specifying back_state=
4161 4161 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4162 4162 pull_request.merge()
4163 4163
4164 4164 :param pull_request_state:
4165 4165 :param final_state:
4166 4166
4167 4167 """
4168 4168
4169 4169 return _SetState(self, pull_request_state, back_state=final_state)
4170 4170
4171 4171
4172 4172 class PullRequest(Base, _PullRequestBase):
4173 4173 __tablename__ = 'pull_requests'
4174 4174 __table_args__ = (
4175 4175 base_table_args,
4176 4176 )
4177 4177
4178 4178 pull_request_id = Column(
4179 4179 'pull_request_id', Integer(), nullable=False, primary_key=True)
4180 4180
4181 4181 def __repr__(self):
4182 4182 if self.pull_request_id:
4183 4183 return '<DB:PullRequest #%s>' % self.pull_request_id
4184 4184 else:
4185 4185 return '<DB:PullRequest at %#x>' % id(self)
4186 4186
4187 4187 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4188 4188 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4189 4189 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4190 4190 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4191 4191 lazy='dynamic')
4192 4192
4193 4193 @classmethod
4194 4194 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4195 4195 internal_methods=None):
4196 4196
4197 4197 class PullRequestDisplay(object):
4198 4198 """
4199 4199 Special object wrapper for showing PullRequest data via Versions
4200 4200 It mimics PR object as close as possible. This is read only object
4201 4201 just for display
4202 4202 """
4203 4203
4204 4204 def __init__(self, attrs, internal=None):
4205 4205 self.attrs = attrs
4206 4206 # internal have priority over the given ones via attrs
4207 4207 self.internal = internal or ['versions']
4208 4208
4209 4209 def __getattr__(self, item):
4210 4210 if item in self.internal:
4211 4211 return getattr(self, item)
4212 4212 try:
4213 4213 return self.attrs[item]
4214 4214 except KeyError:
4215 4215 raise AttributeError(
4216 4216 '%s object has no attribute %s' % (self, item))
4217 4217
4218 4218 def __repr__(self):
4219 4219 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4220 4220
4221 4221 def versions(self):
4222 4222 return pull_request_obj.versions.order_by(
4223 4223 PullRequestVersion.pull_request_version_id).all()
4224 4224
4225 4225 def is_closed(self):
4226 4226 return pull_request_obj.is_closed()
4227 4227
4228 4228 def is_state_changing(self):
4229 4229 return pull_request_obj.is_state_changing()
4230 4230
4231 4231 @property
4232 4232 def pull_request_version_id(self):
4233 4233 return getattr(pull_request_obj, 'pull_request_version_id', None)
4234 4234
4235 4235 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4236 4236
4237 4237 attrs.author = StrictAttributeDict(
4238 4238 pull_request_obj.author.get_api_data())
4239 4239 if pull_request_obj.target_repo:
4240 4240 attrs.target_repo = StrictAttributeDict(
4241 4241 pull_request_obj.target_repo.get_api_data())
4242 4242 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4243 4243
4244 4244 if pull_request_obj.source_repo:
4245 4245 attrs.source_repo = StrictAttributeDict(
4246 4246 pull_request_obj.source_repo.get_api_data())
4247 4247 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4248 4248
4249 4249 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4250 4250 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4251 4251 attrs.revisions = pull_request_obj.revisions
4252 4252
4253 4253 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4254 4254 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4255 4255 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4256 4256
4257 4257 return PullRequestDisplay(attrs, internal=internal_methods)
4258 4258
4259 4259 def is_closed(self):
4260 4260 return self.status == self.STATUS_CLOSED
4261 4261
4262 4262 def is_state_changing(self):
4263 4263 return self.pull_request_state != PullRequest.STATE_CREATED
4264 4264
4265 4265 def __json__(self):
4266 4266 return {
4267 4267 'revisions': self.revisions,
4268 4268 }
4269 4269
4270 4270 def calculated_review_status(self):
4271 4271 from rhodecode.model.changeset_status import ChangesetStatusModel
4272 4272 return ChangesetStatusModel().calculated_review_status(self)
4273 4273
4274 4274 def reviewers_statuses(self):
4275 4275 from rhodecode.model.changeset_status import ChangesetStatusModel
4276 4276 return ChangesetStatusModel().reviewers_statuses(self)
4277 4277
4278 4278 @property
4279 4279 def workspace_id(self):
4280 4280 from rhodecode.model.pull_request import PullRequestModel
4281 4281 return PullRequestModel()._workspace_id(self)
4282 4282
4283 4283 def get_shadow_repo(self):
4284 4284 workspace_id = self.workspace_id
4285 4285 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4286 4286 if os.path.isdir(shadow_repository_path):
4287 4287 vcs_obj = self.target_repo.scm_instance()
4288 4288 return vcs_obj.get_shadow_instance(shadow_repository_path)
4289 4289
4290 4290
4291 4291 class PullRequestVersion(Base, _PullRequestBase):
4292 4292 __tablename__ = 'pull_request_versions'
4293 4293 __table_args__ = (
4294 4294 base_table_args,
4295 4295 )
4296 4296
4297 4297 pull_request_version_id = Column(
4298 4298 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4299 4299 pull_request_id = Column(
4300 4300 'pull_request_id', Integer(),
4301 4301 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4302 4302 pull_request = relationship('PullRequest')
4303 4303
4304 4304 def __repr__(self):
4305 4305 if self.pull_request_version_id:
4306 4306 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4307 4307 else:
4308 4308 return '<DB:PullRequestVersion at %#x>' % id(self)
4309 4309
4310 4310 @property
4311 4311 def reviewers(self):
4312 4312 return self.pull_request.reviewers
4313 4313
4314 4314 @property
4315 4315 def versions(self):
4316 4316 return self.pull_request.versions
4317 4317
4318 4318 def is_closed(self):
4319 4319 # calculate from original
4320 4320 return self.pull_request.status == self.STATUS_CLOSED
4321 4321
4322 4322 def is_state_changing(self):
4323 4323 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4324 4324
4325 4325 def calculated_review_status(self):
4326 4326 return self.pull_request.calculated_review_status()
4327 4327
4328 4328 def reviewers_statuses(self):
4329 4329 return self.pull_request.reviewers_statuses()
4330 4330
4331 4331
4332 4332 class PullRequestReviewers(Base, BaseModel):
4333 4333 __tablename__ = 'pull_request_reviewers'
4334 4334 __table_args__ = (
4335 4335 base_table_args,
4336 4336 )
4337 4337
4338 4338 @hybrid_property
4339 4339 def reasons(self):
4340 4340 if not self._reasons:
4341 4341 return []
4342 4342 return self._reasons
4343 4343
4344 4344 @reasons.setter
4345 4345 def reasons(self, val):
4346 4346 val = val or []
4347 4347 if any(not isinstance(x, compat.string_types) for x in val):
4348 4348 raise Exception('invalid reasons type, must be list of strings')
4349 4349 self._reasons = val
4350 4350
4351 4351 pull_requests_reviewers_id = Column(
4352 4352 'pull_requests_reviewers_id', Integer(), nullable=False,
4353 4353 primary_key=True)
4354 4354 pull_request_id = Column(
4355 4355 "pull_request_id", Integer(),
4356 4356 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4357 4357 user_id = Column(
4358 4358 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4359 4359 _reasons = Column(
4360 4360 'reason', MutationList.as_mutable(
4361 4361 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4362 4362
4363 4363 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4364 4364 user = relationship('User')
4365 4365 pull_request = relationship('PullRequest')
4366 4366
4367 4367 rule_data = Column(
4368 4368 'rule_data_json',
4369 4369 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4370 4370
4371 4371 def rule_user_group_data(self):
4372 4372 """
4373 4373 Returns the voting user group rule data for this reviewer
4374 4374 """
4375 4375
4376 4376 if self.rule_data and 'vote_rule' in self.rule_data:
4377 4377 user_group_data = {}
4378 4378 if 'rule_user_group_entry_id' in self.rule_data:
4379 4379 # means a group with voting rules !
4380 4380 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4381 4381 user_group_data['name'] = self.rule_data['rule_name']
4382 4382 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4383 4383
4384 4384 return user_group_data
4385 4385
4386 4386 def __unicode__(self):
4387 4387 return u"<%s('id:%s')>" % (self.__class__.__name__,
4388 4388 self.pull_requests_reviewers_id)
4389 4389
4390 4390
4391 4391 class Notification(Base, BaseModel):
4392 4392 __tablename__ = 'notifications'
4393 4393 __table_args__ = (
4394 4394 Index('notification_type_idx', 'type'),
4395 4395 base_table_args,
4396 4396 )
4397 4397
4398 4398 TYPE_CHANGESET_COMMENT = u'cs_comment'
4399 4399 TYPE_MESSAGE = u'message'
4400 4400 TYPE_MENTION = u'mention'
4401 4401 TYPE_REGISTRATION = u'registration'
4402 4402 TYPE_PULL_REQUEST = u'pull_request'
4403 4403 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4404 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4404 4405
4405 4406 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4406 4407 subject = Column('subject', Unicode(512), nullable=True)
4407 4408 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4408 4409 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4409 4410 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4410 4411 type_ = Column('type', Unicode(255))
4411 4412
4412 4413 created_by_user = relationship('User')
4413 4414 notifications_to_users = relationship('UserNotification', lazy='joined',
4414 4415 cascade="all, delete-orphan")
4415 4416
4416 4417 @property
4417 4418 def recipients(self):
4418 4419 return [x.user for x in UserNotification.query()\
4419 4420 .filter(UserNotification.notification == self)\
4420 4421 .order_by(UserNotification.user_id.asc()).all()]
4421 4422
4422 4423 @classmethod
4423 4424 def create(cls, created_by, subject, body, recipients, type_=None):
4424 4425 if type_ is None:
4425 4426 type_ = Notification.TYPE_MESSAGE
4426 4427
4427 4428 notification = cls()
4428 4429 notification.created_by_user = created_by
4429 4430 notification.subject = subject
4430 4431 notification.body = body
4431 4432 notification.type_ = type_
4432 4433 notification.created_on = datetime.datetime.now()
4433 4434
4434 4435 # For each recipient link the created notification to his account
4435 4436 for u in recipients:
4436 4437 assoc = UserNotification()
4437 4438 assoc.user_id = u.user_id
4438 4439 assoc.notification = notification
4439 4440
4440 4441 # if created_by is inside recipients mark his notification
4441 4442 # as read
4442 4443 if u.user_id == created_by.user_id:
4443 4444 assoc.read = True
4444 4445 Session().add(assoc)
4445 4446
4446 4447 Session().add(notification)
4447 4448
4448 4449 return notification
4449 4450
4450 4451
4451 4452 class UserNotification(Base, BaseModel):
4452 4453 __tablename__ = 'user_to_notification'
4453 4454 __table_args__ = (
4454 4455 UniqueConstraint('user_id', 'notification_id'),
4455 4456 base_table_args
4456 4457 )
4457 4458
4458 4459 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4459 4460 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4460 4461 read = Column('read', Boolean, default=False)
4461 4462 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4462 4463
4463 4464 user = relationship('User', lazy="joined")
4464 4465 notification = relationship('Notification', lazy="joined",
4465 4466 order_by=lambda: Notification.created_on.desc(),)
4466 4467
4467 4468 def mark_as_read(self):
4468 4469 self.read = True
4469 4470 Session().add(self)
4470 4471
4471 4472
4472 4473 class Gist(Base, BaseModel):
4473 4474 __tablename__ = 'gists'
4474 4475 __table_args__ = (
4475 4476 Index('g_gist_access_id_idx', 'gist_access_id'),
4476 4477 Index('g_created_on_idx', 'created_on'),
4477 4478 base_table_args
4478 4479 )
4479 4480
4480 4481 GIST_PUBLIC = u'public'
4481 4482 GIST_PRIVATE = u'private'
4482 4483 DEFAULT_FILENAME = u'gistfile1.txt'
4483 4484
4484 4485 ACL_LEVEL_PUBLIC = u'acl_public'
4485 4486 ACL_LEVEL_PRIVATE = u'acl_private'
4486 4487
4487 4488 gist_id = Column('gist_id', Integer(), primary_key=True)
4488 4489 gist_access_id = Column('gist_access_id', Unicode(250))
4489 4490 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4490 4491 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4491 4492 gist_expires = Column('gist_expires', Float(53), nullable=False)
4492 4493 gist_type = Column('gist_type', Unicode(128), nullable=False)
4493 4494 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4494 4495 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4495 4496 acl_level = Column('acl_level', Unicode(128), nullable=True)
4496 4497
4497 4498 owner = relationship('User')
4498 4499
4499 4500 def __repr__(self):
4500 4501 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4501 4502
4502 4503 @hybrid_property
4503 4504 def description_safe(self):
4504 4505 from rhodecode.lib import helpers as h
4505 4506 return h.escape(self.gist_description)
4506 4507
4507 4508 @classmethod
4508 4509 def get_or_404(cls, id_):
4509 4510 from pyramid.httpexceptions import HTTPNotFound
4510 4511
4511 4512 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4512 4513 if not res:
4513 4514 raise HTTPNotFound()
4514 4515 return res
4515 4516
4516 4517 @classmethod
4517 4518 def get_by_access_id(cls, gist_access_id):
4518 4519 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4519 4520
4520 4521 def gist_url(self):
4521 4522 from rhodecode.model.gist import GistModel
4522 4523 return GistModel().get_url(self)
4523 4524
4524 4525 @classmethod
4525 4526 def base_path(cls):
4526 4527 """
4527 4528 Returns base path when all gists are stored
4528 4529
4529 4530 :param cls:
4530 4531 """
4531 4532 from rhodecode.model.gist import GIST_STORE_LOC
4532 4533 q = Session().query(RhodeCodeUi)\
4533 4534 .filter(RhodeCodeUi.ui_key == URL_SEP)
4534 4535 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4535 4536 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4536 4537
4537 4538 def get_api_data(self):
4538 4539 """
4539 4540 Common function for generating gist related data for API
4540 4541 """
4541 4542 gist = self
4542 4543 data = {
4543 4544 'gist_id': gist.gist_id,
4544 4545 'type': gist.gist_type,
4545 4546 'access_id': gist.gist_access_id,
4546 4547 'description': gist.gist_description,
4547 4548 'url': gist.gist_url(),
4548 4549 'expires': gist.gist_expires,
4549 4550 'created_on': gist.created_on,
4550 4551 'modified_at': gist.modified_at,
4551 4552 'content': None,
4552 4553 'acl_level': gist.acl_level,
4553 4554 }
4554 4555 return data
4555 4556
4556 4557 def __json__(self):
4557 4558 data = dict(
4558 4559 )
4559 4560 data.update(self.get_api_data())
4560 4561 return data
4561 4562 # SCM functions
4562 4563
4563 4564 def scm_instance(self, **kwargs):
4564 4565 """
4565 4566 Get an instance of VCS Repository
4566 4567
4567 4568 :param kwargs:
4568 4569 """
4569 4570 from rhodecode.model.gist import GistModel
4570 4571 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4571 4572 return get_vcs_instance(
4572 4573 repo_path=safe_str(full_repo_path), create=False,
4573 4574 _vcs_alias=GistModel.vcs_backend)
4574 4575
4575 4576
4576 4577 class ExternalIdentity(Base, BaseModel):
4577 4578 __tablename__ = 'external_identities'
4578 4579 __table_args__ = (
4579 4580 Index('local_user_id_idx', 'local_user_id'),
4580 4581 Index('external_id_idx', 'external_id'),
4581 4582 base_table_args
4582 4583 )
4583 4584
4584 4585 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4585 4586 external_username = Column('external_username', Unicode(1024), default=u'')
4586 4587 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4587 4588 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4588 4589 access_token = Column('access_token', String(1024), default=u'')
4589 4590 alt_token = Column('alt_token', String(1024), default=u'')
4590 4591 token_secret = Column('token_secret', String(1024), default=u'')
4591 4592
4592 4593 @classmethod
4593 4594 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4594 4595 """
4595 4596 Returns ExternalIdentity instance based on search params
4596 4597
4597 4598 :param external_id:
4598 4599 :param provider_name:
4599 4600 :return: ExternalIdentity
4600 4601 """
4601 4602 query = cls.query()
4602 4603 query = query.filter(cls.external_id == external_id)
4603 4604 query = query.filter(cls.provider_name == provider_name)
4604 4605 if local_user_id:
4605 4606 query = query.filter(cls.local_user_id == local_user_id)
4606 4607 return query.first()
4607 4608
4608 4609 @classmethod
4609 4610 def user_by_external_id_and_provider(cls, external_id, provider_name):
4610 4611 """
4611 4612 Returns User instance based on search params
4612 4613
4613 4614 :param external_id:
4614 4615 :param provider_name:
4615 4616 :return: User
4616 4617 """
4617 4618 query = User.query()
4618 4619 query = query.filter(cls.external_id == external_id)
4619 4620 query = query.filter(cls.provider_name == provider_name)
4620 4621 query = query.filter(User.user_id == cls.local_user_id)
4621 4622 return query.first()
4622 4623
4623 4624 @classmethod
4624 4625 def by_local_user_id(cls, local_user_id):
4625 4626 """
4626 4627 Returns all tokens for user
4627 4628
4628 4629 :param local_user_id:
4629 4630 :return: ExternalIdentity
4630 4631 """
4631 4632 query = cls.query()
4632 4633 query = query.filter(cls.local_user_id == local_user_id)
4633 4634 return query
4634 4635
4635 4636 @classmethod
4636 4637 def load_provider_plugin(cls, plugin_id):
4637 4638 from rhodecode.authentication.base import loadplugin
4638 4639 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4639 4640 auth_plugin = loadplugin(_plugin_id)
4640 4641 return auth_plugin
4641 4642
4642 4643
4643 4644 class Integration(Base, BaseModel):
4644 4645 __tablename__ = 'integrations'
4645 4646 __table_args__ = (
4646 4647 base_table_args
4647 4648 )
4648 4649
4649 4650 integration_id = Column('integration_id', Integer(), primary_key=True)
4650 4651 integration_type = Column('integration_type', String(255))
4651 4652 enabled = Column('enabled', Boolean(), nullable=False)
4652 4653 name = Column('name', String(255), nullable=False)
4653 4654 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4654 4655 default=False)
4655 4656
4656 4657 settings = Column(
4657 4658 'settings_json', MutationObj.as_mutable(
4658 4659 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4659 4660 repo_id = Column(
4660 4661 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4661 4662 nullable=True, unique=None, default=None)
4662 4663 repo = relationship('Repository', lazy='joined')
4663 4664
4664 4665 repo_group_id = Column(
4665 4666 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4666 4667 nullable=True, unique=None, default=None)
4667 4668 repo_group = relationship('RepoGroup', lazy='joined')
4668 4669
4669 4670 @property
4670 4671 def scope(self):
4671 4672 if self.repo:
4672 4673 return repr(self.repo)
4673 4674 if self.repo_group:
4674 4675 if self.child_repos_only:
4675 4676 return repr(self.repo_group) + ' (child repos only)'
4676 4677 else:
4677 4678 return repr(self.repo_group) + ' (recursive)'
4678 4679 if self.child_repos_only:
4679 4680 return 'root_repos'
4680 4681 return 'global'
4681 4682
4682 4683 def __repr__(self):
4683 4684 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4684 4685
4685 4686
4686 4687 class RepoReviewRuleUser(Base, BaseModel):
4687 4688 __tablename__ = 'repo_review_rules_users'
4688 4689 __table_args__ = (
4689 4690 base_table_args
4690 4691 )
4691 4692
4692 4693 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4693 4694 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4694 4695 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4695 4696 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4696 4697 user = relationship('User')
4697 4698
4698 4699 def rule_data(self):
4699 4700 return {
4700 4701 'mandatory': self.mandatory
4701 4702 }
4702 4703
4703 4704
4704 4705 class RepoReviewRuleUserGroup(Base, BaseModel):
4705 4706 __tablename__ = 'repo_review_rules_users_groups'
4706 4707 __table_args__ = (
4707 4708 base_table_args
4708 4709 )
4709 4710
4710 4711 VOTE_RULE_ALL = -1
4711 4712
4712 4713 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4713 4714 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4714 4715 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4715 4716 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4716 4717 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4717 4718 users_group = relationship('UserGroup')
4718 4719
4719 4720 def rule_data(self):
4720 4721 return {
4721 4722 'mandatory': self.mandatory,
4722 4723 'vote_rule': self.vote_rule
4723 4724 }
4724 4725
4725 4726 @property
4726 4727 def vote_rule_label(self):
4727 4728 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4728 4729 return 'all must vote'
4729 4730 else:
4730 4731 return 'min. vote {}'.format(self.vote_rule)
4731 4732
4732 4733
4733 4734 class RepoReviewRule(Base, BaseModel):
4734 4735 __tablename__ = 'repo_review_rules'
4735 4736 __table_args__ = (
4736 4737 base_table_args
4737 4738 )
4738 4739
4739 4740 repo_review_rule_id = Column(
4740 4741 'repo_review_rule_id', Integer(), primary_key=True)
4741 4742 repo_id = Column(
4742 4743 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4743 4744 repo = relationship('Repository', backref='review_rules')
4744 4745
4745 4746 review_rule_name = Column('review_rule_name', String(255))
4746 4747 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4747 4748 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4748 4749 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4749 4750
4750 4751 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4751 4752 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4752 4753 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4753 4754 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4754 4755
4755 4756 rule_users = relationship('RepoReviewRuleUser')
4756 4757 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4757 4758
4758 4759 def _validate_pattern(self, value):
4759 4760 re.compile('^' + glob2re(value) + '$')
4760 4761
4761 4762 @hybrid_property
4762 4763 def source_branch_pattern(self):
4763 4764 return self._branch_pattern or '*'
4764 4765
4765 4766 @source_branch_pattern.setter
4766 4767 def source_branch_pattern(self, value):
4767 4768 self._validate_pattern(value)
4768 4769 self._branch_pattern = value or '*'
4769 4770
4770 4771 @hybrid_property
4771 4772 def target_branch_pattern(self):
4772 4773 return self._target_branch_pattern or '*'
4773 4774
4774 4775 @target_branch_pattern.setter
4775 4776 def target_branch_pattern(self, value):
4776 4777 self._validate_pattern(value)
4777 4778 self._target_branch_pattern = value or '*'
4778 4779
4779 4780 @hybrid_property
4780 4781 def file_pattern(self):
4781 4782 return self._file_pattern or '*'
4782 4783
4783 4784 @file_pattern.setter
4784 4785 def file_pattern(self, value):
4785 4786 self._validate_pattern(value)
4786 4787 self._file_pattern = value or '*'
4787 4788
4788 4789 def matches(self, source_branch, target_branch, files_changed):
4789 4790 """
4790 4791 Check if this review rule matches a branch/files in a pull request
4791 4792
4792 4793 :param source_branch: source branch name for the commit
4793 4794 :param target_branch: target branch name for the commit
4794 4795 :param files_changed: list of file paths changed in the pull request
4795 4796 """
4796 4797
4797 4798 source_branch = source_branch or ''
4798 4799 target_branch = target_branch or ''
4799 4800 files_changed = files_changed or []
4800 4801
4801 4802 branch_matches = True
4802 4803 if source_branch or target_branch:
4803 4804 if self.source_branch_pattern == '*':
4804 4805 source_branch_match = True
4805 4806 else:
4806 4807 if self.source_branch_pattern.startswith('re:'):
4807 4808 source_pattern = self.source_branch_pattern[3:]
4808 4809 else:
4809 4810 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4810 4811 source_branch_regex = re.compile(source_pattern)
4811 4812 source_branch_match = bool(source_branch_regex.search(source_branch))
4812 4813 if self.target_branch_pattern == '*':
4813 4814 target_branch_match = True
4814 4815 else:
4815 4816 if self.target_branch_pattern.startswith('re:'):
4816 4817 target_pattern = self.target_branch_pattern[3:]
4817 4818 else:
4818 4819 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4819 4820 target_branch_regex = re.compile(target_pattern)
4820 4821 target_branch_match = bool(target_branch_regex.search(target_branch))
4821 4822
4822 4823 branch_matches = source_branch_match and target_branch_match
4823 4824
4824 4825 files_matches = True
4825 4826 if self.file_pattern != '*':
4826 4827 files_matches = False
4827 4828 if self.file_pattern.startswith('re:'):
4828 4829 file_pattern = self.file_pattern[3:]
4829 4830 else:
4830 4831 file_pattern = glob2re(self.file_pattern)
4831 4832 file_regex = re.compile(file_pattern)
4832 4833 for filename in files_changed:
4833 4834 if file_regex.search(filename):
4834 4835 files_matches = True
4835 4836 break
4836 4837
4837 4838 return branch_matches and files_matches
4838 4839
4839 4840 @property
4840 4841 def review_users(self):
4841 4842 """ Returns the users which this rule applies to """
4842 4843
4843 4844 users = collections.OrderedDict()
4844 4845
4845 4846 for rule_user in self.rule_users:
4846 4847 if rule_user.user.active:
4847 4848 if rule_user.user not in users:
4848 4849 users[rule_user.user.username] = {
4849 4850 'user': rule_user.user,
4850 4851 'source': 'user',
4851 4852 'source_data': {},
4852 4853 'data': rule_user.rule_data()
4853 4854 }
4854 4855
4855 4856 for rule_user_group in self.rule_user_groups:
4856 4857 source_data = {
4857 4858 'user_group_id': rule_user_group.users_group.users_group_id,
4858 4859 'name': rule_user_group.users_group.users_group_name,
4859 4860 'members': len(rule_user_group.users_group.members)
4860 4861 }
4861 4862 for member in rule_user_group.users_group.members:
4862 4863 if member.user.active:
4863 4864 key = member.user.username
4864 4865 if key in users:
4865 4866 # skip this member as we have him already
4866 4867 # this prevents from override the "first" matched
4867 4868 # users with duplicates in multiple groups
4868 4869 continue
4869 4870
4870 4871 users[key] = {
4871 4872 'user': member.user,
4872 4873 'source': 'user_group',
4873 4874 'source_data': source_data,
4874 4875 'data': rule_user_group.rule_data()
4875 4876 }
4876 4877
4877 4878 return users
4878 4879
4879 4880 def user_group_vote_rule(self, user_id):
4880 4881
4881 4882 rules = []
4882 4883 if not self.rule_user_groups:
4883 4884 return rules
4884 4885
4885 4886 for user_group in self.rule_user_groups:
4886 4887 user_group_members = [x.user_id for x in user_group.users_group.members]
4887 4888 if user_id in user_group_members:
4888 4889 rules.append(user_group)
4889 4890 return rules
4890 4891
4891 4892 def __repr__(self):
4892 4893 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4893 4894 self.repo_review_rule_id, self.repo)
4894 4895
4895 4896
4896 4897 class ScheduleEntry(Base, BaseModel):
4897 4898 __tablename__ = 'schedule_entries'
4898 4899 __table_args__ = (
4899 4900 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4900 4901 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4901 4902 base_table_args,
4902 4903 )
4903 4904
4904 4905 schedule_types = ['crontab', 'timedelta', 'integer']
4905 4906 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4906 4907
4907 4908 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4908 4909 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4909 4910 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4910 4911
4911 4912 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4912 4913 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4913 4914
4914 4915 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4915 4916 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4916 4917
4917 4918 # task
4918 4919 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4919 4920 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4920 4921 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4921 4922 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4922 4923
4923 4924 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4924 4925 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4925 4926
4926 4927 @hybrid_property
4927 4928 def schedule_type(self):
4928 4929 return self._schedule_type
4929 4930
4930 4931 @schedule_type.setter
4931 4932 def schedule_type(self, val):
4932 4933 if val not in self.schedule_types:
4933 4934 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4934 4935 val, self.schedule_type))
4935 4936
4936 4937 self._schedule_type = val
4937 4938
4938 4939 @classmethod
4939 4940 def get_uid(cls, obj):
4940 4941 args = obj.task_args
4941 4942 kwargs = obj.task_kwargs
4942 4943 if isinstance(args, JsonRaw):
4943 4944 try:
4944 4945 args = json.loads(args)
4945 4946 except ValueError:
4946 4947 args = tuple()
4947 4948
4948 4949 if isinstance(kwargs, JsonRaw):
4949 4950 try:
4950 4951 kwargs = json.loads(kwargs)
4951 4952 except ValueError:
4952 4953 kwargs = dict()
4953 4954
4954 4955 dot_notation = obj.task_dot_notation
4955 4956 val = '.'.join(map(safe_str, [
4956 4957 sorted(dot_notation), args, sorted(kwargs.items())]))
4957 4958 return hashlib.sha1(val).hexdigest()
4958 4959
4959 4960 @classmethod
4960 4961 def get_by_schedule_name(cls, schedule_name):
4961 4962 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4962 4963
4963 4964 @classmethod
4964 4965 def get_by_schedule_id(cls, schedule_id):
4965 4966 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4966 4967
4967 4968 @property
4968 4969 def task(self):
4969 4970 return self.task_dot_notation
4970 4971
4971 4972 @property
4972 4973 def schedule(self):
4973 4974 from rhodecode.lib.celerylib.utils import raw_2_schedule
4974 4975 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4975 4976 return schedule
4976 4977
4977 4978 @property
4978 4979 def args(self):
4979 4980 try:
4980 4981 return list(self.task_args or [])
4981 4982 except ValueError:
4982 4983 return list()
4983 4984
4984 4985 @property
4985 4986 def kwargs(self):
4986 4987 try:
4987 4988 return dict(self.task_kwargs or {})
4988 4989 except ValueError:
4989 4990 return dict()
4990 4991
4991 4992 def _as_raw(self, val):
4992 4993 if hasattr(val, 'de_coerce'):
4993 4994 val = val.de_coerce()
4994 4995 if val:
4995 4996 val = json.dumps(val)
4996 4997
4997 4998 return val
4998 4999
4999 5000 @property
5000 5001 def schedule_definition_raw(self):
5001 5002 return self._as_raw(self.schedule_definition)
5002 5003
5003 5004 @property
5004 5005 def args_raw(self):
5005 5006 return self._as_raw(self.task_args)
5006 5007
5007 5008 @property
5008 5009 def kwargs_raw(self):
5009 5010 return self._as_raw(self.task_kwargs)
5010 5011
5011 5012 def __repr__(self):
5012 5013 return '<DB:ScheduleEntry({}:{})>'.format(
5013 5014 self.schedule_entry_id, self.schedule_name)
5014 5015
5015 5016
5016 5017 @event.listens_for(ScheduleEntry, 'before_update')
5017 5018 def update_task_uid(mapper, connection, target):
5018 5019 target.task_uid = ScheduleEntry.get_uid(target)
5019 5020
5020 5021
5021 5022 @event.listens_for(ScheduleEntry, 'before_insert')
5022 5023 def set_task_uid(mapper, connection, target):
5023 5024 target.task_uid = ScheduleEntry.get_uid(target)
5024 5025
5025 5026
5026 5027 class _BaseBranchPerms(BaseModel):
5027 5028 @classmethod
5028 5029 def compute_hash(cls, value):
5029 5030 return sha1_safe(value)
5030 5031
5031 5032 @hybrid_property
5032 5033 def branch_pattern(self):
5033 5034 return self._branch_pattern or '*'
5034 5035
5035 5036 @hybrid_property
5036 5037 def branch_hash(self):
5037 5038 return self._branch_hash
5038 5039
5039 5040 def _validate_glob(self, value):
5040 5041 re.compile('^' + glob2re(value) + '$')
5041 5042
5042 5043 @branch_pattern.setter
5043 5044 def branch_pattern(self, value):
5044 5045 self._validate_glob(value)
5045 5046 self._branch_pattern = value or '*'
5046 5047 # set the Hash when setting the branch pattern
5047 5048 self._branch_hash = self.compute_hash(self._branch_pattern)
5048 5049
5049 5050 def matches(self, branch):
5050 5051 """
5051 5052 Check if this the branch matches entry
5052 5053
5053 5054 :param branch: branch name for the commit
5054 5055 """
5055 5056
5056 5057 branch = branch or ''
5057 5058
5058 5059 branch_matches = True
5059 5060 if branch:
5060 5061 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5061 5062 branch_matches = bool(branch_regex.search(branch))
5062 5063
5063 5064 return branch_matches
5064 5065
5065 5066
5066 5067 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5067 5068 __tablename__ = 'user_to_repo_branch_permissions'
5068 5069 __table_args__ = (
5069 5070 base_table_args
5070 5071 )
5071 5072
5072 5073 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5073 5074
5074 5075 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5075 5076 repo = relationship('Repository', backref='user_branch_perms')
5076 5077
5077 5078 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5078 5079 permission = relationship('Permission')
5079 5080
5080 5081 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5081 5082 user_repo_to_perm = relationship('UserRepoToPerm')
5082 5083
5083 5084 rule_order = Column('rule_order', Integer(), nullable=False)
5084 5085 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5085 5086 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5086 5087
5087 5088 def __unicode__(self):
5088 5089 return u'<UserBranchPermission(%s => %r)>' % (
5089 5090 self.user_repo_to_perm, self.branch_pattern)
5090 5091
5091 5092
5092 5093 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5093 5094 __tablename__ = 'user_group_to_repo_branch_permissions'
5094 5095 __table_args__ = (
5095 5096 base_table_args
5096 5097 )
5097 5098
5098 5099 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5099 5100
5100 5101 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5101 5102 repo = relationship('Repository', backref='user_group_branch_perms')
5102 5103
5103 5104 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5104 5105 permission = relationship('Permission')
5105 5106
5106 5107 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5107 5108 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5108 5109
5109 5110 rule_order = Column('rule_order', Integer(), nullable=False)
5110 5111 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5111 5112 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5112 5113
5113 5114 def __unicode__(self):
5114 5115 return u'<UserBranchPermission(%s => %r)>' % (
5115 5116 self.user_group_repo_to_perm, self.branch_pattern)
5116 5117
5117 5118
5118 5119 class UserBookmark(Base, BaseModel):
5119 5120 __tablename__ = 'user_bookmarks'
5120 5121 __table_args__ = (
5121 5122 UniqueConstraint('user_id', 'bookmark_repo_id'),
5122 5123 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5123 5124 UniqueConstraint('user_id', 'bookmark_position'),
5124 5125 base_table_args
5125 5126 )
5126 5127
5127 5128 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5128 5129 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5129 5130 position = Column("bookmark_position", Integer(), nullable=False)
5130 5131 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5131 5132 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5132 5133 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5133 5134
5134 5135 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5135 5136 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5136 5137
5137 5138 user = relationship("User")
5138 5139
5139 5140 repository = relationship("Repository")
5140 5141 repository_group = relationship("RepoGroup")
5141 5142
5142 5143 @classmethod
5143 5144 def get_by_position_for_user(cls, position, user_id):
5144 5145 return cls.query() \
5145 5146 .filter(UserBookmark.user_id == user_id) \
5146 5147 .filter(UserBookmark.position == position).scalar()
5147 5148
5148 5149 @classmethod
5149 5150 def get_bookmarks_for_user(cls, user_id):
5150 5151 return cls.query() \
5151 5152 .filter(UserBookmark.user_id == user_id) \
5152 5153 .options(joinedload(UserBookmark.repository)) \
5153 5154 .options(joinedload(UserBookmark.repository_group)) \
5154 5155 .order_by(UserBookmark.position.asc()) \
5155 5156 .all()
5156 5157
5157 5158 def __unicode__(self):
5158 5159 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5159 5160
5160 5161
5161 5162 class FileStore(Base, BaseModel):
5162 5163 __tablename__ = 'file_store'
5163 5164 __table_args__ = (
5164 5165 base_table_args
5165 5166 )
5166 5167
5167 5168 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5168 5169 file_uid = Column('file_uid', String(1024), nullable=False)
5169 5170 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5170 5171 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5171 5172 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5172 5173
5173 5174 # sha256 hash
5174 5175 file_hash = Column('file_hash', String(512), nullable=False)
5175 5176 file_size = Column('file_size', BigInteger(), nullable=False)
5176 5177
5177 5178 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5178 5179 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5179 5180 accessed_count = Column('accessed_count', Integer(), default=0)
5180 5181
5181 5182 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5182 5183
5183 5184 # if repo/repo_group reference is set, check for permissions
5184 5185 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5185 5186
5186 5187 # hidden defines an attachment that should be hidden from showing in artifact listing
5187 5188 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5188 5189
5189 5190 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5190 5191 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5191 5192
5192 5193 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5193 5194
5194 5195 # scope limited to user, which requester have access to
5195 5196 scope_user_id = Column(
5196 5197 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5197 5198 nullable=True, unique=None, default=None)
5198 5199 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5199 5200
5200 5201 # scope limited to user group, which requester have access to
5201 5202 scope_user_group_id = Column(
5202 5203 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5203 5204 nullable=True, unique=None, default=None)
5204 5205 user_group = relationship('UserGroup', lazy='joined')
5205 5206
5206 5207 # scope limited to repo, which requester have access to
5207 5208 scope_repo_id = Column(
5208 5209 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5209 5210 nullable=True, unique=None, default=None)
5210 5211 repo = relationship('Repository', lazy='joined')
5211 5212
5212 5213 # scope limited to repo group, which requester have access to
5213 5214 scope_repo_group_id = Column(
5214 5215 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5215 5216 nullable=True, unique=None, default=None)
5216 5217 repo_group = relationship('RepoGroup', lazy='joined')
5217 5218
5218 5219 @classmethod
5219 5220 def get_by_store_uid(cls, file_store_uid):
5220 5221 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5221 5222
5222 5223 @classmethod
5223 5224 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5224 5225 file_description='', enabled=True, hidden=False, check_acl=True,
5225 5226 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5226 5227
5227 5228 store_entry = FileStore()
5228 5229 store_entry.file_uid = file_uid
5229 5230 store_entry.file_display_name = file_display_name
5230 5231 store_entry.file_org_name = filename
5231 5232 store_entry.file_size = file_size
5232 5233 store_entry.file_hash = file_hash
5233 5234 store_entry.file_description = file_description
5234 5235
5235 5236 store_entry.check_acl = check_acl
5236 5237 store_entry.enabled = enabled
5237 5238 store_entry.hidden = hidden
5238 5239
5239 5240 store_entry.user_id = user_id
5240 5241 store_entry.scope_user_id = scope_user_id
5241 5242 store_entry.scope_repo_id = scope_repo_id
5242 5243 store_entry.scope_repo_group_id = scope_repo_group_id
5243 5244
5244 5245 return store_entry
5245 5246
5246 5247 @classmethod
5247 5248 def store_metadata(cls, file_store_id, args, commit=True):
5248 5249 file_store = FileStore.get(file_store_id)
5249 5250 if file_store is None:
5250 5251 return
5251 5252
5252 5253 for section, key, value, value_type in args:
5253 5254 has_key = FileStoreMetadata().query() \
5254 5255 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5255 5256 .filter(FileStoreMetadata.file_store_meta_section == section) \
5256 5257 .filter(FileStoreMetadata.file_store_meta_key == key) \
5257 5258 .scalar()
5258 5259 if has_key:
5259 5260 msg = 'key `{}` already defined under section `{}` for this file.'\
5260 5261 .format(key, section)
5261 5262 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5262 5263
5263 5264 # NOTE(marcink): raises ArtifactMetadataBadValueType
5264 5265 FileStoreMetadata.valid_value_type(value_type)
5265 5266
5266 5267 meta_entry = FileStoreMetadata()
5267 5268 meta_entry.file_store = file_store
5268 5269 meta_entry.file_store_meta_section = section
5269 5270 meta_entry.file_store_meta_key = key
5270 5271 meta_entry.file_store_meta_value_type = value_type
5271 5272 meta_entry.file_store_meta_value = value
5272 5273
5273 5274 Session().add(meta_entry)
5274 5275
5275 5276 try:
5276 5277 if commit:
5277 5278 Session().commit()
5278 5279 except IntegrityError:
5279 5280 Session().rollback()
5280 5281 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5281 5282
5282 5283 @classmethod
5283 5284 def bump_access_counter(cls, file_uid, commit=True):
5284 5285 FileStore().query()\
5285 5286 .filter(FileStore.file_uid == file_uid)\
5286 5287 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5287 5288 FileStore.accessed_on: datetime.datetime.now()})
5288 5289 if commit:
5289 5290 Session().commit()
5290 5291
5291 5292 def __json__(self):
5292 5293 data = {
5293 5294 'filename': self.file_display_name,
5294 5295 'filename_org': self.file_org_name,
5295 5296 'file_uid': self.file_uid,
5296 5297 'description': self.file_description,
5297 5298 'hidden': self.hidden,
5298 5299 'size': self.file_size,
5299 5300 'created_on': self.created_on,
5300 5301 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5301 5302 'downloaded_times': self.accessed_count,
5302 5303 'sha256': self.file_hash,
5303 5304 'metadata': self.file_metadata,
5304 5305 }
5305 5306
5306 5307 return data
5307 5308
5308 5309 def __repr__(self):
5309 5310 return '<FileStore({})>'.format(self.file_store_id)
5310 5311
5311 5312
5312 5313 class FileStoreMetadata(Base, BaseModel):
5313 5314 __tablename__ = 'file_store_metadata'
5314 5315 __table_args__ = (
5315 5316 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5316 5317 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5317 5318 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5318 5319 base_table_args
5319 5320 )
5320 5321 SETTINGS_TYPES = {
5321 5322 'str': safe_str,
5322 5323 'int': safe_int,
5323 5324 'unicode': safe_unicode,
5324 5325 'bool': str2bool,
5325 5326 'list': functools.partial(aslist, sep=',')
5326 5327 }
5327 5328
5328 5329 file_store_meta_id = Column(
5329 5330 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5330 5331 primary_key=True)
5331 5332 _file_store_meta_section = Column(
5332 5333 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5333 5334 nullable=True, unique=None, default=None)
5334 5335 _file_store_meta_section_hash = Column(
5335 5336 "file_store_meta_section_hash", String(255),
5336 5337 nullable=True, unique=None, default=None)
5337 5338 _file_store_meta_key = Column(
5338 5339 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5339 5340 nullable=True, unique=None, default=None)
5340 5341 _file_store_meta_key_hash = Column(
5341 5342 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5342 5343 _file_store_meta_value = Column(
5343 5344 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5344 5345 nullable=True, unique=None, default=None)
5345 5346 _file_store_meta_value_type = Column(
5346 5347 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5347 5348 default='unicode')
5348 5349
5349 5350 file_store_id = Column(
5350 5351 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5351 5352 nullable=True, unique=None, default=None)
5352 5353
5353 5354 file_store = relationship('FileStore', lazy='joined')
5354 5355
5355 5356 @classmethod
5356 5357 def valid_value_type(cls, value):
5357 5358 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5358 5359 raise ArtifactMetadataBadValueType(
5359 5360 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5360 5361
5361 5362 @hybrid_property
5362 5363 def file_store_meta_section(self):
5363 5364 return self._file_store_meta_section
5364 5365
5365 5366 @file_store_meta_section.setter
5366 5367 def file_store_meta_section(self, value):
5367 5368 self._file_store_meta_section = value
5368 5369 self._file_store_meta_section_hash = _hash_key(value)
5369 5370
5370 5371 @hybrid_property
5371 5372 def file_store_meta_key(self):
5372 5373 return self._file_store_meta_key
5373 5374
5374 5375 @file_store_meta_key.setter
5375 5376 def file_store_meta_key(self, value):
5376 5377 self._file_store_meta_key = value
5377 5378 self._file_store_meta_key_hash = _hash_key(value)
5378 5379
5379 5380 @hybrid_property
5380 5381 def file_store_meta_value(self):
5381 5382 val = self._file_store_meta_value
5382 5383
5383 5384 if self._file_store_meta_value_type:
5384 5385 # e.g unicode.encrypted == unicode
5385 5386 _type = self._file_store_meta_value_type.split('.')[0]
5386 5387 # decode the encrypted value if it's encrypted field type
5387 5388 if '.encrypted' in self._file_store_meta_value_type:
5388 5389 cipher = EncryptedTextValue()
5389 5390 val = safe_unicode(cipher.process_result_value(val, None))
5390 5391 # do final type conversion
5391 5392 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5392 5393 val = converter(val)
5393 5394
5394 5395 return val
5395 5396
5396 5397 @file_store_meta_value.setter
5397 5398 def file_store_meta_value(self, val):
5398 5399 val = safe_unicode(val)
5399 5400 # encode the encrypted value
5400 5401 if '.encrypted' in self.file_store_meta_value_type:
5401 5402 cipher = EncryptedTextValue()
5402 5403 val = safe_unicode(cipher.process_bind_param(val, None))
5403 5404 self._file_store_meta_value = val
5404 5405
5405 5406 @hybrid_property
5406 5407 def file_store_meta_value_type(self):
5407 5408 return self._file_store_meta_value_type
5408 5409
5409 5410 @file_store_meta_value_type.setter
5410 5411 def file_store_meta_value_type(self, val):
5411 5412 # e.g unicode.encrypted
5412 5413 self.valid_value_type(val)
5413 5414 self._file_store_meta_value_type = val
5414 5415
5415 5416 def __json__(self):
5416 5417 data = {
5417 5418 'artifact': self.file_store.file_uid,
5418 5419 'section': self.file_store_meta_section,
5419 5420 'key': self.file_store_meta_key,
5420 5421 'value': self.file_store_meta_value,
5421 5422 }
5422 5423
5423 5424 return data
5424 5425
5425 5426 def __repr__(self):
5426 5427 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5427 5428 self.file_store_meta_key, self.file_store_meta_value)
5428 5429
5429 5430
5430 5431 class DbMigrateVersion(Base, BaseModel):
5431 5432 __tablename__ = 'db_migrate_version'
5432 5433 __table_args__ = (
5433 5434 base_table_args,
5434 5435 )
5435 5436
5436 5437 repository_id = Column('repository_id', String(250), primary_key=True)
5437 5438 repository_path = Column('repository_path', Text)
5438 5439 version = Column('version', Integer)
5439 5440
5440 5441 @classmethod
5441 5442 def set_version(cls, version):
5442 5443 """
5443 5444 Helper for forcing a different version, usually for debugging purposes via ishell.
5444 5445 """
5445 5446 ver = DbMigrateVersion.query().first()
5446 5447 ver.version = version
5447 5448 Session().commit()
5448 5449
5449 5450
5450 5451 class DbSession(Base, BaseModel):
5451 5452 __tablename__ = 'db_session'
5452 5453 __table_args__ = (
5453 5454 base_table_args,
5454 5455 )
5455 5456
5456 5457 def __repr__(self):
5457 5458 return '<DB:DbSession({})>'.format(self.id)
5458 5459
5459 5460 id = Column('id', Integer())
5460 5461 namespace = Column('namespace', String(255), primary_key=True)
5461 5462 accessed = Column('accessed', DateTime, nullable=False)
5462 5463 created = Column('created', DateTime, nullable=False)
5463 5464 data = Column('data', PickleType, nullable=False)
@@ -1,386 +1,390 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Model for notifications
24 24 """
25 25
26 26 import logging
27 27 import traceback
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from sqlalchemy.sql.expression import false, true
31 31
32 32 import rhodecode
33 33 from rhodecode.lib import helpers as h
34 34 from rhodecode.model import BaseModel
35 35 from rhodecode.model.db import Notification, User, UserNotification
36 36 from rhodecode.model.meta import Session
37 37 from rhodecode.translation import TranslationString
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 class NotificationModel(BaseModel):
43 43
44 44 cls = Notification
45 45
46 46 def __get_notification(self, notification):
47 47 if isinstance(notification, Notification):
48 48 return notification
49 49 elif isinstance(notification, (int, long)):
50 50 return Notification.get(notification)
51 51 else:
52 52 if notification:
53 53 raise Exception('notification must be int, long or Instance'
54 54 ' of Notification got %s' % type(notification))
55 55
56 56 def create(
57 57 self, created_by, notification_subject, notification_body,
58 58 notification_type=Notification.TYPE_MESSAGE, recipients=None,
59 59 mention_recipients=None, with_email=True, email_kwargs=None):
60 60 """
61 61
62 62 Creates notification of given type
63 63
64 64 :param created_by: int, str or User instance. User who created this
65 65 notification
66 66 :param notification_subject: subject of notification itself
67 67 :param notification_body: body of notification text
68 68 :param notification_type: type of notification, based on that we
69 69 pick templates
70 70
71 71 :param recipients: list of int, str or User objects, when None
72 72 is given send to all admins
73 73 :param mention_recipients: list of int, str or User objects,
74 74 that were mentioned
75 75 :param with_email: send email with this notification
76 76 :param email_kwargs: dict with arguments to generate email
77 77 """
78 78
79 79 from rhodecode.lib.celerylib import tasks, run_task
80 80
81 81 if recipients and not getattr(recipients, '__iter__', False):
82 82 raise Exception('recipients must be an iterable object')
83 83
84 84 created_by_obj = self._get_user(created_by)
85 85 # default MAIN body if not given
86 86 email_kwargs = email_kwargs or {'body': notification_body}
87 87 mention_recipients = mention_recipients or set()
88 88
89 89 if not created_by_obj:
90 90 raise Exception('unknown user %s' % created_by)
91 91
92 92 if recipients is None:
93 93 # recipients is None means to all admins
94 94 recipients_objs = User.query().filter(User.admin == true()).all()
95 95 log.debug('sending notifications %s to admins: %s',
96 96 notification_type, recipients_objs)
97 97 else:
98 98 recipients_objs = set()
99 99 for u in recipients:
100 100 obj = self._get_user(u)
101 101 if obj:
102 102 recipients_objs.add(obj)
103 103 else: # we didn't find this user, log the error and carry on
104 104 log.error('cannot notify unknown user %r', u)
105 105
106 106 if not recipients_objs:
107 107 raise Exception('no valid recipients specified')
108 108
109 109 log.debug('sending notifications %s to %s',
110 110 notification_type, recipients_objs)
111 111
112 112 # add mentioned users into recipients
113 113 final_recipients = set(recipients_objs).union(mention_recipients)
114 114
115 115 notification = Notification.create(
116 116 created_by=created_by_obj, subject=notification_subject,
117 117 body=notification_body, recipients=final_recipients,
118 118 type_=notification_type
119 119 )
120 120
121 121 if not with_email: # skip sending email, and just create notification
122 122 return notification
123 123
124 124 # don't send email to person who created this comment
125 125 rec_objs = set(recipients_objs).difference({created_by_obj})
126 126
127 127 # now notify all recipients in question
128 128
129 129 for recipient in rec_objs.union(mention_recipients):
130 130 # inject current recipient
131 131 email_kwargs['recipient'] = recipient
132 132 email_kwargs['mention'] = recipient in mention_recipients
133 133 (subject, headers, email_body,
134 134 email_body_plaintext) = EmailNotificationModel().render_email(
135 135 notification_type, **email_kwargs)
136 136
137 137 log.debug(
138 138 'Creating notification email task for user:`%s`', recipient)
139 139 task = run_task(
140 140 tasks.send_email, recipient.email, subject,
141 141 email_body_plaintext, email_body)
142 142 log.debug('Created email task: %s', task)
143 143
144 144 return notification
145 145
146 146 def delete(self, user, notification):
147 147 # we don't want to remove actual notification just the assignment
148 148 try:
149 149 notification = self.__get_notification(notification)
150 150 user = self._get_user(user)
151 151 if notification and user:
152 152 obj = UserNotification.query()\
153 153 .filter(UserNotification.user == user)\
154 154 .filter(UserNotification.notification == notification)\
155 155 .one()
156 156 Session().delete(obj)
157 157 return True
158 158 except Exception:
159 159 log.error(traceback.format_exc())
160 160 raise
161 161
162 162 def get_for_user(self, user, filter_=None):
163 163 """
164 164 Get mentions for given user, filter them if filter dict is given
165 165 """
166 166 user = self._get_user(user)
167 167
168 168 q = UserNotification.query()\
169 169 .filter(UserNotification.user == user)\
170 170 .join((
171 171 Notification, UserNotification.notification_id ==
172 172 Notification.notification_id))
173 173 if filter_ == ['all']:
174 174 q = q # no filter
175 175 elif filter_ == ['unread']:
176 176 q = q.filter(UserNotification.read == false())
177 177 elif filter_:
178 178 q = q.filter(Notification.type_.in_(filter_))
179 179
180 180 return q
181 181
182 182 def mark_read(self, user, notification):
183 183 try:
184 184 notification = self.__get_notification(notification)
185 185 user = self._get_user(user)
186 186 if notification and user:
187 187 obj = UserNotification.query()\
188 188 .filter(UserNotification.user == user)\
189 189 .filter(UserNotification.notification == notification)\
190 190 .one()
191 191 obj.read = True
192 192 Session().add(obj)
193 193 return True
194 194 except Exception:
195 195 log.error(traceback.format_exc())
196 196 raise
197 197
198 198 def mark_all_read_for_user(self, user, filter_=None):
199 199 user = self._get_user(user)
200 200 q = UserNotification.query()\
201 201 .filter(UserNotification.user == user)\
202 202 .filter(UserNotification.read == false())\
203 203 .join((
204 204 Notification, UserNotification.notification_id ==
205 205 Notification.notification_id))
206 206 if filter_ == ['unread']:
207 207 q = q.filter(UserNotification.read == false())
208 208 elif filter_:
209 209 q = q.filter(Notification.type_.in_(filter_))
210 210
211 211 # this is a little inefficient but sqlalchemy doesn't support
212 212 # update on joined tables :(
213 213 for obj in q.all():
214 214 obj.read = True
215 215 Session().add(obj)
216 216
217 217 def get_unread_cnt_for_user(self, user):
218 218 user = self._get_user(user)
219 219 return UserNotification.query()\
220 220 .filter(UserNotification.read == false())\
221 221 .filter(UserNotification.user == user).count()
222 222
223 223 def get_unread_for_user(self, user):
224 224 user = self._get_user(user)
225 225 return [x.notification for x in UserNotification.query()
226 226 .filter(UserNotification.read == false())
227 227 .filter(UserNotification.user == user).all()]
228 228
229 229 def get_user_notification(self, user, notification):
230 230 user = self._get_user(user)
231 231 notification = self.__get_notification(notification)
232 232
233 233 return UserNotification.query()\
234 234 .filter(UserNotification.notification == notification)\
235 235 .filter(UserNotification.user == user).scalar()
236 236
237 237 def make_description(self, notification, translate, show_age=True):
238 238 """
239 239 Creates a human readable description based on properties
240 240 of notification object
241 241 """
242 242 _ = translate
243 243 _map = {
244 244 notification.TYPE_CHANGESET_COMMENT: [
245 245 _('%(user)s commented on commit %(date_or_age)s'),
246 246 _('%(user)s commented on commit at %(date_or_age)s'),
247 247 ],
248 248 notification.TYPE_MESSAGE: [
249 249 _('%(user)s sent message %(date_or_age)s'),
250 250 _('%(user)s sent message at %(date_or_age)s'),
251 251 ],
252 252 notification.TYPE_MENTION: [
253 253 _('%(user)s mentioned you %(date_or_age)s'),
254 254 _('%(user)s mentioned you at %(date_or_age)s'),
255 255 ],
256 256 notification.TYPE_REGISTRATION: [
257 257 _('%(user)s registered in RhodeCode %(date_or_age)s'),
258 258 _('%(user)s registered in RhodeCode at %(date_or_age)s'),
259 259 ],
260 260 notification.TYPE_PULL_REQUEST: [
261 261 _('%(user)s opened new pull request %(date_or_age)s'),
262 262 _('%(user)s opened new pull request at %(date_or_age)s'),
263 263 ],
264 264 notification.TYPE_PULL_REQUEST_COMMENT: [
265 265 _('%(user)s commented on pull request %(date_or_age)s'),
266 266 _('%(user)s commented on pull request at %(date_or_age)s'),
267 267 ],
268 268 }
269 269
270 270 templates = _map[notification.type_]
271 271
272 272 if show_age:
273 273 template = templates[0]
274 274 date_or_age = h.age(notification.created_on)
275 275 if translate:
276 276 date_or_age = translate(date_or_age)
277 277
278 278 if isinstance(date_or_age, TranslationString):
279 279 date_or_age = date_or_age.interpolate()
280 280
281 281 else:
282 282 template = templates[1]
283 283 date_or_age = h.format_date(notification.created_on)
284 284
285 285 return template % {
286 286 'user': notification.created_by_user.username,
287 287 'date_or_age': date_or_age,
288 288 }
289 289
290 290
291 291 class EmailNotificationModel(BaseModel):
292 292 TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT
293 293 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
294 294 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
295 295 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
296 TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE
296 297 TYPE_MAIN = Notification.TYPE_MESSAGE
297 298
298 299 TYPE_PASSWORD_RESET = 'password_reset'
299 300 TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation'
300 301 TYPE_EMAIL_TEST = 'email_test'
301 302 TYPE_TEST = 'test'
302 303
303 304 email_types = {
304 305 TYPE_MAIN:
305 306 'rhodecode:templates/email_templates/main.mako',
306 307 TYPE_TEST:
307 308 'rhodecode:templates/email_templates/test.mako',
308 309 TYPE_EMAIL_TEST:
309 310 'rhodecode:templates/email_templates/email_test.mako',
310 311 TYPE_REGISTRATION:
311 312 'rhodecode:templates/email_templates/user_registration.mako',
312 313 TYPE_PASSWORD_RESET:
313 314 'rhodecode:templates/email_templates/password_reset.mako',
314 315 TYPE_PASSWORD_RESET_CONFIRMATION:
315 316 'rhodecode:templates/email_templates/password_reset_confirmation.mako',
316 317 TYPE_COMMIT_COMMENT:
317 318 'rhodecode:templates/email_templates/commit_comment.mako',
318 319 TYPE_PULL_REQUEST:
319 320 'rhodecode:templates/email_templates/pull_request_review.mako',
320 321 TYPE_PULL_REQUEST_COMMENT:
321 322 'rhodecode:templates/email_templates/pull_request_comment.mako',
323 TYPE_PULL_REQUEST_UPDATE:
324 'rhodecode:templates/email_templates/pull_request_update.mako',
322 325 }
323 326
324 327 def __init__(self):
325 328 """
326 329 Example usage::
327 330
328 331 (subject, headers, email_body,
329 332 email_body_plaintext) = EmailNotificationModel().render_email(
330 333 EmailNotificationModel.TYPE_TEST, **email_kwargs)
331 334
332 335 """
333 336 super(EmailNotificationModel, self).__init__()
334 337 self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title')
335 338
336 339 def _update_kwargs_for_render(self, kwargs):
337 340 """
338 341 Inject params required for Mako rendering
339 342
340 343 :param kwargs:
341 344 """
342 345
343 346 kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name
347 kwargs['rhodecode_version'] = rhodecode.__version__
344 348 instance_url = h.route_url('home')
345 349 _kwargs = {
346 350 'instance_url': instance_url,
347 351 'whitespace_filter': self.whitespace_filter
348 352 }
349 353 _kwargs.update(kwargs)
350 354 return _kwargs
351 355
352 356 def whitespace_filter(self, text):
353 357 return text.replace('\n', '').replace('\t', '')
354 358
355 359 def get_renderer(self, type_, request):
356 360 template_name = self.email_types[type_]
357 361 return request.get_partial_renderer(template_name)
358 362
359 363 def render_email(self, type_, **kwargs):
360 364 """
361 365 renders template for email, and returns a tuple of
362 366 (subject, email_headers, email_html_body, email_plaintext_body)
363 367 """
364 368 # translator and helpers inject
365 369 _kwargs = self._update_kwargs_for_render(kwargs)
366 370 request = get_current_request()
367 371 email_template = self.get_renderer(type_, request=request)
368 372
369 373 subject = email_template.render('subject', **_kwargs)
370 374
371 375 try:
372 376 headers = email_template.render('headers', **_kwargs)
373 377 except AttributeError:
374 378 # it's not defined in template, ok we can skip it
375 379 headers = ''
376 380
377 381 try:
378 382 body_plaintext = email_template.render('body_plaintext', **_kwargs)
379 383 except AttributeError:
380 384 # it's not defined in template, ok we can skip it
381 385 body_plaintext = ''
382 386
383 387 # render WHOLE template
384 388 body = email_template.render(None, **_kwargs)
385 389
386 390 return subject, headers, body, body_plaintext
@@ -1,1770 +1,1863 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 'executed', 'reason', 'new', 'old', 'changes',
70 'source_changed', 'target_changed'])
68 class UpdateResponse(object):
69
70 def __init__(self, executed, reason, new, old, common_ancestor_id,
71 commit_changes, source_changed, target_changed):
72
73 self.executed = executed
74 self.reason = reason
75 self.new = new
76 self.old = old
77 self.common_ancestor_id = common_ancestor_id
78 self.changes = commit_changes
79 self.source_changed = source_changed
80 self.target_changed = target_changed
71 81
72 82
73 83 class PullRequestModel(BaseModel):
74 84
75 85 cls = PullRequest
76 86
77 87 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 88
79 89 UPDATE_STATUS_MESSAGES = {
80 90 UpdateFailureReason.NONE: lazy_ugettext(
81 91 'Pull request update successful.'),
82 92 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 93 'Pull request update failed because of an unknown error.'),
84 94 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 95 'No update needed because the source and target have not changed.'),
86 96 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 97 'Pull request cannot be updated because the reference type is '
88 98 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 99 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 100 'This pull request cannot be updated because the target '
91 101 'reference is missing.'),
92 102 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 103 'This pull request cannot be updated because the source '
94 104 'reference is missing.'),
95 105 }
96 106 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 107 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 108
99 109 def __get_pull_request(self, pull_request):
100 110 return self._get_instance((
101 111 PullRequest, PullRequestVersion), pull_request)
102 112
103 113 def _check_perms(self, perms, pull_request, user, api=False):
104 114 if not api:
105 115 return h.HasRepoPermissionAny(*perms)(
106 116 user=user, repo_name=pull_request.target_repo.repo_name)
107 117 else:
108 118 return h.HasRepoPermissionAnyApi(*perms)(
109 119 user=user, repo_name=pull_request.target_repo.repo_name)
110 120
111 121 def check_user_read(self, pull_request, user, api=False):
112 122 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 123 return self._check_perms(_perms, pull_request, user, api)
114 124
115 125 def check_user_merge(self, pull_request, user, api=False):
116 126 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 127 return self._check_perms(_perms, pull_request, user, api)
118 128
119 129 def check_user_update(self, pull_request, user, api=False):
120 130 owner = user.user_id == pull_request.user_id
121 131 return self.check_user_merge(pull_request, user, api) or owner
122 132
123 133 def check_user_delete(self, pull_request, user):
124 134 owner = user.user_id == pull_request.user_id
125 135 _perms = ('repository.admin',)
126 136 return self._check_perms(_perms, pull_request, user) or owner
127 137
128 138 def check_user_change_status(self, pull_request, user, api=False):
129 139 reviewer = user.user_id in [x.user_id for x in
130 140 pull_request.reviewers]
131 141 return self.check_user_update(pull_request, user, api) or reviewer
132 142
133 143 def check_user_comment(self, pull_request, user):
134 144 owner = user.user_id == pull_request.user_id
135 145 return self.check_user_read(pull_request, user) or owner
136 146
137 147 def get(self, pull_request):
138 148 return self.__get_pull_request(pull_request)
139 149
140 150 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
141 151 statuses=None, opened_by=None, order_by=None,
142 152 order_dir='desc', only_created=False):
143 153 repo = None
144 154 if repo_name:
145 155 repo = self._get_repo(repo_name)
146 156
147 157 q = PullRequest.query()
148 158
149 159 if search_q:
150 160 like_expression = u'%{}%'.format(safe_unicode(search_q))
151 161 q = q.filter(or_(
152 162 cast(PullRequest.pull_request_id, String).ilike(like_expression),
153 163 PullRequest.title.ilike(like_expression),
154 164 PullRequest.description.ilike(like_expression),
155 165 ))
156 166
157 167 # source or target
158 168 if repo and source:
159 169 q = q.filter(PullRequest.source_repo == repo)
160 170 elif repo:
161 171 q = q.filter(PullRequest.target_repo == repo)
162 172
163 173 # closed,opened
164 174 if statuses:
165 175 q = q.filter(PullRequest.status.in_(statuses))
166 176
167 177 # opened by filter
168 178 if opened_by:
169 179 q = q.filter(PullRequest.user_id.in_(opened_by))
170 180
171 181 # only get those that are in "created" state
172 182 if only_created:
173 183 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
174 184
175 185 if order_by:
176 186 order_map = {
177 187 'name_raw': PullRequest.pull_request_id,
178 188 'id': PullRequest.pull_request_id,
179 189 'title': PullRequest.title,
180 190 'updated_on_raw': PullRequest.updated_on,
181 191 'target_repo': PullRequest.target_repo_id
182 192 }
183 193 if order_dir == 'asc':
184 194 q = q.order_by(order_map[order_by].asc())
185 195 else:
186 196 q = q.order_by(order_map[order_by].desc())
187 197
188 198 return q
189 199
190 200 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
191 201 opened_by=None):
192 202 """
193 203 Count the number of pull requests for a specific repository.
194 204
195 205 :param repo_name: target or source repo
196 206 :param search_q: filter by text
197 207 :param source: boolean flag to specify if repo_name refers to source
198 208 :param statuses: list of pull request statuses
199 209 :param opened_by: author user of the pull request
200 210 :returns: int number of pull requests
201 211 """
202 212 q = self._prepare_get_all_query(
203 213 repo_name, search_q=search_q, source=source, statuses=statuses,
204 214 opened_by=opened_by)
205 215
206 216 return q.count()
207 217
208 218 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
209 219 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
210 220 """
211 221 Get all pull requests for a specific repository.
212 222
213 223 :param repo_name: target or source repo
214 224 :param search_q: filter by text
215 225 :param source: boolean flag to specify if repo_name refers to source
216 226 :param statuses: list of pull request statuses
217 227 :param opened_by: author user of the pull request
218 228 :param offset: pagination offset
219 229 :param length: length of returned list
220 230 :param order_by: order of the returned list
221 231 :param order_dir: 'asc' or 'desc' ordering direction
222 232 :returns: list of pull requests
223 233 """
224 234 q = self._prepare_get_all_query(
225 235 repo_name, search_q=search_q, source=source, statuses=statuses,
226 236 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
227 237
228 238 if length:
229 239 pull_requests = q.limit(length).offset(offset).all()
230 240 else:
231 241 pull_requests = q.all()
232 242
233 243 return pull_requests
234 244
235 245 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
236 246 opened_by=None):
237 247 """
238 248 Count the number of pull requests for a specific repository that are
239 249 awaiting review.
240 250
241 251 :param repo_name: target or source repo
242 252 :param search_q: filter by text
243 253 :param source: boolean flag to specify if repo_name refers to source
244 254 :param statuses: list of pull request statuses
245 255 :param opened_by: author user of the pull request
246 256 :returns: int number of pull requests
247 257 """
248 258 pull_requests = self.get_awaiting_review(
249 259 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
250 260
251 261 return len(pull_requests)
252 262
253 263 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
254 264 opened_by=None, offset=0, length=None,
255 265 order_by=None, order_dir='desc'):
256 266 """
257 267 Get all pull requests for a specific repository that are awaiting
258 268 review.
259 269
260 270 :param repo_name: target or source repo
261 271 :param search_q: filter by text
262 272 :param source: boolean flag to specify if repo_name refers to source
263 273 :param statuses: list of pull request statuses
264 274 :param opened_by: author user of the pull request
265 275 :param offset: pagination offset
266 276 :param length: length of returned list
267 277 :param order_by: order of the returned list
268 278 :param order_dir: 'asc' or 'desc' ordering direction
269 279 :returns: list of pull requests
270 280 """
271 281 pull_requests = self.get_all(
272 282 repo_name, search_q=search_q, source=source, statuses=statuses,
273 283 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
274 284
275 285 _filtered_pull_requests = []
276 286 for pr in pull_requests:
277 287 status = pr.calculated_review_status()
278 288 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
279 289 ChangesetStatus.STATUS_UNDER_REVIEW]:
280 290 _filtered_pull_requests.append(pr)
281 291 if length:
282 292 return _filtered_pull_requests[offset:offset+length]
283 293 else:
284 294 return _filtered_pull_requests
285 295
286 296 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
287 297 opened_by=None, user_id=None):
288 298 """
289 299 Count the number of pull requests for a specific repository that are
290 300 awaiting review from a specific user.
291 301
292 302 :param repo_name: target or source repo
293 303 :param search_q: filter by text
294 304 :param source: boolean flag to specify if repo_name refers to source
295 305 :param statuses: list of pull request statuses
296 306 :param opened_by: author user of the pull request
297 307 :param user_id: reviewer user of the pull request
298 308 :returns: int number of pull requests
299 309 """
300 310 pull_requests = self.get_awaiting_my_review(
301 311 repo_name, search_q=search_q, source=source, statuses=statuses,
302 312 opened_by=opened_by, user_id=user_id)
303 313
304 314 return len(pull_requests)
305 315
306 316 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
307 317 opened_by=None, user_id=None, offset=0,
308 318 length=None, order_by=None, order_dir='desc'):
309 319 """
310 320 Get all pull requests for a specific repository that are awaiting
311 321 review from a specific user.
312 322
313 323 :param repo_name: target or source repo
314 324 :param search_q: filter by text
315 325 :param source: boolean flag to specify if repo_name refers to source
316 326 :param statuses: list of pull request statuses
317 327 :param opened_by: author user of the pull request
318 328 :param user_id: reviewer user of the pull request
319 329 :param offset: pagination offset
320 330 :param length: length of returned list
321 331 :param order_by: order of the returned list
322 332 :param order_dir: 'asc' or 'desc' ordering direction
323 333 :returns: list of pull requests
324 334 """
325 335 pull_requests = self.get_all(
326 336 repo_name, search_q=search_q, source=source, statuses=statuses,
327 337 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
328 338
329 339 _my = PullRequestModel().get_not_reviewed(user_id)
330 340 my_participation = []
331 341 for pr in pull_requests:
332 342 if pr in _my:
333 343 my_participation.append(pr)
334 344 _filtered_pull_requests = my_participation
335 345 if length:
336 346 return _filtered_pull_requests[offset:offset+length]
337 347 else:
338 348 return _filtered_pull_requests
339 349
340 350 def get_not_reviewed(self, user_id):
341 351 return [
342 352 x.pull_request for x in PullRequestReviewers.query().filter(
343 353 PullRequestReviewers.user_id == user_id).all()
344 354 ]
345 355
346 356 def _prepare_participating_query(self, user_id=None, statuses=None,
347 357 order_by=None, order_dir='desc'):
348 358 q = PullRequest.query()
349 359 if user_id:
350 360 reviewers_subquery = Session().query(
351 361 PullRequestReviewers.pull_request_id).filter(
352 362 PullRequestReviewers.user_id == user_id).subquery()
353 363 user_filter = or_(
354 364 PullRequest.user_id == user_id,
355 365 PullRequest.pull_request_id.in_(reviewers_subquery)
356 366 )
357 367 q = PullRequest.query().filter(user_filter)
358 368
359 369 # closed,opened
360 370 if statuses:
361 371 q = q.filter(PullRequest.status.in_(statuses))
362 372
363 373 if order_by:
364 374 order_map = {
365 375 'name_raw': PullRequest.pull_request_id,
366 376 'title': PullRequest.title,
367 377 'updated_on_raw': PullRequest.updated_on,
368 378 'target_repo': PullRequest.target_repo_id
369 379 }
370 380 if order_dir == 'asc':
371 381 q = q.order_by(order_map[order_by].asc())
372 382 else:
373 383 q = q.order_by(order_map[order_by].desc())
374 384
375 385 return q
376 386
377 387 def count_im_participating_in(self, user_id=None, statuses=None):
378 388 q = self._prepare_participating_query(user_id, statuses=statuses)
379 389 return q.count()
380 390
381 391 def get_im_participating_in(
382 392 self, user_id=None, statuses=None, offset=0,
383 393 length=None, order_by=None, order_dir='desc'):
384 394 """
385 395 Get all Pull requests that i'm participating in, or i have opened
386 396 """
387 397
388 398 q = self._prepare_participating_query(
389 399 user_id, statuses=statuses, order_by=order_by,
390 400 order_dir=order_dir)
391 401
392 402 if length:
393 403 pull_requests = q.limit(length).offset(offset).all()
394 404 else:
395 405 pull_requests = q.all()
396 406
397 407 return pull_requests
398 408
399 409 def get_versions(self, pull_request):
400 410 """
401 411 returns version of pull request sorted by ID descending
402 412 """
403 413 return PullRequestVersion.query()\
404 414 .filter(PullRequestVersion.pull_request == pull_request)\
405 415 .order_by(PullRequestVersion.pull_request_version_id.asc())\
406 416 .all()
407 417
408 418 def get_pr_version(self, pull_request_id, version=None):
409 419 at_version = None
410 420
411 421 if version and version == 'latest':
412 422 pull_request_ver = PullRequest.get(pull_request_id)
413 423 pull_request_obj = pull_request_ver
414 424 _org_pull_request_obj = pull_request_obj
415 425 at_version = 'latest'
416 426 elif version:
417 427 pull_request_ver = PullRequestVersion.get_or_404(version)
418 428 pull_request_obj = pull_request_ver
419 429 _org_pull_request_obj = pull_request_ver.pull_request
420 430 at_version = pull_request_ver.pull_request_version_id
421 431 else:
422 432 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
423 433 pull_request_id)
424 434
425 435 pull_request_display_obj = PullRequest.get_pr_display_object(
426 436 pull_request_obj, _org_pull_request_obj)
427 437
428 438 return _org_pull_request_obj, pull_request_obj, \
429 439 pull_request_display_obj, at_version
430 440
431 441 def create(self, created_by, source_repo, source_ref, target_repo,
432 442 target_ref, revisions, reviewers, title, description=None,
433 443 description_renderer=None,
434 444 reviewer_data=None, translator=None, auth_user=None):
435 445 translator = translator or get_current_request().translate
436 446
437 447 created_by_user = self._get_user(created_by)
438 448 auth_user = auth_user or created_by_user.AuthUser()
439 449 source_repo = self._get_repo(source_repo)
440 450 target_repo = self._get_repo(target_repo)
441 451
442 452 pull_request = PullRequest()
443 453 pull_request.source_repo = source_repo
444 454 pull_request.source_ref = source_ref
445 455 pull_request.target_repo = target_repo
446 456 pull_request.target_ref = target_ref
447 457 pull_request.revisions = revisions
448 458 pull_request.title = title
449 459 pull_request.description = description
450 460 pull_request.description_renderer = description_renderer
451 461 pull_request.author = created_by_user
452 462 pull_request.reviewer_data = reviewer_data
453 463 pull_request.pull_request_state = pull_request.STATE_CREATING
454 464 Session().add(pull_request)
455 465 Session().flush()
456 466
457 467 reviewer_ids = set()
458 468 # members / reviewers
459 469 for reviewer_object in reviewers:
460 470 user_id, reasons, mandatory, rules = reviewer_object
461 471 user = self._get_user(user_id)
462 472
463 473 # skip duplicates
464 474 if user.user_id in reviewer_ids:
465 475 continue
466 476
467 477 reviewer_ids.add(user.user_id)
468 478
469 479 reviewer = PullRequestReviewers()
470 480 reviewer.user = user
471 481 reviewer.pull_request = pull_request
472 482 reviewer.reasons = reasons
473 483 reviewer.mandatory = mandatory
474 484
475 485 # NOTE(marcink): pick only first rule for now
476 486 rule_id = list(rules)[0] if rules else None
477 487 rule = RepoReviewRule.get(rule_id) if rule_id else None
478 488 if rule:
479 489 review_group = rule.user_group_vote_rule(user_id)
480 490 # we check if this particular reviewer is member of a voting group
481 491 if review_group:
482 492 # NOTE(marcink):
483 493 # can be that user is member of more but we pick the first same,
484 494 # same as default reviewers algo
485 495 review_group = review_group[0]
486 496
487 497 rule_data = {
488 498 'rule_name':
489 499 rule.review_rule_name,
490 500 'rule_user_group_entry_id':
491 501 review_group.repo_review_rule_users_group_id,
492 502 'rule_user_group_name':
493 503 review_group.users_group.users_group_name,
494 504 'rule_user_group_members':
495 505 [x.user.username for x in review_group.users_group.members],
496 506 'rule_user_group_members_id':
497 507 [x.user.user_id for x in review_group.users_group.members],
498 508 }
499 509 # e.g {'vote_rule': -1, 'mandatory': True}
500 510 rule_data.update(review_group.rule_data())
501 511
502 512 reviewer.rule_data = rule_data
503 513
504 514 Session().add(reviewer)
505 515 Session().flush()
506 516
507 517 # Set approval status to "Under Review" for all commits which are
508 518 # part of this pull request.
509 519 ChangesetStatusModel().set_status(
510 520 repo=target_repo,
511 521 status=ChangesetStatus.STATUS_UNDER_REVIEW,
512 522 user=created_by_user,
513 523 pull_request=pull_request
514 524 )
515 525 # we commit early at this point. This has to do with a fact
516 526 # that before queries do some row-locking. And because of that
517 527 # we need to commit and finish transaction before below validate call
518 528 # that for large repos could be long resulting in long row locks
519 529 Session().commit()
520 530
521 531 # prepare workspace, and run initial merge simulation. Set state during that
522 532 # operation
523 533 pull_request = PullRequest.get(pull_request.pull_request_id)
524 534
525 535 # set as merging, for merge simulation, and if finished to created so we mark
526 536 # simulation is working fine
527 537 with pull_request.set_state(PullRequest.STATE_MERGING,
528 538 final_state=PullRequest.STATE_CREATED) as state_obj:
529 539 MergeCheck.validate(
530 540 pull_request, auth_user=auth_user, translator=translator)
531 541
532 542 self.notify_reviewers(pull_request, reviewer_ids)
533 543 self.trigger_pull_request_hook(
534 544 pull_request, created_by_user, 'create')
535 545
536 546 creation_data = pull_request.get_api_data(with_merge_state=False)
537 547 self._log_audit_action(
538 548 'repo.pull_request.create', {'data': creation_data},
539 549 auth_user, pull_request)
540 550
541 551 return pull_request
542 552
543 553 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
544 554 pull_request = self.__get_pull_request(pull_request)
545 555 target_scm = pull_request.target_repo.scm_instance()
546 556 if action == 'create':
547 557 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
548 558 elif action == 'merge':
549 559 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
550 560 elif action == 'close':
551 561 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
552 562 elif action == 'review_status_change':
553 563 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
554 564 elif action == 'update':
555 565 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
556 566 elif action == 'comment':
557 567 # dummy hook ! for comment. We want this function to handle all cases
558 568 def trigger_hook(*args, **kwargs):
559 569 pass
560 570 comment = data['comment']
561 571 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
562 572 else:
563 573 return
564 574
565 575 trigger_hook(
566 576 username=user.username,
567 577 repo_name=pull_request.target_repo.repo_name,
568 578 repo_alias=target_scm.alias,
569 579 pull_request=pull_request,
570 580 data=data)
571 581
572 582 def _get_commit_ids(self, pull_request):
573 583 """
574 584 Return the commit ids of the merged pull request.
575 585
576 586 This method is not dealing correctly yet with the lack of autoupdates
577 587 nor with the implicit target updates.
578 588 For example: if a commit in the source repo is already in the target it
579 589 will be reported anyways.
580 590 """
581 591 merge_rev = pull_request.merge_rev
582 592 if merge_rev is None:
583 593 raise ValueError('This pull request was not merged yet')
584 594
585 595 commit_ids = list(pull_request.revisions)
586 596 if merge_rev not in commit_ids:
587 597 commit_ids.append(merge_rev)
588 598
589 599 return commit_ids
590 600
591 601 def merge_repo(self, pull_request, user, extras):
592 602 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 603 extras['user_agent'] = 'internal-merge'
594 604 merge_state = self._merge_pull_request(pull_request, user, extras)
595 605 if merge_state.executed:
596 606 log.debug("Merge was successful, updating the pull request comments.")
597 607 self._comment_and_close_pr(pull_request, user, merge_state)
598 608
599 609 self._log_audit_action(
600 610 'repo.pull_request.merge',
601 611 {'merge_state': merge_state.__dict__},
602 612 user, pull_request)
603 613
604 614 else:
605 615 log.warn("Merge failed, not updating the pull request.")
606 616 return merge_state
607 617
608 618 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 619 target_vcs = pull_request.target_repo.scm_instance()
610 620 source_vcs = pull_request.source_repo.scm_instance()
611 621
612 622 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
613 623 pr_id=pull_request.pull_request_id,
614 624 pr_title=pull_request.title,
615 625 source_repo=source_vcs.name,
616 626 source_ref_name=pull_request.source_ref_parts.name,
617 627 target_repo=target_vcs.name,
618 628 target_ref_name=pull_request.target_ref_parts.name,
619 629 )
620 630
621 631 workspace_id = self._workspace_id(pull_request)
622 632 repo_id = pull_request.target_repo.repo_id
623 633 use_rebase = self._use_rebase_for_merging(pull_request)
624 634 close_branch = self._close_branch_before_merging(pull_request)
625 635
626 636 target_ref = self._refresh_reference(
627 637 pull_request.target_ref_parts, target_vcs)
628 638
629 639 callback_daemon, extras = prepare_callback_daemon(
630 640 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
631 641 host=vcs_settings.HOOKS_HOST,
632 642 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
633 643
634 644 with callback_daemon:
635 645 # TODO: johbo: Implement a clean way to run a config_override
636 646 # for a single call.
637 647 target_vcs.config.set(
638 648 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639 649
640 650 user_name = user.short_contact
641 651 merge_state = target_vcs.merge(
642 652 repo_id, workspace_id, target_ref, source_vcs,
643 653 pull_request.source_ref_parts,
644 654 user_name=user_name, user_email=user.email,
645 655 message=message, use_rebase=use_rebase,
646 656 close_branch=close_branch)
647 657 return merge_state
648 658
649 659 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
650 660 pull_request.merge_rev = merge_state.merge_ref.commit_id
651 661 pull_request.updated_on = datetime.datetime.now()
652 662 close_msg = close_msg or 'Pull request merged and closed'
653 663
654 664 CommentsModel().create(
655 665 text=safe_unicode(close_msg),
656 666 repo=pull_request.target_repo.repo_id,
657 667 user=user.user_id,
658 668 pull_request=pull_request.pull_request_id,
659 669 f_path=None,
660 670 line_no=None,
661 671 closing_pr=True
662 672 )
663 673
664 674 Session().add(pull_request)
665 675 Session().flush()
666 676 # TODO: paris: replace invalidation with less radical solution
667 677 ScmModel().mark_for_invalidation(
668 678 pull_request.target_repo.repo_name)
669 679 self.trigger_pull_request_hook(pull_request, user, 'merge')
670 680
671 681 def has_valid_update_type(self, pull_request):
672 682 source_ref_type = pull_request.source_ref_parts.type
673 683 return source_ref_type in self.REF_TYPES
674 684
675 def update_commits(self, pull_request):
685 def update_commits(self, pull_request, updating_user):
676 686 """
677 687 Get the updated list of commits for the pull request
678 688 and return the new pull request version and the list
679 689 of commits processed by this update action
690
691 updating_user is the user_object who triggered the update
680 692 """
681 693 pull_request = self.__get_pull_request(pull_request)
682 694 source_ref_type = pull_request.source_ref_parts.type
683 695 source_ref_name = pull_request.source_ref_parts.name
684 696 source_ref_id = pull_request.source_ref_parts.commit_id
685 697
686 698 target_ref_type = pull_request.target_ref_parts.type
687 699 target_ref_name = pull_request.target_ref_parts.name
688 700 target_ref_id = pull_request.target_ref_parts.commit_id
689 701
690 702 if not self.has_valid_update_type(pull_request):
691 703 log.debug("Skipping update of pull request %s due to ref type: %s",
692 704 pull_request, source_ref_type)
693 705 return UpdateResponse(
694 706 executed=False,
695 707 reason=UpdateFailureReason.WRONG_REF_TYPE,
696 old=pull_request, new=None, changes=None,
708 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
697 709 source_changed=False, target_changed=False)
698 710
699 711 # source repo
700 712 source_repo = pull_request.source_repo.scm_instance()
701 713
702 714 try:
703 715 source_commit = source_repo.get_commit(commit_id=source_ref_name)
704 716 except CommitDoesNotExistError:
705 717 return UpdateResponse(
706 718 executed=False,
707 719 reason=UpdateFailureReason.MISSING_SOURCE_REF,
708 old=pull_request, new=None, changes=None,
720 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
709 721 source_changed=False, target_changed=False)
710 722
711 723 source_changed = source_ref_id != source_commit.raw_id
712 724
713 725 # target repo
714 726 target_repo = pull_request.target_repo.scm_instance()
715 727
716 728 try:
717 729 target_commit = target_repo.get_commit(commit_id=target_ref_name)
718 730 except CommitDoesNotExistError:
719 731 return UpdateResponse(
720 732 executed=False,
721 733 reason=UpdateFailureReason.MISSING_TARGET_REF,
722 old=pull_request, new=None, changes=None,
734 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
723 735 source_changed=False, target_changed=False)
724 736 target_changed = target_ref_id != target_commit.raw_id
725 737
726 738 if not (source_changed or target_changed):
727 739 log.debug("Nothing changed in pull request %s", pull_request)
728 740 return UpdateResponse(
729 741 executed=False,
730 742 reason=UpdateFailureReason.NO_CHANGE,
731 old=pull_request, new=None, changes=None,
743 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
732 744 source_changed=target_changed, target_changed=source_changed)
733 745
734 746 change_in_found = 'target repo' if target_changed else 'source repo'
735 747 log.debug('Updating pull request because of change in %s detected',
736 748 change_in_found)
737 749
738 750 # Finally there is a need for an update, in case of source change
739 751 # we create a new version, else just an update
740 752 if source_changed:
741 753 pull_request_version = self._create_version_from_snapshot(pull_request)
742 754 self._link_comments_to_version(pull_request_version)
743 755 else:
744 756 try:
745 757 ver = pull_request.versions[-1]
746 758 except IndexError:
747 759 ver = None
748 760
749 761 pull_request.pull_request_version_id = \
750 762 ver.pull_request_version_id if ver else None
751 763 pull_request_version = pull_request
752 764
753 765 try:
754 766 if target_ref_type in self.REF_TYPES:
755 767 target_commit = target_repo.get_commit(target_ref_name)
756 768 else:
757 769 target_commit = target_repo.get_commit(target_ref_id)
758 770 except CommitDoesNotExistError:
759 771 return UpdateResponse(
760 772 executed=False,
761 773 reason=UpdateFailureReason.MISSING_TARGET_REF,
762 old=pull_request, new=None, changes=None,
774 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
763 775 source_changed=source_changed, target_changed=target_changed)
764 776
765 777 # re-compute commit ids
766 778 old_commit_ids = pull_request.revisions
767 779 pre_load = ["author", "date", "message", "branch"]
768 780 commit_ranges = target_repo.compare(
769 781 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
770 782 pre_load=pre_load)
771 783
772 ancestor = source_repo.get_common_ancestor(
784 ancestor_commit_id = source_repo.get_common_ancestor(
773 785 source_commit.raw_id, target_commit.raw_id, target_repo)
774 786
775 787 pull_request.source_ref = '%s:%s:%s' % (
776 788 source_ref_type, source_ref_name, source_commit.raw_id)
777 789 pull_request.target_ref = '%s:%s:%s' % (
778 target_ref_type, target_ref_name, ancestor)
790 target_ref_type, target_ref_name, ancestor_commit_id)
779 791
780 792 pull_request.revisions = [
781 793 commit.raw_id for commit in reversed(commit_ranges)]
782 794 pull_request.updated_on = datetime.datetime.now()
783 795 Session().add(pull_request)
784 796 new_commit_ids = pull_request.revisions
785 797
786 798 old_diff_data, new_diff_data = self._generate_update_diffs(
787 799 pull_request, pull_request_version)
788 800
789 801 # calculate commit and file changes
790 changes = self._calculate_commit_id_changes(
802 commit_changes = self._calculate_commit_id_changes(
791 803 old_commit_ids, new_commit_ids)
792 804 file_changes = self._calculate_file_changes(
793 805 old_diff_data, new_diff_data)
794 806
795 807 # set comments as outdated if DIFFS changed
796 808 CommentsModel().outdate_comments(
797 809 pull_request, old_diff_data=old_diff_data,
798 810 new_diff_data=new_diff_data)
799 811
800 commit_changes = (changes.added or changes.removed)
812 valid_commit_changes = (commit_changes.added or commit_changes.removed)
801 813 file_node_changes = (
802 814 file_changes.added or file_changes.modified or file_changes.removed)
803 pr_has_changes = commit_changes or file_node_changes
815 pr_has_changes = valid_commit_changes or file_node_changes
804 816
805 817 # Add an automatic comment to the pull request, in case
806 818 # anything has changed
807 819 if pr_has_changes:
808 820 update_comment = CommentsModel().create(
809 text=self._render_update_message(changes, file_changes),
821 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
810 822 repo=pull_request.target_repo,
811 823 user=pull_request.author,
812 824 pull_request=pull_request,
813 825 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
814 826
815 827 # Update status to "Under Review" for added commits
816 for commit_id in changes.added:
828 for commit_id in commit_changes.added:
817 829 ChangesetStatusModel().set_status(
818 830 repo=pull_request.source_repo,
819 831 status=ChangesetStatus.STATUS_UNDER_REVIEW,
820 832 comment=update_comment,
821 833 user=pull_request.author,
822 834 pull_request=pull_request,
823 835 revision=commit_id)
824 836
837 # send update email to users
838 try:
839 self.notify_users(pull_request=pull_request, updating_user=updating_user,
840 ancestor_commit_id=ancestor_commit_id,
841 commit_changes=commit_changes,
842 file_changes=file_changes)
843 except Exception:
844 log.exception('Failed to send email notification to users')
845
825 846 log.debug(
826 847 'Updated pull request %s, added_ids: %s, common_ids: %s, '
827 848 'removed_ids: %s', pull_request.pull_request_id,
828 changes.added, changes.common, changes.removed)
849 commit_changes.added, commit_changes.common, commit_changes.removed)
829 850 log.debug(
830 851 'Updated pull request with the following file changes: %s',
831 852 file_changes)
832 853
833 854 log.info(
834 855 "Updated pull request %s from commit %s to commit %s, "
835 856 "stored new version %s of this pull request.",
836 857 pull_request.pull_request_id, source_ref_id,
837 858 pull_request.source_ref_parts.commit_id,
838 859 pull_request_version.pull_request_version_id)
839 860 Session().commit()
840 861 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
841 862
842 863 return UpdateResponse(
843 864 executed=True, reason=UpdateFailureReason.NONE,
844 old=pull_request, new=pull_request_version, changes=changes,
865 old=pull_request, new=pull_request_version,
866 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
845 867 source_changed=source_changed, target_changed=target_changed)
846 868
847 869 def _create_version_from_snapshot(self, pull_request):
848 870 version = PullRequestVersion()
849 871 version.title = pull_request.title
850 872 version.description = pull_request.description
851 873 version.status = pull_request.status
852 874 version.pull_request_state = pull_request.pull_request_state
853 875 version.created_on = datetime.datetime.now()
854 876 version.updated_on = pull_request.updated_on
855 877 version.user_id = pull_request.user_id
856 878 version.source_repo = pull_request.source_repo
857 879 version.source_ref = pull_request.source_ref
858 880 version.target_repo = pull_request.target_repo
859 881 version.target_ref = pull_request.target_ref
860 882
861 883 version._last_merge_source_rev = pull_request._last_merge_source_rev
862 884 version._last_merge_target_rev = pull_request._last_merge_target_rev
863 885 version.last_merge_status = pull_request.last_merge_status
864 886 version.shadow_merge_ref = pull_request.shadow_merge_ref
865 887 version.merge_rev = pull_request.merge_rev
866 888 version.reviewer_data = pull_request.reviewer_data
867 889
868 890 version.revisions = pull_request.revisions
869 891 version.pull_request = pull_request
870 892 Session().add(version)
871 893 Session().flush()
872 894
873 895 return version
874 896
875 897 def _generate_update_diffs(self, pull_request, pull_request_version):
876 898
877 899 diff_context = (
878 900 self.DIFF_CONTEXT +
879 901 CommentsModel.needed_extra_diff_context())
880 902 hide_whitespace_changes = False
881 903 source_repo = pull_request_version.source_repo
882 904 source_ref_id = pull_request_version.source_ref_parts.commit_id
883 905 target_ref_id = pull_request_version.target_ref_parts.commit_id
884 906 old_diff = self._get_diff_from_pr_or_version(
885 907 source_repo, source_ref_id, target_ref_id,
886 908 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
887 909
888 910 source_repo = pull_request.source_repo
889 911 source_ref_id = pull_request.source_ref_parts.commit_id
890 912 target_ref_id = pull_request.target_ref_parts.commit_id
891 913
892 914 new_diff = self._get_diff_from_pr_or_version(
893 915 source_repo, source_ref_id, target_ref_id,
894 916 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
895 917
896 918 old_diff_data = diffs.DiffProcessor(old_diff)
897 919 old_diff_data.prepare()
898 920 new_diff_data = diffs.DiffProcessor(new_diff)
899 921 new_diff_data.prepare()
900 922
901 923 return old_diff_data, new_diff_data
902 924
903 925 def _link_comments_to_version(self, pull_request_version):
904 926 """
905 927 Link all unlinked comments of this pull request to the given version.
906 928
907 929 :param pull_request_version: The `PullRequestVersion` to which
908 930 the comments shall be linked.
909 931
910 932 """
911 933 pull_request = pull_request_version.pull_request
912 934 comments = ChangesetComment.query()\
913 935 .filter(
914 936 # TODO: johbo: Should we query for the repo at all here?
915 937 # Pending decision on how comments of PRs are to be related
916 938 # to either the source repo, the target repo or no repo at all.
917 939 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
918 940 ChangesetComment.pull_request == pull_request,
919 941 ChangesetComment.pull_request_version == None)\
920 942 .order_by(ChangesetComment.comment_id.asc())
921 943
922 944 # TODO: johbo: Find out why this breaks if it is done in a bulk
923 945 # operation.
924 946 for comment in comments:
925 947 comment.pull_request_version_id = (
926 948 pull_request_version.pull_request_version_id)
927 949 Session().add(comment)
928 950
929 951 def _calculate_commit_id_changes(self, old_ids, new_ids):
930 952 added = [x for x in new_ids if x not in old_ids]
931 953 common = [x for x in new_ids if x in old_ids]
932 954 removed = [x for x in old_ids if x not in new_ids]
933 955 total = new_ids
934 956 return ChangeTuple(added, common, removed, total)
935 957
936 958 def _calculate_file_changes(self, old_diff_data, new_diff_data):
937 959
938 960 old_files = OrderedDict()
939 961 for diff_data in old_diff_data.parsed_diff:
940 962 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
941 963
942 964 added_files = []
943 965 modified_files = []
944 966 removed_files = []
945 967 for diff_data in new_diff_data.parsed_diff:
946 968 new_filename = diff_data['filename']
947 969 new_hash = md5_safe(diff_data['raw_diff'])
948 970
949 971 old_hash = old_files.get(new_filename)
950 972 if not old_hash:
951 973 # file is not present in old diff, means it's added
952 974 added_files.append(new_filename)
953 975 else:
954 976 if new_hash != old_hash:
955 977 modified_files.append(new_filename)
956 978 # now remove a file from old, since we have seen it already
957 979 del old_files[new_filename]
958 980
959 981 # removed files is when there are present in old, but not in NEW,
960 982 # since we remove old files that are present in new diff, left-overs
961 983 # if any should be the removed files
962 984 removed_files.extend(old_files.keys())
963 985
964 986 return FileChangeTuple(added_files, modified_files, removed_files)
965 987
966 def _render_update_message(self, changes, file_changes):
988 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
967 989 """
968 990 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
969 991 so it's always looking the same disregarding on which default
970 992 renderer system is using.
971 993
994 :param ancestor_commit_id: ancestor raw_id
972 995 :param changes: changes named tuple
973 996 :param file_changes: file changes named tuple
974 997
975 998 """
976 999 new_status = ChangesetStatus.get_status_lbl(
977 1000 ChangesetStatus.STATUS_UNDER_REVIEW)
978 1001
979 1002 changed_files = (
980 1003 file_changes.added + file_changes.modified + file_changes.removed)
981 1004
982 1005 params = {
983 1006 'under_review_label': new_status,
984 1007 'added_commits': changes.added,
985 1008 'removed_commits': changes.removed,
986 1009 'changed_files': changed_files,
987 1010 'added_files': file_changes.added,
988 1011 'modified_files': file_changes.modified,
989 1012 'removed_files': file_changes.removed,
1013 'ancestor_commit_id': ancestor_commit_id
990 1014 }
991 1015 renderer = RstTemplateRenderer()
992 1016 return renderer.render('pull_request_update.mako', **params)
993 1017
994 1018 def edit(self, pull_request, title, description, description_renderer, user):
995 1019 pull_request = self.__get_pull_request(pull_request)
996 1020 old_data = pull_request.get_api_data(with_merge_state=False)
997 1021 if pull_request.is_closed():
998 1022 raise ValueError('This pull request is closed')
999 1023 if title:
1000 1024 pull_request.title = title
1001 1025 pull_request.description = description
1002 1026 pull_request.updated_on = datetime.datetime.now()
1003 1027 pull_request.description_renderer = description_renderer
1004 1028 Session().add(pull_request)
1005 1029 self._log_audit_action(
1006 1030 'repo.pull_request.edit', {'old_data': old_data},
1007 1031 user, pull_request)
1008 1032
1009 1033 def update_reviewers(self, pull_request, reviewer_data, user):
1010 1034 """
1011 1035 Update the reviewers in the pull request
1012 1036
1013 1037 :param pull_request: the pr to update
1014 1038 :param reviewer_data: list of tuples
1015 1039 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1016 1040 """
1017 1041 pull_request = self.__get_pull_request(pull_request)
1018 1042 if pull_request.is_closed():
1019 1043 raise ValueError('This pull request is closed')
1020 1044
1021 1045 reviewers = {}
1022 1046 for user_id, reasons, mandatory, rules in reviewer_data:
1023 1047 if isinstance(user_id, (int, compat.string_types)):
1024 1048 user_id = self._get_user(user_id).user_id
1025 1049 reviewers[user_id] = {
1026 1050 'reasons': reasons, 'mandatory': mandatory}
1027 1051
1028 1052 reviewers_ids = set(reviewers.keys())
1029 1053 current_reviewers = PullRequestReviewers.query()\
1030 1054 .filter(PullRequestReviewers.pull_request ==
1031 1055 pull_request).all()
1032 1056 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1033 1057
1034 1058 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1035 1059 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1036 1060
1037 1061 log.debug("Adding %s reviewers", ids_to_add)
1038 1062 log.debug("Removing %s reviewers", ids_to_remove)
1039 1063 changed = False
1040 1064 added_audit_reviewers = []
1041 1065 removed_audit_reviewers = []
1042 1066
1043 1067 for uid in ids_to_add:
1044 1068 changed = True
1045 1069 _usr = self._get_user(uid)
1046 1070 reviewer = PullRequestReviewers()
1047 1071 reviewer.user = _usr
1048 1072 reviewer.pull_request = pull_request
1049 1073 reviewer.reasons = reviewers[uid]['reasons']
1050 1074 # NOTE(marcink): mandatory shouldn't be changed now
1051 1075 # reviewer.mandatory = reviewers[uid]['reasons']
1052 1076 Session().add(reviewer)
1053 1077 added_audit_reviewers.append(reviewer.get_dict())
1054 1078
1055 1079 for uid in ids_to_remove:
1056 1080 changed = True
1057 1081 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1058 1082 # that prevents and fixes cases that we added the same reviewer twice.
1059 1083 # this CAN happen due to the lack of DB checks
1060 1084 reviewers = PullRequestReviewers.query()\
1061 1085 .filter(PullRequestReviewers.user_id == uid,
1062 1086 PullRequestReviewers.pull_request == pull_request)\
1063 1087 .all()
1064 1088
1065 1089 for obj in reviewers:
1066 1090 added_audit_reviewers.append(obj.get_dict())
1067 1091 Session().delete(obj)
1068 1092
1069 1093 if changed:
1070 1094 Session().expire_all()
1071 1095 pull_request.updated_on = datetime.datetime.now()
1072 1096 Session().add(pull_request)
1073 1097
1074 1098 # finally store audit logs
1075 1099 for user_data in added_audit_reviewers:
1076 1100 self._log_audit_action(
1077 1101 'repo.pull_request.reviewer.add', {'data': user_data},
1078 1102 user, pull_request)
1079 1103 for user_data in removed_audit_reviewers:
1080 1104 self._log_audit_action(
1081 1105 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1082 1106 user, pull_request)
1083 1107
1084 1108 self.notify_reviewers(pull_request, ids_to_add)
1085 1109 return ids_to_add, ids_to_remove
1086 1110
1087 1111 def get_url(self, pull_request, request=None, permalink=False):
1088 1112 if not request:
1089 1113 request = get_current_request()
1090 1114
1091 1115 if permalink:
1092 1116 return request.route_url(
1093 1117 'pull_requests_global',
1094 1118 pull_request_id=pull_request.pull_request_id,)
1095 1119 else:
1096 1120 return request.route_url('pullrequest_show',
1097 1121 repo_name=safe_str(pull_request.target_repo.repo_name),
1098 1122 pull_request_id=pull_request.pull_request_id,)
1099 1123
1100 1124 def get_shadow_clone_url(self, pull_request, request=None):
1101 1125 """
1102 1126 Returns qualified url pointing to the shadow repository. If this pull
1103 1127 request is closed there is no shadow repository and ``None`` will be
1104 1128 returned.
1105 1129 """
1106 1130 if pull_request.is_closed():
1107 1131 return None
1108 1132 else:
1109 1133 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1110 1134 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1111 1135
1112 1136 def notify_reviewers(self, pull_request, reviewers_ids):
1113 1137 # notification to reviewers
1114 1138 if not reviewers_ids:
1115 1139 return
1116 1140
1117 1141 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1118 1142
1119 1143 pull_request_obj = pull_request
1120 1144 # get the current participants of this pull request
1121 1145 recipients = reviewers_ids
1122 1146 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1123 1147
1124 1148 pr_source_repo = pull_request_obj.source_repo
1125 1149 pr_target_repo = pull_request_obj.target_repo
1126 1150
1127 1151 pr_url = h.route_url('pullrequest_show',
1128 1152 repo_name=pr_target_repo.repo_name,
1129 1153 pull_request_id=pull_request_obj.pull_request_id,)
1130 1154
1131 1155 # set some variables for email notification
1132 1156 pr_target_repo_url = h.route_url(
1133 1157 'repo_summary', repo_name=pr_target_repo.repo_name)
1134 1158
1135 1159 pr_source_repo_url = h.route_url(
1136 1160 'repo_summary', repo_name=pr_source_repo.repo_name)
1137 1161
1138 1162 # pull request specifics
1139 1163 pull_request_commits = [
1140 1164 (x.raw_id, x.message)
1141 1165 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1142 1166
1143 1167 kwargs = {
1144 1168 'user': pull_request.author,
1145 1169 'pull_request': pull_request_obj,
1146 1170 'pull_request_commits': pull_request_commits,
1147 1171
1148 1172 'pull_request_target_repo': pr_target_repo,
1149 1173 'pull_request_target_repo_url': pr_target_repo_url,
1150 1174
1151 1175 'pull_request_source_repo': pr_source_repo,
1152 1176 'pull_request_source_repo_url': pr_source_repo_url,
1153 1177
1154 1178 'pull_request_url': pr_url,
1155 1179 }
1156 1180
1157 1181 # pre-generate the subject for notification itself
1158 1182 (subject,
1159 1183 _h, _e, # we don't care about those
1160 1184 body_plaintext) = EmailNotificationModel().render_email(
1161 1185 notification_type, **kwargs)
1162 1186
1163 1187 # create notification objects, and emails
1164 1188 NotificationModel().create(
1165 1189 created_by=pull_request.author,
1166 1190 notification_subject=subject,
1167 1191 notification_body=body_plaintext,
1168 1192 notification_type=notification_type,
1169 1193 recipients=recipients,
1170 1194 email_kwargs=kwargs,
1171 1195 )
1172 1196
1197 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1198 commit_changes, file_changes):
1199
1200 updating_user_id = updating_user.user_id
1201 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1202 # NOTE(marcink): send notification to all other users except to
1203 # person who updated the PR
1204 recipients = reviewers.difference(set([updating_user_id]))
1205
1206 log.debug('Notify following recipients about pull-request update %s', recipients)
1207
1208 pull_request_obj = pull_request
1209
1210 # send email about the update
1211 changed_files = (
1212 file_changes.added + file_changes.modified + file_changes.removed)
1213
1214 pr_source_repo = pull_request_obj.source_repo
1215 pr_target_repo = pull_request_obj.target_repo
1216
1217 pr_url = h.route_url('pullrequest_show',
1218 repo_name=pr_target_repo.repo_name,
1219 pull_request_id=pull_request_obj.pull_request_id,)
1220
1221 # set some variables for email notification
1222 pr_target_repo_url = h.route_url(
1223 'repo_summary', repo_name=pr_target_repo.repo_name)
1224
1225 pr_source_repo_url = h.route_url(
1226 'repo_summary', repo_name=pr_source_repo.repo_name)
1227
1228 email_kwargs = {
1229 'date': datetime.datetime.now(),
1230 'updating_user': updating_user,
1231
1232 'pull_request': pull_request_obj,
1233
1234 'pull_request_target_repo': pr_target_repo,
1235 'pull_request_target_repo_url': pr_target_repo_url,
1236
1237 'pull_request_source_repo': pr_source_repo,
1238 'pull_request_source_repo_url': pr_source_repo_url,
1239
1240 'pull_request_url': pr_url,
1241
1242 'ancestor_commit_id': ancestor_commit_id,
1243 'added_commits': commit_changes.added,
1244 'removed_commits': commit_changes.removed,
1245 'changed_files': changed_files,
1246 'added_files': file_changes.added,
1247 'modified_files': file_changes.modified,
1248 'removed_files': file_changes.removed,
1249 }
1250
1251 (subject,
1252 _h, _e, # we don't care about those
1253 body_plaintext) = EmailNotificationModel().render_email(
1254 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1255
1256 # create notification objects, and emails
1257 NotificationModel().create(
1258 created_by=updating_user,
1259 notification_subject=subject,
1260 notification_body=body_plaintext,
1261 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1262 recipients=recipients,
1263 email_kwargs=email_kwargs,
1264 )
1265
1173 1266 def delete(self, pull_request, user):
1174 1267 pull_request = self.__get_pull_request(pull_request)
1175 1268 old_data = pull_request.get_api_data(with_merge_state=False)
1176 1269 self._cleanup_merge_workspace(pull_request)
1177 1270 self._log_audit_action(
1178 1271 'repo.pull_request.delete', {'old_data': old_data},
1179 1272 user, pull_request)
1180 1273 Session().delete(pull_request)
1181 1274
1182 1275 def close_pull_request(self, pull_request, user):
1183 1276 pull_request = self.__get_pull_request(pull_request)
1184 1277 self._cleanup_merge_workspace(pull_request)
1185 1278 pull_request.status = PullRequest.STATUS_CLOSED
1186 1279 pull_request.updated_on = datetime.datetime.now()
1187 1280 Session().add(pull_request)
1188 1281 self.trigger_pull_request_hook(
1189 1282 pull_request, pull_request.author, 'close')
1190 1283
1191 1284 pr_data = pull_request.get_api_data(with_merge_state=False)
1192 1285 self._log_audit_action(
1193 1286 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1194 1287
1195 1288 def close_pull_request_with_comment(
1196 1289 self, pull_request, user, repo, message=None, auth_user=None):
1197 1290
1198 1291 pull_request_review_status = pull_request.calculated_review_status()
1199 1292
1200 1293 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1201 1294 # approved only if we have voting consent
1202 1295 status = ChangesetStatus.STATUS_APPROVED
1203 1296 else:
1204 1297 status = ChangesetStatus.STATUS_REJECTED
1205 1298 status_lbl = ChangesetStatus.get_status_lbl(status)
1206 1299
1207 1300 default_message = (
1208 1301 'Closing with status change {transition_icon} {status}.'
1209 1302 ).format(transition_icon='>', status=status_lbl)
1210 1303 text = message or default_message
1211 1304
1212 1305 # create a comment, and link it to new status
1213 1306 comment = CommentsModel().create(
1214 1307 text=text,
1215 1308 repo=repo.repo_id,
1216 1309 user=user.user_id,
1217 1310 pull_request=pull_request.pull_request_id,
1218 1311 status_change=status_lbl,
1219 1312 status_change_type=status,
1220 1313 closing_pr=True,
1221 1314 auth_user=auth_user,
1222 1315 )
1223 1316
1224 1317 # calculate old status before we change it
1225 1318 old_calculated_status = pull_request.calculated_review_status()
1226 1319 ChangesetStatusModel().set_status(
1227 1320 repo.repo_id,
1228 1321 status,
1229 1322 user.user_id,
1230 1323 comment=comment,
1231 1324 pull_request=pull_request.pull_request_id
1232 1325 )
1233 1326
1234 1327 Session().flush()
1235 1328 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1236 1329 # we now calculate the status of pull request again, and based on that
1237 1330 # calculation trigger status change. This might happen in cases
1238 1331 # that non-reviewer admin closes a pr, which means his vote doesn't
1239 1332 # change the status, while if he's a reviewer this might change it.
1240 1333 calculated_status = pull_request.calculated_review_status()
1241 1334 if old_calculated_status != calculated_status:
1242 1335 self.trigger_pull_request_hook(
1243 1336 pull_request, user, 'review_status_change',
1244 1337 data={'status': calculated_status})
1245 1338
1246 1339 # finally close the PR
1247 1340 PullRequestModel().close_pull_request(
1248 1341 pull_request.pull_request_id, user)
1249 1342
1250 1343 return comment, status
1251 1344
1252 1345 def merge_status(self, pull_request, translator=None,
1253 1346 force_shadow_repo_refresh=False):
1254 1347 _ = translator or get_current_request().translate
1255 1348
1256 1349 if not self._is_merge_enabled(pull_request):
1257 1350 return False, _('Server-side pull request merging is disabled.')
1258 1351 if pull_request.is_closed():
1259 1352 return False, _('This pull request is closed.')
1260 1353 merge_possible, msg = self._check_repo_requirements(
1261 1354 target=pull_request.target_repo, source=pull_request.source_repo,
1262 1355 translator=_)
1263 1356 if not merge_possible:
1264 1357 return merge_possible, msg
1265 1358
1266 1359 try:
1267 1360 resp = self._try_merge(
1268 1361 pull_request,
1269 1362 force_shadow_repo_refresh=force_shadow_repo_refresh)
1270 1363 log.debug("Merge response: %s", resp)
1271 1364 status = resp.possible, resp.merge_status_message
1272 1365 except NotImplementedError:
1273 1366 status = False, _('Pull request merging is not supported.')
1274 1367
1275 1368 return status
1276 1369
1277 1370 def _check_repo_requirements(self, target, source, translator):
1278 1371 """
1279 1372 Check if `target` and `source` have compatible requirements.
1280 1373
1281 1374 Currently this is just checking for largefiles.
1282 1375 """
1283 1376 _ = translator
1284 1377 target_has_largefiles = self._has_largefiles(target)
1285 1378 source_has_largefiles = self._has_largefiles(source)
1286 1379 merge_possible = True
1287 1380 message = u''
1288 1381
1289 1382 if target_has_largefiles != source_has_largefiles:
1290 1383 merge_possible = False
1291 1384 if source_has_largefiles:
1292 1385 message = _(
1293 1386 'Target repository large files support is disabled.')
1294 1387 else:
1295 1388 message = _(
1296 1389 'Source repository large files support is disabled.')
1297 1390
1298 1391 return merge_possible, message
1299 1392
1300 1393 def _has_largefiles(self, repo):
1301 1394 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1302 1395 'extensions', 'largefiles')
1303 1396 return largefiles_ui and largefiles_ui[0].active
1304 1397
1305 1398 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1306 1399 """
1307 1400 Try to merge the pull request and return the merge status.
1308 1401 """
1309 1402 log.debug(
1310 1403 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1311 1404 pull_request.pull_request_id, force_shadow_repo_refresh)
1312 1405 target_vcs = pull_request.target_repo.scm_instance()
1313 1406 # Refresh the target reference.
1314 1407 try:
1315 1408 target_ref = self._refresh_reference(
1316 1409 pull_request.target_ref_parts, target_vcs)
1317 1410 except CommitDoesNotExistError:
1318 1411 merge_state = MergeResponse(
1319 1412 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1320 1413 metadata={'target_ref': pull_request.target_ref_parts})
1321 1414 return merge_state
1322 1415
1323 1416 target_locked = pull_request.target_repo.locked
1324 1417 if target_locked and target_locked[0]:
1325 1418 locked_by = 'user:{}'.format(target_locked[0])
1326 1419 log.debug("The target repository is locked by %s.", locked_by)
1327 1420 merge_state = MergeResponse(
1328 1421 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1329 1422 metadata={'locked_by': locked_by})
1330 1423 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1331 1424 pull_request, target_ref):
1332 1425 log.debug("Refreshing the merge status of the repository.")
1333 1426 merge_state = self._refresh_merge_state(
1334 1427 pull_request, target_vcs, target_ref)
1335 1428 else:
1336 1429 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1337 1430 metadata = {
1338 1431 'unresolved_files': '',
1339 1432 'target_ref': pull_request.target_ref_parts,
1340 1433 'source_ref': pull_request.source_ref_parts,
1341 1434 }
1342 1435 if not possible and target_ref.type == 'branch':
1343 1436 # NOTE(marcink): case for mercurial multiple heads on branch
1344 1437 heads = target_vcs._heads(target_ref.name)
1345 1438 if len(heads) != 1:
1346 1439 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1347 1440 metadata.update({
1348 1441 'heads': heads
1349 1442 })
1350 1443 merge_state = MergeResponse(
1351 1444 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1352 1445
1353 1446 return merge_state
1354 1447
1355 1448 def _refresh_reference(self, reference, vcs_repository):
1356 1449 if reference.type in self.UPDATABLE_REF_TYPES:
1357 1450 name_or_id = reference.name
1358 1451 else:
1359 1452 name_or_id = reference.commit_id
1360 1453
1361 1454 refreshed_commit = vcs_repository.get_commit(name_or_id)
1362 1455 refreshed_reference = Reference(
1363 1456 reference.type, reference.name, refreshed_commit.raw_id)
1364 1457 return refreshed_reference
1365 1458
1366 1459 def _needs_merge_state_refresh(self, pull_request, target_reference):
1367 1460 return not(
1368 1461 pull_request.revisions and
1369 1462 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1370 1463 target_reference.commit_id == pull_request._last_merge_target_rev)
1371 1464
1372 1465 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1373 1466 workspace_id = self._workspace_id(pull_request)
1374 1467 source_vcs = pull_request.source_repo.scm_instance()
1375 1468 repo_id = pull_request.target_repo.repo_id
1376 1469 use_rebase = self._use_rebase_for_merging(pull_request)
1377 1470 close_branch = self._close_branch_before_merging(pull_request)
1378 1471 merge_state = target_vcs.merge(
1379 1472 repo_id, workspace_id,
1380 1473 target_reference, source_vcs, pull_request.source_ref_parts,
1381 1474 dry_run=True, use_rebase=use_rebase,
1382 1475 close_branch=close_branch)
1383 1476
1384 1477 # Do not store the response if there was an unknown error.
1385 1478 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1386 1479 pull_request._last_merge_source_rev = \
1387 1480 pull_request.source_ref_parts.commit_id
1388 1481 pull_request._last_merge_target_rev = target_reference.commit_id
1389 1482 pull_request.last_merge_status = merge_state.failure_reason
1390 1483 pull_request.shadow_merge_ref = merge_state.merge_ref
1391 1484 Session().add(pull_request)
1392 1485 Session().commit()
1393 1486
1394 1487 return merge_state
1395 1488
1396 1489 def _workspace_id(self, pull_request):
1397 1490 workspace_id = 'pr-%s' % pull_request.pull_request_id
1398 1491 return workspace_id
1399 1492
1400 1493 def generate_repo_data(self, repo, commit_id=None, branch=None,
1401 1494 bookmark=None, translator=None):
1402 1495 from rhodecode.model.repo import RepoModel
1403 1496
1404 1497 all_refs, selected_ref = \
1405 1498 self._get_repo_pullrequest_sources(
1406 1499 repo.scm_instance(), commit_id=commit_id,
1407 1500 branch=branch, bookmark=bookmark, translator=translator)
1408 1501
1409 1502 refs_select2 = []
1410 1503 for element in all_refs:
1411 1504 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1412 1505 refs_select2.append({'text': element[1], 'children': children})
1413 1506
1414 1507 return {
1415 1508 'user': {
1416 1509 'user_id': repo.user.user_id,
1417 1510 'username': repo.user.username,
1418 1511 'firstname': repo.user.first_name,
1419 1512 'lastname': repo.user.last_name,
1420 1513 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1421 1514 },
1422 1515 'name': repo.repo_name,
1423 1516 'link': RepoModel().get_url(repo),
1424 1517 'description': h.chop_at_smart(repo.description_safe, '\n'),
1425 1518 'refs': {
1426 1519 'all_refs': all_refs,
1427 1520 'selected_ref': selected_ref,
1428 1521 'select2_refs': refs_select2
1429 1522 }
1430 1523 }
1431 1524
1432 1525 def generate_pullrequest_title(self, source, source_ref, target):
1433 1526 return u'{source}#{at_ref} to {target}'.format(
1434 1527 source=source,
1435 1528 at_ref=source_ref,
1436 1529 target=target,
1437 1530 )
1438 1531
1439 1532 def _cleanup_merge_workspace(self, pull_request):
1440 1533 # Merging related cleanup
1441 1534 repo_id = pull_request.target_repo.repo_id
1442 1535 target_scm = pull_request.target_repo.scm_instance()
1443 1536 workspace_id = self._workspace_id(pull_request)
1444 1537
1445 1538 try:
1446 1539 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1447 1540 except NotImplementedError:
1448 1541 pass
1449 1542
1450 1543 def _get_repo_pullrequest_sources(
1451 1544 self, repo, commit_id=None, branch=None, bookmark=None,
1452 1545 translator=None):
1453 1546 """
1454 1547 Return a structure with repo's interesting commits, suitable for
1455 1548 the selectors in pullrequest controller
1456 1549
1457 1550 :param commit_id: a commit that must be in the list somehow
1458 1551 and selected by default
1459 1552 :param branch: a branch that must be in the list and selected
1460 1553 by default - even if closed
1461 1554 :param bookmark: a bookmark that must be in the list and selected
1462 1555 """
1463 1556 _ = translator or get_current_request().translate
1464 1557
1465 1558 commit_id = safe_str(commit_id) if commit_id else None
1466 1559 branch = safe_unicode(branch) if branch else None
1467 1560 bookmark = safe_unicode(bookmark) if bookmark else None
1468 1561
1469 1562 selected = None
1470 1563
1471 1564 # order matters: first source that has commit_id in it will be selected
1472 1565 sources = []
1473 1566 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1474 1567 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1475 1568
1476 1569 if commit_id:
1477 1570 ref_commit = (h.short_id(commit_id), commit_id)
1478 1571 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1479 1572
1480 1573 sources.append(
1481 1574 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1482 1575 )
1483 1576
1484 1577 groups = []
1485 1578
1486 1579 for group_key, ref_list, group_name, match in sources:
1487 1580 group_refs = []
1488 1581 for ref_name, ref_id in ref_list:
1489 1582 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1490 1583 group_refs.append((ref_key, ref_name))
1491 1584
1492 1585 if not selected:
1493 1586 if set([commit_id, match]) & set([ref_id, ref_name]):
1494 1587 selected = ref_key
1495 1588
1496 1589 if group_refs:
1497 1590 groups.append((group_refs, group_name))
1498 1591
1499 1592 if not selected:
1500 1593 ref = commit_id or branch or bookmark
1501 1594 if ref:
1502 1595 raise CommitDoesNotExistError(
1503 1596 u'No commit refs could be found matching: {}'.format(ref))
1504 1597 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1505 1598 selected = u'branch:{}:{}'.format(
1506 1599 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1507 1600 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1508 1601 )
1509 1602 elif repo.commit_ids:
1510 1603 # make the user select in this case
1511 1604 selected = None
1512 1605 else:
1513 1606 raise EmptyRepositoryError()
1514 1607 return groups, selected
1515 1608
1516 1609 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1517 1610 hide_whitespace_changes, diff_context):
1518 1611
1519 1612 return self._get_diff_from_pr_or_version(
1520 1613 source_repo, source_ref_id, target_ref_id,
1521 1614 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1522 1615
1523 1616 def _get_diff_from_pr_or_version(
1524 1617 self, source_repo, source_ref_id, target_ref_id,
1525 1618 hide_whitespace_changes, diff_context):
1526 1619
1527 1620 target_commit = source_repo.get_commit(
1528 1621 commit_id=safe_str(target_ref_id))
1529 1622 source_commit = source_repo.get_commit(
1530 1623 commit_id=safe_str(source_ref_id))
1531 1624 if isinstance(source_repo, Repository):
1532 1625 vcs_repo = source_repo.scm_instance()
1533 1626 else:
1534 1627 vcs_repo = source_repo
1535 1628
1536 1629 # TODO: johbo: In the context of an update, we cannot reach
1537 1630 # the old commit anymore with our normal mechanisms. It needs
1538 1631 # some sort of special support in the vcs layer to avoid this
1539 1632 # workaround.
1540 1633 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1541 1634 vcs_repo.alias == 'git'):
1542 1635 source_commit.raw_id = safe_str(source_ref_id)
1543 1636
1544 1637 log.debug('calculating diff between '
1545 1638 'source_ref:%s and target_ref:%s for repo `%s`',
1546 1639 target_ref_id, source_ref_id,
1547 1640 safe_unicode(vcs_repo.path))
1548 1641
1549 1642 vcs_diff = vcs_repo.get_diff(
1550 1643 commit1=target_commit, commit2=source_commit,
1551 1644 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1552 1645 return vcs_diff
1553 1646
1554 1647 def _is_merge_enabled(self, pull_request):
1555 1648 return self._get_general_setting(
1556 1649 pull_request, 'rhodecode_pr_merge_enabled')
1557 1650
1558 1651 def _use_rebase_for_merging(self, pull_request):
1559 1652 repo_type = pull_request.target_repo.repo_type
1560 1653 if repo_type == 'hg':
1561 1654 return self._get_general_setting(
1562 1655 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1563 1656 elif repo_type == 'git':
1564 1657 return self._get_general_setting(
1565 1658 pull_request, 'rhodecode_git_use_rebase_for_merging')
1566 1659
1567 1660 return False
1568 1661
1569 1662 def _close_branch_before_merging(self, pull_request):
1570 1663 repo_type = pull_request.target_repo.repo_type
1571 1664 if repo_type == 'hg':
1572 1665 return self._get_general_setting(
1573 1666 pull_request, 'rhodecode_hg_close_branch_before_merging')
1574 1667 elif repo_type == 'git':
1575 1668 return self._get_general_setting(
1576 1669 pull_request, 'rhodecode_git_close_branch_before_merging')
1577 1670
1578 1671 return False
1579 1672
1580 1673 def _get_general_setting(self, pull_request, settings_key, default=False):
1581 1674 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1582 1675 settings = settings_model.get_general_settings()
1583 1676 return settings.get(settings_key, default)
1584 1677
1585 1678 def _log_audit_action(self, action, action_data, user, pull_request):
1586 1679 audit_logger.store(
1587 1680 action=action,
1588 1681 action_data=action_data,
1589 1682 user=user,
1590 1683 repo=pull_request.target_repo)
1591 1684
1592 1685 def get_reviewer_functions(self):
1593 1686 """
1594 1687 Fetches functions for validation and fetching default reviewers.
1595 1688 If available we use the EE package, else we fallback to CE
1596 1689 package functions
1597 1690 """
1598 1691 try:
1599 1692 from rc_reviewers.utils import get_default_reviewers_data
1600 1693 from rc_reviewers.utils import validate_default_reviewers
1601 1694 except ImportError:
1602 1695 from rhodecode.apps.repository.utils import get_default_reviewers_data
1603 1696 from rhodecode.apps.repository.utils import validate_default_reviewers
1604 1697
1605 1698 return get_default_reviewers_data, validate_default_reviewers
1606 1699
1607 1700
1608 1701 class MergeCheck(object):
1609 1702 """
1610 1703 Perform Merge Checks and returns a check object which stores information
1611 1704 about merge errors, and merge conditions
1612 1705 """
1613 1706 TODO_CHECK = 'todo'
1614 1707 PERM_CHECK = 'perm'
1615 1708 REVIEW_CHECK = 'review'
1616 1709 MERGE_CHECK = 'merge'
1617 1710 WIP_CHECK = 'wip'
1618 1711
1619 1712 def __init__(self):
1620 1713 self.review_status = None
1621 1714 self.merge_possible = None
1622 1715 self.merge_msg = ''
1623 1716 self.failed = None
1624 1717 self.errors = []
1625 1718 self.error_details = OrderedDict()
1626 1719
1627 1720 def push_error(self, error_type, message, error_key, details):
1628 1721 self.failed = True
1629 1722 self.errors.append([error_type, message])
1630 1723 self.error_details[error_key] = dict(
1631 1724 details=details,
1632 1725 error_type=error_type,
1633 1726 message=message
1634 1727 )
1635 1728
1636 1729 @classmethod
1637 1730 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1638 1731 force_shadow_repo_refresh=False):
1639 1732 _ = translator
1640 1733 merge_check = cls()
1641 1734
1642 1735 # title has WIP:
1643 1736 if pull_request.work_in_progress:
1644 1737 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1645 1738
1646 1739 msg = _('WIP marker in title prevents from accidental merge.')
1647 1740 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1648 1741 if fail_early:
1649 1742 return merge_check
1650 1743
1651 1744 # permissions to merge
1652 1745 user_allowed_to_merge = PullRequestModel().check_user_merge(
1653 1746 pull_request, auth_user)
1654 1747 if not user_allowed_to_merge:
1655 1748 log.debug("MergeCheck: cannot merge, approval is pending.")
1656 1749
1657 1750 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1658 1751 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1659 1752 if fail_early:
1660 1753 return merge_check
1661 1754
1662 1755 # permission to merge into the target branch
1663 1756 target_commit_id = pull_request.target_ref_parts.commit_id
1664 1757 if pull_request.target_ref_parts.type == 'branch':
1665 1758 branch_name = pull_request.target_ref_parts.name
1666 1759 else:
1667 1760 # for mercurial we can always figure out the branch from the commit
1668 1761 # in case of bookmark
1669 1762 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1670 1763 branch_name = target_commit.branch
1671 1764
1672 1765 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1673 1766 pull_request.target_repo.repo_name, branch_name)
1674 1767 if branch_perm and branch_perm == 'branch.none':
1675 1768 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1676 1769 branch_name, rule)
1677 1770 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1678 1771 if fail_early:
1679 1772 return merge_check
1680 1773
1681 1774 # review status, must be always present
1682 1775 review_status = pull_request.calculated_review_status()
1683 1776 merge_check.review_status = review_status
1684 1777
1685 1778 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1686 1779 if not status_approved:
1687 1780 log.debug("MergeCheck: cannot merge, approval is pending.")
1688 1781
1689 1782 msg = _('Pull request reviewer approval is pending.')
1690 1783
1691 1784 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1692 1785
1693 1786 if fail_early:
1694 1787 return merge_check
1695 1788
1696 1789 # left over TODOs
1697 1790 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1698 1791 if todos:
1699 1792 log.debug("MergeCheck: cannot merge, {} "
1700 1793 "unresolved TODOs left.".format(len(todos)))
1701 1794
1702 1795 if len(todos) == 1:
1703 1796 msg = _('Cannot merge, {} TODO still not resolved.').format(
1704 1797 len(todos))
1705 1798 else:
1706 1799 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1707 1800 len(todos))
1708 1801
1709 1802 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1710 1803
1711 1804 if fail_early:
1712 1805 return merge_check
1713 1806
1714 1807 # merge possible, here is the filesystem simulation + shadow repo
1715 1808 merge_status, msg = PullRequestModel().merge_status(
1716 1809 pull_request, translator=translator,
1717 1810 force_shadow_repo_refresh=force_shadow_repo_refresh)
1718 1811 merge_check.merge_possible = merge_status
1719 1812 merge_check.merge_msg = msg
1720 1813 if not merge_status:
1721 1814 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1722 1815 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1723 1816
1724 1817 if fail_early:
1725 1818 return merge_check
1726 1819
1727 1820 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1728 1821 return merge_check
1729 1822
1730 1823 @classmethod
1731 1824 def get_merge_conditions(cls, pull_request, translator):
1732 1825 _ = translator
1733 1826 merge_details = {}
1734 1827
1735 1828 model = PullRequestModel()
1736 1829 use_rebase = model._use_rebase_for_merging(pull_request)
1737 1830
1738 1831 if use_rebase:
1739 1832 merge_details['merge_strategy'] = dict(
1740 1833 details={},
1741 1834 message=_('Merge strategy: rebase')
1742 1835 )
1743 1836 else:
1744 1837 merge_details['merge_strategy'] = dict(
1745 1838 details={},
1746 1839 message=_('Merge strategy: explicit merge commit')
1747 1840 )
1748 1841
1749 1842 close_branch = model._close_branch_before_merging(pull_request)
1750 1843 if close_branch:
1751 1844 repo_type = pull_request.target_repo.repo_type
1752 1845 close_msg = ''
1753 1846 if repo_type == 'hg':
1754 1847 close_msg = _('Source branch will be closed after merge.')
1755 1848 elif repo_type == 'git':
1756 1849 close_msg = _('Source branch will be deleted after merge.')
1757 1850
1758 1851 merge_details['close_branch'] = dict(
1759 1852 details={},
1760 1853 message=close_msg
1761 1854 )
1762 1855
1763 1856 return merge_details
1764 1857
1765 1858
1766 1859 ChangeTuple = collections.namedtuple(
1767 1860 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1768 1861
1769 1862 FileChangeTuple = collections.namedtuple(
1770 1863 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,201 +1,201 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="base.mako"/>
3 3 <%namespace name="base" file="base.mako"/>
4 4
5 5 ## EMAIL SUBJECT
6 6 <%def name="subject()" filter="n,trim,whitespace_filter">
7 7 <%
8 8 data = {
9 9 'user': '@'+h.person(user),
10 10 'repo_name': repo_name,
11 11 'status': status_change,
12 12 'comment_file': comment_file,
13 13 'comment_line': comment_line,
14 14 'comment_type': comment_type,
15 15 'comment_id': comment_id,
16 16
17 17 'pr_title': pull_request.title,
18 18 'pr_id': pull_request.pull_request_id,
19 19 }
20 20 %>
21 21
22 22
23 23 % if comment_file:
24 24 ${(_('[mention]') if mention else '')} ${_('{user} left a {comment_type} on file `{comment_file}` in pull request !{pr_id}: "{pr_title}"').format(**data) |n}
25 25 % else:
26 26 % if status_change:
27 27 ${(_('[mention]') if mention else '')} ${_('[status: {status}] {user} left a {comment_type} on pull request !{pr_id}: "{pr_title}"').format(**data) |n}
28 28 % else:
29 29 ${(_('[mention]') if mention else '')} ${_('{user} left a {comment_type} on pull request !{pr_id}: "{pr_title}"').format(**data) |n}
30 30 % endif
31 31 % endif
32 32
33 33 </%def>
34 34
35 35 ## PLAINTEXT VERSION OF BODY
36 36 <%def name="body_plaintext()" filter="n,trim">
37 37 <%
38 38 data = {
39 39 'user': h.person(user),
40 40 'repo_name': repo_name,
41 41 'status': status_change,
42 42 'comment_file': comment_file,
43 43 'comment_line': comment_line,
44 44 'comment_type': comment_type,
45 45 'comment_id': comment_id,
46 46
47 47 'pr_title': pull_request.title,
48 48 'pr_id': pull_request.pull_request_id,
49 49 'source_ref_type': pull_request.source_ref_parts.type,
50 50 'source_ref_name': pull_request.source_ref_parts.name,
51 51 'target_ref_type': pull_request.target_ref_parts.type,
52 52 'target_ref_name': pull_request.target_ref_parts.name,
53 53 'source_repo': pull_request_source_repo.repo_name,
54 54 'target_repo': pull_request_target_repo.repo_name,
55 55 'source_repo_url': pull_request_source_repo_url,
56 56 'target_repo_url': pull_request_target_repo_url,
57 57 }
58 58 %>
59 59
60 60 * ${_('Comment link')}: ${pr_comment_url}
61 61
62 62 * ${_('Pull Request')}: !${pull_request.pull_request_id}
63 63
64 64 * ${h.literal(_('Commit flow: {source_ref_type}:{source_ref_name} of {source_repo_url} into {target_ref_type}:{target_ref_name} of {target_repo_url}').format(**data))}
65 65
66 66 %if status_change and not closing_pr:
67 67 * ${_('{user} submitted pull request !{pr_id} status: *{status}*').format(**data)}
68 68
69 69 %elif status_change and closing_pr:
70 70 * ${_('{user} submitted pull request !{pr_id} status: *{status} and closed*').format(**data)}
71 71
72 72 %endif
73 73 %if comment_file:
74 74 * ${_('File: {comment_file} on line {comment_line}').format(**data)}
75 75
76 76 %endif
77 77 % if comment_type == 'todo':
78 78 ${('Inline' if comment_file else 'General')} ${_('`TODO` number')} ${comment_id}:
79 79 % else:
80 80 ${('Inline' if comment_file else 'General')} ${_('`Note` number')} ${comment_id}:
81 81 % endif
82 82
83 83 ${comment_body |n, trim}
84 84
85 85 ---
86 86 ${self.plaintext_footer()}
87 87 </%def>
88 88
89 89
90 90 <%
91 91 data = {
92 92 'user': h.person(user),
93 93 'comment_file': comment_file,
94 94 'comment_line': comment_line,
95 95 'comment_type': comment_type,
96 96 'comment_id': comment_id,
97 97 'renderer_type': renderer_type or 'plain',
98 98
99 99 'pr_title': pull_request.title,
100 100 'pr_id': pull_request.pull_request_id,
101 101 'status': status_change,
102 102 'source_ref_type': pull_request.source_ref_parts.type,
103 103 'source_ref_name': pull_request.source_ref_parts.name,
104 104 'target_ref_type': pull_request.target_ref_parts.type,
105 105 'target_ref_name': pull_request.target_ref_parts.name,
106 106 'source_repo': pull_request_source_repo.repo_name,
107 107 'target_repo': pull_request_target_repo.repo_name,
108 108 'source_repo_url': h.link_to(pull_request_source_repo.repo_name, pull_request_source_repo_url),
109 109 'target_repo_url': h.link_to(pull_request_target_repo.repo_name, pull_request_target_repo_url),
110 110 }
111 111 %>
112 112
113 113 <table style="text-align:left;vertical-align:middle;width: 100%">
114 114 <tr>
115 115 <td style="width:100%;border-bottom:1px solid #dbd9da;">
116 116
117 117 <h4 style="margin: 0">
118 <div style="margin-bottom: 4px; color:#7E7F7F">
119 @${h.person(user.username)}
118 <div style="margin-bottom: 4px">
119 <span style="color:#7E7F7F">@${h.person(user.username)}</span>
120 ${_('left a')}
121 <a href="${pr_comment_url}" style="${base.link_css()}">
122 % if comment_file:
123 ${_('{comment_type} on file `{comment_file}` in pull request.').format(**data)}
124 % else:
125 ${_('{comment_type} on pull request.').format(**data) |n}
126 % endif
127 </a>
120 128 </div>
121 ${_('left a')}
122 <a href="${pr_comment_url}" style="${base.link_css()}">
123 % if comment_file:
124 ${_('{comment_type} on file `{comment_file}` in pull request.').format(**data)}
125 % else:
126 ${_('{comment_type} on pull request.').format(**data) |n}
127 % endif
128 </a>
129 129 <div style="margin-top: 10px"></div>
130 130 ${_('Pull request')} <code>!${data['pr_id']}: ${data['pr_title']}</code>
131 131 </h4>
132 132
133 133 </td>
134 134 </tr>
135 135
136 136 </table>
137 137
138 138 <table style="text-align:left;vertical-align:middle;width: 100%">
139 139
140 140 ## spacing def
141 141 <tr>
142 142 <td style="width: 130px"></td>
143 143 <td></td>
144 144 </tr>
145 145
146 146 % if status_change:
147 147 <tr>
148 148 <td style="padding-right:20px;">${_('Review Status')}:</td>
149 149 <td>
150 150 % if closing_pr:
151 151 ${_('Closed pull request with status')}: ${base.status_text(status_change, tag_type=status_change_type)}
152 152 % else:
153 153 ${_('Submitted review status')}: ${base.status_text(status_change, tag_type=status_change_type)}
154 154 % endif
155 155 </td>
156 156 </tr>
157 157 % endif
158 158 <tr>
159 159 <td style="padding-right:20px;">${_('Pull request')}:</td>
160 160 <td>
161 161 <a href="${pull_request_url}" style="${base.link_css()}">
162 162 !${pull_request.pull_request_id}
163 163 </a>
164 164 </td>
165 165 </tr>
166 166
167 167 <tr>
168 168 <td style="padding-right:20px;line-height:20px;">${_('Commit Flow')}:</td>
169 169 <td style="line-height:20px;">
170 170 <code>${'{}:{}'.format(data['source_ref_type'], pull_request.source_ref_parts.name)}</code> ${_('of')} ${data['source_repo_url']}
171 171 &rarr;
172 172 <code>${'{}:{}'.format(data['target_ref_type'], pull_request.target_ref_parts.name)}</code> ${_('of')} ${data['target_repo_url']}
173 173 </td>
174 174 </tr>
175 175
176 176 % if comment_file:
177 177 <tr>
178 178 <td style="padding-right:20px;">${_('File')}:</td>
179 179 <td><a href="${pr_comment_url}" style="${base.link_css()}">${_('`{comment_file}` on line {comment_line}').format(**data)}</a></td>
180 180 </tr>
181 181 % endif
182 182
183 183 <tr style="border-bottom:1px solid #dbd9da;">
184 184 <td colspan="2" style="padding-right:20px;">
185 185 % if comment_type == 'todo':
186 186 ${('Inline' if comment_file else 'General')} ${_('`TODO` number')} ${comment_id}:
187 187 % else:
188 188 ${('Inline' if comment_file else 'General')} ${_('`Note` number')} ${comment_id}:
189 189 % endif
190 190 </td>
191 191 </tr>
192 192
193 193 <tr>
194 194 <td colspan="2" style="background: #F7F7F7">${h.render(comment_body, renderer=data['renderer_type'], mentions=True)}</td>
195 195 </tr>
196 196
197 197 <tr>
198 198 <td><a href="${pr_comment_reply_url}">${_('Reply')}</a></td>
199 199 <td></td>
200 200 </tr>
201 201 </table>
@@ -1,143 +1,143 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="base.mako"/>
3 3 <%namespace name="base" file="base.mako"/>
4 4
5 5 ## EMAIL SUBJECT
6 6 <%def name="subject()" filter="n,trim,whitespace_filter">
7 7 <%
8 8 data = {
9 9 'user': '@'+h.person(user),
10 10 'pr_id': pull_request.pull_request_id,
11 11 'pr_title': pull_request.title,
12 12 }
13 13 %>
14 14
15 15 ${_('{user} requested a pull request review. !{pr_id}: "{pr_title}"').format(**data) |n}
16 16 </%def>
17 17
18 18 ## PLAINTEXT VERSION OF BODY
19 19 <%def name="body_plaintext()" filter="n,trim">
20 20 <%
21 21 data = {
22 22 'user': h.person(user),
23 23 'pr_id': pull_request.pull_request_id,
24 24 'pr_title': pull_request.title,
25 25 'source_ref_type': pull_request.source_ref_parts.type,
26 26 'source_ref_name': pull_request.source_ref_parts.name,
27 27 'target_ref_type': pull_request.target_ref_parts.type,
28 28 'target_ref_name': pull_request.target_ref_parts.name,
29 29 'repo_url': pull_request_source_repo_url,
30 30 'source_repo': pull_request_source_repo.repo_name,
31 31 'target_repo': pull_request_target_repo.repo_name,
32 32 'source_repo_url': pull_request_source_repo_url,
33 33 'target_repo_url': pull_request_target_repo_url,
34 34 }
35 35 %>
36 36
37 37 * ${_('Pull Request link')}: ${pull_request_url}
38 38
39 39 * ${h.literal(_('Commit flow: {source_ref_type}:{source_ref_name} of {source_repo_url} into {target_ref_type}:{target_ref_name} of {target_repo_url}').format(**data))}
40 40
41 41 * ${_('Title')}: ${pull_request.title}
42 42
43 43 * ${_('Description')}:
44 44
45 45 ${pull_request.description | trim}
46 46
47 47
48 48 * ${_ungettext('Commit (%(num)s)', 'Commits (%(num)s)', len(pull_request_commits) ) % {'num': len(pull_request_commits)}}:
49 49
50 50 % for commit_id, message in pull_request_commits:
51 51 - ${h.short_id(commit_id)}
52 52 ${h.chop_at_smart(message, '\n', suffix_if_chopped='...')}
53 53
54 54 % endfor
55 55
56 56 ---
57 57 ${self.plaintext_footer()}
58 58 </%def>
59 59 <%
60 60 data = {
61 61 'user': h.person(user),
62 62 'pr_id': pull_request.pull_request_id,
63 63 'pr_title': pull_request.title,
64 64 'source_ref_type': pull_request.source_ref_parts.type,
65 65 'source_ref_name': pull_request.source_ref_parts.name,
66 66 'target_ref_type': pull_request.target_ref_parts.type,
67 67 'target_ref_name': pull_request.target_ref_parts.name,
68 68 'repo_url': pull_request_source_repo_url,
69 69 'source_repo': pull_request_source_repo.repo_name,
70 70 'target_repo': pull_request_target_repo.repo_name,
71 71 'source_repo_url': h.link_to(pull_request_source_repo.repo_name, pull_request_source_repo_url),
72 72 'target_repo_url': h.link_to(pull_request_target_repo.repo_name, pull_request_target_repo_url),
73 73 }
74 74 %>
75 75
76 76 <table style="text-align:left;vertical-align:middle;width: 100%">
77 77 <tr>
78 78 <td style="width:100%;border-bottom:1px solid #dbd9da;">
79 79
80 80 <h4 style="margin: 0">
81 <div style="margin-bottom: 4px; color:#7E7F7F">
82 @${h.person(user.username)}
81 <div style="margin-bottom: 4px">
82 <span style="color:#7E7F7F">@${h.person(user.username)}</span>
83 ${_('requested a')}
84 <a href="${pull_request_url}" style="${base.link_css()}">
85 ${_('pull request review.').format(**data) }
86 </a>
83 87 </div>
84 ${_('requested a')}
85 <a href="${pull_request_url}" style="${base.link_css()}">
86 ${_('pull request review.').format(**data) }
87 </a>
88 88 <div style="margin-top: 10px"></div>
89 89 ${_('Pull request')} <code>!${data['pr_id']}: ${data['pr_title']}</code>
90 90 </h4>
91 91
92 92 </td>
93 93 </tr>
94 94
95 95 </table>
96 96
97 97 <table style="text-align:left;vertical-align:middle;width: 100%">
98 98 ## spacing def
99 99 <tr>
100 100 <td style="width: 130px"></td>
101 101 <td></td>
102 102 </tr>
103 103
104 104 <tr>
105 105 <td style="padding-right:20px;">${_('Pull request')}:</td>
106 106 <td>
107 107 <a href="${pull_request_url}" style="${base.link_css()}">
108 108 !${pull_request.pull_request_id}
109 109 </a>
110 110 </td>
111 111 </tr>
112 112
113 113 <tr>
114 114 <td style="padding-right:20px;line-height:20px;">${_('Commit Flow')}:</td>
115 115 <td style="line-height:20px;">
116 116 <code>${'{}:{}'.format(data['source_ref_type'], pull_request.source_ref_parts.name)}</code> ${_('of')} ${data['source_repo_url']}
117 117 &rarr;
118 118 <code>${'{}:{}'.format(data['target_ref_type'], pull_request.target_ref_parts.name)}</code> ${_('of')} ${data['target_repo_url']}
119 119 </td>
120 120 </tr>
121 121
122 122 <tr>
123 123 <td style="padding-right:20px;">${_('Description')}:</td>
124 124 <td style="white-space:pre-wrap"><code>${pull_request.description | trim}</code></td>
125 125 </tr>
126 126 <tr>
127 127 <td style="padding-right:20px;">${_ungettext('Commit (%(num)s)', 'Commits (%(num)s)', len(pull_request_commits)) % {'num': len(pull_request_commits)}}:</td>
128 128 <td></td>
129 129 </tr>
130 130
131 131 <tr>
132 132 <td colspan="2">
133 133 <ol style="margin:0 0 0 1em;padding:0;text-align:left;">
134 134 % for commit_id, message in pull_request_commits:
135 135 <li style="margin:0 0 1em;">
136 136 <pre style="margin:0 0 .5em"><a href="${h.route_path('repo_commit', repo_name=pull_request_source_repo.repo_name, commit_id=commit_id)}" style="${base.link_css()}">${h.short_id(commit_id)}</a></pre>
137 137 ${h.chop_at_smart(message, '\n', suffix_if_chopped='...')}
138 138 </li>
139 139 % endfor
140 140 </ol>
141 141 </td>
142 142 </tr>
143 143 </table>
@@ -1,27 +1,27 b''
1 1 ## -*- coding: utf-8 -*-
2 2 Pull request updated. Auto status change to |under_review|
3 3
4 4 .. role:: added
5 5 .. role:: removed
6 6 .. parsed-literal::
7 7
8 8 Changed commits:
9 9 * :added:`${len(added_commits)} added`
10 10 * :removed:`${len(removed_commits)} removed`
11 11
12 12 %if not changed_files:
13 13 No file changes found
14 14 %else:
15 15 Changed files:
16 16 %for file_name in added_files:
17 * `A ${file_name} <#${'a_' + h.FID('', file_name)}>`_
17 * `A ${file_name} <#${'a_' + h.FID(ancestor_commit_id, file_name)}>`_
18 18 %endfor
19 19 %for file_name in modified_files:
20 * `M ${file_name} <#${'a_' + h.FID('', file_name)}>`_
20 * `M ${file_name} <#${'a_' + h.FID(ancestor_commit_id, file_name)}>`_
21 21 %endfor
22 22 %for file_name in removed_files:
23 * R ${file_name}
23 * `R ${file_name}`
24 24 %endfor
25 25 %endif
26 26
27 27 .. |under_review| replace:: *"${under_review_label}"* No newline at end of file
@@ -1,141 +1,194 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22 import collections
23 23
24 24 from rhodecode.lib.partial_renderer import PyramidPartialRenderer
25 25 from rhodecode.lib.utils2 import AttributeDict
26 26 from rhodecode.model.db import User
27 27 from rhodecode.model.notification import EmailNotificationModel
28 28
29 29
30 30 def test_get_template_obj(app, request_stub):
31 31 template = EmailNotificationModel().get_renderer(
32 32 EmailNotificationModel.TYPE_TEST, request_stub)
33 33 assert isinstance(template, PyramidPartialRenderer)
34 34
35 35
36 36 def test_render_email(app, http_host_only_stub):
37 37 kwargs = {}
38 38 subject, headers, body, body_plaintext = EmailNotificationModel().render_email(
39 39 EmailNotificationModel.TYPE_TEST, **kwargs)
40 40
41 41 # subject
42 42 assert subject == 'Test "Subject" hello "world"'
43 43
44 44 # headers
45 45 assert headers == 'X=Y'
46 46
47 47 # body plaintext
48 48 assert body_plaintext == 'Email Plaintext Body'
49 49
50 50 # body
51 51 notification_footer1 = 'This is a notification from RhodeCode.'
52 52 notification_footer2 = 'http://{}/'.format(http_host_only_stub)
53 53 assert notification_footer1 in body
54 54 assert notification_footer2 in body
55 55 assert 'Email Body' in body
56 56
57 57
58 58 def test_render_pr_email(app, user_admin):
59 59 ref = collections.namedtuple(
60 60 'Ref', 'name, type')('fxies123', 'book')
61 61
62 62 pr = collections.namedtuple('PullRequest',
63 63 'pull_request_id, title, description, source_ref_parts, source_ref_name, target_ref_parts, target_ref_name')(
64 64 200, 'Example Pull Request', 'Desc of PR', ref, 'bookmark', ref, 'Branch')
65 65
66 66 source_repo = target_repo = collections.namedtuple(
67 67 'Repo', 'type, repo_name')('hg', 'pull_request_1')
68 68
69 69 kwargs = {
70 70 'user': User.get_first_super_admin(),
71 71 'pull_request': pr,
72 72 'pull_request_commits': [],
73 73
74 74 'pull_request_target_repo': target_repo,
75 75 'pull_request_target_repo_url': 'x',
76 76
77 77 'pull_request_source_repo': source_repo,
78 78 'pull_request_source_repo_url': 'x',
79 79
80 80 'pull_request_url': 'http://localhost/pr1',
81 81 }
82 82
83 83 subject, headers, body, body_plaintext = EmailNotificationModel().render_email(
84 84 EmailNotificationModel.TYPE_PULL_REQUEST, **kwargs)
85 85
86 86 # subject
87 87 assert subject == '@test_admin (RhodeCode Admin) requested a pull request review. !200: "Example Pull Request"'
88 88
89 89
90 def test_render_pr_update_email(app, user_admin):
91 ref = collections.namedtuple(
92 'Ref', 'name, type')('fxies123', 'book')
93
94 pr = collections.namedtuple('PullRequest',
95 'pull_request_id, title, description, source_ref_parts, source_ref_name, target_ref_parts, target_ref_name')(
96 200, 'Example Pull Request', 'Desc of PR', ref, 'bookmark', ref, 'Branch')
97
98 source_repo = target_repo = collections.namedtuple(
99 'Repo', 'type, repo_name')('hg', 'pull_request_1')
100
101 commit_changes = AttributeDict({
102 'added': ['aaaaaaabbbbb', 'cccccccddddddd'],
103 'removed': ['eeeeeeeeeee'],
104 })
105 file_changes = AttributeDict({
106 'added': ['a/file1.md', 'file2.py'],
107 'modified': ['b/modified_file.rst'],
108 'removed': ['.idea'],
109 })
110
111 kwargs = {
112 'updating_user': User.get_first_super_admin(),
113
114 'pull_request': pr,
115 'pull_request_commits': [],
116
117 'pull_request_target_repo': target_repo,
118 'pull_request_target_repo_url': 'x',
119
120 'pull_request_source_repo': source_repo,
121 'pull_request_source_repo_url': 'x',
122
123 'pull_request_url': 'http://localhost/pr1',
124
125 'pr_comment_url': 'http://comment-url',
126 'pr_comment_reply_url': 'http://comment-url#reply',
127 'ancestor_commit_id': 'f39bd443',
128 'added_commits': commit_changes.added,
129 'removed_commits': commit_changes.removed,
130 'changed_files': (file_changes.added + file_changes.modified + file_changes.removed),
131 'added_files': file_changes.added,
132 'modified_files': file_changes.modified,
133 'removed_files': file_changes.removed,
134 }
135
136 subject, headers, body, body_plaintext = EmailNotificationModel().render_email(
137 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **kwargs)
138
139 # subject
140 assert subject == '@test_admin (RhodeCode Admin) updated pull request. !200: "Example Pull Request"'
141
142
90 143 @pytest.mark.parametrize('mention', [
91 144 True,
92 145 False
93 146 ])
94 147 @pytest.mark.parametrize('email_type', [
95 148 EmailNotificationModel.TYPE_COMMIT_COMMENT,
96 149 EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
97 150 ])
98 151 def test_render_comment_subject_no_newlines(app, mention, email_type):
99 152 ref = collections.namedtuple(
100 153 'Ref', 'name, type')('fxies123', 'book')
101 154
102 155 pr = collections.namedtuple('PullRequest',
103 156 'pull_request_id, title, description, source_ref_parts, source_ref_name, target_ref_parts, target_ref_name')(
104 157 200, 'Example Pull Request', 'Desc of PR', ref, 'bookmark', ref, 'Branch')
105 158
106 159 source_repo = target_repo = collections.namedtuple(
107 160 'Repo', 'type, repo_name')('hg', 'pull_request_1')
108 161
109 162 kwargs = {
110 163 'user': User.get_first_super_admin(),
111 164 'commit': AttributeDict(raw_id='a'*40, message='Commit message'),
112 165 'status_change': 'approved',
113 166 'commit_target_repo_url': 'http://foo.example.com/#comment1',
114 167 'repo_name': 'test-repo',
115 168 'comment_file': 'test-file.py',
116 169 'comment_line': 'n100',
117 170 'comment_type': 'note',
118 171 'comment_id': 2048,
119 172 'commit_comment_url': 'http://comment-url',
120 173 'commit_comment_reply_url': 'http://comment-url/#Reply',
121 174 'instance_url': 'http://rc-instance',
122 175 'comment_body': 'hello world',
123 176 'mention': mention,
124 177
125 178 'pr_comment_url': 'http://comment-url',
126 179 'pr_comment_reply_url': 'http://comment-url/#Reply',
127 180 'pull_request': pr,
128 181 'pull_request_commits': [],
129 182
130 183 'pull_request_target_repo': target_repo,
131 184 'pull_request_target_repo_url': 'x',
132 185
133 186 'pull_request_source_repo': source_repo,
134 187 'pull_request_source_repo_url': 'x',
135 188
136 189 'pull_request_url': 'http://code.rc.com/_pr/123'
137 190 }
138 191 subject, headers, body, body_plaintext = EmailNotificationModel().render_email(
139 192 email_type, **kwargs)
140 193
141 194 assert '\n' not in subject
@@ -1,678 +1,680 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.markup_renderer import (
24 24 MarkupRenderer, RstTemplateRenderer, relative_path, relative_links)
25 25
26 26
27 27 @pytest.mark.parametrize(
28 28 "filename, expected_renderer",
29 29 [
30 30 ('readme.md', 'markdown'),
31 31 ('readme.Md', 'markdown'),
32 32 ('readme.MdoWn', 'markdown'),
33 33 ('readme.rst', 'rst'),
34 34 ('readme.Rst', 'rst'),
35 35 ('readme.rest', 'rst'),
36 36 ('readme.rest', 'rst'),
37 37
38 38 ('markdown.xml', 'plain'),
39 39 ('rest.xml', 'plain'),
40 40 ('readme.xml', 'plain'),
41 41
42 42 ('readme', 'plain'),
43 43 ('README', 'plain'),
44 44 ('readme.mdx', 'plain'),
45 45 ('readme.rstx', 'plain'),
46 46 ('readmex', 'plain'),
47 47 ])
48 48 def test_detect_renderer(filename, expected_renderer):
49 49 detected_renderer = MarkupRenderer()._detect_renderer(
50 50 '', filename=filename).__name__
51 51 assert expected_renderer == detected_renderer
52 52
53 53
54 54 def test_markdown_xss_link():
55 55 xss_md = "[link](javascript:alert('XSS: pwned!'))"
56 56 rendered_html = MarkupRenderer.markdown(xss_md)
57 57 assert 'href="javascript:alert(\'XSS: pwned!\')"' not in rendered_html
58 58
59 59
60 60 def test_markdown_xss_inline_html():
61 61 xss_md = '\n'.join([
62 62 '> <a name="n"',
63 63 '> href="javascript:alert(\'XSS: pwned!\')">link</a>'])
64 64 rendered_html = MarkupRenderer.markdown(xss_md)
65 65 assert 'href="javascript:alert(\'XSS: pwned!\')">' not in rendered_html
66 66
67 67
68 68 def test_markdown_inline_html():
69 69 xss_md = '\n'.join(['> <a name="n"',
70 70 '> onload="javascript:alert()" href="https://rhodecode.com">link</a>'])
71 71 rendered_html = MarkupRenderer.markdown(xss_md)
72 72 assert '<a href="https://rhodecode.com" name="n">link</a>' in rendered_html
73 73
74 74
75 75 def test_markdown_bleach_renders_correct():
76 76 test_md = """
77 77 This is intended as a quick reference and showcase. For more complete info, see [John Gruber's original spec](http://daringfireball.net/projects/markdown/) and the [Github-flavored Markdown info page](http://github.github.com/github-flavored-markdown/).
78 78
79 79 Note that there is also a [Cheatsheet specific to Markdown Here](./Markdown-Here-Cheatsheet) if that's what you're looking for. You can also check out [more Markdown tools](./Other-Markdown-Tools).
80 80
81 81 ##### Table of Contents
82 82 [Headers](#headers)
83 83 [Emphasis](#emphasis)
84 84 [Lists](#lists)
85 85 [Links](#links)
86 86 [Images](#images)
87 87 [Code and Syntax Highlighting](#code)
88 88 [Tables](#tables)
89 89 [Blockquotes](#blockquotes)
90 90 [Inline HTML](#html)
91 91 [Horizontal Rule](#hr)
92 92 [Line Breaks](#lines)
93 93 [Youtube videos](#videos)
94 94
95 95
96 96 ## Headers
97 97
98 98 ```no-highlight
99 99 # H1
100 100 ## H2
101 101 ### H3
102 102 #### H4
103 103 ##### H5
104 104 ###### H6
105 105
106 106 Alternatively, for H1 and H2, an underline-ish style:
107 107
108 108 Alt-H1
109 109 ======
110 110
111 111 Alt-H2
112 112 ------
113 113 ```
114 114
115 115 # H1
116 116 ## H2
117 117 ### H3
118 118 #### H4
119 119 ##### H5
120 120 ###### H6
121 121
122 122 Alternatively, for H1 and H2, an underline-ish style:
123 123
124 124 Alt-H1
125 125 ======
126 126
127 127 Alt-H2
128 128 ------
129 129
130 130 ## Emphasis
131 131
132 132 ```no-highlight
133 133 Emphasis, aka italics, with *asterisks* or _underscores_.
134 134
135 135 Strong emphasis, aka bold, with **asterisks** or __underscores__.
136 136
137 137 Combined emphasis with **asterisks and _underscores_**.
138 138
139 139 Strikethrough uses two tildes. ~~Scratch this.~~
140 140 ```
141 141
142 142 Emphasis, aka italics, with *asterisks* or _underscores_.
143 143
144 144 Strong emphasis, aka bold, with **asterisks** or __underscores__.
145 145
146 146 Combined emphasis with **asterisks and _underscores_**.
147 147
148 148 Strikethrough uses two tildes. ~~Scratch this.~~
149 149
150 150
151 151 ## Lists
152 152
153 153 (In this example, leading and trailing spaces are shown with with dots: β‹…)
154 154
155 155 ```no-highlight
156 156 1. First ordered list item
157 157 2. Another item
158 158 β‹…β‹…* Unordered sub-list.
159 159 1. Actual numbers don't matter, just that it's a number
160 160 β‹…β‹…1. Ordered sub-list
161 161 4. And another item.
162 162
163 163 β‹…β‹…β‹…You can have properly indented paragraphs within list items. Notice the blank line above, and the leading spaces (at least one, but we'll use three here to also align the raw Markdown).
164 164
165 165 β‹…β‹…β‹…To have a line break without a paragraph, you will need to use two trailing spaces.β‹…β‹…
166 166 β‹…β‹…β‹…Note that this line is separate, but within the same paragraph.β‹…β‹…
167 167 β‹…β‹…β‹…(This is contrary to the typical GFM line break behaviour, where trailing spaces are not required.)
168 168
169 169 * Unordered list can use asterisks
170 170 - Or minuses
171 171 + Or pluses
172 172 ```
173 173
174 174 1. First ordered list item
175 175 2. Another item
176 176 * Unordered sub-list.
177 177 1. Actual numbers don't matter, just that it's a number
178 178 1. Ordered sub-list
179 179 4. And another item.
180 180
181 181 You can have properly indented paragraphs within list items. Notice the blank line above, and the leading spaces (at least one, but we'll use three here to also align the raw Markdown).
182 182
183 183 To have a line break without a paragraph, you will need to use two trailing spaces.
184 184 Note that this line is separate, but within the same paragraph.
185 185 (This is contrary to the typical GFM line break behaviour, where trailing spaces are not required.)
186 186
187 187 * Unordered list can use asterisks
188 188 - Or minuses
189 189 + Or pluses
190 190
191 191
192 192 ## Links
193 193
194 194 There are two ways to create links.
195 195
196 196 ```no-highlight
197 197 [I'm an inline-style link](https://www.google.com)
198 198
199 199 [I'm an inline-style link with title](https://www.google.com "Google's Homepage")
200 200
201 201 [I'm a reference-style link][Arbitrary case-insensitive reference text]
202 202
203 203 [I'm a relative reference to a repository file (LICENSE)](./LICENSE)
204 204
205 205 [I'm a relative reference to a repository file (IMAGE)](./img/logo.png)
206 206
207 207 [I'm a relative reference to a repository file (IMAGE2)](img/logo.png)
208 208
209 209 [You can use numbers for reference-style link definitions][1]
210 210
211 211 Or leave it empty and use the [link text itself].
212 212
213 213 URLs and URLs in angle brackets will automatically get turned into links.
214 214 http://www.example.com or <http://www.example.com> and sometimes
215 215 example.com (but not on Github, for example).
216 216
217 217 Some text to show that the reference links can follow later.
218 218
219 219 [arbitrary case-insensitive reference text]: https://www.mozilla.org
220 220 [1]: http://slashdot.org
221 221 [link text itself]: http://www.reddit.com
222 222 ```
223 223
224 224 [I'm an inline-style link](https://www.google.com)
225 225
226 226 [I'm an inline-style link with title](https://www.google.com "Google's Homepage")
227 227
228 228 [I'm a reference-style link][Arbitrary case-insensitive reference text]
229 229
230 230 [I'm a relative reference to a repository file (LICENSE)](./LICENSE)
231 231
232 232 [I'm a relative reference to a repository file (IMAGE)](./img/logo.png)
233 233
234 234 [I'm a relative reference to a repository file (IMAGE2)](img/logo.png)
235 235
236 236 [You can use numbers for reference-style link definitions][1]
237 237
238 238 Or leave it empty and use the [link text itself].
239 239
240 240 URLs and URLs in angle brackets will automatically get turned into links.
241 241 http://www.example.com or <http://www.example.com> and sometimes
242 242 example.com (but not on Github, for example).
243 243
244 244 Some text to show that the reference links can follow later.
245 245
246 246 [arbitrary case-insensitive reference text]: https://www.mozilla.org
247 247 [1]: http://slashdot.org
248 248 [link text itself]: http://www.reddit.com
249 249
250 250
251 251 ## Images
252 252
253 253 ```no-highlight
254 254 Here's our logo (hover to see the title text):
255 255
256 256 Inline-style:
257 257 ![alt text](https://github.com/adam-p/markdown-here/raw/master/src/common/images/icon48.png "Logo Title Text 1")
258 258
259 259 relative-src-style:
260 260 ![alt text](img/logo.png)
261 261
262 262 Reference-style:
263 263 ![alt text][logo]
264 264
265 265 [logo]: https://github.com/adam-p/markdown-here/raw/master/src/common/images/icon48.png "Logo Title Text 2"
266 266 ```
267 267
268 268 Here's our logo (hover to see the title text):
269 269
270 270 Inline-style:
271 271 ![alt text](https://github.com/adam-p/markdown-here/raw/master/src/common/images/icon48.png "Logo Title Text 1")
272 272
273 273 relative-src-style:
274 274 ![alt text](img/logo.png)
275 275
276 276 relative-src-style:
277 277 ![alt text](./img/logo.png)
278 278
279 279 Reference-style:
280 280 ![alt text][logo]
281 281
282 282 [logo]: https://github.com/adam-p/markdown-here/raw/master/src/common/images/icon48.png "Logo Title Text 2"
283 283
284 284
285 285 ## Code and Syntax Highlighting
286 286
287 287 Code blocks are part of the Markdown spec, but syntax highlighting isn't. However, many renderers -- like Github's and *Markdown Here* -- support syntax highlighting. Which languages are supported and how those language names should be written will vary from renderer to renderer. *Markdown Here* supports highlighting for dozens of languages (and not-really-languages, like diffs and HTTP headers); to see the complete list, and how to write the language names, see the [highlight.js demo page](http://softwaremaniacs.org/media/soft/highlight/test.html).
288 288
289 289 ```no-highlight
290 290 Inline `code` has `back-ticks around` it.
291 291 ```
292 292
293 293 Inline `code` has `back-ticks around` it.
294 294
295 295 Blocks of code are either fenced by lines with three back-ticks <code>```</code>, or are indented with four spaces. I recommend only using the fenced code blocks -- they're easier and only they support syntax highlighting.
296 296
297 297 ```javascript
298 298 var s = "JavaScript syntax highlighting";
299 299 console.log(s);
300 300 ```
301 301
302 302 ```python
303 303 s = "Python syntax highlighting"
304 304 print s
305 305 ```
306 306
307 307 ```
308 308 No language indicated, so no syntax highlighting.
309 309 But let's throw in a &lt;b&gt;tag&lt;/b&gt;.
310 310 ```
311 311
312 312
313 313 ```javascript
314 314 var s = "JavaScript syntax highlighting";
315 315 alert(s);
316 316 ```
317 317
318 318 ```python
319 319 s = "Python syntax highlighting"
320 320 print s
321 321 ```
322 322
323 323 ```
324 324 No language indicated, so no syntax highlighting in Markdown Here (varies on Github).
325 325 But let's throw in a <b>tag</b>.
326 326 ```
327 327
328 328
329 329 ## Tables
330 330
331 331 Tables aren't part of the core Markdown spec, but they are part of GFM and *Markdown Here* supports them. They are an easy way of adding tables to your email -- a task that would otherwise require copy-pasting from another application.
332 332
333 333 ```no-highlight
334 334 Colons can be used to align columns.
335 335
336 336 | Tables | Are | Cool |
337 337 | ------------- |:-------------:| -----:|
338 338 | col 3 is | right-aligned | $1600 |
339 339 | col 2 is | centered | $12 |
340 340 | zebra stripes | are neat | $1 |
341 341
342 342 There must be at least 3 dashes separating each header cell.
343 343 The outer pipes (|) are optional, and you don't need to make the
344 344 raw Markdown line up prettily. You can also use inline Markdown.
345 345
346 346 Markdown | Less | Pretty
347 347 --- | --- | ---
348 348 *Still* | `renders` | **nicely**
349 349 1 | 2 | 3
350 350 ```
351 351
352 352 Colons can be used to align columns.
353 353
354 354 | Tables | Are | Cool |
355 355 | ------------- |:-------------:| -----:|
356 356 | col 3 is | right-aligned | $1600 |
357 357 | col 2 is | centered | $12 |
358 358 | zebra stripes | are neat | $1 |
359 359
360 360 There must be at least 3 dashes separating each header cell. The outer pipes (|) are optional, and you don't need to make the raw Markdown line up prettily. You can also use inline Markdown.
361 361
362 362 Markdown | Less | Pretty
363 363 --- | --- | ---
364 364 *Still* | `renders` | **nicely**
365 365 1 | 2 | 3
366 366
367 367
368 368 ## Blockquotes
369 369
370 370 ```no-highlight
371 371 > Blockquotes are very handy in email to emulate reply text.
372 372 > This line is part of the same quote.
373 373
374 374 Quote break.
375 375
376 376 > This is a very long line that will still be quoted properly when it wraps. Oh boy let's keep writing to make sure this is long enough to actually wrap for everyone. Oh, you can *put* **Markdown** into a blockquote.
377 377 ```
378 378
379 379 > Blockquotes are very handy in email to emulate reply text.
380 380 > This line is part of the same quote.
381 381
382 382 Quote break.
383 383
384 384 > This is a very long line that will still be quoted properly when it wraps. Oh boy let's keep writing to make sure this is long enough to actually wrap for everyone. Oh, you can *put* **Markdown** into a blockquote.
385 385
386 386
387 387 ## Inline HTML
388 388
389 389 You can also use raw HTML in your Markdown, and it'll mostly work pretty well.
390 390
391 391 ```no-highlight
392 392 <dl>
393 393 <dt>Definition list</dt>
394 394 <dd>Is something people use sometimes.</dd>
395 395
396 396 <dt>Markdown in HTML</dt>
397 397 <dd>Does *not* work **very** well. Use HTML <em>tags</em>.</dd>
398 398 </dl>
399 399 ```
400 400
401 401 <dl>
402 402 <dt>Definition list</dt>
403 403 <dd>Is something people use sometimes.</dd>
404 404
405 405 <dt>Markdown in HTML</dt>
406 406 <dd>Does *not* work **very** well. Use HTML <em>tags</em>.</dd>
407 407 </dl>
408 408
409 409
410 410 ## Horizontal Rule
411 411
412 412 ```
413 413 Three or more...
414 414
415 415 ---
416 416
417 417 Hyphens
418 418
419 419 ***
420 420
421 421 Asterisks
422 422
423 423 ___
424 424
425 425 Underscores
426 426 ```
427 427
428 428 Three or more...
429 429
430 430 ---
431 431
432 432 Hyphens
433 433
434 434 ***
435 435
436 436 Asterisks
437 437
438 438 ___
439 439
440 440 Underscores
441 441
442 442
443 443 ## Line Breaks
444 444
445 445 My basic recommendation for learning how line breaks work is to experiment and discover -- hit &lt;Enter&gt; once (i.e., insert one newline), then hit it twice (i.e., insert two newlines), see what happens. You'll soon learn to get what you want. "Markdown Toggle" is your friend.
446 446
447 447 Here are some things to try out:
448 448
449 449 ```
450 450 Here's a line for us to start with.
451 451
452 452 This line is separated from the one above by two newlines, so it will be a *separate paragraph*.
453 453
454 454 This line is also a separate paragraph, but...
455 455 This line is only separated by a single newline, so it's a separate line in the *same paragraph*.
456 456 ```
457 457
458 458 Here's a line for us to start with.
459 459
460 460 This line is separated from the one above by two newlines, so it will be a *separate paragraph*.
461 461
462 462 This line is also begins a separate paragraph, but...
463 463 This line is only separated by a single newline, so it's a separate line in the *same paragraph*.
464 464
465 465 (Technical note: *Markdown Here* uses GFM line breaks, so there's no need to use MD's two-space line breaks.)
466 466
467 467
468 468 ## Youtube videos
469 469
470 470 They can't be added directly but you can add an image with a link to the video like this:
471 471
472 472 ```no-highlight
473 473 <a href="http://www.youtube.com/watch?feature=player_embedded&v=YOUTUBE_VIDEO_ID_HERE
474 474 " target="_blank"><img src="http://img.youtube.com/vi/YOUTUBE_VIDEO_ID_HERE/0.jpg"
475 475 alt="IMAGE ALT TEXT HERE" width="240" height="180" border="10" /></a>
476 476 ```
477 477
478 478 Or, in pure Markdown, but losing the image sizing and border:
479 479
480 480 ```no-highlight
481 481 [![IMAGE ALT TEXT HERE](http://img.youtube.com/vi/YOUTUBE_VIDEO_ID_HERE/0.jpg)](http://www.youtube.com/watch?v=YOUTUBE_VIDEO_ID_HERE)
482 482 ```
483 483
484 484 Referencing a bug by #bugID in your git commit links it to the slip. For example #1.
485 485
486 486 ---
487 487
488 488 License: [CC-BY](https://creativecommons.org/licenses/by/3.0/)
489 489 """
490 490 raw_rendered_html = MarkupRenderer.markdown(test_md, clean_html=False)
491 491 bleached_rendered_html = MarkupRenderer.markdown(test_md, clean_html=True)
492 492 assert raw_rendered_html == bleached_rendered_html
493 493
494 494
495 495 def test_rst_xss_link():
496 496 xss_rst = "`Link<javascript:alert('XSS: pwned!')>`_"
497 497 rendered_html = MarkupRenderer.rst(xss_rst)
498 498 assert "href=javascript:alert('XSS: pwned!')" not in rendered_html
499 499
500 500
501 501 @pytest.mark.xfail(reason='Bug in docutils. Waiting answer from the author')
502 502 def test_rst_xss_inline_html():
503 503 xss_rst = '<a href="javascript:alert(\'XSS: pwned!\')">link</a>'
504 504 rendered_html = MarkupRenderer.rst(xss_rst)
505 505 assert 'href="javascript:alert(' not in rendered_html
506 506
507 507
508 508 def test_rst_xss_raw_directive():
509 509 xss_rst = '\n'.join([
510 510 '.. raw:: html',
511 511 '',
512 512 ' <a href="javascript:alert(\'XSS: pwned!\')">link</a>'])
513 513 rendered_html = MarkupRenderer.rst(xss_rst)
514 514 assert 'href="javascript:alert(' not in rendered_html
515 515
516 516
517 517 def test_render_rst_template_without_files():
518 518 expected = u'''\
519 519 Pull request updated. Auto status change to |under_review|
520 520
521 521 .. role:: added
522 522 .. role:: removed
523 523 .. parsed-literal::
524 524
525 525 Changed commits:
526 526 * :added:`2 added`
527 527 * :removed:`3 removed`
528 528
529 529 No file changes found
530 530
531 531 .. |under_review| replace:: *"NEW STATUS"*'''
532 532
533 533 params = {
534 534 'under_review_label': 'NEW STATUS',
535 535 'added_commits': ['a', 'b'],
536 536 'removed_commits': ['a', 'b', 'c'],
537 537 'changed_files': [],
538 538 'added_files': [],
539 539 'modified_files': [],
540 540 'removed_files': [],
541 'ancestor_commit_id': 'aaabbbcccdddeee',
541 542 }
542 543 renderer = RstTemplateRenderer()
543 544 rendered = renderer.render('pull_request_update.mako', **params)
544 545 assert expected == rendered
545 546
546 547
547 548 def test_render_rst_template_with_files():
548 549 expected = u'''\
549 550 Pull request updated. Auto status change to |under_review|
550 551
551 552 .. role:: added
552 553 .. role:: removed
553 554 .. parsed-literal::
554 555
555 556 Changed commits:
556 557 * :added:`1 added`
557 558 * :removed:`3 removed`
558 559
559 560 Changed files:
560 * `A /path/a.py <#a_c--68ed34923b68>`_
561 * `A /path/b.js <#a_c--64f90608b607>`_
562 * `M /path/d.js <#a_c--85842bf30c6e>`_
563 * `M /path/Δ™.py <#a_c--d713adf009cd>`_
564 * R /path/ΕΊ.py
561 * `A /path/a.py <#a_c-aaabbbcccddd-68ed34923b68>`_
562 * `A /path/b.js <#a_c-aaabbbcccddd-64f90608b607>`_
563 * `M /path/d.js <#a_c-aaabbbcccddd-85842bf30c6e>`_
564 * `M /path/Δ™.py <#a_c-aaabbbcccddd-d713adf009cd>`_
565 * `R /path/ΕΊ.py`
565 566
566 567 .. |under_review| replace:: *"NEW STATUS"*'''
567 568
568 569 added = ['/path/a.py', '/path/b.js']
569 570 modified = ['/path/d.js', u'/path/Δ™.py']
570 571 removed = [u'/path/ΕΊ.py']
571 572
572 573 params = {
573 574 'under_review_label': 'NEW STATUS',
574 575 'added_commits': ['a'],
575 576 'removed_commits': ['a', 'b', 'c'],
576 577 'changed_files': added + modified + removed,
577 578 'added_files': added,
578 579 'modified_files': modified,
579 580 'removed_files': removed,
581 'ancestor_commit_id': 'aaabbbcccdddeee',
580 582 }
581 583 renderer = RstTemplateRenderer()
582 584 rendered = renderer.render('pull_request_update.mako', **params)
583 585
584 586 assert expected == rendered
585 587
586 588
587 589 def test_render_rst_auto_status_template():
588 590 expected = u'''\
589 591 Auto status change to |new_status|
590 592
591 593 .. |new_status| replace:: *"NEW STATUS"*'''
592 594
593 595 params = {
594 596 'new_status_label': 'NEW STATUS',
595 597 'pull_request': None,
596 598 'commit_id': None,
597 599 }
598 600 renderer = RstTemplateRenderer()
599 601 rendered = renderer.render('auto_status_change.mako', **params)
600 602 assert expected == rendered
601 603
602 604
603 605 @pytest.mark.parametrize(
604 606 "src_path, server_path, is_path, expected",
605 607 [
606 608 ('source.png', '/repo/files/path', lambda p: False,
607 609 '/repo/files/path/source.png'),
608 610
609 611 ('source.png', 'mk/git/blob/master/README.md', lambda p: True,
610 612 '/mk/git/blob/master/source.png'),
611 613
612 614 ('./source.png', 'mk/git/blob/master/README.md', lambda p: True,
613 615 '/mk/git/blob/master/source.png'),
614 616
615 617 ('/source.png', 'mk/git/blob/master/README.md', lambda p: True,
616 618 '/mk/git/blob/master/source.png'),
617 619
618 620 ('./source.png', 'repo/files/path/source.md', lambda p: True,
619 621 '/repo/files/path/source.png'),
620 622
621 623 ('./source.png', '/repo/files/path/file.md', lambda p: True,
622 624 '/repo/files/path/source.png'),
623 625
624 626 ('../source.png', '/repo/files/path/file.md', lambda p: True,
625 627 '/repo/files/source.png'),
626 628
627 629 ('./../source.png', '/repo/files/path/file.md', lambda p: True,
628 630 '/repo/files/source.png'),
629 631
630 632 ('./source.png', '/repo/files/path/file.md', lambda p: True,
631 633 '/repo/files/path/source.png'),
632 634
633 635 ('../../../source.png', 'path/file.md', lambda p: True,
634 636 '/source.png'),
635 637
636 638 ('../../../../../source.png', '/path/file.md', None,
637 639 '/source.png'),
638 640
639 641 ('../../../../../source.png', 'files/path/file.md', None,
640 642 '/source.png'),
641 643
642 644 ('../../../../../https://google.com/image.png', 'files/path/file.md', None,
643 645 '/https://google.com/image.png'),
644 646
645 647 ('https://google.com/image.png', 'files/path/file.md', None,
646 648 'https://google.com/image.png'),
647 649
648 650 ('://foo', '/files/path/file.md', None,
649 651 '://foo'),
650 652
651 653 (u'ν•œκΈ€.png', '/files/path/file.md', None,
652 654 u'/files/path/ν•œκΈ€.png'),
653 655
654 656 ('my custom image.png', '/files/path/file.md', None,
655 657 '/files/path/my custom image.png'),
656 658 ])
657 659 def test_relative_path(src_path, server_path, is_path, expected):
658 660 path = relative_path(src_path, server_path, is_path)
659 661 assert path == expected
660 662
661 663
662 664 @pytest.mark.parametrize(
663 665 "src_html, expected_html",
664 666 [
665 667 ('<div></div>', '<div></div>'),
666 668 ('<img src="/file.png"></img>', '<img src="/path/raw/file.png">'),
667 669 ('<img src="data:abcd"/>', '<img src="data:abcd">'),
668 670 ('<a href="/file.png?raw=1"></a>', '<a href="/path/raw/file.png?raw=1"></a>'),
669 671 ('<a href="/file.png"></a>', '<a href="/path/file.png"></a>'),
670 672 ('<a href="#anchor"></a>', '<a href="#anchor"></a>'),
671 673 ('<a href="./README.md?raw=1"></a>', '<a href="/path/raw/README.md?raw=1"></a>'),
672 674 ('<a href="./README.md"></a>', '<a href="/path/README.md"></a>'),
673 675 ('<a href="../README.md"></a>', '<a href="/README.md"></a>'),
674 676
675 677 ])
676 678 def test_relative_links(src_html, expected_html):
677 679 server_paths = {'raw': '/path/raw/file.md', 'standard': '/path/file.md'}
678 680 assert relative_links(src_html, server_paths=server_paths) == expected_html
@@ -1,179 +1,179 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.model import db
24 24 from rhodecode.model.changeset_status import ChangesetStatusModel
25 25 from rhodecode.model.pull_request import PullRequestModel
26 26
27 27
28 28 pytestmark = [
29 29 pytest.mark.backends("git", "hg"),
30 30 ]
31 31
32 32
33 33 def test_new_pull_request_is_under_review(pr_util, config_stub):
34 34 pull_request = pr_util.create_pull_request()
35 35
36 36 # Expect that review status "Under Review"
37 37 expected_review_status = db.ChangesetStatus.STATUS_UNDER_REVIEW
38 38 assert pull_request.calculated_review_status() == expected_review_status
39 39
40 40
41 41 @pytest.mark.parametrize("voted_status", [
42 42 db.ChangesetStatus.STATUS_APPROVED,
43 43 db.ChangesetStatus.STATUS_REJECTED,
44 44 db.ChangesetStatus.STATUS_UNDER_REVIEW,
45 45 ])
46 46 def test_pull_request_under_review_if_one_reviewer_voted(
47 47 pr_util, voted_status, config_stub):
48 48 pull_request = pr_util.create_pull_request()
49 49 pr_util.create_status_votes(
50 50 voted_status, pull_request.reviewers[0])
51 51
52 52 # Expect that review status "Under Review"
53 53 expected_review_status = db.ChangesetStatus.STATUS_UNDER_REVIEW
54 54 assert pull_request.calculated_review_status() == expected_review_status
55 55
56 56
57 57 @pytest.mark.parametrize("voted_status", [
58 58 db.ChangesetStatus.STATUS_APPROVED,
59 59 db.ChangesetStatus.STATUS_REJECTED,
60 60 db.ChangesetStatus.STATUS_UNDER_REVIEW,
61 61 ])
62 62 def test_pull_request_has_voted_status_if_all_voted(pr_util, voted_status, config_stub):
63 63 pull_request = pr_util.create_pull_request()
64 64 pr_util.create_status_votes(
65 65 voted_status, *pull_request.reviewers)
66 66
67 67 # Expect that review status is the voted_status
68 68 expected_review_status = voted_status
69 69 assert pull_request.calculated_review_status() == expected_review_status
70 70
71 71
72 72 @pytest.mark.parametrize("voted_status", [
73 73 db.ChangesetStatus.STATUS_APPROVED,
74 74 db.ChangesetStatus.STATUS_REJECTED,
75 75 db.ChangesetStatus.STATUS_UNDER_REVIEW,
76 76 ])
77 77 def test_pull_request_stays_if_update_without_change(
78 78 pr_util, voted_status, config_stub):
79 79 pull_request = pr_util.create_pull_request()
80 80 pr_util.create_status_votes(
81 81 voted_status, *pull_request.reviewers)
82 82
83 83 # Update, without change
84 PullRequestModel().update_commits(pull_request)
84 PullRequestModel().update_commits(pull_request, pull_request.author)
85 85
86 86 # Expect that review status is the voted_status
87 87 expected_review_status = voted_status
88 88 assert pull_request.calculated_review_status() == expected_review_status
89 89
90 90
91 91 @pytest.mark.parametrize("voted_status", [
92 92 db.ChangesetStatus.STATUS_APPROVED,
93 93 db.ChangesetStatus.STATUS_REJECTED,
94 94 db.ChangesetStatus.STATUS_UNDER_REVIEW,
95 95 ])
96 96 def test_pull_request_under_review_if_update(pr_util, voted_status, config_stub):
97 97 pull_request = pr_util.create_pull_request()
98 98 pr_util.create_status_votes(
99 99 voted_status, *pull_request.reviewers)
100 100
101 101 # Update, with change
102 102 pr_util.update_source_repository()
103 PullRequestModel().update_commits(pull_request)
103 PullRequestModel().update_commits(pull_request, pull_request.author)
104 104
105 105 # Expect that review status is the voted_status
106 106 expected_review_status = db.ChangesetStatus.STATUS_UNDER_REVIEW
107 107 assert pull_request.calculated_review_status() == expected_review_status
108 108
109 109
110 110 def test_commit_under_review_if_part_of_new_pull_request(pr_util, config_stub):
111 111 pull_request = pr_util.create_pull_request()
112 112 for commit_id in pull_request.revisions:
113 113 status = ChangesetStatusModel().get_status(
114 114 repo=pr_util.source_repository, revision=commit_id)
115 115 assert status == db.ChangesetStatus.STATUS_UNDER_REVIEW
116 116
117 117
118 118 @pytest.mark.parametrize("voted_status", [
119 119 db.ChangesetStatus.STATUS_APPROVED,
120 120 db.ChangesetStatus.STATUS_REJECTED,
121 121 db.ChangesetStatus.STATUS_UNDER_REVIEW,
122 122 ])
123 123 def test_commit_has_voted_status_after_vote_on_pull_request(
124 124 pr_util, voted_status, config_stub):
125 125 pull_request = pr_util.create_pull_request()
126 126 pr_util.create_status_votes(
127 127 voted_status, pull_request.reviewers[0])
128 128 for commit_id in pull_request.revisions:
129 129 status = ChangesetStatusModel().get_status(
130 130 repo=pr_util.source_repository, revision=commit_id)
131 131 assert status == voted_status
132 132
133 133
134 134 def test_commit_under_review_if_added_to_pull_request(pr_util, config_stub):
135 135 pull_request = pr_util.create_pull_request()
136 136 pr_util.create_status_votes(
137 137 db.ChangesetStatus.STATUS_APPROVED, pull_request.reviewers[0])
138 138 added_commit_id = pr_util.add_one_commit()
139 139
140 140 status = ChangesetStatusModel().get_status(
141 141 repo=pr_util.source_repository, revision=added_commit_id)
142 142 assert status == db.ChangesetStatus.STATUS_UNDER_REVIEW
143 143
144 144
145 145 @pytest.mark.parametrize("voted_status", [
146 146 db.ChangesetStatus.STATUS_APPROVED,
147 147 db.ChangesetStatus.STATUS_REJECTED,
148 148 db.ChangesetStatus.STATUS_UNDER_REVIEW,
149 149 ])
150 150 def test_commit_keeps_status_if_removed_from_pull_request(
151 151 pr_util, voted_status, config_stub):
152 152 pull_request = pr_util.create_pull_request()
153 153 pr_util.add_one_commit()
154 154 pr_util.create_status_votes(voted_status, pull_request.reviewers[0])
155 155
156 156 removed_commit_id = pr_util.remove_one_commit()
157 157
158 158 status = ChangesetStatusModel().get_status(
159 159 repo=pr_util.source_repository, revision=removed_commit_id)
160 160 assert status == voted_status
161 161
162 162
163 163 @pytest.mark.parametrize("voted_status", [
164 164 db.ChangesetStatus.STATUS_APPROVED,
165 165 db.ChangesetStatus.STATUS_REJECTED,
166 166 db.ChangesetStatus.STATUS_UNDER_REVIEW,
167 167 ])
168 168 def test_commit_keeps_status_if_unchanged_after_update_of_pull_request(
169 169 pr_util, voted_status, config_stub):
170 170 pull_request = pr_util.create_pull_request()
171 171 commit_id = pull_request.revisions[-1]
172 172 pr_util.create_status_votes(voted_status, pull_request.reviewers[0])
173 173 pr_util.update_source_repository()
174 PullRequestModel().update_commits(pull_request)
174 PullRequestModel().update_commits(pull_request, pull_request.author)
175 175 assert pull_request.revisions[-1] == commit_id
176 176
177 177 status = ChangesetStatusModel().get_status(
178 178 repo=pr_util.source_repository, revision=commit_id)
179 179 assert status == voted_status
@@ -1,980 +1,981 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture()
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 merge_resp = MergeResponse(
54 54 False, False, None, MergeFailureReason.UNKNOWN,
55 55 metadata={'exception': 'MockError'})
56 56 self.merge_patcher = mock.patch.object(
57 57 BackendClass, 'merge', return_value=merge_resp)
58 58 self.workspace_remove_patcher = mock.patch.object(
59 59 BackendClass, 'cleanup_merge_workspace')
60 60
61 61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 62 self.merge_mock = self.merge_patcher.start()
63 63 self.comment_patcher = mock.patch(
64 64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 65 self.comment_patcher.start()
66 66 self.notification_patcher = mock.patch(
67 67 'rhodecode.model.notification.NotificationModel.create')
68 68 self.notification_patcher.start()
69 69 self.helper_patcher = mock.patch(
70 70 'rhodecode.lib.helpers.route_path')
71 71 self.helper_patcher.start()
72 72
73 73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 74 'trigger_pull_request_hook')
75 75 self.hook_mock = self.hook_patcher.start()
76 76
77 77 self.invalidation_patcher = mock.patch(
78 78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 79 self.invalidation_mock = self.invalidation_patcher.start()
80 80
81 81 self.pull_request = pr_util.create_pull_request(
82 82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 86 self.repo_id = self.pull_request.target_repo.repo_id
87 87
88 88 @request.addfinalizer
89 89 def cleanup_pull_request():
90 90 calls = [mock.call(
91 91 self.pull_request, self.pull_request.author, 'create')]
92 92 self.hook_mock.assert_has_calls(calls)
93 93
94 94 self.workspace_remove_patcher.stop()
95 95 self.merge_patcher.stop()
96 96 self.comment_patcher.stop()
97 97 self.notification_patcher.stop()
98 98 self.helper_patcher.stop()
99 99 self.hook_patcher.stop()
100 100 self.invalidation_patcher.stop()
101 101
102 102 return self.pull_request
103 103
104 104 def test_get_all(self, pull_request):
105 105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 106 assert isinstance(prs, list)
107 107 assert len(prs) == 1
108 108
109 109 def test_count_all(self, pull_request):
110 110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 111 assert pr_count == 1
112 112
113 113 def test_get_awaiting_review(self, pull_request):
114 114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 115 assert isinstance(prs, list)
116 116 assert len(prs) == 1
117 117
118 118 def test_count_awaiting_review(self, pull_request):
119 119 pr_count = PullRequestModel().count_awaiting_review(
120 120 pull_request.target_repo)
121 121 assert pr_count == 1
122 122
123 123 def test_get_awaiting_my_review(self, pull_request):
124 124 PullRequestModel().update_reviewers(
125 125 pull_request, [(pull_request.author, ['author'], False, [])],
126 126 pull_request.author)
127 127 Session().commit()
128 128
129 129 prs = PullRequestModel().get_awaiting_my_review(
130 130 pull_request.target_repo, user_id=pull_request.author.user_id)
131 131 assert isinstance(prs, list)
132 132 assert len(prs) == 1
133 133
134 134 def test_count_awaiting_my_review(self, pull_request):
135 135 PullRequestModel().update_reviewers(
136 136 pull_request, [(pull_request.author, ['author'], False, [])],
137 137 pull_request.author)
138 138 Session().commit()
139 139
140 140 pr_count = PullRequestModel().count_awaiting_my_review(
141 141 pull_request.target_repo, user_id=pull_request.author.user_id)
142 142 assert pr_count == 1
143 143
144 144 def test_delete_calls_cleanup_merge(self, pull_request):
145 145 repo_id = pull_request.target_repo.repo_id
146 146 PullRequestModel().delete(pull_request, pull_request.author)
147 147 Session().commit()
148 148
149 149 self.workspace_remove_mock.assert_called_once_with(
150 150 repo_id, self.workspace_id)
151 151
152 152 def test_close_calls_cleanup_and_hook(self, pull_request):
153 153 PullRequestModel().close_pull_request(
154 154 pull_request, pull_request.author)
155 155 Session().commit()
156 156
157 157 repo_id = pull_request.target_repo.repo_id
158 158
159 159 self.workspace_remove_mock.assert_called_once_with(
160 160 repo_id, self.workspace_id)
161 161 self.hook_mock.assert_called_with(
162 162 self.pull_request, self.pull_request.author, 'close')
163 163
164 164 def test_merge_status(self, pull_request):
165 165 self.merge_mock.return_value = MergeResponse(
166 166 True, False, None, MergeFailureReason.NONE)
167 167
168 168 assert pull_request._last_merge_source_rev is None
169 169 assert pull_request._last_merge_target_rev is None
170 170 assert pull_request.last_merge_status is None
171 171
172 172 status, msg = PullRequestModel().merge_status(pull_request)
173 173 assert status is True
174 174 assert msg == 'This pull request can be automatically merged.'
175 175 self.merge_mock.assert_called_with(
176 176 self.repo_id, self.workspace_id,
177 177 pull_request.target_ref_parts,
178 178 pull_request.source_repo.scm_instance(),
179 179 pull_request.source_ref_parts, dry_run=True,
180 180 use_rebase=False, close_branch=False)
181 181
182 182 assert pull_request._last_merge_source_rev == self.source_commit
183 183 assert pull_request._last_merge_target_rev == self.target_commit
184 184 assert pull_request.last_merge_status is MergeFailureReason.NONE
185 185
186 186 self.merge_mock.reset_mock()
187 187 status, msg = PullRequestModel().merge_status(pull_request)
188 188 assert status is True
189 189 assert msg == 'This pull request can be automatically merged.'
190 190 assert self.merge_mock.called is False
191 191
192 192 def test_merge_status_known_failure(self, pull_request):
193 193 self.merge_mock.return_value = MergeResponse(
194 194 False, False, None, MergeFailureReason.MERGE_FAILED,
195 195 metadata={'unresolved_files': 'file1'})
196 196
197 197 assert pull_request._last_merge_source_rev is None
198 198 assert pull_request._last_merge_target_rev is None
199 199 assert pull_request.last_merge_status is None
200 200
201 201 status, msg = PullRequestModel().merge_status(pull_request)
202 202 assert status is False
203 203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
204 204 self.merge_mock.assert_called_with(
205 205 self.repo_id, self.workspace_id,
206 206 pull_request.target_ref_parts,
207 207 pull_request.source_repo.scm_instance(),
208 208 pull_request.source_ref_parts, dry_run=True,
209 209 use_rebase=False, close_branch=False)
210 210
211 211 assert pull_request._last_merge_source_rev == self.source_commit
212 212 assert pull_request._last_merge_target_rev == self.target_commit
213 213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
214 214
215 215 self.merge_mock.reset_mock()
216 216 status, msg = PullRequestModel().merge_status(pull_request)
217 217 assert status is False
218 218 assert msg == 'This pull request cannot be merged because of merge conflicts. '
219 219 assert self.merge_mock.called is False
220 220
221 221 def test_merge_status_unknown_failure(self, pull_request):
222 222 self.merge_mock.return_value = MergeResponse(
223 223 False, False, None, MergeFailureReason.UNKNOWN,
224 224 metadata={'exception': 'MockError'})
225 225
226 226 assert pull_request._last_merge_source_rev is None
227 227 assert pull_request._last_merge_target_rev is None
228 228 assert pull_request.last_merge_status is None
229 229
230 230 status, msg = PullRequestModel().merge_status(pull_request)
231 231 assert status is False
232 232 assert msg == (
233 233 'This pull request cannot be merged because of an unhandled exception. '
234 234 'MockError')
235 235 self.merge_mock.assert_called_with(
236 236 self.repo_id, self.workspace_id,
237 237 pull_request.target_ref_parts,
238 238 pull_request.source_repo.scm_instance(),
239 239 pull_request.source_ref_parts, dry_run=True,
240 240 use_rebase=False, close_branch=False)
241 241
242 242 assert pull_request._last_merge_source_rev is None
243 243 assert pull_request._last_merge_target_rev is None
244 244 assert pull_request.last_merge_status is None
245 245
246 246 self.merge_mock.reset_mock()
247 247 status, msg = PullRequestModel().merge_status(pull_request)
248 248 assert status is False
249 249 assert msg == (
250 250 'This pull request cannot be merged because of an unhandled exception. '
251 251 'MockError')
252 252 assert self.merge_mock.called is True
253 253
254 254 def test_merge_status_when_target_is_locked(self, pull_request):
255 255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
256 256 status, msg = PullRequestModel().merge_status(pull_request)
257 257 assert status is False
258 258 assert msg == (
259 259 'This pull request cannot be merged because the target repository '
260 260 'is locked by user:1.')
261 261
262 262 def test_merge_status_requirements_check_target(self, pull_request):
263 263
264 264 def has_largefiles(self, repo):
265 265 return repo == pull_request.source_repo
266 266
267 267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
268 268 with patcher:
269 269 status, msg = PullRequestModel().merge_status(pull_request)
270 270
271 271 assert status is False
272 272 assert msg == 'Target repository large files support is disabled.'
273 273
274 274 def test_merge_status_requirements_check_source(self, pull_request):
275 275
276 276 def has_largefiles(self, repo):
277 277 return repo == pull_request.target_repo
278 278
279 279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
280 280 with patcher:
281 281 status, msg = PullRequestModel().merge_status(pull_request)
282 282
283 283 assert status is False
284 284 assert msg == 'Source repository large files support is disabled.'
285 285
286 286 def test_merge(self, pull_request, merge_extras):
287 287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
288 288 merge_ref = Reference(
289 289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
290 290 self.merge_mock.return_value = MergeResponse(
291 291 True, True, merge_ref, MergeFailureReason.NONE)
292 292
293 293 merge_extras['repository'] = pull_request.target_repo.repo_name
294 294 PullRequestModel().merge_repo(
295 295 pull_request, pull_request.author, extras=merge_extras)
296 296 Session().commit()
297 297
298 298 message = (
299 299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
300 300 u'\n\n {pr_title}'.format(
301 301 pr_id=pull_request.pull_request_id,
302 302 source_repo=safe_unicode(
303 303 pull_request.source_repo.scm_instance().name),
304 304 source_ref_name=pull_request.source_ref_parts.name,
305 305 pr_title=safe_unicode(pull_request.title)
306 306 )
307 307 )
308 308 self.merge_mock.assert_called_with(
309 309 self.repo_id, self.workspace_id,
310 310 pull_request.target_ref_parts,
311 311 pull_request.source_repo.scm_instance(),
312 312 pull_request.source_ref_parts,
313 313 user_name=user.short_contact, user_email=user.email, message=message,
314 314 use_rebase=False, close_branch=False
315 315 )
316 316 self.invalidation_mock.assert_called_once_with(
317 317 pull_request.target_repo.repo_name)
318 318
319 319 self.hook_mock.assert_called_with(
320 320 self.pull_request, self.pull_request.author, 'merge')
321 321
322 322 pull_request = PullRequest.get(pull_request.pull_request_id)
323 323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
324 324
325 325 def test_merge_with_status_lock(self, pull_request, merge_extras):
326 326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
327 327 merge_ref = Reference(
328 328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
329 329 self.merge_mock.return_value = MergeResponse(
330 330 True, True, merge_ref, MergeFailureReason.NONE)
331 331
332 332 merge_extras['repository'] = pull_request.target_repo.repo_name
333 333
334 334 with pull_request.set_state(PullRequest.STATE_UPDATING):
335 335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
336 336 PullRequestModel().merge_repo(
337 337 pull_request, pull_request.author, extras=merge_extras)
338 338 Session().commit()
339 339
340 340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
341 341
342 342 message = (
343 343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
344 344 u'\n\n {pr_title}'.format(
345 345 pr_id=pull_request.pull_request_id,
346 346 source_repo=safe_unicode(
347 347 pull_request.source_repo.scm_instance().name),
348 348 source_ref_name=pull_request.source_ref_parts.name,
349 349 pr_title=safe_unicode(pull_request.title)
350 350 )
351 351 )
352 352 self.merge_mock.assert_called_with(
353 353 self.repo_id, self.workspace_id,
354 354 pull_request.target_ref_parts,
355 355 pull_request.source_repo.scm_instance(),
356 356 pull_request.source_ref_parts,
357 357 user_name=user.short_contact, user_email=user.email, message=message,
358 358 use_rebase=False, close_branch=False
359 359 )
360 360 self.invalidation_mock.assert_called_once_with(
361 361 pull_request.target_repo.repo_name)
362 362
363 363 self.hook_mock.assert_called_with(
364 364 self.pull_request, self.pull_request.author, 'merge')
365 365
366 366 pull_request = PullRequest.get(pull_request.pull_request_id)
367 367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
368 368
369 369 def test_merge_failed(self, pull_request, merge_extras):
370 370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
371 371 merge_ref = Reference(
372 372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
373 373 self.merge_mock.return_value = MergeResponse(
374 374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
375 375
376 376 merge_extras['repository'] = pull_request.target_repo.repo_name
377 377 PullRequestModel().merge_repo(
378 378 pull_request, pull_request.author, extras=merge_extras)
379 379 Session().commit()
380 380
381 381 message = (
382 382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
383 383 u'\n\n {pr_title}'.format(
384 384 pr_id=pull_request.pull_request_id,
385 385 source_repo=safe_unicode(
386 386 pull_request.source_repo.scm_instance().name),
387 387 source_ref_name=pull_request.source_ref_parts.name,
388 388 pr_title=safe_unicode(pull_request.title)
389 389 )
390 390 )
391 391 self.merge_mock.assert_called_with(
392 392 self.repo_id, self.workspace_id,
393 393 pull_request.target_ref_parts,
394 394 pull_request.source_repo.scm_instance(),
395 395 pull_request.source_ref_parts,
396 396 user_name=user.short_contact, user_email=user.email, message=message,
397 397 use_rebase=False, close_branch=False
398 398 )
399 399
400 400 pull_request = PullRequest.get(pull_request.pull_request_id)
401 401 assert self.invalidation_mock.called is False
402 402 assert pull_request.merge_rev is None
403 403
404 404 def test_get_commit_ids(self, pull_request):
405 405 # The PR has been not merged yet, so expect an exception
406 406 with pytest.raises(ValueError):
407 407 PullRequestModel()._get_commit_ids(pull_request)
408 408
409 409 # Merge revision is in the revisions list
410 410 pull_request.merge_rev = pull_request.revisions[0]
411 411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
412 412 assert commit_ids == pull_request.revisions
413 413
414 414 # Merge revision is not in the revisions list
415 415 pull_request.merge_rev = 'f000' * 10
416 416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
417 417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
418 418
419 419 def test_get_diff_from_pr_version(self, pull_request):
420 420 source_repo = pull_request.source_repo
421 421 source_ref_id = pull_request.source_ref_parts.commit_id
422 422 target_ref_id = pull_request.target_ref_parts.commit_id
423 423 diff = PullRequestModel()._get_diff_from_pr_or_version(
424 424 source_repo, source_ref_id, target_ref_id,
425 425 hide_whitespace_changes=False, diff_context=6)
426 426 assert 'file_1' in diff.raw
427 427
428 428 def test_generate_title_returns_unicode(self):
429 429 title = PullRequestModel().generate_pullrequest_title(
430 430 source='source-dummy',
431 431 source_ref='source-ref-dummy',
432 432 target='target-dummy',
433 433 )
434 434 assert type(title) == unicode
435 435
436 436 @pytest.mark.parametrize('title, has_wip', [
437 437 ('hello', False),
438 438 ('hello wip', False),
439 439 ('hello wip: xxx', False),
440 440 ('[wip] hello', True),
441 441 ('[wip] hello', True),
442 442 ('wip: hello', True),
443 443 ('wip hello', True),
444 444
445 445 ])
446 446 def test_wip_title_marker(self, pull_request, title, has_wip):
447 447 pull_request.title = title
448 448 assert pull_request.work_in_progress == has_wip
449 449
450 450
451 451 @pytest.mark.usefixtures('config_stub')
452 452 class TestIntegrationMerge(object):
453 453 @pytest.mark.parametrize('extra_config', (
454 454 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
455 455 ))
456 456 def test_merge_triggers_push_hooks(
457 457 self, pr_util, user_admin, capture_rcextensions, merge_extras,
458 458 extra_config):
459 459
460 460 pull_request = pr_util.create_pull_request(
461 461 approved=True, mergeable=True)
462 462 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
463 463 merge_extras['repository'] = pull_request.target_repo.repo_name
464 464 Session().commit()
465 465
466 466 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
467 467 merge_state = PullRequestModel().merge_repo(
468 468 pull_request, user_admin, extras=merge_extras)
469 469 Session().commit()
470 470
471 471 assert merge_state.executed
472 472 assert '_pre_push_hook' in capture_rcextensions
473 473 assert '_push_hook' in capture_rcextensions
474 474
475 475 def test_merge_can_be_rejected_by_pre_push_hook(
476 476 self, pr_util, user_admin, capture_rcextensions, merge_extras):
477 477 pull_request = pr_util.create_pull_request(
478 478 approved=True, mergeable=True)
479 479 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
480 480 merge_extras['repository'] = pull_request.target_repo.repo_name
481 481 Session().commit()
482 482
483 483 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
484 484 pre_pull.side_effect = RepositoryError("Disallow push!")
485 485 merge_status = PullRequestModel().merge_repo(
486 486 pull_request, user_admin, extras=merge_extras)
487 487 Session().commit()
488 488
489 489 assert not merge_status.executed
490 490 assert 'pre_push' not in capture_rcextensions
491 491 assert 'post_push' not in capture_rcextensions
492 492
493 493 def test_merge_fails_if_target_is_locked(
494 494 self, pr_util, user_regular, merge_extras):
495 495 pull_request = pr_util.create_pull_request(
496 496 approved=True, mergeable=True)
497 497 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
498 498 pull_request.target_repo.locked = locked_by
499 499 # TODO: johbo: Check if this can work based on the database, currently
500 500 # all data is pre-computed, that's why just updating the DB is not
501 501 # enough.
502 502 merge_extras['locked_by'] = locked_by
503 503 merge_extras['repository'] = pull_request.target_repo.repo_name
504 504 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
505 505 Session().commit()
506 506 merge_status = PullRequestModel().merge_repo(
507 507 pull_request, user_regular, extras=merge_extras)
508 508 Session().commit()
509 509
510 510 assert not merge_status.executed
511 511
512 512
513 513 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
514 514 (False, 1, 0),
515 515 (True, 0, 1),
516 516 ])
517 517 def test_outdated_comments(
518 518 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
519 519 pull_request = pr_util.create_pull_request()
520 520 pr_util.create_inline_comment(file_path='not_in_updated_diff')
521 521
522 522 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
523 523 pr_util.add_one_commit()
524 524 assert_inline_comments(
525 525 pull_request, visible=inlines_count, outdated=outdated_count)
526 526 outdated_comment_mock.assert_called_with(pull_request)
527 527
528 528
529 529 @pytest.mark.parametrize('mr_type, expected_msg', [
530 530 (MergeFailureReason.NONE,
531 531 'This pull request can be automatically merged.'),
532 532 (MergeFailureReason.UNKNOWN,
533 533 'This pull request cannot be merged because of an unhandled exception. CRASH'),
534 534 (MergeFailureReason.MERGE_FAILED,
535 535 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
536 536 (MergeFailureReason.PUSH_FAILED,
537 537 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
538 538 (MergeFailureReason.TARGET_IS_NOT_HEAD,
539 539 'This pull request cannot be merged because the target `ref_name` is not a head.'),
540 540 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
541 541 'This pull request cannot be merged because the source contains more branches than the target.'),
542 542 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
543 543 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
544 544 (MergeFailureReason.TARGET_IS_LOCKED,
545 545 'This pull request cannot be merged because the target repository is locked by user:123.'),
546 546 (MergeFailureReason.MISSING_TARGET_REF,
547 547 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
548 548 (MergeFailureReason.MISSING_SOURCE_REF,
549 549 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
550 550 (MergeFailureReason.SUBREPO_MERGE_FAILED,
551 551 'This pull request cannot be merged because of conflicts related to sub repositories.'),
552 552
553 553 ])
554 554 def test_merge_response_message(mr_type, expected_msg):
555 555 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
556 556 metadata = {
557 557 'unresolved_files': 'CONFLICT_FILE',
558 558 'exception': "CRASH",
559 559 'target': 'some-repo',
560 560 'merge_commit': 'merge_commit',
561 561 'target_ref': merge_ref,
562 562 'source_ref': merge_ref,
563 563 'heads': ','.join(['a', 'b', 'c']),
564 564 'locked_by': 'user:123'
565 565 }
566 566
567 567 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
568 568 assert merge_response.merge_status_message == expected_msg
569 569
570 570
571 571 @pytest.fixture()
572 572 def merge_extras(user_regular):
573 573 """
574 574 Context for the vcs operation when running a merge.
575 575 """
576 576 extras = {
577 577 'ip': '127.0.0.1',
578 578 'username': user_regular.username,
579 579 'user_id': user_regular.user_id,
580 580 'action': 'push',
581 581 'repository': 'fake_target_repo_name',
582 582 'scm': 'git',
583 583 'config': 'fake_config_ini_path',
584 584 'repo_store': '',
585 585 'make_lock': None,
586 586 'locked_by': [None, None, None],
587 587 'server_url': 'http://test.example.com:5000',
588 588 'hooks': ['push', 'pull'],
589 589 'is_shadow_repo': False,
590 590 }
591 591 return extras
592 592
593 593
594 594 @pytest.mark.usefixtures('config_stub')
595 595 class TestUpdateCommentHandling(object):
596 596
597 597 @pytest.fixture(autouse=True, scope='class')
598 598 def enable_outdated_comments(self, request, baseapp):
599 599 config_patch = mock.patch.dict(
600 600 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
601 601 config_patch.start()
602 602
603 603 @request.addfinalizer
604 604 def cleanup():
605 605 config_patch.stop()
606 606
607 607 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
608 608 commits = [
609 609 {'message': 'a'},
610 610 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
611 611 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
612 612 ]
613 613 pull_request = pr_util.create_pull_request(
614 614 commits=commits, target_head='a', source_head='b', revisions=['b'])
615 615 pr_util.create_inline_comment(file_path='file_b')
616 616 pr_util.add_one_commit(head='c')
617 617
618 618 assert_inline_comments(pull_request, visible=1, outdated=0)
619 619
620 620 def test_comment_stays_unflagged_on_change_above(self, pr_util):
621 621 original_content = ''.join(
622 622 ['line {}\n'.format(x) for x in range(1, 11)])
623 623 updated_content = 'new_line_at_top\n' + original_content
624 624 commits = [
625 625 {'message': 'a'},
626 626 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
627 627 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
628 628 ]
629 629 pull_request = pr_util.create_pull_request(
630 630 commits=commits, target_head='a', source_head='b', revisions=['b'])
631 631
632 632 with outdated_comments_patcher():
633 633 comment = pr_util.create_inline_comment(
634 634 line_no=u'n8', file_path='file_b')
635 635 pr_util.add_one_commit(head='c')
636 636
637 637 assert_inline_comments(pull_request, visible=1, outdated=0)
638 638 assert comment.line_no == u'n9'
639 639
640 640 def test_comment_stays_unflagged_on_change_below(self, pr_util):
641 641 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
642 642 updated_content = original_content + 'new_line_at_end\n'
643 643 commits = [
644 644 {'message': 'a'},
645 645 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
646 646 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
647 647 ]
648 648 pull_request = pr_util.create_pull_request(
649 649 commits=commits, target_head='a', source_head='b', revisions=['b'])
650 650 pr_util.create_inline_comment(file_path='file_b')
651 651 pr_util.add_one_commit(head='c')
652 652
653 653 assert_inline_comments(pull_request, visible=1, outdated=0)
654 654
655 655 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
656 656 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
657 657 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
658 658 change_lines = list(base_lines)
659 659 change_lines.insert(6, 'line 6a added\n')
660 660
661 661 # Changes on the last line of sight
662 662 update_lines = list(change_lines)
663 663 update_lines[0] = 'line 1 changed\n'
664 664 update_lines[-1] = 'line 12 changed\n'
665 665
666 666 def file_b(lines):
667 667 return FileNode('file_b', ''.join(lines))
668 668
669 669 commits = [
670 670 {'message': 'a', 'added': [file_b(base_lines)]},
671 671 {'message': 'b', 'changed': [file_b(change_lines)]},
672 672 {'message': 'c', 'changed': [file_b(update_lines)]},
673 673 ]
674 674
675 675 pull_request = pr_util.create_pull_request(
676 676 commits=commits, target_head='a', source_head='b', revisions=['b'])
677 677 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
678 678
679 679 with outdated_comments_patcher():
680 680 pr_util.add_one_commit(head='c')
681 681 assert_inline_comments(pull_request, visible=0, outdated=1)
682 682
683 683 @pytest.mark.parametrize("change, content", [
684 684 ('changed', 'changed\n'),
685 685 ('removed', ''),
686 686 ], ids=['changed', 'removed'])
687 687 def test_comment_flagged_on_change(self, pr_util, change, content):
688 688 commits = [
689 689 {'message': 'a'},
690 690 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
691 691 {'message': 'c', change: [FileNode('file_b', content)]},
692 692 ]
693 693 pull_request = pr_util.create_pull_request(
694 694 commits=commits, target_head='a', source_head='b', revisions=['b'])
695 695 pr_util.create_inline_comment(file_path='file_b')
696 696
697 697 with outdated_comments_patcher():
698 698 pr_util.add_one_commit(head='c')
699 699 assert_inline_comments(pull_request, visible=0, outdated=1)
700 700
701 701
702 702 @pytest.mark.usefixtures('config_stub')
703 703 class TestUpdateChangedFiles(object):
704 704
705 705 def test_no_changes_on_unchanged_diff(self, pr_util):
706 706 commits = [
707 707 {'message': 'a'},
708 708 {'message': 'b',
709 709 'added': [FileNode('file_b', 'test_content b\n')]},
710 710 {'message': 'c',
711 711 'added': [FileNode('file_c', 'test_content c\n')]},
712 712 ]
713 713 # open a PR from a to b, adding file_b
714 714 pull_request = pr_util.create_pull_request(
715 715 commits=commits, target_head='a', source_head='b', revisions=['b'],
716 716 name_suffix='per-file-review')
717 717
718 718 # modify PR adding new file file_c
719 719 pr_util.add_one_commit(head='c')
720 720
721 721 assert_pr_file_changes(
722 722 pull_request,
723 723 added=['file_c'],
724 724 modified=[],
725 725 removed=[])
726 726
727 727 def test_modify_and_undo_modification_diff(self, pr_util):
728 728 commits = [
729 729 {'message': 'a'},
730 730 {'message': 'b',
731 731 'added': [FileNode('file_b', 'test_content b\n')]},
732 732 {'message': 'c',
733 733 'changed': [FileNode('file_b', 'test_content b modified\n')]},
734 734 {'message': 'd',
735 735 'changed': [FileNode('file_b', 'test_content b\n')]},
736 736 ]
737 737 # open a PR from a to b, adding file_b
738 738 pull_request = pr_util.create_pull_request(
739 739 commits=commits, target_head='a', source_head='b', revisions=['b'],
740 740 name_suffix='per-file-review')
741 741
742 742 # modify PR modifying file file_b
743 743 pr_util.add_one_commit(head='c')
744 744
745 745 assert_pr_file_changes(
746 746 pull_request,
747 747 added=[],
748 748 modified=['file_b'],
749 749 removed=[])
750 750
751 751 # move the head again to d, which rollbacks change,
752 752 # meaning we should indicate no changes
753 753 pr_util.add_one_commit(head='d')
754 754
755 755 assert_pr_file_changes(
756 756 pull_request,
757 757 added=[],
758 758 modified=[],
759 759 removed=[])
760 760
761 761 def test_updated_all_files_in_pr(self, pr_util):
762 762 commits = [
763 763 {'message': 'a'},
764 764 {'message': 'b', 'added': [
765 765 FileNode('file_a', 'test_content a\n'),
766 766 FileNode('file_b', 'test_content b\n'),
767 767 FileNode('file_c', 'test_content c\n')]},
768 768 {'message': 'c', 'changed': [
769 769 FileNode('file_a', 'test_content a changed\n'),
770 770 FileNode('file_b', 'test_content b changed\n'),
771 771 FileNode('file_c', 'test_content c changed\n')]},
772 772 ]
773 773 # open a PR from a to b, changing 3 files
774 774 pull_request = pr_util.create_pull_request(
775 775 commits=commits, target_head='a', source_head='b', revisions=['b'],
776 776 name_suffix='per-file-review')
777 777
778 778 pr_util.add_one_commit(head='c')
779 779
780 780 assert_pr_file_changes(
781 781 pull_request,
782 782 added=[],
783 783 modified=['file_a', 'file_b', 'file_c'],
784 784 removed=[])
785 785
786 786 def test_updated_and_removed_all_files_in_pr(self, pr_util):
787 787 commits = [
788 788 {'message': 'a'},
789 789 {'message': 'b', 'added': [
790 790 FileNode('file_a', 'test_content a\n'),
791 791 FileNode('file_b', 'test_content b\n'),
792 792 FileNode('file_c', 'test_content c\n')]},
793 793 {'message': 'c', 'removed': [
794 794 FileNode('file_a', 'test_content a changed\n'),
795 795 FileNode('file_b', 'test_content b changed\n'),
796 796 FileNode('file_c', 'test_content c changed\n')]},
797 797 ]
798 798 # open a PR from a to b, removing 3 files
799 799 pull_request = pr_util.create_pull_request(
800 800 commits=commits, target_head='a', source_head='b', revisions=['b'],
801 801 name_suffix='per-file-review')
802 802
803 803 pr_util.add_one_commit(head='c')
804 804
805 805 assert_pr_file_changes(
806 806 pull_request,
807 807 added=[],
808 808 modified=[],
809 809 removed=['file_a', 'file_b', 'file_c'])
810 810
811 811
812 812 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
813 813 model = PullRequestModel()
814 814 pull_request = pr_util.create_pull_request()
815 815 pr_util.update_source_repository()
816 816
817 model.update_commits(pull_request)
817 model.update_commits(pull_request, pull_request.author)
818 818
819 819 # Expect that it has a version entry now
820 820 assert len(model.get_versions(pull_request)) == 1
821 821
822 822
823 823 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
824 824 pull_request = pr_util.create_pull_request()
825 825 model = PullRequestModel()
826 model.update_commits(pull_request)
826 model.update_commits(pull_request, pull_request.author)
827 827
828 828 # Expect that it still has no versions
829 829 assert len(model.get_versions(pull_request)) == 0
830 830
831 831
832 832 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
833 833 model = PullRequestModel()
834 834 pull_request = pr_util.create_pull_request()
835 835 comment = pr_util.create_comment()
836 836 pr_util.update_source_repository()
837 837
838 model.update_commits(pull_request)
838 model.update_commits(pull_request, pull_request.author)
839 839
840 840 # Expect that the comment is linked to the pr version now
841 841 assert comment.pull_request_version == model.get_versions(pull_request)[0]
842 842
843 843
844 844 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
845 845 model = PullRequestModel()
846 846 pull_request = pr_util.create_pull_request()
847 847 pr_util.update_source_repository()
848 848 pr_util.update_source_repository()
849 849
850 model.update_commits(pull_request)
850 update_response = model.update_commits(pull_request, pull_request.author)
851 851
852 commit_id = update_response.common_ancestor_id
852 853 # Expect to find a new comment about the change
853 854 expected_message = textwrap.dedent(
854 855 """\
855 856 Pull request updated. Auto status change to |under_review|
856 857
857 858 .. role:: added
858 859 .. role:: removed
859 860 .. parsed-literal::
860 861
861 862 Changed commits:
862 863 * :added:`1 added`
863 864 * :removed:`0 removed`
864 865
865 866 Changed files:
866 * `A file_2 <#a_c--92ed3b5f07b4>`_
867 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
867 868
868 869 .. |under_review| replace:: *"Under Review"*"""
869 )
870 ).format(commit_id[:12])
870 871 pull_request_comments = sorted(
871 872 pull_request.comments, key=lambda c: c.modified_at)
872 873 update_comment = pull_request_comments[-1]
873 874 assert update_comment.text == expected_message
874 875
875 876
876 877 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
877 878 pull_request = pr_util.create_pull_request()
878 879
879 880 # Avoiding default values
880 881 pull_request.status = PullRequest.STATUS_CLOSED
881 882 pull_request._last_merge_source_rev = "0" * 40
882 883 pull_request._last_merge_target_rev = "1" * 40
883 884 pull_request.last_merge_status = 1
884 885 pull_request.merge_rev = "2" * 40
885 886
886 887 # Remember automatic values
887 888 created_on = pull_request.created_on
888 889 updated_on = pull_request.updated_on
889 890
890 891 # Create a new version of the pull request
891 892 version = PullRequestModel()._create_version_from_snapshot(pull_request)
892 893
893 894 # Check attributes
894 895 assert version.title == pr_util.create_parameters['title']
895 896 assert version.description == pr_util.create_parameters['description']
896 897 assert version.status == PullRequest.STATUS_CLOSED
897 898
898 899 # versions get updated created_on
899 900 assert version.created_on != created_on
900 901
901 902 assert version.updated_on == updated_on
902 903 assert version.user_id == pull_request.user_id
903 904 assert version.revisions == pr_util.create_parameters['revisions']
904 905 assert version.source_repo == pr_util.source_repository
905 906 assert version.source_ref == pr_util.create_parameters['source_ref']
906 907 assert version.target_repo == pr_util.target_repository
907 908 assert version.target_ref == pr_util.create_parameters['target_ref']
908 909 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
909 910 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
910 911 assert version.last_merge_status == pull_request.last_merge_status
911 912 assert version.merge_rev == pull_request.merge_rev
912 913 assert version.pull_request == pull_request
913 914
914 915
915 916 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
916 917 version1 = pr_util.create_version_of_pull_request()
917 918 comment_linked = pr_util.create_comment(linked_to=version1)
918 919 comment_unlinked = pr_util.create_comment()
919 920 version2 = pr_util.create_version_of_pull_request()
920 921
921 922 PullRequestModel()._link_comments_to_version(version2)
922 923 Session().commit()
923 924
924 925 # Expect that only the new comment is linked to version2
925 926 assert (
926 927 comment_unlinked.pull_request_version_id ==
927 928 version2.pull_request_version_id)
928 929 assert (
929 930 comment_linked.pull_request_version_id ==
930 931 version1.pull_request_version_id)
931 932 assert (
932 933 comment_unlinked.pull_request_version_id !=
933 934 comment_linked.pull_request_version_id)
934 935
935 936
936 937 def test_calculate_commits():
937 938 old_ids = [1, 2, 3]
938 939 new_ids = [1, 3, 4, 5]
939 940 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
940 941 assert change.added == [4, 5]
941 942 assert change.common == [1, 3]
942 943 assert change.removed == [2]
943 944 assert change.total == [1, 3, 4, 5]
944 945
945 946
946 947 def assert_inline_comments(pull_request, visible=None, outdated=None):
947 948 if visible is not None:
948 949 inline_comments = CommentsModel().get_inline_comments(
949 950 pull_request.target_repo.repo_id, pull_request=pull_request)
950 951 inline_cnt = CommentsModel().get_inline_comments_count(
951 952 inline_comments)
952 953 assert inline_cnt == visible
953 954 if outdated is not None:
954 955 outdated_comments = CommentsModel().get_outdated_comments(
955 956 pull_request.target_repo.repo_id, pull_request)
956 957 assert len(outdated_comments) == outdated
957 958
958 959
959 960 def assert_pr_file_changes(
960 961 pull_request, added=None, modified=None, removed=None):
961 962 pr_versions = PullRequestModel().get_versions(pull_request)
962 963 # always use first version, ie original PR to calculate changes
963 964 pull_request_version = pr_versions[0]
964 965 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
965 966 pull_request, pull_request_version)
966 967 file_changes = PullRequestModel()._calculate_file_changes(
967 968 old_diff_data, new_diff_data)
968 969
969 970 assert added == file_changes.added, \
970 971 'expected added:%s vs value:%s' % (added, file_changes.added)
971 972 assert modified == file_changes.modified, \
972 973 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
973 974 assert removed == file_changes.removed, \
974 975 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
975 976
976 977
977 978 def outdated_comments_patcher(use_outdated=True):
978 979 return mock.patch.object(
979 980 CommentsModel, 'use_outdated_comments',
980 981 return_value=use_outdated)
@@ -1,1831 +1,1831 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33
34 34 import mock
35 35 import pyramid.testing
36 36 import pytest
37 37 import colander
38 38 import requests
39 39 import pyramid.paster
40 40
41 41 import rhodecode
42 42 from rhodecode.lib.utils2 import AttributeDict
43 43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 44 from rhodecode.model.comment import CommentsModel
45 45 from rhodecode.model.db import (
46 46 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 47 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.model.pull_request import PullRequestModel
50 50 from rhodecode.model.repo import RepoModel
51 51 from rhodecode.model.repo_group import RepoGroupModel
52 52 from rhodecode.model.user import UserModel
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54 from rhodecode.model.user_group import UserGroupModel
55 55 from rhodecode.model.integration import IntegrationModel
56 56 from rhodecode.integrations import integration_type_registry
57 57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 58 from rhodecode.lib.utils import repo2db_mapper
59 59 from rhodecode.lib.vcs.backends import get_backend
60 60 from rhodecode.lib.vcs.nodes import FileNode
61 61 from rhodecode.tests import (
62 62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 64 TEST_USER_REGULAR_PASS)
65 65 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 66 from rhodecode.tests.fixture import Fixture
67 67 from rhodecode.config import utils as config_utils
68 68
69 69
70 70 def _split_comma(value):
71 71 return value.split(',')
72 72
73 73
74 74 def pytest_addoption(parser):
75 75 parser.addoption(
76 76 '--keep-tmp-path', action='store_true',
77 77 help="Keep the test temporary directories")
78 78 parser.addoption(
79 79 '--backends', action='store', type=_split_comma,
80 80 default=['git', 'hg', 'svn'],
81 81 help="Select which backends to test for backend specific tests.")
82 82 parser.addoption(
83 83 '--dbs', action='store', type=_split_comma,
84 84 default=['sqlite'],
85 85 help="Select which database to test for database specific tests. "
86 86 "Possible options are sqlite,postgres,mysql")
87 87 parser.addoption(
88 88 '--appenlight', '--ae', action='store_true',
89 89 help="Track statistics in appenlight.")
90 90 parser.addoption(
91 91 '--appenlight-api-key', '--ae-key',
92 92 help="API key for Appenlight.")
93 93 parser.addoption(
94 94 '--appenlight-url', '--ae-url',
95 95 default="https://ae.rhodecode.com",
96 96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 97 parser.addoption(
98 98 '--sqlite-connection-string', action='store',
99 99 default='', help="Connection string for the dbs tests with SQLite")
100 100 parser.addoption(
101 101 '--postgres-connection-string', action='store',
102 102 default='', help="Connection string for the dbs tests with Postgres")
103 103 parser.addoption(
104 104 '--mysql-connection-string', action='store',
105 105 default='', help="Connection string for the dbs tests with MySQL")
106 106 parser.addoption(
107 107 '--repeat', type=int, default=100,
108 108 help="Number of repetitions in performance tests.")
109 109
110 110
111 111 def pytest_configure(config):
112 112 from rhodecode.config import patches
113 113
114 114
115 115 def pytest_collection_modifyitems(session, config, items):
116 116 # nottest marked, compare nose, used for transition from nose to pytest
117 117 remaining = [
118 118 i for i in items if getattr(i.obj, '__test__', True)]
119 119 items[:] = remaining
120 120
121 121 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
122 122 # be executed at the end for faster test feedback
123 123 def sorter(item):
124 124 pos = 0
125 125 key = item._nodeid
126 126 if key.startswith('rhodecode/tests/database'):
127 127 pos = 1
128 128 elif key.startswith('rhodecode/tests/vcs_operations'):
129 129 pos = 2
130 130
131 131 return pos
132 132
133 133 items.sort(key=sorter)
134 134
135 135
136 136 def pytest_generate_tests(metafunc):
137 137
138 138 # Support test generation based on --backend parameter
139 139 if 'backend_alias' in metafunc.fixturenames:
140 140 backends = get_backends_from_metafunc(metafunc)
141 141 scope = None
142 142 if not backends:
143 143 pytest.skip("Not enabled for any of selected backends")
144 144
145 145 metafunc.parametrize('backend_alias', backends, scope=scope)
146 146
147 147 backend_mark = metafunc.definition.get_closest_marker('backends')
148 148 if backend_mark:
149 149 backends = get_backends_from_metafunc(metafunc)
150 150 if not backends:
151 151 pytest.skip("Not enabled for any of selected backends")
152 152
153 153
154 154 def get_backends_from_metafunc(metafunc):
155 155 requested_backends = set(metafunc.config.getoption('--backends'))
156 156 backend_mark = metafunc.definition.get_closest_marker('backends')
157 157 if backend_mark:
158 158 # Supported backends by this test function, created from
159 159 # pytest.mark.backends
160 160 backends = backend_mark.args
161 161 elif hasattr(metafunc.cls, 'backend_alias'):
162 162 # Support class attribute "backend_alias", this is mainly
163 163 # for legacy reasons for tests not yet using pytest.mark.backends
164 164 backends = [metafunc.cls.backend_alias]
165 165 else:
166 166 backends = metafunc.config.getoption('--backends')
167 167 return requested_backends.intersection(backends)
168 168
169 169
170 170 @pytest.fixture(scope='session', autouse=True)
171 171 def activate_example_rcextensions(request):
172 172 """
173 173 Patch in an example rcextensions module which verifies passed in kwargs.
174 174 """
175 175 from rhodecode.config import rcextensions
176 176
177 177 old_extensions = rhodecode.EXTENSIONS
178 178 rhodecode.EXTENSIONS = rcextensions
179 179 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
180 180
181 181 @request.addfinalizer
182 182 def cleanup():
183 183 rhodecode.EXTENSIONS = old_extensions
184 184
185 185
186 186 @pytest.fixture()
187 187 def capture_rcextensions():
188 188 """
189 189 Returns the recorded calls to entry points in rcextensions.
190 190 """
191 191 calls = rhodecode.EXTENSIONS.calls
192 192 calls.clear()
193 193 # Note: At this moment, it is still the empty dict, but that will
194 194 # be filled during the test run and since it is a reference this
195 195 # is enough to make it work.
196 196 return calls
197 197
198 198
199 199 @pytest.fixture(scope='session')
200 200 def http_environ_session():
201 201 """
202 202 Allow to use "http_environ" in session scope.
203 203 """
204 204 return plain_http_environ()
205 205
206 206
207 207 def plain_http_host_stub():
208 208 """
209 209 Value of HTTP_HOST in the test run.
210 210 """
211 211 return 'example.com:80'
212 212
213 213
214 214 @pytest.fixture()
215 215 def http_host_stub():
216 216 """
217 217 Value of HTTP_HOST in the test run.
218 218 """
219 219 return plain_http_host_stub()
220 220
221 221
222 222 def plain_http_host_only_stub():
223 223 """
224 224 Value of HTTP_HOST in the test run.
225 225 """
226 226 return plain_http_host_stub().split(':')[0]
227 227
228 228
229 229 @pytest.fixture()
230 230 def http_host_only_stub():
231 231 """
232 232 Value of HTTP_HOST in the test run.
233 233 """
234 234 return plain_http_host_only_stub()
235 235
236 236
237 237 def plain_http_environ():
238 238 """
239 239 HTTP extra environ keys.
240 240
241 241 User by the test application and as well for setting up the pylons
242 242 environment. In the case of the fixture "app" it should be possible
243 243 to override this for a specific test case.
244 244 """
245 245 return {
246 246 'SERVER_NAME': plain_http_host_only_stub(),
247 247 'SERVER_PORT': plain_http_host_stub().split(':')[1],
248 248 'HTTP_HOST': plain_http_host_stub(),
249 249 'HTTP_USER_AGENT': 'rc-test-agent',
250 250 'REQUEST_METHOD': 'GET'
251 251 }
252 252
253 253
254 254 @pytest.fixture()
255 255 def http_environ():
256 256 """
257 257 HTTP extra environ keys.
258 258
259 259 User by the test application and as well for setting up the pylons
260 260 environment. In the case of the fixture "app" it should be possible
261 261 to override this for a specific test case.
262 262 """
263 263 return plain_http_environ()
264 264
265 265
266 266 @pytest.fixture(scope='session')
267 267 def baseapp(ini_config, vcsserver, http_environ_session):
268 268 from rhodecode.lib.pyramid_utils import get_app_config
269 269 from rhodecode.config.middleware import make_pyramid_app
270 270
271 271 print("Using the RhodeCode configuration:{}".format(ini_config))
272 272 pyramid.paster.setup_logging(ini_config)
273 273
274 274 settings = get_app_config(ini_config)
275 275 app = make_pyramid_app({'__file__': ini_config}, **settings)
276 276
277 277 return app
278 278
279 279
280 280 @pytest.fixture(scope='function')
281 281 def app(request, config_stub, baseapp, http_environ):
282 282 app = CustomTestApp(
283 283 baseapp,
284 284 extra_environ=http_environ)
285 285 if request.cls:
286 286 request.cls.app = app
287 287 return app
288 288
289 289
290 290 @pytest.fixture(scope='session')
291 291 def app_settings(baseapp, ini_config):
292 292 """
293 293 Settings dictionary used to create the app.
294 294
295 295 Parses the ini file and passes the result through the sanitize and apply
296 296 defaults mechanism in `rhodecode.config.middleware`.
297 297 """
298 298 return baseapp.config.get_settings()
299 299
300 300
301 301 @pytest.fixture(scope='session')
302 302 def db_connection(ini_settings):
303 303 # Initialize the database connection.
304 304 config_utils.initialize_database(ini_settings)
305 305
306 306
307 307 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
308 308
309 309
310 310 def _autologin_user(app, *args):
311 311 session = login_user_session(app, *args)
312 312 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
313 313 return LoginData(csrf_token, session['rhodecode_user'])
314 314
315 315
316 316 @pytest.fixture()
317 317 def autologin_user(app):
318 318 """
319 319 Utility fixture which makes sure that the admin user is logged in
320 320 """
321 321 return _autologin_user(app)
322 322
323 323
324 324 @pytest.fixture()
325 325 def autologin_regular_user(app):
326 326 """
327 327 Utility fixture which makes sure that the regular user is logged in
328 328 """
329 329 return _autologin_user(
330 330 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
331 331
332 332
333 333 @pytest.fixture(scope='function')
334 334 def csrf_token(request, autologin_user):
335 335 return autologin_user.csrf_token
336 336
337 337
338 338 @pytest.fixture(scope='function')
339 339 def xhr_header(request):
340 340 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
341 341
342 342
343 343 @pytest.fixture()
344 344 def real_crypto_backend(monkeypatch):
345 345 """
346 346 Switch the production crypto backend on for this test.
347 347
348 348 During the test run the crypto backend is replaced with a faster
349 349 implementation based on the MD5 algorithm.
350 350 """
351 351 monkeypatch.setattr(rhodecode, 'is_test', False)
352 352
353 353
354 354 @pytest.fixture(scope='class')
355 355 def index_location(request, baseapp):
356 356 index_location = baseapp.config.get_settings()['search.location']
357 357 if request.cls:
358 358 request.cls.index_location = index_location
359 359 return index_location
360 360
361 361
362 362 @pytest.fixture(scope='session', autouse=True)
363 363 def tests_tmp_path(request):
364 364 """
365 365 Create temporary directory to be used during the test session.
366 366 """
367 367 if not os.path.exists(TESTS_TMP_PATH):
368 368 os.makedirs(TESTS_TMP_PATH)
369 369
370 370 if not request.config.getoption('--keep-tmp-path'):
371 371 @request.addfinalizer
372 372 def remove_tmp_path():
373 373 shutil.rmtree(TESTS_TMP_PATH)
374 374
375 375 return TESTS_TMP_PATH
376 376
377 377
378 378 @pytest.fixture()
379 379 def test_repo_group(request):
380 380 """
381 381 Create a temporary repository group, and destroy it after
382 382 usage automatically
383 383 """
384 384 fixture = Fixture()
385 385 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
386 386 repo_group = fixture.create_repo_group(repogroupid)
387 387
388 388 def _cleanup():
389 389 fixture.destroy_repo_group(repogroupid)
390 390
391 391 request.addfinalizer(_cleanup)
392 392 return repo_group
393 393
394 394
395 395 @pytest.fixture()
396 396 def test_user_group(request):
397 397 """
398 398 Create a temporary user group, and destroy it after
399 399 usage automatically
400 400 """
401 401 fixture = Fixture()
402 402 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
403 403 user_group = fixture.create_user_group(usergroupid)
404 404
405 405 def _cleanup():
406 406 fixture.destroy_user_group(user_group)
407 407
408 408 request.addfinalizer(_cleanup)
409 409 return user_group
410 410
411 411
412 412 @pytest.fixture(scope='session')
413 413 def test_repo(request):
414 414 container = TestRepoContainer()
415 415 request.addfinalizer(container._cleanup)
416 416 return container
417 417
418 418
419 419 class TestRepoContainer(object):
420 420 """
421 421 Container for test repositories which are used read only.
422 422
423 423 Repositories will be created on demand and re-used during the lifetime
424 424 of this object.
425 425
426 426 Usage to get the svn test repository "minimal"::
427 427
428 428 test_repo = TestContainer()
429 429 repo = test_repo('minimal', 'svn')
430 430
431 431 """
432 432
433 433 dump_extractors = {
434 434 'git': utils.extract_git_repo_from_dump,
435 435 'hg': utils.extract_hg_repo_from_dump,
436 436 'svn': utils.extract_svn_repo_from_dump,
437 437 }
438 438
439 439 def __init__(self):
440 440 self._cleanup_repos = []
441 441 self._fixture = Fixture()
442 442 self._repos = {}
443 443
444 444 def __call__(self, dump_name, backend_alias, config=None):
445 445 key = (dump_name, backend_alias)
446 446 if key not in self._repos:
447 447 repo = self._create_repo(dump_name, backend_alias, config)
448 448 self._repos[key] = repo.repo_id
449 449 return Repository.get(self._repos[key])
450 450
451 451 def _create_repo(self, dump_name, backend_alias, config):
452 452 repo_name = '%s-%s' % (backend_alias, dump_name)
453 453 backend = get_backend(backend_alias)
454 454 dump_extractor = self.dump_extractors[backend_alias]
455 455 repo_path = dump_extractor(dump_name, repo_name)
456 456
457 457 vcs_repo = backend(repo_path, config=config)
458 458 repo2db_mapper({repo_name: vcs_repo})
459 459
460 460 repo = RepoModel().get_by_repo_name(repo_name)
461 461 self._cleanup_repos.append(repo_name)
462 462 return repo
463 463
464 464 def _cleanup(self):
465 465 for repo_name in reversed(self._cleanup_repos):
466 466 self._fixture.destroy_repo(repo_name)
467 467
468 468
469 469 def backend_base(request, backend_alias, baseapp, test_repo):
470 470 if backend_alias not in request.config.getoption('--backends'):
471 471 pytest.skip("Backend %s not selected." % (backend_alias, ))
472 472
473 473 utils.check_xfail_backends(request.node, backend_alias)
474 474 utils.check_skip_backends(request.node, backend_alias)
475 475
476 476 repo_name = 'vcs_test_%s' % (backend_alias, )
477 477 backend = Backend(
478 478 alias=backend_alias,
479 479 repo_name=repo_name,
480 480 test_name=request.node.name,
481 481 test_repo_container=test_repo)
482 482 request.addfinalizer(backend.cleanup)
483 483 return backend
484 484
485 485
486 486 @pytest.fixture()
487 487 def backend(request, backend_alias, baseapp, test_repo):
488 488 """
489 489 Parametrized fixture which represents a single backend implementation.
490 490
491 491 It respects the option `--backends` to focus the test run on specific
492 492 backend implementations.
493 493
494 494 It also supports `pytest.mark.xfail_backends` to mark tests as failing
495 495 for specific backends. This is intended as a utility for incremental
496 496 development of a new backend implementation.
497 497 """
498 498 return backend_base(request, backend_alias, baseapp, test_repo)
499 499
500 500
501 501 @pytest.fixture()
502 502 def backend_git(request, baseapp, test_repo):
503 503 return backend_base(request, 'git', baseapp, test_repo)
504 504
505 505
506 506 @pytest.fixture()
507 507 def backend_hg(request, baseapp, test_repo):
508 508 return backend_base(request, 'hg', baseapp, test_repo)
509 509
510 510
511 511 @pytest.fixture()
512 512 def backend_svn(request, baseapp, test_repo):
513 513 return backend_base(request, 'svn', baseapp, test_repo)
514 514
515 515
516 516 @pytest.fixture()
517 517 def backend_random(backend_git):
518 518 """
519 519 Use this to express that your tests need "a backend.
520 520
521 521 A few of our tests need a backend, so that we can run the code. This
522 522 fixture is intended to be used for such cases. It will pick one of the
523 523 backends and run the tests.
524 524
525 525 The fixture `backend` would run the test multiple times for each
526 526 available backend which is a pure waste of time if the test is
527 527 independent of the backend type.
528 528 """
529 529 # TODO: johbo: Change this to pick a random backend
530 530 return backend_git
531 531
532 532
533 533 @pytest.fixture()
534 534 def backend_stub(backend_git):
535 535 """
536 536 Use this to express that your tests need a backend stub
537 537
538 538 TODO: mikhail: Implement a real stub logic instead of returning
539 539 a git backend
540 540 """
541 541 return backend_git
542 542
543 543
544 544 @pytest.fixture()
545 545 def repo_stub(backend_stub):
546 546 """
547 547 Use this to express that your tests need a repository stub
548 548 """
549 549 return backend_stub.create_repo()
550 550
551 551
552 552 class Backend(object):
553 553 """
554 554 Represents the test configuration for one supported backend
555 555
556 556 Provides easy access to different test repositories based on
557 557 `__getitem__`. Such repositories will only be created once per test
558 558 session.
559 559 """
560 560
561 561 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
562 562 _master_repo = None
563 563 _commit_ids = {}
564 564
565 565 def __init__(self, alias, repo_name, test_name, test_repo_container):
566 566 self.alias = alias
567 567 self.repo_name = repo_name
568 568 self._cleanup_repos = []
569 569 self._test_name = test_name
570 570 self._test_repo_container = test_repo_container
571 571 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
572 572 # Fixture will survive in the end.
573 573 self._fixture = Fixture()
574 574
575 575 def __getitem__(self, key):
576 576 return self._test_repo_container(key, self.alias)
577 577
578 578 def create_test_repo(self, key, config=None):
579 579 return self._test_repo_container(key, self.alias, config)
580 580
581 581 @property
582 582 def repo(self):
583 583 """
584 584 Returns the "current" repository. This is the vcs_test repo or the
585 585 last repo which has been created with `create_repo`.
586 586 """
587 587 from rhodecode.model.db import Repository
588 588 return Repository.get_by_repo_name(self.repo_name)
589 589
590 590 @property
591 591 def default_branch_name(self):
592 592 VcsRepository = get_backend(self.alias)
593 593 return VcsRepository.DEFAULT_BRANCH_NAME
594 594
595 595 @property
596 596 def default_head_id(self):
597 597 """
598 598 Returns the default head id of the underlying backend.
599 599
600 600 This will be the default branch name in case the backend does have a
601 601 default branch. In the other cases it will point to a valid head
602 602 which can serve as the base to create a new commit on top of it.
603 603 """
604 604 vcsrepo = self.repo.scm_instance()
605 605 head_id = (
606 606 vcsrepo.DEFAULT_BRANCH_NAME or
607 607 vcsrepo.commit_ids[-1])
608 608 return head_id
609 609
610 610 @property
611 611 def commit_ids(self):
612 612 """
613 613 Returns the list of commits for the last created repository
614 614 """
615 615 return self._commit_ids
616 616
617 617 def create_master_repo(self, commits):
618 618 """
619 619 Create a repository and remember it as a template.
620 620
621 621 This allows to easily create derived repositories to construct
622 622 more complex scenarios for diff, compare and pull requests.
623 623
624 624 Returns a commit map which maps from commit message to raw_id.
625 625 """
626 626 self._master_repo = self.create_repo(commits=commits)
627 627 return self._commit_ids
628 628
629 629 def create_repo(
630 630 self, commits=None, number_of_commits=0, heads=None,
631 631 name_suffix=u'', bare=False, **kwargs):
632 632 """
633 633 Create a repository and record it for later cleanup.
634 634
635 635 :param commits: Optional. A sequence of dict instances.
636 636 Will add a commit per entry to the new repository.
637 637 :param number_of_commits: Optional. If set to a number, this number of
638 638 commits will be added to the new repository.
639 639 :param heads: Optional. Can be set to a sequence of of commit
640 640 names which shall be pulled in from the master repository.
641 641 :param name_suffix: adds special suffix to generated repo name
642 642 :param bare: set a repo as bare (no checkout)
643 643 """
644 644 self.repo_name = self._next_repo_name() + name_suffix
645 645 repo = self._fixture.create_repo(
646 646 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
647 647 self._cleanup_repos.append(repo.repo_name)
648 648
649 649 commits = commits or [
650 650 {'message': 'Commit %s of %s' % (x, self.repo_name)}
651 651 for x in range(number_of_commits)]
652 652 vcs_repo = repo.scm_instance()
653 653 vcs_repo.count()
654 654 self._add_commits_to_repo(vcs_repo, commits)
655 655 if heads:
656 656 self.pull_heads(repo, heads)
657 657
658 658 return repo
659 659
660 660 def pull_heads(self, repo, heads):
661 661 """
662 662 Make sure that repo contains all commits mentioned in `heads`
663 663 """
664 664 vcsmaster = self._master_repo.scm_instance()
665 665 vcsrepo = repo.scm_instance()
666 666 vcsrepo.config.clear_section('hooks')
667 667 commit_ids = [self._commit_ids[h] for h in heads]
668 668 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
669 669
670 670 def create_fork(self):
671 671 repo_to_fork = self.repo_name
672 672 self.repo_name = self._next_repo_name()
673 673 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
674 674 self._cleanup_repos.append(self.repo_name)
675 675 return repo
676 676
677 677 def new_repo_name(self, suffix=u''):
678 678 self.repo_name = self._next_repo_name() + suffix
679 679 self._cleanup_repos.append(self.repo_name)
680 680 return self.repo_name
681 681
682 682 def _next_repo_name(self):
683 683 return u"%s_%s" % (
684 684 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
685 685
686 686 def ensure_file(self, filename, content='Test content\n'):
687 687 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
688 688 commits = [
689 689 {'added': [
690 690 FileNode(filename, content=content),
691 691 ]},
692 692 ]
693 693 self._add_commits_to_repo(self.repo.scm_instance(), commits)
694 694
695 695 def enable_downloads(self):
696 696 repo = self.repo
697 697 repo.enable_downloads = True
698 698 Session().add(repo)
699 699 Session().commit()
700 700
701 701 def cleanup(self):
702 702 for repo_name in reversed(self._cleanup_repos):
703 703 self._fixture.destroy_repo(repo_name)
704 704
705 705 def _add_commits_to_repo(self, repo, commits):
706 706 commit_ids = _add_commits_to_repo(repo, commits)
707 707 if not commit_ids:
708 708 return
709 709 self._commit_ids = commit_ids
710 710
711 711 # Creating refs for Git to allow fetching them from remote repository
712 712 if self.alias == 'git':
713 713 refs = {}
714 714 for message in self._commit_ids:
715 715 # TODO: mikhail: do more special chars replacements
716 716 ref_name = 'refs/test-refs/{}'.format(
717 717 message.replace(' ', ''))
718 718 refs[ref_name] = self._commit_ids[message]
719 719 self._create_refs(repo, refs)
720 720
721 721 def _create_refs(self, repo, refs):
722 722 for ref_name in refs:
723 723 repo.set_refs(ref_name, refs[ref_name])
724 724
725 725
726 726 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
727 727 if backend_alias not in request.config.getoption('--backends'):
728 728 pytest.skip("Backend %s not selected." % (backend_alias, ))
729 729
730 730 utils.check_xfail_backends(request.node, backend_alias)
731 731 utils.check_skip_backends(request.node, backend_alias)
732 732
733 733 repo_name = 'vcs_test_%s' % (backend_alias, )
734 734 repo_path = os.path.join(tests_tmp_path, repo_name)
735 735 backend = VcsBackend(
736 736 alias=backend_alias,
737 737 repo_path=repo_path,
738 738 test_name=request.node.name,
739 739 test_repo_container=test_repo)
740 740 request.addfinalizer(backend.cleanup)
741 741 return backend
742 742
743 743
744 744 @pytest.fixture()
745 745 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
746 746 """
747 747 Parametrized fixture which represents a single vcs backend implementation.
748 748
749 749 See the fixture `backend` for more details. This one implements the same
750 750 concept, but on vcs level. So it does not provide model instances etc.
751 751
752 752 Parameters are generated dynamically, see :func:`pytest_generate_tests`
753 753 for how this works.
754 754 """
755 755 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
756 756
757 757
758 758 @pytest.fixture()
759 759 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
760 760 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
761 761
762 762
763 763 @pytest.fixture()
764 764 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
765 765 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
766 766
767 767
768 768 @pytest.fixture()
769 769 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
770 770 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
771 771
772 772
773 773 @pytest.fixture()
774 774 def vcsbackend_stub(vcsbackend_git):
775 775 """
776 776 Use this to express that your test just needs a stub of a vcsbackend.
777 777
778 778 Plan is to eventually implement an in-memory stub to speed tests up.
779 779 """
780 780 return vcsbackend_git
781 781
782 782
783 783 class VcsBackend(object):
784 784 """
785 785 Represents the test configuration for one supported vcs backend.
786 786 """
787 787
788 788 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
789 789
790 790 def __init__(self, alias, repo_path, test_name, test_repo_container):
791 791 self.alias = alias
792 792 self._repo_path = repo_path
793 793 self._cleanup_repos = []
794 794 self._test_name = test_name
795 795 self._test_repo_container = test_repo_container
796 796
797 797 def __getitem__(self, key):
798 798 return self._test_repo_container(key, self.alias).scm_instance()
799 799
800 800 @property
801 801 def repo(self):
802 802 """
803 803 Returns the "current" repository. This is the vcs_test repo of the last
804 804 repo which has been created.
805 805 """
806 806 Repository = get_backend(self.alias)
807 807 return Repository(self._repo_path)
808 808
809 809 @property
810 810 def backend(self):
811 811 """
812 812 Returns the backend implementation class.
813 813 """
814 814 return get_backend(self.alias)
815 815
816 816 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
817 817 bare=False):
818 818 repo_name = self._next_repo_name()
819 819 self._repo_path = get_new_dir(repo_name)
820 820 repo_class = get_backend(self.alias)
821 821 src_url = None
822 822 if _clone_repo:
823 823 src_url = _clone_repo.path
824 824 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
825 825 self._cleanup_repos.append(repo)
826 826
827 827 commits = commits or [
828 828 {'message': 'Commit %s of %s' % (x, repo_name)}
829 829 for x in xrange(number_of_commits)]
830 830 _add_commits_to_repo(repo, commits)
831 831 return repo
832 832
833 833 def clone_repo(self, repo):
834 834 return self.create_repo(_clone_repo=repo)
835 835
836 836 def cleanup(self):
837 837 for repo in self._cleanup_repos:
838 838 shutil.rmtree(repo.path)
839 839
840 840 def new_repo_path(self):
841 841 repo_name = self._next_repo_name()
842 842 self._repo_path = get_new_dir(repo_name)
843 843 return self._repo_path
844 844
845 845 def _next_repo_name(self):
846 846 return "%s_%s" % (
847 847 self.invalid_repo_name.sub('_', self._test_name),
848 848 len(self._cleanup_repos))
849 849
850 850 def add_file(self, repo, filename, content='Test content\n'):
851 851 imc = repo.in_memory_commit
852 852 imc.add(FileNode(filename, content=content))
853 853 imc.commit(
854 854 message=u'Automatic commit from vcsbackend fixture',
855 855 author=u'Automatic <automatic@rhodecode.com>')
856 856
857 857 def ensure_file(self, filename, content='Test content\n'):
858 858 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
859 859 self.add_file(self.repo, filename, content)
860 860
861 861
862 862 def _add_commits_to_repo(vcs_repo, commits):
863 863 commit_ids = {}
864 864 if not commits:
865 865 return commit_ids
866 866
867 867 imc = vcs_repo.in_memory_commit
868 868 commit = None
869 869
870 870 for idx, commit in enumerate(commits):
871 871 message = unicode(commit.get('message', 'Commit %s' % idx))
872 872
873 873 for node in commit.get('added', []):
874 874 imc.add(FileNode(node.path, content=node.content))
875 875 for node in commit.get('changed', []):
876 876 imc.change(FileNode(node.path, content=node.content))
877 877 for node in commit.get('removed', []):
878 878 imc.remove(FileNode(node.path))
879 879
880 880 parents = [
881 881 vcs_repo.get_commit(commit_id=commit_ids[p])
882 882 for p in commit.get('parents', [])]
883 883
884 884 operations = ('added', 'changed', 'removed')
885 885 if not any((commit.get(o) for o in operations)):
886 886 imc.add(FileNode('file_%s' % idx, content=message))
887 887
888 888 commit = imc.commit(
889 889 message=message,
890 890 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
891 891 date=commit.get('date'),
892 892 branch=commit.get('branch'),
893 893 parents=parents)
894 894
895 895 commit_ids[commit.message] = commit.raw_id
896 896
897 897 return commit_ids
898 898
899 899
900 900 @pytest.fixture()
901 901 def reposerver(request):
902 902 """
903 903 Allows to serve a backend repository
904 904 """
905 905
906 906 repo_server = RepoServer()
907 907 request.addfinalizer(repo_server.cleanup)
908 908 return repo_server
909 909
910 910
911 911 class RepoServer(object):
912 912 """
913 913 Utility to serve a local repository for the duration of a test case.
914 914
915 915 Supports only Subversion so far.
916 916 """
917 917
918 918 url = None
919 919
920 920 def __init__(self):
921 921 self._cleanup_servers = []
922 922
923 923 def serve(self, vcsrepo):
924 924 if vcsrepo.alias != 'svn':
925 925 raise TypeError("Backend %s not supported" % vcsrepo.alias)
926 926
927 927 proc = subprocess32.Popen(
928 928 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
929 929 '--root', vcsrepo.path])
930 930 self._cleanup_servers.append(proc)
931 931 self.url = 'svn://localhost'
932 932
933 933 def cleanup(self):
934 934 for proc in self._cleanup_servers:
935 935 proc.terminate()
936 936
937 937
938 938 @pytest.fixture()
939 939 def pr_util(backend, request, config_stub):
940 940 """
941 941 Utility for tests of models and for functional tests around pull requests.
942 942
943 943 It gives an instance of :class:`PRTestUtility` which provides various
944 944 utility methods around one pull request.
945 945
946 946 This fixture uses `backend` and inherits its parameterization.
947 947 """
948 948
949 949 util = PRTestUtility(backend)
950 950 request.addfinalizer(util.cleanup)
951 951
952 952 return util
953 953
954 954
955 955 class PRTestUtility(object):
956 956
957 957 pull_request = None
958 958 pull_request_id = None
959 959 mergeable_patcher = None
960 960 mergeable_mock = None
961 961 notification_patcher = None
962 962
963 963 def __init__(self, backend):
964 964 self.backend = backend
965 965
966 966 def create_pull_request(
967 967 self, commits=None, target_head=None, source_head=None,
968 968 revisions=None, approved=False, author=None, mergeable=False,
969 969 enable_notifications=True, name_suffix=u'', reviewers=None,
970 970 title=u"Test", description=u"Description"):
971 971 self.set_mergeable(mergeable)
972 972 if not enable_notifications:
973 973 # mock notification side effect
974 974 self.notification_patcher = mock.patch(
975 975 'rhodecode.model.notification.NotificationModel.create')
976 976 self.notification_patcher.start()
977 977
978 978 if not self.pull_request:
979 979 if not commits:
980 980 commits = [
981 981 {'message': 'c1'},
982 982 {'message': 'c2'},
983 983 {'message': 'c3'},
984 984 ]
985 985 target_head = 'c1'
986 986 source_head = 'c2'
987 987 revisions = ['c2']
988 988
989 989 self.commit_ids = self.backend.create_master_repo(commits)
990 990 self.target_repository = self.backend.create_repo(
991 991 heads=[target_head], name_suffix=name_suffix)
992 992 self.source_repository = self.backend.create_repo(
993 993 heads=[source_head], name_suffix=name_suffix)
994 994 self.author = author or UserModel().get_by_username(
995 995 TEST_USER_ADMIN_LOGIN)
996 996
997 997 model = PullRequestModel()
998 998 self.create_parameters = {
999 999 'created_by': self.author,
1000 1000 'source_repo': self.source_repository.repo_name,
1001 1001 'source_ref': self._default_branch_reference(source_head),
1002 1002 'target_repo': self.target_repository.repo_name,
1003 1003 'target_ref': self._default_branch_reference(target_head),
1004 1004 'revisions': [self.commit_ids[r] for r in revisions],
1005 1005 'reviewers': reviewers or self._get_reviewers(),
1006 1006 'title': title,
1007 1007 'description': description,
1008 1008 }
1009 1009 self.pull_request = model.create(**self.create_parameters)
1010 1010 assert model.get_versions(self.pull_request) == []
1011 1011
1012 1012 self.pull_request_id = self.pull_request.pull_request_id
1013 1013
1014 1014 if approved:
1015 1015 self.approve()
1016 1016
1017 1017 Session().add(self.pull_request)
1018 1018 Session().commit()
1019 1019
1020 1020 return self.pull_request
1021 1021
1022 1022 def approve(self):
1023 1023 self.create_status_votes(
1024 1024 ChangesetStatus.STATUS_APPROVED,
1025 1025 *self.pull_request.reviewers)
1026 1026
1027 1027 def close(self):
1028 1028 PullRequestModel().close_pull_request(self.pull_request, self.author)
1029 1029
1030 1030 def _default_branch_reference(self, commit_message):
1031 1031 reference = '%s:%s:%s' % (
1032 1032 'branch',
1033 1033 self.backend.default_branch_name,
1034 1034 self.commit_ids[commit_message])
1035 1035 return reference
1036 1036
1037 1037 def _get_reviewers(self):
1038 1038 return [
1039 1039 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1040 1040 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1041 1041 ]
1042 1042
1043 1043 def update_source_repository(self, head=None):
1044 1044 heads = [head or 'c3']
1045 1045 self.backend.pull_heads(self.source_repository, heads=heads)
1046 1046
1047 1047 def add_one_commit(self, head=None):
1048 1048 self.update_source_repository(head=head)
1049 1049 old_commit_ids = set(self.pull_request.revisions)
1050 PullRequestModel().update_commits(self.pull_request)
1050 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1051 1051 commit_ids = set(self.pull_request.revisions)
1052 1052 new_commit_ids = commit_ids - old_commit_ids
1053 1053 assert len(new_commit_ids) == 1
1054 1054 return new_commit_ids.pop()
1055 1055
1056 1056 def remove_one_commit(self):
1057 1057 assert len(self.pull_request.revisions) == 2
1058 1058 source_vcs = self.source_repository.scm_instance()
1059 1059 removed_commit_id = source_vcs.commit_ids[-1]
1060 1060
1061 1061 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1062 1062 # remove the if once that's sorted out.
1063 1063 if self.backend.alias == "git":
1064 1064 kwargs = {'branch_name': self.backend.default_branch_name}
1065 1065 else:
1066 1066 kwargs = {}
1067 1067 source_vcs.strip(removed_commit_id, **kwargs)
1068 1068
1069 PullRequestModel().update_commits(self.pull_request)
1069 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1070 1070 assert len(self.pull_request.revisions) == 1
1071 1071 return removed_commit_id
1072 1072
1073 1073 def create_comment(self, linked_to=None):
1074 1074 comment = CommentsModel().create(
1075 1075 text=u"Test comment",
1076 1076 repo=self.target_repository.repo_name,
1077 1077 user=self.author,
1078 1078 pull_request=self.pull_request)
1079 1079 assert comment.pull_request_version_id is None
1080 1080
1081 1081 if linked_to:
1082 1082 PullRequestModel()._link_comments_to_version(linked_to)
1083 1083
1084 1084 return comment
1085 1085
1086 1086 def create_inline_comment(
1087 1087 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1088 1088 comment = CommentsModel().create(
1089 1089 text=u"Test comment",
1090 1090 repo=self.target_repository.repo_name,
1091 1091 user=self.author,
1092 1092 line_no=line_no,
1093 1093 f_path=file_path,
1094 1094 pull_request=self.pull_request)
1095 1095 assert comment.pull_request_version_id is None
1096 1096
1097 1097 if linked_to:
1098 1098 PullRequestModel()._link_comments_to_version(linked_to)
1099 1099
1100 1100 return comment
1101 1101
1102 1102 def create_version_of_pull_request(self):
1103 1103 pull_request = self.create_pull_request()
1104 1104 version = PullRequestModel()._create_version_from_snapshot(
1105 1105 pull_request)
1106 1106 return version
1107 1107
1108 1108 def create_status_votes(self, status, *reviewers):
1109 1109 for reviewer in reviewers:
1110 1110 ChangesetStatusModel().set_status(
1111 1111 repo=self.pull_request.target_repo,
1112 1112 status=status,
1113 1113 user=reviewer.user_id,
1114 1114 pull_request=self.pull_request)
1115 1115
1116 1116 def set_mergeable(self, value):
1117 1117 if not self.mergeable_patcher:
1118 1118 self.mergeable_patcher = mock.patch.object(
1119 1119 VcsSettingsModel, 'get_general_settings')
1120 1120 self.mergeable_mock = self.mergeable_patcher.start()
1121 1121 self.mergeable_mock.return_value = {
1122 1122 'rhodecode_pr_merge_enabled': value}
1123 1123
1124 1124 def cleanup(self):
1125 1125 # In case the source repository is already cleaned up, the pull
1126 1126 # request will already be deleted.
1127 1127 pull_request = PullRequest().get(self.pull_request_id)
1128 1128 if pull_request:
1129 1129 PullRequestModel().delete(pull_request, pull_request.author)
1130 1130 Session().commit()
1131 1131
1132 1132 if self.notification_patcher:
1133 1133 self.notification_patcher.stop()
1134 1134
1135 1135 if self.mergeable_patcher:
1136 1136 self.mergeable_patcher.stop()
1137 1137
1138 1138
1139 1139 @pytest.fixture()
1140 1140 def user_admin(baseapp):
1141 1141 """
1142 1142 Provides the default admin test user as an instance of `db.User`.
1143 1143 """
1144 1144 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1145 1145 return user
1146 1146
1147 1147
1148 1148 @pytest.fixture()
1149 1149 def user_regular(baseapp):
1150 1150 """
1151 1151 Provides the default regular test user as an instance of `db.User`.
1152 1152 """
1153 1153 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1154 1154 return user
1155 1155
1156 1156
1157 1157 @pytest.fixture()
1158 1158 def user_util(request, db_connection):
1159 1159 """
1160 1160 Provides a wired instance of `UserUtility` with integrated cleanup.
1161 1161 """
1162 1162 utility = UserUtility(test_name=request.node.name)
1163 1163 request.addfinalizer(utility.cleanup)
1164 1164 return utility
1165 1165
1166 1166
1167 1167 # TODO: johbo: Split this up into utilities per domain or something similar
1168 1168 class UserUtility(object):
1169 1169
1170 1170 def __init__(self, test_name="test"):
1171 1171 self._test_name = self._sanitize_name(test_name)
1172 1172 self.fixture = Fixture()
1173 1173 self.repo_group_ids = []
1174 1174 self.repos_ids = []
1175 1175 self.user_ids = []
1176 1176 self.user_group_ids = []
1177 1177 self.user_repo_permission_ids = []
1178 1178 self.user_group_repo_permission_ids = []
1179 1179 self.user_repo_group_permission_ids = []
1180 1180 self.user_group_repo_group_permission_ids = []
1181 1181 self.user_user_group_permission_ids = []
1182 1182 self.user_group_user_group_permission_ids = []
1183 1183 self.user_permissions = []
1184 1184
1185 1185 def _sanitize_name(self, name):
1186 1186 for char in ['[', ']']:
1187 1187 name = name.replace(char, '_')
1188 1188 return name
1189 1189
1190 1190 def create_repo_group(
1191 1191 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1192 1192 group_name = "{prefix}_repogroup_{count}".format(
1193 1193 prefix=self._test_name,
1194 1194 count=len(self.repo_group_ids))
1195 1195 repo_group = self.fixture.create_repo_group(
1196 1196 group_name, cur_user=owner)
1197 1197 if auto_cleanup:
1198 1198 self.repo_group_ids.append(repo_group.group_id)
1199 1199 return repo_group
1200 1200
1201 1201 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1202 1202 auto_cleanup=True, repo_type='hg', bare=False):
1203 1203 repo_name = "{prefix}_repository_{count}".format(
1204 1204 prefix=self._test_name,
1205 1205 count=len(self.repos_ids))
1206 1206
1207 1207 repository = self.fixture.create_repo(
1208 1208 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1209 1209 if auto_cleanup:
1210 1210 self.repos_ids.append(repository.repo_id)
1211 1211 return repository
1212 1212
1213 1213 def create_user(self, auto_cleanup=True, **kwargs):
1214 1214 user_name = "{prefix}_user_{count}".format(
1215 1215 prefix=self._test_name,
1216 1216 count=len(self.user_ids))
1217 1217 user = self.fixture.create_user(user_name, **kwargs)
1218 1218 if auto_cleanup:
1219 1219 self.user_ids.append(user.user_id)
1220 1220 return user
1221 1221
1222 1222 def create_additional_user_email(self, user, email):
1223 1223 uem = self.fixture.create_additional_user_email(user=user, email=email)
1224 1224 return uem
1225 1225
1226 1226 def create_user_with_group(self):
1227 1227 user = self.create_user()
1228 1228 user_group = self.create_user_group(members=[user])
1229 1229 return user, user_group
1230 1230
1231 1231 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1232 1232 auto_cleanup=True, **kwargs):
1233 1233 group_name = "{prefix}_usergroup_{count}".format(
1234 1234 prefix=self._test_name,
1235 1235 count=len(self.user_group_ids))
1236 1236 user_group = self.fixture.create_user_group(
1237 1237 group_name, cur_user=owner, **kwargs)
1238 1238
1239 1239 if auto_cleanup:
1240 1240 self.user_group_ids.append(user_group.users_group_id)
1241 1241 if members:
1242 1242 for user in members:
1243 1243 UserGroupModel().add_user_to_group(user_group, user)
1244 1244 return user_group
1245 1245
1246 1246 def grant_user_permission(self, user_name, permission_name):
1247 1247 self.inherit_default_user_permissions(user_name, False)
1248 1248 self.user_permissions.append((user_name, permission_name))
1249 1249
1250 1250 def grant_user_permission_to_repo_group(
1251 1251 self, repo_group, user, permission_name):
1252 1252 permission = RepoGroupModel().grant_user_permission(
1253 1253 repo_group, user, permission_name)
1254 1254 self.user_repo_group_permission_ids.append(
1255 1255 (repo_group.group_id, user.user_id))
1256 1256 return permission
1257 1257
1258 1258 def grant_user_group_permission_to_repo_group(
1259 1259 self, repo_group, user_group, permission_name):
1260 1260 permission = RepoGroupModel().grant_user_group_permission(
1261 1261 repo_group, user_group, permission_name)
1262 1262 self.user_group_repo_group_permission_ids.append(
1263 1263 (repo_group.group_id, user_group.users_group_id))
1264 1264 return permission
1265 1265
1266 1266 def grant_user_permission_to_repo(
1267 1267 self, repo, user, permission_name):
1268 1268 permission = RepoModel().grant_user_permission(
1269 1269 repo, user, permission_name)
1270 1270 self.user_repo_permission_ids.append(
1271 1271 (repo.repo_id, user.user_id))
1272 1272 return permission
1273 1273
1274 1274 def grant_user_group_permission_to_repo(
1275 1275 self, repo, user_group, permission_name):
1276 1276 permission = RepoModel().grant_user_group_permission(
1277 1277 repo, user_group, permission_name)
1278 1278 self.user_group_repo_permission_ids.append(
1279 1279 (repo.repo_id, user_group.users_group_id))
1280 1280 return permission
1281 1281
1282 1282 def grant_user_permission_to_user_group(
1283 1283 self, target_user_group, user, permission_name):
1284 1284 permission = UserGroupModel().grant_user_permission(
1285 1285 target_user_group, user, permission_name)
1286 1286 self.user_user_group_permission_ids.append(
1287 1287 (target_user_group.users_group_id, user.user_id))
1288 1288 return permission
1289 1289
1290 1290 def grant_user_group_permission_to_user_group(
1291 1291 self, target_user_group, user_group, permission_name):
1292 1292 permission = UserGroupModel().grant_user_group_permission(
1293 1293 target_user_group, user_group, permission_name)
1294 1294 self.user_group_user_group_permission_ids.append(
1295 1295 (target_user_group.users_group_id, user_group.users_group_id))
1296 1296 return permission
1297 1297
1298 1298 def revoke_user_permission(self, user_name, permission_name):
1299 1299 self.inherit_default_user_permissions(user_name, True)
1300 1300 UserModel().revoke_perm(user_name, permission_name)
1301 1301
1302 1302 def inherit_default_user_permissions(self, user_name, value):
1303 1303 user = UserModel().get_by_username(user_name)
1304 1304 user.inherit_default_permissions = value
1305 1305 Session().add(user)
1306 1306 Session().commit()
1307 1307
1308 1308 def cleanup(self):
1309 1309 self._cleanup_permissions()
1310 1310 self._cleanup_repos()
1311 1311 self._cleanup_repo_groups()
1312 1312 self._cleanup_user_groups()
1313 1313 self._cleanup_users()
1314 1314
1315 1315 def _cleanup_permissions(self):
1316 1316 if self.user_permissions:
1317 1317 for user_name, permission_name in self.user_permissions:
1318 1318 self.revoke_user_permission(user_name, permission_name)
1319 1319
1320 1320 for permission in self.user_repo_permission_ids:
1321 1321 RepoModel().revoke_user_permission(*permission)
1322 1322
1323 1323 for permission in self.user_group_repo_permission_ids:
1324 1324 RepoModel().revoke_user_group_permission(*permission)
1325 1325
1326 1326 for permission in self.user_repo_group_permission_ids:
1327 1327 RepoGroupModel().revoke_user_permission(*permission)
1328 1328
1329 1329 for permission in self.user_group_repo_group_permission_ids:
1330 1330 RepoGroupModel().revoke_user_group_permission(*permission)
1331 1331
1332 1332 for permission in self.user_user_group_permission_ids:
1333 1333 UserGroupModel().revoke_user_permission(*permission)
1334 1334
1335 1335 for permission in self.user_group_user_group_permission_ids:
1336 1336 UserGroupModel().revoke_user_group_permission(*permission)
1337 1337
1338 1338 def _cleanup_repo_groups(self):
1339 1339 def _repo_group_compare(first_group_id, second_group_id):
1340 1340 """
1341 1341 Gives higher priority to the groups with the most complex paths
1342 1342 """
1343 1343 first_group = RepoGroup.get(first_group_id)
1344 1344 second_group = RepoGroup.get(second_group_id)
1345 1345 first_group_parts = (
1346 1346 len(first_group.group_name.split('/')) if first_group else 0)
1347 1347 second_group_parts = (
1348 1348 len(second_group.group_name.split('/')) if second_group else 0)
1349 1349 return cmp(second_group_parts, first_group_parts)
1350 1350
1351 1351 sorted_repo_group_ids = sorted(
1352 1352 self.repo_group_ids, cmp=_repo_group_compare)
1353 1353 for repo_group_id in sorted_repo_group_ids:
1354 1354 self.fixture.destroy_repo_group(repo_group_id)
1355 1355
1356 1356 def _cleanup_repos(self):
1357 1357 sorted_repos_ids = sorted(self.repos_ids)
1358 1358 for repo_id in sorted_repos_ids:
1359 1359 self.fixture.destroy_repo(repo_id)
1360 1360
1361 1361 def _cleanup_user_groups(self):
1362 1362 def _user_group_compare(first_group_id, second_group_id):
1363 1363 """
1364 1364 Gives higher priority to the groups with the most complex paths
1365 1365 """
1366 1366 first_group = UserGroup.get(first_group_id)
1367 1367 second_group = UserGroup.get(second_group_id)
1368 1368 first_group_parts = (
1369 1369 len(first_group.users_group_name.split('/'))
1370 1370 if first_group else 0)
1371 1371 second_group_parts = (
1372 1372 len(second_group.users_group_name.split('/'))
1373 1373 if second_group else 0)
1374 1374 return cmp(second_group_parts, first_group_parts)
1375 1375
1376 1376 sorted_user_group_ids = sorted(
1377 1377 self.user_group_ids, cmp=_user_group_compare)
1378 1378 for user_group_id in sorted_user_group_ids:
1379 1379 self.fixture.destroy_user_group(user_group_id)
1380 1380
1381 1381 def _cleanup_users(self):
1382 1382 for user_id in self.user_ids:
1383 1383 self.fixture.destroy_user(user_id)
1384 1384
1385 1385
1386 1386 # TODO: Think about moving this into a pytest-pyro package and make it a
1387 1387 # pytest plugin
1388 1388 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1389 1389 def pytest_runtest_makereport(item, call):
1390 1390 """
1391 1391 Adding the remote traceback if the exception has this information.
1392 1392
1393 1393 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1394 1394 to the exception instance.
1395 1395 """
1396 1396 outcome = yield
1397 1397 report = outcome.get_result()
1398 1398 if call.excinfo:
1399 1399 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1400 1400
1401 1401
1402 1402 def _add_vcsserver_remote_traceback(report, exc):
1403 1403 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1404 1404
1405 1405 if vcsserver_traceback:
1406 1406 section = 'VCSServer remote traceback ' + report.when
1407 1407 report.sections.append((section, vcsserver_traceback))
1408 1408
1409 1409
1410 1410 @pytest.fixture(scope='session')
1411 1411 def testrun():
1412 1412 return {
1413 1413 'uuid': uuid.uuid4(),
1414 1414 'start': datetime.datetime.utcnow().isoformat(),
1415 1415 'timestamp': int(time.time()),
1416 1416 }
1417 1417
1418 1418
1419 1419 class AppenlightClient(object):
1420 1420
1421 1421 url_template = '{url}?protocol_version=0.5'
1422 1422
1423 1423 def __init__(
1424 1424 self, url, api_key, add_server=True, add_timestamp=True,
1425 1425 namespace=None, request=None, testrun=None):
1426 1426 self.url = self.url_template.format(url=url)
1427 1427 self.api_key = api_key
1428 1428 self.add_server = add_server
1429 1429 self.add_timestamp = add_timestamp
1430 1430 self.namespace = namespace
1431 1431 self.request = request
1432 1432 self.server = socket.getfqdn(socket.gethostname())
1433 1433 self.tags_before = {}
1434 1434 self.tags_after = {}
1435 1435 self.stats = []
1436 1436 self.testrun = testrun or {}
1437 1437
1438 1438 def tag_before(self, tag, value):
1439 1439 self.tags_before[tag] = value
1440 1440
1441 1441 def tag_after(self, tag, value):
1442 1442 self.tags_after[tag] = value
1443 1443
1444 1444 def collect(self, data):
1445 1445 if self.add_server:
1446 1446 data.setdefault('server', self.server)
1447 1447 if self.add_timestamp:
1448 1448 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1449 1449 if self.namespace:
1450 1450 data.setdefault('namespace', self.namespace)
1451 1451 if self.request:
1452 1452 data.setdefault('request', self.request)
1453 1453 self.stats.append(data)
1454 1454
1455 1455 def send_stats(self):
1456 1456 tags = [
1457 1457 ('testrun', self.request),
1458 1458 ('testrun.start', self.testrun['start']),
1459 1459 ('testrun.timestamp', self.testrun['timestamp']),
1460 1460 ('test', self.namespace),
1461 1461 ]
1462 1462 for key, value in self.tags_before.items():
1463 1463 tags.append((key + '.before', value))
1464 1464 try:
1465 1465 delta = self.tags_after[key] - value
1466 1466 tags.append((key + '.delta', delta))
1467 1467 except Exception:
1468 1468 pass
1469 1469 for key, value in self.tags_after.items():
1470 1470 tags.append((key + '.after', value))
1471 1471 self.collect({
1472 1472 'message': "Collected tags",
1473 1473 'tags': tags,
1474 1474 })
1475 1475
1476 1476 response = requests.post(
1477 1477 self.url,
1478 1478 headers={
1479 1479 'X-appenlight-api-key': self.api_key},
1480 1480 json=self.stats,
1481 1481 )
1482 1482
1483 1483 if not response.status_code == 200:
1484 1484 pprint.pprint(self.stats)
1485 1485 print(response.headers)
1486 1486 print(response.text)
1487 1487 raise Exception('Sending to appenlight failed')
1488 1488
1489 1489
1490 1490 @pytest.fixture()
1491 1491 def gist_util(request, db_connection):
1492 1492 """
1493 1493 Provides a wired instance of `GistUtility` with integrated cleanup.
1494 1494 """
1495 1495 utility = GistUtility()
1496 1496 request.addfinalizer(utility.cleanup)
1497 1497 return utility
1498 1498
1499 1499
1500 1500 class GistUtility(object):
1501 1501 def __init__(self):
1502 1502 self.fixture = Fixture()
1503 1503 self.gist_ids = []
1504 1504
1505 1505 def create_gist(self, **kwargs):
1506 1506 gist = self.fixture.create_gist(**kwargs)
1507 1507 self.gist_ids.append(gist.gist_id)
1508 1508 return gist
1509 1509
1510 1510 def cleanup(self):
1511 1511 for id_ in self.gist_ids:
1512 1512 self.fixture.destroy_gists(str(id_))
1513 1513
1514 1514
1515 1515 @pytest.fixture()
1516 1516 def enabled_backends(request):
1517 1517 backends = request.config.option.backends
1518 1518 return backends[:]
1519 1519
1520 1520
1521 1521 @pytest.fixture()
1522 1522 def settings_util(request, db_connection):
1523 1523 """
1524 1524 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1525 1525 """
1526 1526 utility = SettingsUtility()
1527 1527 request.addfinalizer(utility.cleanup)
1528 1528 return utility
1529 1529
1530 1530
1531 1531 class SettingsUtility(object):
1532 1532 def __init__(self):
1533 1533 self.rhodecode_ui_ids = []
1534 1534 self.rhodecode_setting_ids = []
1535 1535 self.repo_rhodecode_ui_ids = []
1536 1536 self.repo_rhodecode_setting_ids = []
1537 1537
1538 1538 def create_repo_rhodecode_ui(
1539 1539 self, repo, section, value, key=None, active=True, cleanup=True):
1540 1540 key = key or hashlib.sha1(
1541 1541 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1542 1542
1543 1543 setting = RepoRhodeCodeUi()
1544 1544 setting.repository_id = repo.repo_id
1545 1545 setting.ui_section = section
1546 1546 setting.ui_value = value
1547 1547 setting.ui_key = key
1548 1548 setting.ui_active = active
1549 1549 Session().add(setting)
1550 1550 Session().commit()
1551 1551
1552 1552 if cleanup:
1553 1553 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1554 1554 return setting
1555 1555
1556 1556 def create_rhodecode_ui(
1557 1557 self, section, value, key=None, active=True, cleanup=True):
1558 1558 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1559 1559
1560 1560 setting = RhodeCodeUi()
1561 1561 setting.ui_section = section
1562 1562 setting.ui_value = value
1563 1563 setting.ui_key = key
1564 1564 setting.ui_active = active
1565 1565 Session().add(setting)
1566 1566 Session().commit()
1567 1567
1568 1568 if cleanup:
1569 1569 self.rhodecode_ui_ids.append(setting.ui_id)
1570 1570 return setting
1571 1571
1572 1572 def create_repo_rhodecode_setting(
1573 1573 self, repo, name, value, type_, cleanup=True):
1574 1574 setting = RepoRhodeCodeSetting(
1575 1575 repo.repo_id, key=name, val=value, type=type_)
1576 1576 Session().add(setting)
1577 1577 Session().commit()
1578 1578
1579 1579 if cleanup:
1580 1580 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1581 1581 return setting
1582 1582
1583 1583 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1584 1584 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1585 1585 Session().add(setting)
1586 1586 Session().commit()
1587 1587
1588 1588 if cleanup:
1589 1589 self.rhodecode_setting_ids.append(setting.app_settings_id)
1590 1590
1591 1591 return setting
1592 1592
1593 1593 def cleanup(self):
1594 1594 for id_ in self.rhodecode_ui_ids:
1595 1595 setting = RhodeCodeUi.get(id_)
1596 1596 Session().delete(setting)
1597 1597
1598 1598 for id_ in self.rhodecode_setting_ids:
1599 1599 setting = RhodeCodeSetting.get(id_)
1600 1600 Session().delete(setting)
1601 1601
1602 1602 for id_ in self.repo_rhodecode_ui_ids:
1603 1603 setting = RepoRhodeCodeUi.get(id_)
1604 1604 Session().delete(setting)
1605 1605
1606 1606 for id_ in self.repo_rhodecode_setting_ids:
1607 1607 setting = RepoRhodeCodeSetting.get(id_)
1608 1608 Session().delete(setting)
1609 1609
1610 1610 Session().commit()
1611 1611
1612 1612
1613 1613 @pytest.fixture()
1614 1614 def no_notifications(request):
1615 1615 notification_patcher = mock.patch(
1616 1616 'rhodecode.model.notification.NotificationModel.create')
1617 1617 notification_patcher.start()
1618 1618 request.addfinalizer(notification_patcher.stop)
1619 1619
1620 1620
1621 1621 @pytest.fixture(scope='session')
1622 1622 def repeat(request):
1623 1623 """
1624 1624 The number of repetitions is based on this fixture.
1625 1625
1626 1626 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1627 1627 tests are not too slow in our default test suite.
1628 1628 """
1629 1629 return request.config.getoption('--repeat')
1630 1630
1631 1631
1632 1632 @pytest.fixture()
1633 1633 def rhodecode_fixtures():
1634 1634 return Fixture()
1635 1635
1636 1636
1637 1637 @pytest.fixture()
1638 1638 def context_stub():
1639 1639 """
1640 1640 Stub context object.
1641 1641 """
1642 1642 context = pyramid.testing.DummyResource()
1643 1643 return context
1644 1644
1645 1645
1646 1646 @pytest.fixture()
1647 1647 def request_stub():
1648 1648 """
1649 1649 Stub request object.
1650 1650 """
1651 1651 from rhodecode.lib.base import bootstrap_request
1652 1652 request = bootstrap_request(scheme='https')
1653 1653 return request
1654 1654
1655 1655
1656 1656 @pytest.fixture()
1657 1657 def config_stub(request, request_stub):
1658 1658 """
1659 1659 Set up pyramid.testing and return the Configurator.
1660 1660 """
1661 1661 from rhodecode.lib.base import bootstrap_config
1662 1662 config = bootstrap_config(request=request_stub)
1663 1663
1664 1664 @request.addfinalizer
1665 1665 def cleanup():
1666 1666 pyramid.testing.tearDown()
1667 1667
1668 1668 return config
1669 1669
1670 1670
1671 1671 @pytest.fixture()
1672 1672 def StubIntegrationType():
1673 1673 class _StubIntegrationType(IntegrationTypeBase):
1674 1674 """ Test integration type class """
1675 1675
1676 1676 key = 'test'
1677 1677 display_name = 'Test integration type'
1678 1678 description = 'A test integration type for testing'
1679 1679
1680 1680 @classmethod
1681 1681 def icon(cls):
1682 1682 return 'test_icon_html_image'
1683 1683
1684 1684 def __init__(self, settings):
1685 1685 super(_StubIntegrationType, self).__init__(settings)
1686 1686 self.sent_events = [] # for testing
1687 1687
1688 1688 def send_event(self, event):
1689 1689 self.sent_events.append(event)
1690 1690
1691 1691 def settings_schema(self):
1692 1692 class SettingsSchema(colander.Schema):
1693 1693 test_string_field = colander.SchemaNode(
1694 1694 colander.String(),
1695 1695 missing=colander.required,
1696 1696 title='test string field',
1697 1697 )
1698 1698 test_int_field = colander.SchemaNode(
1699 1699 colander.Int(),
1700 1700 title='some integer setting',
1701 1701 )
1702 1702 return SettingsSchema()
1703 1703
1704 1704
1705 1705 integration_type_registry.register_integration_type(_StubIntegrationType)
1706 1706 return _StubIntegrationType
1707 1707
1708 1708 @pytest.fixture()
1709 1709 def stub_integration_settings():
1710 1710 return {
1711 1711 'test_string_field': 'some data',
1712 1712 'test_int_field': 100,
1713 1713 }
1714 1714
1715 1715
1716 1716 @pytest.fixture()
1717 1717 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1718 1718 stub_integration_settings):
1719 1719 integration = IntegrationModel().create(
1720 1720 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1721 1721 name='test repo integration',
1722 1722 repo=repo_stub, repo_group=None, child_repos_only=None)
1723 1723
1724 1724 @request.addfinalizer
1725 1725 def cleanup():
1726 1726 IntegrationModel().delete(integration)
1727 1727
1728 1728 return integration
1729 1729
1730 1730
1731 1731 @pytest.fixture()
1732 1732 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1733 1733 stub_integration_settings):
1734 1734 integration = IntegrationModel().create(
1735 1735 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1736 1736 name='test repogroup integration',
1737 1737 repo=None, repo_group=test_repo_group, child_repos_only=True)
1738 1738
1739 1739 @request.addfinalizer
1740 1740 def cleanup():
1741 1741 IntegrationModel().delete(integration)
1742 1742
1743 1743 return integration
1744 1744
1745 1745
1746 1746 @pytest.fixture()
1747 1747 def repogroup_recursive_integration_stub(request, test_repo_group,
1748 1748 StubIntegrationType, stub_integration_settings):
1749 1749 integration = IntegrationModel().create(
1750 1750 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1751 1751 name='test recursive repogroup integration',
1752 1752 repo=None, repo_group=test_repo_group, child_repos_only=False)
1753 1753
1754 1754 @request.addfinalizer
1755 1755 def cleanup():
1756 1756 IntegrationModel().delete(integration)
1757 1757
1758 1758 return integration
1759 1759
1760 1760
1761 1761 @pytest.fixture()
1762 1762 def global_integration_stub(request, StubIntegrationType,
1763 1763 stub_integration_settings):
1764 1764 integration = IntegrationModel().create(
1765 1765 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1766 1766 name='test global integration',
1767 1767 repo=None, repo_group=None, child_repos_only=None)
1768 1768
1769 1769 @request.addfinalizer
1770 1770 def cleanup():
1771 1771 IntegrationModel().delete(integration)
1772 1772
1773 1773 return integration
1774 1774
1775 1775
1776 1776 @pytest.fixture()
1777 1777 def root_repos_integration_stub(request, StubIntegrationType,
1778 1778 stub_integration_settings):
1779 1779 integration = IntegrationModel().create(
1780 1780 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1781 1781 name='test global integration',
1782 1782 repo=None, repo_group=None, child_repos_only=True)
1783 1783
1784 1784 @request.addfinalizer
1785 1785 def cleanup():
1786 1786 IntegrationModel().delete(integration)
1787 1787
1788 1788 return integration
1789 1789
1790 1790
1791 1791 @pytest.fixture()
1792 1792 def local_dt_to_utc():
1793 1793 def _factory(dt):
1794 1794 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1795 1795 dateutil.tz.tzutc()).replace(tzinfo=None)
1796 1796 return _factory
1797 1797
1798 1798
1799 1799 @pytest.fixture()
1800 1800 def disable_anonymous_user(request, baseapp):
1801 1801 set_anonymous_access(False)
1802 1802
1803 1803 @request.addfinalizer
1804 1804 def cleanup():
1805 1805 set_anonymous_access(True)
1806 1806
1807 1807
1808 1808 @pytest.fixture(scope='module')
1809 1809 def rc_fixture(request):
1810 1810 return Fixture()
1811 1811
1812 1812
1813 1813 @pytest.fixture()
1814 1814 def repo_groups(request):
1815 1815 fixture = Fixture()
1816 1816
1817 1817 session = Session()
1818 1818 zombie_group = fixture.create_repo_group('zombie')
1819 1819 parent_group = fixture.create_repo_group('parent')
1820 1820 child_group = fixture.create_repo_group('parent/child')
1821 1821 groups_in_db = session.query(RepoGroup).all()
1822 1822 assert len(groups_in_db) == 3
1823 1823 assert child_group.group_parent_id == parent_group.group_id
1824 1824
1825 1825 @request.addfinalizer
1826 1826 def cleanup():
1827 1827 fixture.destroy_repo_group(zombie_group)
1828 1828 fixture.destroy_repo_group(child_group)
1829 1829 fixture.destroy_repo_group(parent_group)
1830 1830
1831 1831 return zombie_group, parent_group, child_group
General Comments 0
You need to be logged in to leave comments. Login now