##// END OF EJS Templates
shadow-repos: use numeric repo id for creation of shadow repos....
marcink -
r2810:a15bd3a8 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,905 +1,905 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from rhodecode import events
25 25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 26 from rhodecode.api.utils import (
27 27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 29 validate_repo_permissions, resolve_ref_or_error)
30 30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 from rhodecode.lib.utils2 import str2bool
33 33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 34 from rhodecode.model.comment import CommentsModel
35 35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
36 36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 37 from rhodecode.model.settings import SettingsModel
38 38 from rhodecode.model.validation_schema import Invalid
39 39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 40 ReviewerListSchema)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 @jsonrpc_method()
46 46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)):
47 47 """
48 48 Get a pull request based on the given ID.
49 49
50 50 :param apiuser: This is filled automatically from the |authtoken|.
51 51 :type apiuser: AuthUser
52 52 :param repoid: Optional, repository name or repository ID from where
53 53 the pull request was opened.
54 54 :type repoid: str or int
55 55 :param pullrequestid: ID of the requested pull request.
56 56 :type pullrequestid: int
57 57
58 58 Example output:
59 59
60 60 .. code-block:: bash
61 61
62 62 "id": <id_given_in_input>,
63 63 "result":
64 64 {
65 65 "pull_request_id": "<pull_request_id>",
66 66 "url": "<url>",
67 67 "title": "<title>",
68 68 "description": "<description>",
69 69 "status" : "<status>",
70 70 "created_on": "<date_time_created>",
71 71 "updated_on": "<date_time_updated>",
72 72 "commit_ids": [
73 73 ...
74 74 "<commit_id>",
75 75 "<commit_id>",
76 76 ...
77 77 ],
78 78 "review_status": "<review_status>",
79 79 "mergeable": {
80 80 "status": "<bool>",
81 81 "message": "<message>",
82 82 },
83 83 "source": {
84 84 "clone_url": "<clone_url>",
85 85 "repository": "<repository_name>",
86 86 "reference":
87 87 {
88 88 "name": "<name>",
89 89 "type": "<type>",
90 90 "commit_id": "<commit_id>",
91 91 }
92 92 },
93 93 "target": {
94 94 "clone_url": "<clone_url>",
95 95 "repository": "<repository_name>",
96 96 "reference":
97 97 {
98 98 "name": "<name>",
99 99 "type": "<type>",
100 100 "commit_id": "<commit_id>",
101 101 }
102 102 },
103 103 "merge": {
104 104 "clone_url": "<clone_url>",
105 105 "reference":
106 106 {
107 107 "name": "<name>",
108 108 "type": "<type>",
109 109 "commit_id": "<commit_id>",
110 110 }
111 111 },
112 112 "author": <user_obj>,
113 113 "reviewers": [
114 114 ...
115 115 {
116 116 "user": "<user_obj>",
117 117 "review_status": "<review_status>",
118 118 }
119 119 ...
120 120 ]
121 121 },
122 122 "error": null
123 123 """
124 124
125 125 pull_request = get_pull_request_or_error(pullrequestid)
126 126 if Optional.extract(repoid):
127 127 repo = get_repo_or_error(repoid)
128 128 else:
129 129 repo = pull_request.target_repo
130 130
131 131 if not PullRequestModel().check_user_read(
132 132 pull_request, apiuser, api=True):
133 133 raise JSONRPCError('repository `%s` or pull request `%s` '
134 134 'does not exist' % (repoid, pullrequestid))
135 135 data = pull_request.get_api_data()
136 136 return data
137 137
138 138
139 139 @jsonrpc_method()
140 140 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
141 141 """
142 142 Get all pull requests from the repository specified in `repoid`.
143 143
144 144 :param apiuser: This is filled automatically from the |authtoken|.
145 145 :type apiuser: AuthUser
146 146 :param repoid: Optional repository name or repository ID.
147 147 :type repoid: str or int
148 148 :param status: Only return pull requests with the specified status.
149 149 Valid options are.
150 150 * ``new`` (default)
151 151 * ``open``
152 152 * ``closed``
153 153 :type status: str
154 154
155 155 Example output:
156 156
157 157 .. code-block:: bash
158 158
159 159 "id": <id_given_in_input>,
160 160 "result":
161 161 [
162 162 ...
163 163 {
164 164 "pull_request_id": "<pull_request_id>",
165 165 "url": "<url>",
166 166 "title" : "<title>",
167 167 "description": "<description>",
168 168 "status": "<status>",
169 169 "created_on": "<date_time_created>",
170 170 "updated_on": "<date_time_updated>",
171 171 "commit_ids": [
172 172 ...
173 173 "<commit_id>",
174 174 "<commit_id>",
175 175 ...
176 176 ],
177 177 "review_status": "<review_status>",
178 178 "mergeable": {
179 179 "status": "<bool>",
180 180 "message: "<message>",
181 181 },
182 182 "source": {
183 183 "clone_url": "<clone_url>",
184 184 "reference":
185 185 {
186 186 "name": "<name>",
187 187 "type": "<type>",
188 188 "commit_id": "<commit_id>",
189 189 }
190 190 },
191 191 "target": {
192 192 "clone_url": "<clone_url>",
193 193 "reference":
194 194 {
195 195 "name": "<name>",
196 196 "type": "<type>",
197 197 "commit_id": "<commit_id>",
198 198 }
199 199 },
200 200 "merge": {
201 201 "clone_url": "<clone_url>",
202 202 "reference":
203 203 {
204 204 "name": "<name>",
205 205 "type": "<type>",
206 206 "commit_id": "<commit_id>",
207 207 }
208 208 },
209 209 "author": <user_obj>,
210 210 "reviewers": [
211 211 ...
212 212 {
213 213 "user": "<user_obj>",
214 214 "review_status": "<review_status>",
215 215 }
216 216 ...
217 217 ]
218 218 }
219 219 ...
220 220 ],
221 221 "error": null
222 222
223 223 """
224 224 repo = get_repo_or_error(repoid)
225 225 if not has_superadmin_permission(apiuser):
226 226 _perms = (
227 227 'repository.admin', 'repository.write', 'repository.read',)
228 228 validate_repo_permissions(apiuser, repoid, repo, _perms)
229 229
230 230 status = Optional.extract(status)
231 231 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
232 232 data = [pr.get_api_data() for pr in pull_requests]
233 233 return data
234 234
235 235
236 236 @jsonrpc_method()
237 237 def merge_pull_request(
238 238 request, apiuser, pullrequestid, repoid=Optional(None),
239 239 userid=Optional(OAttr('apiuser'))):
240 240 """
241 241 Merge the pull request specified by `pullrequestid` into its target
242 242 repository.
243 243
244 244 :param apiuser: This is filled automatically from the |authtoken|.
245 245 :type apiuser: AuthUser
246 246 :param repoid: Optional, repository name or repository ID of the
247 247 target repository to which the |pr| is to be merged.
248 248 :type repoid: str or int
249 249 :param pullrequestid: ID of the pull request which shall be merged.
250 250 :type pullrequestid: int
251 251 :param userid: Merge the pull request as this user.
252 252 :type userid: Optional(str or int)
253 253
254 254 Example output:
255 255
256 256 .. code-block:: bash
257 257
258 258 "id": <id_given_in_input>,
259 259 "result": {
260 260 "executed": "<bool>",
261 261 "failure_reason": "<int>",
262 262 "merge_commit_id": "<merge_commit_id>",
263 263 "possible": "<bool>",
264 264 "merge_ref": {
265 265 "commit_id": "<commit_id>",
266 266 "type": "<type>",
267 267 "name": "<name>"
268 268 }
269 269 },
270 270 "error": null
271 271 """
272 272 pull_request = get_pull_request_or_error(pullrequestid)
273 273 if Optional.extract(repoid):
274 274 repo = get_repo_or_error(repoid)
275 275 else:
276 276 repo = pull_request.target_repo
277 277
278 278 if not isinstance(userid, Optional):
279 279 if (has_superadmin_permission(apiuser) or
280 280 HasRepoPermissionAnyApi('repository.admin')(
281 281 user=apiuser, repo_name=repo.repo_name)):
282 282 apiuser = get_user_or_error(userid)
283 283 else:
284 284 raise JSONRPCError('userid is not the same as your user')
285 285
286 286 check = MergeCheck.validate(
287 287 pull_request, user=apiuser, translator=request.translate)
288 288 merge_possible = not check.failed
289 289
290 290 if not merge_possible:
291 291 error_messages = []
292 292 for err_type, error_msg in check.errors:
293 293 error_msg = request.translate(error_msg)
294 294 error_messages.append(error_msg)
295 295
296 296 reasons = ','.join(error_messages)
297 297 raise JSONRPCError(
298 298 'merge not possible for following reasons: {}'.format(reasons))
299 299
300 300 target_repo = pull_request.target_repo
301 301 extras = vcs_operation_context(
302 302 request.environ, repo_name=target_repo.repo_name,
303 303 username=apiuser.username, action='push',
304 304 scm=target_repo.repo_type)
305 merge_response = PullRequestModel().merge(
305 merge_response = PullRequestModel().merge_repo(
306 306 pull_request, apiuser, extras=extras)
307 307 if merge_response.executed:
308 308 PullRequestModel().close_pull_request(
309 309 pull_request.pull_request_id, apiuser)
310 310
311 311 Session().commit()
312 312
313 313 # In previous versions the merge response directly contained the merge
314 314 # commit id. It is now contained in the merge reference object. To be
315 315 # backwards compatible we have to extract it again.
316 316 merge_response = merge_response._asdict()
317 317 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
318 318
319 319 return merge_response
320 320
321 321
322 322 @jsonrpc_method()
323 323 def get_pull_request_comments(
324 324 request, apiuser, pullrequestid, repoid=Optional(None)):
325 325 """
326 326 Get all comments of pull request specified with the `pullrequestid`
327 327
328 328 :param apiuser: This is filled automatically from the |authtoken|.
329 329 :type apiuser: AuthUser
330 330 :param repoid: Optional repository name or repository ID.
331 331 :type repoid: str or int
332 332 :param pullrequestid: The pull request ID.
333 333 :type pullrequestid: int
334 334
335 335 Example output:
336 336
337 337 .. code-block:: bash
338 338
339 339 id : <id_given_in_input>
340 340 result : [
341 341 {
342 342 "comment_author": {
343 343 "active": true,
344 344 "full_name_or_username": "Tom Gore",
345 345 "username": "admin"
346 346 },
347 347 "comment_created_on": "2017-01-02T18:43:45.533",
348 348 "comment_f_path": null,
349 349 "comment_id": 25,
350 350 "comment_lineno": null,
351 351 "comment_status": {
352 352 "status": "under_review",
353 353 "status_lbl": "Under Review"
354 354 },
355 355 "comment_text": "Example text",
356 356 "comment_type": null,
357 357 "pull_request_version": null
358 358 }
359 359 ],
360 360 error : null
361 361 """
362 362
363 363 pull_request = get_pull_request_or_error(pullrequestid)
364 364 if Optional.extract(repoid):
365 365 repo = get_repo_or_error(repoid)
366 366 else:
367 367 repo = pull_request.target_repo
368 368
369 369 if not PullRequestModel().check_user_read(
370 370 pull_request, apiuser, api=True):
371 371 raise JSONRPCError('repository `%s` or pull request `%s` '
372 372 'does not exist' % (repoid, pullrequestid))
373 373
374 374 (pull_request_latest,
375 375 pull_request_at_ver,
376 376 pull_request_display_obj,
377 377 at_version) = PullRequestModel().get_pr_version(
378 378 pull_request.pull_request_id, version=None)
379 379
380 380 versions = pull_request_display_obj.versions()
381 381 ver_map = {
382 382 ver.pull_request_version_id: cnt
383 383 for cnt, ver in enumerate(versions, 1)
384 384 }
385 385
386 386 # GENERAL COMMENTS with versions #
387 387 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
388 388 q = q.order_by(ChangesetComment.comment_id.asc())
389 389 general_comments = q.all()
390 390
391 391 # INLINE COMMENTS with versions #
392 392 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
393 393 q = q.order_by(ChangesetComment.comment_id.asc())
394 394 inline_comments = q.all()
395 395
396 396 data = []
397 397 for comment in inline_comments + general_comments:
398 398 full_data = comment.get_api_data()
399 399 pr_version_id = None
400 400 if comment.pull_request_version_id:
401 401 pr_version_id = 'v{}'.format(
402 402 ver_map[comment.pull_request_version_id])
403 403
404 404 # sanitize some entries
405 405
406 406 full_data['pull_request_version'] = pr_version_id
407 407 full_data['comment_author'] = {
408 408 'username': full_data['comment_author'].username,
409 409 'full_name_or_username': full_data['comment_author'].full_name_or_username,
410 410 'active': full_data['comment_author'].active,
411 411 }
412 412
413 413 if full_data['comment_status']:
414 414 full_data['comment_status'] = {
415 415 'status': full_data['comment_status'][0].status,
416 416 'status_lbl': full_data['comment_status'][0].status_lbl,
417 417 }
418 418 else:
419 419 full_data['comment_status'] = {}
420 420
421 421 data.append(full_data)
422 422 return data
423 423
424 424
425 425 @jsonrpc_method()
426 426 def comment_pull_request(
427 427 request, apiuser, pullrequestid, repoid=Optional(None),
428 428 message=Optional(None), commit_id=Optional(None), status=Optional(None),
429 429 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
430 430 resolves_comment_id=Optional(None),
431 431 userid=Optional(OAttr('apiuser'))):
432 432 """
433 433 Comment on the pull request specified with the `pullrequestid`,
434 434 in the |repo| specified by the `repoid`, and optionally change the
435 435 review status.
436 436
437 437 :param apiuser: This is filled automatically from the |authtoken|.
438 438 :type apiuser: AuthUser
439 439 :param repoid: Optional repository name or repository ID.
440 440 :type repoid: str or int
441 441 :param pullrequestid: The pull request ID.
442 442 :type pullrequestid: int
443 443 :param commit_id: Specify the commit_id for which to set a comment. If
444 444 given commit_id is different than latest in the PR status
445 445 change won't be performed.
446 446 :type commit_id: str
447 447 :param message: The text content of the comment.
448 448 :type message: str
449 449 :param status: (**Optional**) Set the approval status of the pull
450 450 request. One of: 'not_reviewed', 'approved', 'rejected',
451 451 'under_review'
452 452 :type status: str
453 453 :param comment_type: Comment type, one of: 'note', 'todo'
454 454 :type comment_type: Optional(str), default: 'note'
455 455 :param userid: Comment on the pull request as this user
456 456 :type userid: Optional(str or int)
457 457
458 458 Example output:
459 459
460 460 .. code-block:: bash
461 461
462 462 id : <id_given_in_input>
463 463 result : {
464 464 "pull_request_id": "<Integer>",
465 465 "comment_id": "<Integer>",
466 466 "status": {"given": <given_status>,
467 467 "was_changed": <bool status_was_actually_changed> },
468 468 },
469 469 error : null
470 470 """
471 471 pull_request = get_pull_request_or_error(pullrequestid)
472 472 if Optional.extract(repoid):
473 473 repo = get_repo_or_error(repoid)
474 474 else:
475 475 repo = pull_request.target_repo
476 476
477 477 if not isinstance(userid, Optional):
478 478 if (has_superadmin_permission(apiuser) or
479 479 HasRepoPermissionAnyApi('repository.admin')(
480 480 user=apiuser, repo_name=repo.repo_name)):
481 481 apiuser = get_user_or_error(userid)
482 482 else:
483 483 raise JSONRPCError('userid is not the same as your user')
484 484
485 485 if not PullRequestModel().check_user_read(
486 486 pull_request, apiuser, api=True):
487 487 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
488 488 message = Optional.extract(message)
489 489 status = Optional.extract(status)
490 490 commit_id = Optional.extract(commit_id)
491 491 comment_type = Optional.extract(comment_type)
492 492 resolves_comment_id = Optional.extract(resolves_comment_id)
493 493
494 494 if not message and not status:
495 495 raise JSONRPCError(
496 496 'Both message and status parameters are missing. '
497 497 'At least one is required.')
498 498
499 499 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
500 500 status is not None):
501 501 raise JSONRPCError('Unknown comment status: `%s`' % status)
502 502
503 503 if commit_id and commit_id not in pull_request.revisions:
504 504 raise JSONRPCError(
505 505 'Invalid commit_id `%s` for this pull request.' % commit_id)
506 506
507 507 allowed_to_change_status = PullRequestModel().check_user_change_status(
508 508 pull_request, apiuser)
509 509
510 510 # if commit_id is passed re-validated if user is allowed to change status
511 511 # based on latest commit_id from the PR
512 512 if commit_id:
513 513 commit_idx = pull_request.revisions.index(commit_id)
514 514 if commit_idx != 0:
515 515 allowed_to_change_status = False
516 516
517 517 if resolves_comment_id:
518 518 comment = ChangesetComment.get(resolves_comment_id)
519 519 if not comment:
520 520 raise JSONRPCError(
521 521 'Invalid resolves_comment_id `%s` for this pull request.'
522 522 % resolves_comment_id)
523 523 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
524 524 raise JSONRPCError(
525 525 'Comment `%s` is wrong type for setting status to resolved.'
526 526 % resolves_comment_id)
527 527
528 528 text = message
529 529 status_label = ChangesetStatus.get_status_lbl(status)
530 530 if status and allowed_to_change_status:
531 531 st_message = ('Status change %(transition_icon)s %(status)s'
532 532 % {'transition_icon': '>', 'status': status_label})
533 533 text = message or st_message
534 534
535 535 rc_config = SettingsModel().get_all_settings()
536 536 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
537 537
538 538 status_change = status and allowed_to_change_status
539 539 comment = CommentsModel().create(
540 540 text=text,
541 541 repo=pull_request.target_repo.repo_id,
542 542 user=apiuser.user_id,
543 543 pull_request=pull_request.pull_request_id,
544 544 f_path=None,
545 545 line_no=None,
546 546 status_change=(status_label if status_change else None),
547 547 status_change_type=(status if status_change else None),
548 548 closing_pr=False,
549 549 renderer=renderer,
550 550 comment_type=comment_type,
551 551 resolves_comment_id=resolves_comment_id,
552 552 auth_user=apiuser
553 553 )
554 554
555 555 if allowed_to_change_status and status:
556 556 ChangesetStatusModel().set_status(
557 557 pull_request.target_repo.repo_id,
558 558 status,
559 559 apiuser.user_id,
560 560 comment,
561 561 pull_request=pull_request.pull_request_id
562 562 )
563 563 Session().flush()
564 564
565 565 Session().commit()
566 566 data = {
567 567 'pull_request_id': pull_request.pull_request_id,
568 568 'comment_id': comment.comment_id if comment else None,
569 569 'status': {'given': status, 'was_changed': status_change},
570 570 }
571 571 return data
572 572
573 573
574 574 @jsonrpc_method()
575 575 def create_pull_request(
576 576 request, apiuser, source_repo, target_repo, source_ref, target_ref,
577 577 title, description=Optional(''), reviewers=Optional(None)):
578 578 """
579 579 Creates a new pull request.
580 580
581 581 Accepts refs in the following formats:
582 582
583 583 * branch:<branch_name>:<sha>
584 584 * branch:<branch_name>
585 585 * bookmark:<bookmark_name>:<sha> (Mercurial only)
586 586 * bookmark:<bookmark_name> (Mercurial only)
587 587
588 588 :param apiuser: This is filled automatically from the |authtoken|.
589 589 :type apiuser: AuthUser
590 590 :param source_repo: Set the source repository name.
591 591 :type source_repo: str
592 592 :param target_repo: Set the target repository name.
593 593 :type target_repo: str
594 594 :param source_ref: Set the source ref name.
595 595 :type source_ref: str
596 596 :param target_ref: Set the target ref name.
597 597 :type target_ref: str
598 598 :param title: Set the pull request title.
599 599 :type title: str
600 600 :param description: Set the pull request description.
601 601 :type description: Optional(str)
602 602 :param reviewers: Set the new pull request reviewers list.
603 603 Reviewer defined by review rules will be added automatically to the
604 604 defined list.
605 605 :type reviewers: Optional(list)
606 606 Accepts username strings or objects of the format:
607 607
608 608 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
609 609 """
610 610
611 611 source_db_repo = get_repo_or_error(source_repo)
612 612 target_db_repo = get_repo_or_error(target_repo)
613 613 if not has_superadmin_permission(apiuser):
614 614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
615 615 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
616 616
617 617 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
618 618 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
619 619 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
620 620 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
621 621 source_scm = source_db_repo.scm_instance()
622 622 target_scm = target_db_repo.scm_instance()
623 623
624 624 commit_ranges = target_scm.compare(
625 625 target_commit.raw_id, source_commit.raw_id, source_scm,
626 626 merge=True, pre_load=[])
627 627
628 628 ancestor = target_scm.get_common_ancestor(
629 629 target_commit.raw_id, source_commit.raw_id, source_scm)
630 630
631 631 if not commit_ranges:
632 632 raise JSONRPCError('no commits found')
633 633
634 634 if not ancestor:
635 635 raise JSONRPCError('no common ancestor found')
636 636
637 637 reviewer_objects = Optional.extract(reviewers) or []
638 638
639 639 if reviewer_objects:
640 640 schema = ReviewerListSchema()
641 641 try:
642 642 reviewer_objects = schema.deserialize(reviewer_objects)
643 643 except Invalid as err:
644 644 raise JSONRPCValidationError(colander_exc=err)
645 645
646 646 # validate users
647 647 for reviewer_object in reviewer_objects:
648 648 user = get_user_or_error(reviewer_object['username'])
649 649 reviewer_object['user_id'] = user.user_id
650 650
651 651 get_default_reviewers_data, get_validated_reviewers = \
652 652 PullRequestModel().get_reviewer_functions()
653 653
654 654 reviewer_rules = get_default_reviewers_data(
655 655 apiuser.get_instance(), source_db_repo,
656 656 source_commit, target_db_repo, target_commit)
657 657
658 658 # specified rules are later re-validated, thus we can assume users will
659 659 # eventually provide those that meet the reviewer criteria.
660 660 if not reviewer_objects:
661 661 reviewer_objects = reviewer_rules['reviewers']
662 662
663 663 try:
664 664 reviewers = get_validated_reviewers(
665 665 reviewer_objects, reviewer_rules)
666 666 except ValueError as e:
667 667 raise JSONRPCError('Reviewers Validation: {}'.format(e))
668 668
669 669 pull_request_model = PullRequestModel()
670 670 pull_request = pull_request_model.create(
671 671 created_by=apiuser.user_id,
672 672 source_repo=source_repo,
673 673 source_ref=full_source_ref,
674 674 target_repo=target_repo,
675 675 target_ref=full_target_ref,
676 676 revisions=reversed(
677 677 [commit.raw_id for commit in reversed(commit_ranges)]),
678 678 reviewers=reviewers,
679 679 title=title,
680 680 description=Optional.extract(description),
681 681 auth_user=apiuser
682 682 )
683 683
684 684 Session().commit()
685 685 data = {
686 686 'msg': 'Created new pull request `{}`'.format(title),
687 687 'pull_request_id': pull_request.pull_request_id,
688 688 }
689 689 return data
690 690
691 691
692 692 @jsonrpc_method()
693 693 def update_pull_request(
694 694 request, apiuser, pullrequestid, repoid=Optional(None),
695 695 title=Optional(''), description=Optional(''), reviewers=Optional(None),
696 696 update_commits=Optional(None)):
697 697 """
698 698 Updates a pull request.
699 699
700 700 :param apiuser: This is filled automatically from the |authtoken|.
701 701 :type apiuser: AuthUser
702 702 :param repoid: Optional repository name or repository ID.
703 703 :type repoid: str or int
704 704 :param pullrequestid: The pull request ID.
705 705 :type pullrequestid: int
706 706 :param title: Set the pull request title.
707 707 :type title: str
708 708 :param description: Update pull request description.
709 709 :type description: Optional(str)
710 710 :param reviewers: Update pull request reviewers list with new value.
711 711 :type reviewers: Optional(list)
712 712 Accepts username strings or objects of the format:
713 713
714 714 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
715 715
716 716 :param update_commits: Trigger update of commits for this pull request
717 717 :type: update_commits: Optional(bool)
718 718
719 719 Example output:
720 720
721 721 .. code-block:: bash
722 722
723 723 id : <id_given_in_input>
724 724 result : {
725 725 "msg": "Updated pull request `63`",
726 726 "pull_request": <pull_request_object>,
727 727 "updated_reviewers": {
728 728 "added": [
729 729 "username"
730 730 ],
731 731 "removed": []
732 732 },
733 733 "updated_commits": {
734 734 "added": [
735 735 "<sha1_hash>"
736 736 ],
737 737 "common": [
738 738 "<sha1_hash>",
739 739 "<sha1_hash>",
740 740 ],
741 741 "removed": []
742 742 }
743 743 }
744 744 error : null
745 745 """
746 746
747 747 pull_request = get_pull_request_or_error(pullrequestid)
748 748 if Optional.extract(repoid):
749 749 repo = get_repo_or_error(repoid)
750 750 else:
751 751 repo = pull_request.target_repo
752 752
753 753 if not PullRequestModel().check_user_update(
754 754 pull_request, apiuser, api=True):
755 755 raise JSONRPCError(
756 756 'pull request `%s` update failed, no permission to update.' % (
757 757 pullrequestid,))
758 758 if pull_request.is_closed():
759 759 raise JSONRPCError(
760 760 'pull request `%s` update failed, pull request is closed' % (
761 761 pullrequestid,))
762 762
763 763 reviewer_objects = Optional.extract(reviewers) or []
764 764
765 765 if reviewer_objects:
766 766 schema = ReviewerListSchema()
767 767 try:
768 768 reviewer_objects = schema.deserialize(reviewer_objects)
769 769 except Invalid as err:
770 770 raise JSONRPCValidationError(colander_exc=err)
771 771
772 772 # validate users
773 773 for reviewer_object in reviewer_objects:
774 774 user = get_user_or_error(reviewer_object['username'])
775 775 reviewer_object['user_id'] = user.user_id
776 776
777 777 get_default_reviewers_data, get_validated_reviewers = \
778 778 PullRequestModel().get_reviewer_functions()
779 779
780 780 # re-use stored rules
781 781 reviewer_rules = pull_request.reviewer_data
782 782 try:
783 783 reviewers = get_validated_reviewers(
784 784 reviewer_objects, reviewer_rules)
785 785 except ValueError as e:
786 786 raise JSONRPCError('Reviewers Validation: {}'.format(e))
787 787 else:
788 788 reviewers = []
789 789
790 790 title = Optional.extract(title)
791 791 description = Optional.extract(description)
792 792 if title or description:
793 793 PullRequestModel().edit(
794 794 pull_request, title or pull_request.title,
795 795 description or pull_request.description, apiuser)
796 796 Session().commit()
797 797
798 798 commit_changes = {"added": [], "common": [], "removed": []}
799 799 if str2bool(Optional.extract(update_commits)):
800 800 if PullRequestModel().has_valid_update_type(pull_request):
801 801 update_response = PullRequestModel().update_commits(
802 802 pull_request)
803 803 commit_changes = update_response.changes or commit_changes
804 804 Session().commit()
805 805
806 806 reviewers_changes = {"added": [], "removed": []}
807 807 if reviewers:
808 808 added_reviewers, removed_reviewers = \
809 809 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
810 810
811 811 reviewers_changes['added'] = sorted(
812 812 [get_user_or_error(n).username for n in added_reviewers])
813 813 reviewers_changes['removed'] = sorted(
814 814 [get_user_or_error(n).username for n in removed_reviewers])
815 815 Session().commit()
816 816
817 817 data = {
818 818 'msg': 'Updated pull request `{}`'.format(
819 819 pull_request.pull_request_id),
820 820 'pull_request': pull_request.get_api_data(),
821 821 'updated_commits': commit_changes,
822 822 'updated_reviewers': reviewers_changes
823 823 }
824 824
825 825 return data
826 826
827 827
828 828 @jsonrpc_method()
829 829 def close_pull_request(
830 830 request, apiuser, pullrequestid, repoid=Optional(None),
831 831 userid=Optional(OAttr('apiuser')), message=Optional('')):
832 832 """
833 833 Close the pull request specified by `pullrequestid`.
834 834
835 835 :param apiuser: This is filled automatically from the |authtoken|.
836 836 :type apiuser: AuthUser
837 837 :param repoid: Repository name or repository ID to which the pull
838 838 request belongs.
839 839 :type repoid: str or int
840 840 :param pullrequestid: ID of the pull request to be closed.
841 841 :type pullrequestid: int
842 842 :param userid: Close the pull request as this user.
843 843 :type userid: Optional(str or int)
844 844 :param message: Optional message to close the Pull Request with. If not
845 845 specified it will be generated automatically.
846 846 :type message: Optional(str)
847 847
848 848 Example output:
849 849
850 850 .. code-block:: bash
851 851
852 852 "id": <id_given_in_input>,
853 853 "result": {
854 854 "pull_request_id": "<int>",
855 855 "close_status": "<str:status_lbl>,
856 856 "closed": "<bool>"
857 857 },
858 858 "error": null
859 859
860 860 """
861 861 _ = request.translate
862 862
863 863 pull_request = get_pull_request_or_error(pullrequestid)
864 864 if Optional.extract(repoid):
865 865 repo = get_repo_or_error(repoid)
866 866 else:
867 867 repo = pull_request.target_repo
868 868
869 869 if not isinstance(userid, Optional):
870 870 if (has_superadmin_permission(apiuser) or
871 871 HasRepoPermissionAnyApi('repository.admin')(
872 872 user=apiuser, repo_name=repo.repo_name)):
873 873 apiuser = get_user_or_error(userid)
874 874 else:
875 875 raise JSONRPCError('userid is not the same as your user')
876 876
877 877 if pull_request.is_closed():
878 878 raise JSONRPCError(
879 879 'pull request `%s` is already closed' % (pullrequestid,))
880 880
881 881 # only owner or admin or person with write permissions
882 882 allowed_to_close = PullRequestModel().check_user_update(
883 883 pull_request, apiuser, api=True)
884 884
885 885 if not allowed_to_close:
886 886 raise JSONRPCError(
887 887 'pull request `%s` close failed, no permission to close.' % (
888 888 pullrequestid,))
889 889
890 890 # message we're using to close the PR, else it's automatically generated
891 891 message = Optional.extract(message)
892 892
893 893 # finally close the PR, with proper message comment
894 894 comment, status = PullRequestModel().close_pull_request_with_comment(
895 895 pull_request, apiuser, repo, message=message)
896 896 status_lbl = ChangesetStatus.get_status_lbl(status)
897 897
898 898 Session().commit()
899 899
900 900 data = {
901 901 'pull_request_id': pull_request.pull_request_id,
902 902 'close_status': status_lbl,
903 903 'closed': True,
904 904 }
905 905 return data
@@ -1,1203 +1,1203 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35 from rhodecode.tests.utils import AssertResponse
36 36
37 37
38 38 def route_path(name, params=None, **kwargs):
39 39 import urllib
40 40
41 41 base_url = {
42 42 'repo_changelog': '/{repo_name}/changelog',
43 43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 56 }[name].format(**kwargs)
57 57
58 58 if params:
59 59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 60 return base_url
61 61
62 62
63 63 @pytest.mark.usefixtures('app', 'autologin_user')
64 64 @pytest.mark.backends("git", "hg")
65 65 class TestPullrequestsView(object):
66 66
67 67 def test_index(self, backend):
68 68 self.app.get(route_path(
69 69 'pullrequest_new',
70 70 repo_name=backend.repo_name))
71 71
72 72 def test_option_menu_create_pull_request_exists(self, backend):
73 73 repo_name = backend.repo_name
74 74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 75
76 76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 77 'pullrequest_new', repo_name=repo_name)
78 78 response.mustcontain(create_pr_link)
79 79
80 80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 81 repo = backend.repo
82 82
83 83 self.app.get(
84 84 route_path('pullrequest_new',
85 85 repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 def test_show(self, pr_util, pr_merge_enabled):
91 91 pull_request = pr_util.create_pull_request(
92 92 mergeable=pr_merge_enabled, enable_notifications=False)
93 93
94 94 response = self.app.get(route_path(
95 95 'pullrequest_show',
96 96 repo_name=pull_request.target_repo.scm_instance().name,
97 97 pull_request_id=pull_request.pull_request_id))
98 98
99 99 for commit_id in pull_request.revisions:
100 100 response.mustcontain(commit_id)
101 101
102 102 assert pull_request.target_ref_parts.type in response
103 103 assert pull_request.target_ref_parts.name in response
104 104 target_clone_url = pull_request.target_repo.clone_url()
105 105 assert target_clone_url in response
106 106
107 107 assert 'class="pull-request-merge"' in response
108 108 assert (
109 109 'Server-side pull request merging is disabled.'
110 110 in response) != pr_merge_enabled
111 111
112 112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 113 # Logout
114 114 response = self.app.post(
115 115 h.route_path('logout'),
116 116 params={'csrf_token': csrf_token})
117 117 # Login as regular user
118 118 response = self.app.post(h.route_path('login'),
119 119 {'username': TEST_USER_REGULAR_LOGIN,
120 120 'password': 'test12'})
121 121
122 122 pull_request = pr_util.create_pull_request(
123 123 author=TEST_USER_REGULAR_LOGIN)
124 124
125 125 response = self.app.get(route_path(
126 126 'pullrequest_show',
127 127 repo_name=pull_request.target_repo.scm_instance().name,
128 128 pull_request_id=pull_request.pull_request_id))
129 129
130 130 response.mustcontain('Server-side pull request merging is disabled.')
131 131
132 132 assert_response = response.assert_response()
133 133 # for regular user without a merge permissions, we don't see it
134 134 assert_response.no_element_exists('#close-pull-request-action')
135 135
136 136 user_util.grant_user_permission_to_repo(
137 137 pull_request.target_repo,
138 138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 139 'repository.write')
140 140 response = self.app.get(route_path(
141 141 'pullrequest_show',
142 142 repo_name=pull_request.target_repo.scm_instance().name,
143 143 pull_request_id=pull_request.pull_request_id))
144 144
145 145 response.mustcontain('Server-side pull request merging is disabled.')
146 146
147 147 assert_response = response.assert_response()
148 148 # now regular user has a merge permissions, we have CLOSE button
149 149 assert_response.one_element_exists('#close-pull-request-action')
150 150
151 151 def test_show_invalid_commit_id(self, pr_util):
152 152 # Simulating invalid revisions which will cause a lookup error
153 153 pull_request = pr_util.create_pull_request()
154 154 pull_request.revisions = ['invalid']
155 155 Session().add(pull_request)
156 156 Session().commit()
157 157
158 158 response = self.app.get(route_path(
159 159 'pullrequest_show',
160 160 repo_name=pull_request.target_repo.scm_instance().name,
161 161 pull_request_id=pull_request.pull_request_id))
162 162
163 163 for commit_id in pull_request.revisions:
164 164 response.mustcontain(commit_id)
165 165
166 166 def test_show_invalid_source_reference(self, pr_util):
167 167 pull_request = pr_util.create_pull_request()
168 168 pull_request.source_ref = 'branch:b:invalid'
169 169 Session().add(pull_request)
170 170 Session().commit()
171 171
172 172 self.app.get(route_path(
173 173 'pullrequest_show',
174 174 repo_name=pull_request.target_repo.scm_instance().name,
175 175 pull_request_id=pull_request.pull_request_id))
176 176
177 177 def test_edit_title_description(self, pr_util, csrf_token):
178 178 pull_request = pr_util.create_pull_request()
179 179 pull_request_id = pull_request.pull_request_id
180 180
181 181 response = self.app.post(
182 182 route_path('pullrequest_update',
183 183 repo_name=pull_request.target_repo.repo_name,
184 184 pull_request_id=pull_request_id),
185 185 params={
186 186 'edit_pull_request': 'true',
187 187 'title': 'New title',
188 188 'description': 'New description',
189 189 'csrf_token': csrf_token})
190 190
191 191 assert_session_flash(
192 192 response, u'Pull request title & description updated.',
193 193 category='success')
194 194
195 195 pull_request = PullRequest.get(pull_request_id)
196 196 assert pull_request.title == 'New title'
197 197 assert pull_request.description == 'New description'
198 198
199 199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 200 pull_request = pr_util.create_pull_request()
201 201 pull_request_id = pull_request.pull_request_id
202 202 repo_name = pull_request.target_repo.repo_name
203 203 pr_util.close()
204 204
205 205 response = self.app.post(
206 206 route_path('pullrequest_update',
207 207 repo_name=repo_name, pull_request_id=pull_request_id),
208 208 params={
209 209 'edit_pull_request': 'true',
210 210 'title': 'New title',
211 211 'description': 'New description',
212 212 'csrf_token': csrf_token}, status=200)
213 213 assert_session_flash(
214 214 response, u'Cannot update closed pull requests.',
215 215 category='error')
216 216
217 217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219 219
220 220 pull_request = pr_util.create_pull_request()
221 221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 222 Session().add(pull_request)
223 223 Session().commit()
224 224
225 225 pull_request_id = pull_request.pull_request_id
226 226
227 227 response = self.app.post(
228 228 route_path('pullrequest_update',
229 229 repo_name=pull_request.target_repo.repo_name,
230 230 pull_request_id=pull_request_id),
231 231 params={'update_commits': 'true',
232 232 'csrf_token': csrf_token})
233 233
234 234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 235 UpdateFailureReason.MISSING_SOURCE_REF])
236 236 assert_session_flash(response, expected_msg, category='error')
237 237
238 238 def test_missing_target_reference(self, pr_util, csrf_token):
239 239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 240 pull_request = pr_util.create_pull_request(
241 241 approved=True, mergeable=True)
242 242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 243 Session().add(pull_request)
244 244 Session().commit()
245 245
246 246 pull_request_id = pull_request.pull_request_id
247 247 pull_request_url = route_path(
248 248 'pullrequest_show',
249 249 repo_name=pull_request.target_repo.repo_name,
250 250 pull_request_id=pull_request_id)
251 251
252 252 response = self.app.get(pull_request_url)
253 253
254 254 assertr = AssertResponse(response)
255 255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 256 MergeFailureReason.MISSING_TARGET_REF]
257 257 assertr.element_contains(
258 258 'span[data-role="merge-message"]', str(expected_msg))
259 259
260 260 def test_comment_and_close_pull_request_custom_message_approved(
261 261 self, pr_util, csrf_token, xhr_header):
262 262
263 263 pull_request = pr_util.create_pull_request(approved=True)
264 264 pull_request_id = pull_request.pull_request_id
265 265 author = pull_request.user_id
266 266 repo = pull_request.target_repo.repo_id
267 267
268 268 self.app.post(
269 269 route_path('pullrequest_comment_create',
270 270 repo_name=pull_request.target_repo.scm_instance().name,
271 271 pull_request_id=pull_request_id),
272 272 params={
273 273 'close_pull_request': '1',
274 274 'text': 'Closing a PR',
275 275 'csrf_token': csrf_token},
276 276 extra_environ=xhr_header,)
277 277
278 278 journal = UserLog.query()\
279 279 .filter(UserLog.user_id == author)\
280 280 .filter(UserLog.repository_id == repo) \
281 281 .order_by('user_log_id') \
282 282 .all()
283 283 assert journal[-1].action == 'repo.pull_request.close'
284 284
285 285 pull_request = PullRequest.get(pull_request_id)
286 286 assert pull_request.is_closed()
287 287
288 288 status = ChangesetStatusModel().get_status(
289 289 pull_request.source_repo, pull_request=pull_request)
290 290 assert status == ChangesetStatus.STATUS_APPROVED
291 291 comments = ChangesetComment().query() \
292 292 .filter(ChangesetComment.pull_request == pull_request) \
293 293 .order_by(ChangesetComment.comment_id.asc())\
294 294 .all()
295 295 assert comments[-1].text == 'Closing a PR'
296 296
297 297 def test_comment_force_close_pull_request_rejected(
298 298 self, pr_util, csrf_token, xhr_header):
299 299 pull_request = pr_util.create_pull_request()
300 300 pull_request_id = pull_request.pull_request_id
301 301 PullRequestModel().update_reviewers(
302 302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 303 pull_request.author)
304 304 author = pull_request.user_id
305 305 repo = pull_request.target_repo.repo_id
306 306
307 307 self.app.post(
308 308 route_path('pullrequest_comment_create',
309 309 repo_name=pull_request.target_repo.scm_instance().name,
310 310 pull_request_id=pull_request_id),
311 311 params={
312 312 'close_pull_request': '1',
313 313 'csrf_token': csrf_token},
314 314 extra_environ=xhr_header)
315 315
316 316 pull_request = PullRequest.get(pull_request_id)
317 317
318 318 journal = UserLog.query()\
319 319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 320 .order_by('user_log_id') \
321 321 .all()
322 322 assert journal[-1].action == 'repo.pull_request.close'
323 323
324 324 # check only the latest status, not the review status
325 325 status = ChangesetStatusModel().get_status(
326 326 pull_request.source_repo, pull_request=pull_request)
327 327 assert status == ChangesetStatus.STATUS_REJECTED
328 328
329 329 def test_comment_and_close_pull_request(
330 330 self, pr_util, csrf_token, xhr_header):
331 331 pull_request = pr_util.create_pull_request()
332 332 pull_request_id = pull_request.pull_request_id
333 333
334 334 response = self.app.post(
335 335 route_path('pullrequest_comment_create',
336 336 repo_name=pull_request.target_repo.scm_instance().name,
337 337 pull_request_id=pull_request.pull_request_id),
338 338 params={
339 339 'close_pull_request': 'true',
340 340 'csrf_token': csrf_token},
341 341 extra_environ=xhr_header)
342 342
343 343 assert response.json
344 344
345 345 pull_request = PullRequest.get(pull_request_id)
346 346 assert pull_request.is_closed()
347 347
348 348 # check only the latest status, not the review status
349 349 status = ChangesetStatusModel().get_status(
350 350 pull_request.source_repo, pull_request=pull_request)
351 351 assert status == ChangesetStatus.STATUS_REJECTED
352 352
353 353 def test_create_pull_request(self, backend, csrf_token):
354 354 commits = [
355 355 {'message': 'ancestor'},
356 356 {'message': 'change'},
357 357 {'message': 'change2'},
358 358 ]
359 359 commit_ids = backend.create_master_repo(commits)
360 360 target = backend.create_repo(heads=['ancestor'])
361 361 source = backend.create_repo(heads=['change2'])
362 362
363 363 response = self.app.post(
364 364 route_path('pullrequest_create', repo_name=source.repo_name),
365 365 [
366 366 ('source_repo', source.repo_name),
367 367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 368 ('target_repo', target.repo_name),
369 369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 370 ('common_ancestor', commit_ids['ancestor']),
371 371 ('pullrequest_desc', 'Description'),
372 372 ('pullrequest_title', 'Title'),
373 373 ('__start__', 'review_members:sequence'),
374 374 ('__start__', 'reviewer:mapping'),
375 375 ('user_id', '1'),
376 376 ('__start__', 'reasons:sequence'),
377 377 ('reason', 'Some reason'),
378 378 ('__end__', 'reasons:sequence'),
379 379 ('__start__', 'rules:sequence'),
380 380 ('__end__', 'rules:sequence'),
381 381 ('mandatory', 'False'),
382 382 ('__end__', 'reviewer:mapping'),
383 383 ('__end__', 'review_members:sequence'),
384 384 ('__start__', 'revisions:sequence'),
385 385 ('revisions', commit_ids['change']),
386 386 ('revisions', commit_ids['change2']),
387 387 ('__end__', 'revisions:sequence'),
388 388 ('user', ''),
389 389 ('csrf_token', csrf_token),
390 390 ],
391 391 status=302)
392 392
393 393 location = response.headers['Location']
394 394 pull_request_id = location.rsplit('/', 1)[1]
395 395 assert pull_request_id != 'new'
396 396 pull_request = PullRequest.get(int(pull_request_id))
397 397
398 398 # check that we have now both revisions
399 399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
400 400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
401 401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
402 402 assert pull_request.target_ref == expected_target_ref
403 403
404 404 def test_reviewer_notifications(self, backend, csrf_token):
405 405 # We have to use the app.post for this test so it will create the
406 406 # notifications properly with the new PR
407 407 commits = [
408 408 {'message': 'ancestor',
409 409 'added': [FileNode('file_A', content='content_of_ancestor')]},
410 410 {'message': 'change',
411 411 'added': [FileNode('file_a', content='content_of_change')]},
412 412 {'message': 'change-child'},
413 413 {'message': 'ancestor-child', 'parents': ['ancestor'],
414 414 'added': [
415 415 FileNode('file_B', content='content_of_ancestor_child')]},
416 416 {'message': 'ancestor-child-2'},
417 417 ]
418 418 commit_ids = backend.create_master_repo(commits)
419 419 target = backend.create_repo(heads=['ancestor-child'])
420 420 source = backend.create_repo(heads=['change'])
421 421
422 422 response = self.app.post(
423 423 route_path('pullrequest_create', repo_name=source.repo_name),
424 424 [
425 425 ('source_repo', source.repo_name),
426 426 ('source_ref', 'branch:default:' + commit_ids['change']),
427 427 ('target_repo', target.repo_name),
428 428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
429 429 ('common_ancestor', commit_ids['ancestor']),
430 430 ('pullrequest_desc', 'Description'),
431 431 ('pullrequest_title', 'Title'),
432 432 ('__start__', 'review_members:sequence'),
433 433 ('__start__', 'reviewer:mapping'),
434 434 ('user_id', '2'),
435 435 ('__start__', 'reasons:sequence'),
436 436 ('reason', 'Some reason'),
437 437 ('__end__', 'reasons:sequence'),
438 438 ('__start__', 'rules:sequence'),
439 439 ('__end__', 'rules:sequence'),
440 440 ('mandatory', 'False'),
441 441 ('__end__', 'reviewer:mapping'),
442 442 ('__end__', 'review_members:sequence'),
443 443 ('__start__', 'revisions:sequence'),
444 444 ('revisions', commit_ids['change']),
445 445 ('__end__', 'revisions:sequence'),
446 446 ('user', ''),
447 447 ('csrf_token', csrf_token),
448 448 ],
449 449 status=302)
450 450
451 451 location = response.headers['Location']
452 452
453 453 pull_request_id = location.rsplit('/', 1)[1]
454 454 assert pull_request_id != 'new'
455 455 pull_request = PullRequest.get(int(pull_request_id))
456 456
457 457 # Check that a notification was made
458 458 notifications = Notification.query()\
459 459 .filter(Notification.created_by == pull_request.author.user_id,
460 460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
461 461 Notification.subject.contains(
462 462 "wants you to review pull request #%s" % pull_request_id))
463 463 assert len(notifications.all()) == 1
464 464
465 465 # Change reviewers and check that a notification was made
466 466 PullRequestModel().update_reviewers(
467 467 pull_request.pull_request_id, [(1, [], False, [])],
468 468 pull_request.author)
469 469 assert len(notifications.all()) == 2
470 470
471 471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
472 472 csrf_token):
473 473 commits = [
474 474 {'message': 'ancestor',
475 475 'added': [FileNode('file_A', content='content_of_ancestor')]},
476 476 {'message': 'change',
477 477 'added': [FileNode('file_a', content='content_of_change')]},
478 478 {'message': 'change-child'},
479 479 {'message': 'ancestor-child', 'parents': ['ancestor'],
480 480 'added': [
481 481 FileNode('file_B', content='content_of_ancestor_child')]},
482 482 {'message': 'ancestor-child-2'},
483 483 ]
484 484 commit_ids = backend.create_master_repo(commits)
485 485 target = backend.create_repo(heads=['ancestor-child'])
486 486 source = backend.create_repo(heads=['change'])
487 487
488 488 response = self.app.post(
489 489 route_path('pullrequest_create', repo_name=source.repo_name),
490 490 [
491 491 ('source_repo', source.repo_name),
492 492 ('source_ref', 'branch:default:' + commit_ids['change']),
493 493 ('target_repo', target.repo_name),
494 494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
495 495 ('common_ancestor', commit_ids['ancestor']),
496 496 ('pullrequest_desc', 'Description'),
497 497 ('pullrequest_title', 'Title'),
498 498 ('__start__', 'review_members:sequence'),
499 499 ('__start__', 'reviewer:mapping'),
500 500 ('user_id', '1'),
501 501 ('__start__', 'reasons:sequence'),
502 502 ('reason', 'Some reason'),
503 503 ('__end__', 'reasons:sequence'),
504 504 ('__start__', 'rules:sequence'),
505 505 ('__end__', 'rules:sequence'),
506 506 ('mandatory', 'False'),
507 507 ('__end__', 'reviewer:mapping'),
508 508 ('__end__', 'review_members:sequence'),
509 509 ('__start__', 'revisions:sequence'),
510 510 ('revisions', commit_ids['change']),
511 511 ('__end__', 'revisions:sequence'),
512 512 ('user', ''),
513 513 ('csrf_token', csrf_token),
514 514 ],
515 515 status=302)
516 516
517 517 location = response.headers['Location']
518 518
519 519 pull_request_id = location.rsplit('/', 1)[1]
520 520 assert pull_request_id != 'new'
521 521 pull_request = PullRequest.get(int(pull_request_id))
522 522
523 523 # target_ref has to point to the ancestor's commit_id in order to
524 524 # show the correct diff
525 525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
526 526 assert pull_request.target_ref == expected_target_ref
527 527
528 528 # Check generated diff contents
529 529 response = response.follow()
530 530 assert 'content_of_ancestor' not in response.body
531 531 assert 'content_of_ancestor-child' not in response.body
532 532 assert 'content_of_change' in response.body
533 533
534 534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
535 535 # Clear any previous calls to rcextensions
536 536 rhodecode.EXTENSIONS.calls.clear()
537 537
538 538 pull_request = pr_util.create_pull_request(
539 539 approved=True, mergeable=True)
540 540 pull_request_id = pull_request.pull_request_id
541 541 repo_name = pull_request.target_repo.scm_instance().name,
542 542
543 543 response = self.app.post(
544 544 route_path('pullrequest_merge',
545 545 repo_name=str(repo_name[0]),
546 546 pull_request_id=pull_request_id),
547 547 params={'csrf_token': csrf_token}).follow()
548 548
549 549 pull_request = PullRequest.get(pull_request_id)
550 550
551 551 assert response.status_int == 200
552 552 assert pull_request.is_closed()
553 553 assert_pull_request_status(
554 554 pull_request, ChangesetStatus.STATUS_APPROVED)
555 555
556 556 # Check the relevant log entries were added
557 557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
558 558 actions = [log.action for log in user_logs]
559 559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
560 560 expected_actions = [
561 561 u'repo.pull_request.close',
562 562 u'repo.pull_request.merge',
563 563 u'repo.pull_request.comment.create'
564 564 ]
565 565 assert actions == expected_actions
566 566
567 567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
568 568 actions = [log for log in user_logs]
569 569 assert actions[-1].action == 'user.push'
570 570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
571 571
572 572 # Check post_push rcextension was really executed
573 573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
574 574 assert len(push_calls) == 1
575 575 unused_last_call_args, last_call_kwargs = push_calls[0]
576 576 assert last_call_kwargs['action'] == 'push'
577 577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
578 578
579 579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
580 580 pull_request = pr_util.create_pull_request(mergeable=False)
581 581 pull_request_id = pull_request.pull_request_id
582 582 pull_request = PullRequest.get(pull_request_id)
583 583
584 584 response = self.app.post(
585 585 route_path('pullrequest_merge',
586 586 repo_name=pull_request.target_repo.scm_instance().name,
587 587 pull_request_id=pull_request.pull_request_id),
588 588 params={'csrf_token': csrf_token}).follow()
589 589
590 590 assert response.status_int == 200
591 591 response.mustcontain(
592 592 'Merge is not currently possible because of below failed checks.')
593 593 response.mustcontain('Server-side pull request merging is disabled.')
594 594
595 595 @pytest.mark.skip_backends('svn')
596 596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
597 597 pull_request = pr_util.create_pull_request(mergeable=True)
598 598 pull_request_id = pull_request.pull_request_id
599 599 repo_name = pull_request.target_repo.scm_instance().name
600 600
601 601 response = self.app.post(
602 602 route_path('pullrequest_merge',
603 603 repo_name=repo_name,
604 604 pull_request_id=pull_request_id),
605 605 params={'csrf_token': csrf_token}).follow()
606 606
607 607 assert response.status_int == 200
608 608
609 609 response.mustcontain(
610 610 'Merge is not currently possible because of below failed checks.')
611 611 response.mustcontain('Pull request reviewer approval is pending.')
612 612
613 613 def test_merge_pull_request_renders_failure_reason(
614 614 self, user_regular, csrf_token, pr_util):
615 615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
616 616 pull_request_id = pull_request.pull_request_id
617 617 repo_name = pull_request.target_repo.scm_instance().name
618 618
619 619 model_patcher = mock.patch.multiple(
620 620 PullRequestModel,
621 merge=mock.Mock(return_value=MergeResponse(
621 merge_repo=mock.Mock(return_value=MergeResponse(
622 622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
623 623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
624 624
625 625 with model_patcher:
626 626 response = self.app.post(
627 627 route_path('pullrequest_merge',
628 628 repo_name=repo_name,
629 629 pull_request_id=pull_request_id),
630 630 params={'csrf_token': csrf_token}, status=302)
631 631
632 632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
633 633 MergeFailureReason.PUSH_FAILED])
634 634
635 635 def test_update_source_revision(self, backend, csrf_token):
636 636 commits = [
637 637 {'message': 'ancestor'},
638 638 {'message': 'change'},
639 639 {'message': 'change-2'},
640 640 ]
641 641 commit_ids = backend.create_master_repo(commits)
642 642 target = backend.create_repo(heads=['ancestor'])
643 643 source = backend.create_repo(heads=['change'])
644 644
645 645 # create pr from a in source to A in target
646 646 pull_request = PullRequest()
647 647 pull_request.source_repo = source
648 648 # TODO: johbo: Make sure that we write the source ref this way!
649 649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
650 650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
651 651 pull_request.target_repo = target
652 652
653 653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
654 654 branch=backend.default_branch_name,
655 655 commit_id=commit_ids['ancestor'])
656 656 pull_request.revisions = [commit_ids['change']]
657 657 pull_request.title = u"Test"
658 658 pull_request.description = u"Description"
659 659 pull_request.author = UserModel().get_by_username(
660 660 TEST_USER_ADMIN_LOGIN)
661 661 Session().add(pull_request)
662 662 Session().commit()
663 663 pull_request_id = pull_request.pull_request_id
664 664
665 665 # source has ancestor - change - change-2
666 666 backend.pull_heads(source, heads=['change-2'])
667 667
668 668 # update PR
669 669 self.app.post(
670 670 route_path('pullrequest_update',
671 671 repo_name=target.repo_name,
672 672 pull_request_id=pull_request_id),
673 673 params={'update_commits': 'true',
674 674 'csrf_token': csrf_token})
675 675
676 676 # check that we have now both revisions
677 677 pull_request = PullRequest.get(pull_request_id)
678 678 assert pull_request.revisions == [
679 679 commit_ids['change-2'], commit_ids['change']]
680 680
681 681 # TODO: johbo: this should be a test on its own
682 682 response = self.app.get(route_path(
683 683 'pullrequest_new',
684 684 repo_name=target.repo_name))
685 685 assert response.status_int == 200
686 686 assert 'Pull request updated to' in response.body
687 687 assert 'with 1 added, 0 removed commits.' in response.body
688 688
689 689 def test_update_target_revision(self, backend, csrf_token):
690 690 commits = [
691 691 {'message': 'ancestor'},
692 692 {'message': 'change'},
693 693 {'message': 'ancestor-new', 'parents': ['ancestor']},
694 694 {'message': 'change-rebased'},
695 695 ]
696 696 commit_ids = backend.create_master_repo(commits)
697 697 target = backend.create_repo(heads=['ancestor'])
698 698 source = backend.create_repo(heads=['change'])
699 699
700 700 # create pr from a in source to A in target
701 701 pull_request = PullRequest()
702 702 pull_request.source_repo = source
703 703 # TODO: johbo: Make sure that we write the source ref this way!
704 704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
705 705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
706 706 pull_request.target_repo = target
707 707 # TODO: johbo: Target ref should be branch based, since tip can jump
708 708 # from branch to branch
709 709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
710 710 branch=backend.default_branch_name,
711 711 commit_id=commit_ids['ancestor'])
712 712 pull_request.revisions = [commit_ids['change']]
713 713 pull_request.title = u"Test"
714 714 pull_request.description = u"Description"
715 715 pull_request.author = UserModel().get_by_username(
716 716 TEST_USER_ADMIN_LOGIN)
717 717 Session().add(pull_request)
718 718 Session().commit()
719 719 pull_request_id = pull_request.pull_request_id
720 720
721 721 # target has ancestor - ancestor-new
722 722 # source has ancestor - ancestor-new - change-rebased
723 723 backend.pull_heads(target, heads=['ancestor-new'])
724 724 backend.pull_heads(source, heads=['change-rebased'])
725 725
726 726 # update PR
727 727 self.app.post(
728 728 route_path('pullrequest_update',
729 729 repo_name=target.repo_name,
730 730 pull_request_id=pull_request_id),
731 731 params={'update_commits': 'true',
732 732 'csrf_token': csrf_token},
733 733 status=200)
734 734
735 735 # check that we have now both revisions
736 736 pull_request = PullRequest.get(pull_request_id)
737 737 assert pull_request.revisions == [commit_ids['change-rebased']]
738 738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
739 739 branch=backend.default_branch_name,
740 740 commit_id=commit_ids['ancestor-new'])
741 741
742 742 # TODO: johbo: This should be a test on its own
743 743 response = self.app.get(route_path(
744 744 'pullrequest_new',
745 745 repo_name=target.repo_name))
746 746 assert response.status_int == 200
747 747 assert 'Pull request updated to' in response.body
748 748 assert 'with 1 added, 1 removed commits.' in response.body
749 749
750 750 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
751 751 backend = backend_git
752 752 commits = [
753 753 {'message': 'master-commit-1'},
754 754 {'message': 'master-commit-2-change-1'},
755 755 {'message': 'master-commit-3-change-2'},
756 756
757 757 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
758 758 {'message': 'feat-commit-2'},
759 759 ]
760 760 commit_ids = backend.create_master_repo(commits)
761 761 target = backend.create_repo(heads=['master-commit-3-change-2'])
762 762 source = backend.create_repo(heads=['feat-commit-2'])
763 763
764 764 # create pr from a in source to A in target
765 765 pull_request = PullRequest()
766 766 pull_request.source_repo = source
767 767 # TODO: johbo: Make sure that we write the source ref this way!
768 768 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
769 769 branch=backend.default_branch_name,
770 770 commit_id=commit_ids['master-commit-3-change-2'])
771 771
772 772 pull_request.target_repo = target
773 773 # TODO: johbo: Target ref should be branch based, since tip can jump
774 774 # from branch to branch
775 775 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
776 776 branch=backend.default_branch_name,
777 777 commit_id=commit_ids['feat-commit-2'])
778 778
779 779 pull_request.revisions = [
780 780 commit_ids['feat-commit-1'],
781 781 commit_ids['feat-commit-2']
782 782 ]
783 783 pull_request.title = u"Test"
784 784 pull_request.description = u"Description"
785 785 pull_request.author = UserModel().get_by_username(
786 786 TEST_USER_ADMIN_LOGIN)
787 787 Session().add(pull_request)
788 788 Session().commit()
789 789 pull_request_id = pull_request.pull_request_id
790 790
791 791 # PR is created, now we simulate a force-push into target,
792 792 # that drops a 2 last commits
793 793 vcsrepo = target.scm_instance()
794 794 vcsrepo.config.clear_section('hooks')
795 795 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
796 796
797 797 # update PR
798 798 self.app.post(
799 799 route_path('pullrequest_update',
800 800 repo_name=target.repo_name,
801 801 pull_request_id=pull_request_id),
802 802 params={'update_commits': 'true',
803 803 'csrf_token': csrf_token},
804 804 status=200)
805 805
806 806 response = self.app.get(route_path(
807 807 'pullrequest_new',
808 808 repo_name=target.repo_name))
809 809 assert response.status_int == 200
810 810 response.mustcontain('Pull request updated to')
811 811 response.mustcontain('with 0 added, 0 removed commits.')
812 812
813 813 def test_update_of_ancestor_reference(self, backend, csrf_token):
814 814 commits = [
815 815 {'message': 'ancestor'},
816 816 {'message': 'change'},
817 817 {'message': 'change-2'},
818 818 {'message': 'ancestor-new', 'parents': ['ancestor']},
819 819 {'message': 'change-rebased'},
820 820 ]
821 821 commit_ids = backend.create_master_repo(commits)
822 822 target = backend.create_repo(heads=['ancestor'])
823 823 source = backend.create_repo(heads=['change'])
824 824
825 825 # create pr from a in source to A in target
826 826 pull_request = PullRequest()
827 827 pull_request.source_repo = source
828 828 # TODO: johbo: Make sure that we write the source ref this way!
829 829 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
830 830 branch=backend.default_branch_name,
831 831 commit_id=commit_ids['change'])
832 832 pull_request.target_repo = target
833 833 # TODO: johbo: Target ref should be branch based, since tip can jump
834 834 # from branch to branch
835 835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
836 836 branch=backend.default_branch_name,
837 837 commit_id=commit_ids['ancestor'])
838 838 pull_request.revisions = [commit_ids['change']]
839 839 pull_request.title = u"Test"
840 840 pull_request.description = u"Description"
841 841 pull_request.author = UserModel().get_by_username(
842 842 TEST_USER_ADMIN_LOGIN)
843 843 Session().add(pull_request)
844 844 Session().commit()
845 845 pull_request_id = pull_request.pull_request_id
846 846
847 847 # target has ancestor - ancestor-new
848 848 # source has ancestor - ancestor-new - change-rebased
849 849 backend.pull_heads(target, heads=['ancestor-new'])
850 850 backend.pull_heads(source, heads=['change-rebased'])
851 851
852 852 # update PR
853 853 self.app.post(
854 854 route_path('pullrequest_update',
855 855 repo_name=target.repo_name,
856 856 pull_request_id=pull_request_id),
857 857 params={'update_commits': 'true',
858 858 'csrf_token': csrf_token},
859 859 status=200)
860 860
861 861 # Expect the target reference to be updated correctly
862 862 pull_request = PullRequest.get(pull_request_id)
863 863 assert pull_request.revisions == [commit_ids['change-rebased']]
864 864 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
865 865 branch=backend.default_branch_name,
866 866 commit_id=commit_ids['ancestor-new'])
867 867 assert pull_request.target_ref == expected_target_ref
868 868
869 869 def test_remove_pull_request_branch(self, backend_git, csrf_token):
870 870 branch_name = 'development'
871 871 commits = [
872 872 {'message': 'initial-commit'},
873 873 {'message': 'old-feature'},
874 874 {'message': 'new-feature', 'branch': branch_name},
875 875 ]
876 876 repo = backend_git.create_repo(commits)
877 877 commit_ids = backend_git.commit_ids
878 878
879 879 pull_request = PullRequest()
880 880 pull_request.source_repo = repo
881 881 pull_request.target_repo = repo
882 882 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
883 883 branch=branch_name, commit_id=commit_ids['new-feature'])
884 884 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
885 885 branch=backend_git.default_branch_name,
886 886 commit_id=commit_ids['old-feature'])
887 887 pull_request.revisions = [commit_ids['new-feature']]
888 888 pull_request.title = u"Test"
889 889 pull_request.description = u"Description"
890 890 pull_request.author = UserModel().get_by_username(
891 891 TEST_USER_ADMIN_LOGIN)
892 892 Session().add(pull_request)
893 893 Session().commit()
894 894
895 895 vcs = repo.scm_instance()
896 896 vcs.remove_ref('refs/heads/{}'.format(branch_name))
897 897
898 898 response = self.app.get(route_path(
899 899 'pullrequest_show',
900 900 repo_name=repo.repo_name,
901 901 pull_request_id=pull_request.pull_request_id))
902 902
903 903 assert response.status_int == 200
904 904 assert_response = AssertResponse(response)
905 905 assert_response.element_contains(
906 906 '#changeset_compare_view_content .alert strong',
907 907 'Missing commits')
908 908 assert_response.element_contains(
909 909 '#changeset_compare_view_content .alert',
910 910 'This pull request cannot be displayed, because one or more'
911 911 ' commits no longer exist in the source repository.')
912 912
913 913 def test_strip_commits_from_pull_request(
914 914 self, backend, pr_util, csrf_token):
915 915 commits = [
916 916 {'message': 'initial-commit'},
917 917 {'message': 'old-feature'},
918 918 {'message': 'new-feature', 'parents': ['initial-commit']},
919 919 ]
920 920 pull_request = pr_util.create_pull_request(
921 921 commits, target_head='initial-commit', source_head='new-feature',
922 922 revisions=['new-feature'])
923 923
924 924 vcs = pr_util.source_repository.scm_instance()
925 925 if backend.alias == 'git':
926 926 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
927 927 else:
928 928 vcs.strip(pr_util.commit_ids['new-feature'])
929 929
930 930 response = self.app.get(route_path(
931 931 'pullrequest_show',
932 932 repo_name=pr_util.target_repository.repo_name,
933 933 pull_request_id=pull_request.pull_request_id))
934 934
935 935 assert response.status_int == 200
936 936 assert_response = AssertResponse(response)
937 937 assert_response.element_contains(
938 938 '#changeset_compare_view_content .alert strong',
939 939 'Missing commits')
940 940 assert_response.element_contains(
941 941 '#changeset_compare_view_content .alert',
942 942 'This pull request cannot be displayed, because one or more'
943 943 ' commits no longer exist in the source repository.')
944 944 assert_response.element_contains(
945 945 '#update_commits',
946 946 'Update commits')
947 947
948 948 def test_strip_commits_and_update(
949 949 self, backend, pr_util, csrf_token):
950 950 commits = [
951 951 {'message': 'initial-commit'},
952 952 {'message': 'old-feature'},
953 953 {'message': 'new-feature', 'parents': ['old-feature']},
954 954 ]
955 955 pull_request = pr_util.create_pull_request(
956 956 commits, target_head='old-feature', source_head='new-feature',
957 957 revisions=['new-feature'], mergeable=True)
958 958
959 959 vcs = pr_util.source_repository.scm_instance()
960 960 if backend.alias == 'git':
961 961 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
962 962 else:
963 963 vcs.strip(pr_util.commit_ids['new-feature'])
964 964
965 965 response = self.app.post(
966 966 route_path('pullrequest_update',
967 967 repo_name=pull_request.target_repo.repo_name,
968 968 pull_request_id=pull_request.pull_request_id),
969 969 params={'update_commits': 'true',
970 970 'csrf_token': csrf_token})
971 971
972 972 assert response.status_int == 200
973 973 assert response.body == 'true'
974 974
975 975 # Make sure that after update, it won't raise 500 errors
976 976 response = self.app.get(route_path(
977 977 'pullrequest_show',
978 978 repo_name=pr_util.target_repository.repo_name,
979 979 pull_request_id=pull_request.pull_request_id))
980 980
981 981 assert response.status_int == 200
982 982 assert_response = AssertResponse(response)
983 983 assert_response.element_contains(
984 984 '#changeset_compare_view_content .alert strong',
985 985 'Missing commits')
986 986
987 987 def test_branch_is_a_link(self, pr_util):
988 988 pull_request = pr_util.create_pull_request()
989 989 pull_request.source_ref = 'branch:origin:1234567890abcdef'
990 990 pull_request.target_ref = 'branch:target:abcdef1234567890'
991 991 Session().add(pull_request)
992 992 Session().commit()
993 993
994 994 response = self.app.get(route_path(
995 995 'pullrequest_show',
996 996 repo_name=pull_request.target_repo.scm_instance().name,
997 997 pull_request_id=pull_request.pull_request_id))
998 998 assert response.status_int == 200
999 999 assert_response = AssertResponse(response)
1000 1000
1001 1001 origin = assert_response.get_element('.pr-origininfo .tag')
1002 1002 origin_children = origin.getchildren()
1003 1003 assert len(origin_children) == 1
1004 1004 target = assert_response.get_element('.pr-targetinfo .tag')
1005 1005 target_children = target.getchildren()
1006 1006 assert len(target_children) == 1
1007 1007
1008 1008 expected_origin_link = route_path(
1009 1009 'repo_changelog',
1010 1010 repo_name=pull_request.source_repo.scm_instance().name,
1011 1011 params=dict(branch='origin'))
1012 1012 expected_target_link = route_path(
1013 1013 'repo_changelog',
1014 1014 repo_name=pull_request.target_repo.scm_instance().name,
1015 1015 params=dict(branch='target'))
1016 1016 assert origin_children[0].attrib['href'] == expected_origin_link
1017 1017 assert origin_children[0].text == 'branch: origin'
1018 1018 assert target_children[0].attrib['href'] == expected_target_link
1019 1019 assert target_children[0].text == 'branch: target'
1020 1020
1021 1021 def test_bookmark_is_not_a_link(self, pr_util):
1022 1022 pull_request = pr_util.create_pull_request()
1023 1023 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1024 1024 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1025 1025 Session().add(pull_request)
1026 1026 Session().commit()
1027 1027
1028 1028 response = self.app.get(route_path(
1029 1029 'pullrequest_show',
1030 1030 repo_name=pull_request.target_repo.scm_instance().name,
1031 1031 pull_request_id=pull_request.pull_request_id))
1032 1032 assert response.status_int == 200
1033 1033 assert_response = AssertResponse(response)
1034 1034
1035 1035 origin = assert_response.get_element('.pr-origininfo .tag')
1036 1036 assert origin.text.strip() == 'bookmark: origin'
1037 1037 assert origin.getchildren() == []
1038 1038
1039 1039 target = assert_response.get_element('.pr-targetinfo .tag')
1040 1040 assert target.text.strip() == 'bookmark: target'
1041 1041 assert target.getchildren() == []
1042 1042
1043 1043 def test_tag_is_not_a_link(self, pr_util):
1044 1044 pull_request = pr_util.create_pull_request()
1045 1045 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1046 1046 pull_request.target_ref = 'tag:target:abcdef1234567890'
1047 1047 Session().add(pull_request)
1048 1048 Session().commit()
1049 1049
1050 1050 response = self.app.get(route_path(
1051 1051 'pullrequest_show',
1052 1052 repo_name=pull_request.target_repo.scm_instance().name,
1053 1053 pull_request_id=pull_request.pull_request_id))
1054 1054 assert response.status_int == 200
1055 1055 assert_response = AssertResponse(response)
1056 1056
1057 1057 origin = assert_response.get_element('.pr-origininfo .tag')
1058 1058 assert origin.text.strip() == 'tag: origin'
1059 1059 assert origin.getchildren() == []
1060 1060
1061 1061 target = assert_response.get_element('.pr-targetinfo .tag')
1062 1062 assert target.text.strip() == 'tag: target'
1063 1063 assert target.getchildren() == []
1064 1064
1065 1065 @pytest.mark.parametrize('mergeable', [True, False])
1066 1066 def test_shadow_repository_link(
1067 1067 self, mergeable, pr_util, http_host_only_stub):
1068 1068 """
1069 1069 Check that the pull request summary page displays a link to the shadow
1070 1070 repository if the pull request is mergeable. If it is not mergeable
1071 1071 the link should not be displayed.
1072 1072 """
1073 1073 pull_request = pr_util.create_pull_request(
1074 1074 mergeable=mergeable, enable_notifications=False)
1075 1075 target_repo = pull_request.target_repo.scm_instance()
1076 1076 pr_id = pull_request.pull_request_id
1077 1077 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1078 1078 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1079 1079
1080 1080 response = self.app.get(route_path(
1081 1081 'pullrequest_show',
1082 1082 repo_name=target_repo.name,
1083 1083 pull_request_id=pr_id))
1084 1084
1085 1085 assertr = AssertResponse(response)
1086 1086 if mergeable:
1087 1087 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1088 1088 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1089 1089 else:
1090 1090 assertr.no_element_exists('.pr-mergeinfo')
1091 1091
1092 1092
1093 1093 @pytest.mark.usefixtures('app')
1094 1094 @pytest.mark.backends("git", "hg")
1095 1095 class TestPullrequestsControllerDelete(object):
1096 1096 def test_pull_request_delete_button_permissions_admin(
1097 1097 self, autologin_user, user_admin, pr_util):
1098 1098 pull_request = pr_util.create_pull_request(
1099 1099 author=user_admin.username, enable_notifications=False)
1100 1100
1101 1101 response = self.app.get(route_path(
1102 1102 'pullrequest_show',
1103 1103 repo_name=pull_request.target_repo.scm_instance().name,
1104 1104 pull_request_id=pull_request.pull_request_id))
1105 1105
1106 1106 response.mustcontain('id="delete_pullrequest"')
1107 1107 response.mustcontain('Confirm to delete this pull request')
1108 1108
1109 1109 def test_pull_request_delete_button_permissions_owner(
1110 1110 self, autologin_regular_user, user_regular, pr_util):
1111 1111 pull_request = pr_util.create_pull_request(
1112 1112 author=user_regular.username, enable_notifications=False)
1113 1113
1114 1114 response = self.app.get(route_path(
1115 1115 'pullrequest_show',
1116 1116 repo_name=pull_request.target_repo.scm_instance().name,
1117 1117 pull_request_id=pull_request.pull_request_id))
1118 1118
1119 1119 response.mustcontain('id="delete_pullrequest"')
1120 1120 response.mustcontain('Confirm to delete this pull request')
1121 1121
1122 1122 def test_pull_request_delete_button_permissions_forbidden(
1123 1123 self, autologin_regular_user, user_regular, user_admin, pr_util):
1124 1124 pull_request = pr_util.create_pull_request(
1125 1125 author=user_admin.username, enable_notifications=False)
1126 1126
1127 1127 response = self.app.get(route_path(
1128 1128 'pullrequest_show',
1129 1129 repo_name=pull_request.target_repo.scm_instance().name,
1130 1130 pull_request_id=pull_request.pull_request_id))
1131 1131 response.mustcontain(no=['id="delete_pullrequest"'])
1132 1132 response.mustcontain(no=['Confirm to delete this pull request'])
1133 1133
1134 1134 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1135 1135 self, autologin_regular_user, user_regular, user_admin, pr_util,
1136 1136 user_util):
1137 1137
1138 1138 pull_request = pr_util.create_pull_request(
1139 1139 author=user_admin.username, enable_notifications=False)
1140 1140
1141 1141 user_util.grant_user_permission_to_repo(
1142 1142 pull_request.target_repo, user_regular,
1143 1143 'repository.write')
1144 1144
1145 1145 response = self.app.get(route_path(
1146 1146 'pullrequest_show',
1147 1147 repo_name=pull_request.target_repo.scm_instance().name,
1148 1148 pull_request_id=pull_request.pull_request_id))
1149 1149
1150 1150 response.mustcontain('id="open_edit_pullrequest"')
1151 1151 response.mustcontain('id="delete_pullrequest"')
1152 1152 response.mustcontain(no=['Confirm to delete this pull request'])
1153 1153
1154 1154 def test_delete_comment_returns_404_if_comment_does_not_exist(
1155 1155 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1156 1156
1157 1157 pull_request = pr_util.create_pull_request(
1158 1158 author=user_admin.username, enable_notifications=False)
1159 1159
1160 1160 self.app.post(
1161 1161 route_path(
1162 1162 'pullrequest_comment_delete',
1163 1163 repo_name=pull_request.target_repo.scm_instance().name,
1164 1164 pull_request_id=pull_request.pull_request_id,
1165 1165 comment_id=1024404),
1166 1166 extra_environ=xhr_header,
1167 1167 params={'csrf_token': csrf_token},
1168 1168 status=404
1169 1169 )
1170 1170
1171 1171 def test_delete_comment(
1172 1172 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1173 1173
1174 1174 pull_request = pr_util.create_pull_request(
1175 1175 author=user_admin.username, enable_notifications=False)
1176 1176 comment = pr_util.create_comment()
1177 1177 comment_id = comment.comment_id
1178 1178
1179 1179 response = self.app.post(
1180 1180 route_path(
1181 1181 'pullrequest_comment_delete',
1182 1182 repo_name=pull_request.target_repo.scm_instance().name,
1183 1183 pull_request_id=pull_request.pull_request_id,
1184 1184 comment_id=comment_id),
1185 1185 extra_environ=xhr_header,
1186 1186 params={'csrf_token': csrf_token},
1187 1187 status=200
1188 1188 )
1189 1189 assert response.body == 'true'
1190 1190
1191 1191
1192 1192 def assert_pull_request_status(pull_request, expected_status):
1193 1193 status = ChangesetStatusModel().calculated_review_status(
1194 1194 pull_request=pull_request)
1195 1195 assert status == expected_status
1196 1196
1197 1197
1198 1198 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1199 1199 @pytest.mark.usefixtures("autologin_user")
1200 1200 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1201 1201 response = app.get(
1202 1202 route_path(route, repo_name=backend_svn.repo_name), status=404)
1203 1203
@@ -1,1306 +1,1307 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode import events
33 33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34 34
35 35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 36 from rhodecode.lib.base import vcs_operation_context
37 37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 41 NotAnonymous, CSRFRequired)
42 42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
44 44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
45 45 RepositoryRequirementError, EmptyRepositoryError)
46 46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 47 from rhodecode.model.comment import CommentsModel
48 48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
49 49 ChangesetComment, ChangesetStatus, Repository)
50 50 from rhodecode.model.forms import PullRequestForm
51 51 from rhodecode.model.meta import Session
52 52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
53 53 from rhodecode.model.scm import ScmModel
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
59 59
60 60 def load_default_context(self):
61 61 c = self._get_local_tmpl_context(include_app_defaults=True)
62 62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
63 63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
64 64
65 65 return c
66 66
67 67 def _get_pull_requests_list(
68 68 self, repo_name, source, filter_type, opened_by, statuses):
69 69
70 70 draw, start, limit = self._extract_chunk(self.request)
71 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 72 _render = self.request.get_partial_renderer(
73 73 'rhodecode:templates/data_table/_dt_elements.mako')
74 74
75 75 # pagination
76 76
77 77 if filter_type == 'awaiting_review':
78 78 pull_requests = PullRequestModel().get_awaiting_review(
79 79 repo_name, source=source, opened_by=opened_by,
80 80 statuses=statuses, offset=start, length=limit,
81 81 order_by=order_by, order_dir=order_dir)
82 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 83 repo_name, source=source, statuses=statuses,
84 84 opened_by=opened_by)
85 85 elif filter_type == 'awaiting_my_review':
86 86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 87 repo_name, source=source, opened_by=opened_by,
88 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 89 offset=start, length=limit, order_by=order_by,
90 90 order_dir=order_dir)
91 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 93 statuses=statuses, opened_by=opened_by)
94 94 else:
95 95 pull_requests = PullRequestModel().get_all(
96 96 repo_name, source=source, opened_by=opened_by,
97 97 statuses=statuses, offset=start, length=limit,
98 98 order_by=order_by, order_dir=order_dir)
99 99 pull_requests_total_count = PullRequestModel().count_all(
100 100 repo_name, source=source, statuses=statuses,
101 101 opened_by=opened_by)
102 102
103 103 data = []
104 104 comments_model = CommentsModel()
105 105 for pr in pull_requests:
106 106 comments = comments_model.get_all_comments(
107 107 self.db_repo.repo_id, pull_request=pr)
108 108
109 109 data.append({
110 110 'name': _render('pullrequest_name',
111 111 pr.pull_request_id, pr.target_repo.repo_name),
112 112 'name_raw': pr.pull_request_id,
113 113 'status': _render('pullrequest_status',
114 114 pr.calculated_review_status()),
115 115 'title': _render(
116 116 'pullrequest_title', pr.title, pr.description),
117 117 'description': h.escape(pr.description),
118 118 'updated_on': _render('pullrequest_updated_on',
119 119 h.datetime_to_time(pr.updated_on)),
120 120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 121 'created_on': _render('pullrequest_updated_on',
122 122 h.datetime_to_time(pr.created_on)),
123 123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 124 'author': _render('pullrequest_author',
125 125 pr.author.full_contact, ),
126 126 'author_raw': pr.author.full_name,
127 127 'comments': _render('pullrequest_comments', len(comments)),
128 128 'comments_raw': len(comments),
129 129 'closed': pr.is_closed(),
130 130 })
131 131
132 132 data = ({
133 133 'draw': draw,
134 134 'data': data,
135 135 'recordsTotal': pull_requests_total_count,
136 136 'recordsFiltered': pull_requests_total_count,
137 137 })
138 138 return data
139 139
140 140 @LoginRequired()
141 141 @HasRepoPermissionAnyDecorator(
142 142 'repository.read', 'repository.write', 'repository.admin')
143 143 @view_config(
144 144 route_name='pullrequest_show_all', request_method='GET',
145 145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 146 def pull_request_list(self):
147 147 c = self.load_default_context()
148 148
149 149 req_get = self.request.GET
150 150 c.source = str2bool(req_get.get('source'))
151 151 c.closed = str2bool(req_get.get('closed'))
152 152 c.my = str2bool(req_get.get('my'))
153 153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155 155
156 156 c.active = 'open'
157 157 if c.my:
158 158 c.active = 'my'
159 159 if c.closed:
160 160 c.active = 'closed'
161 161 if c.awaiting_review and not c.source:
162 162 c.active = 'awaiting'
163 163 if c.source and not c.awaiting_review:
164 164 c.active = 'source'
165 165 if c.awaiting_my_review:
166 166 c.active = 'awaiting_my'
167 167
168 168 return self._get_template_context(c)
169 169
170 170 @LoginRequired()
171 171 @HasRepoPermissionAnyDecorator(
172 172 'repository.read', 'repository.write', 'repository.admin')
173 173 @view_config(
174 174 route_name='pullrequest_show_all_data', request_method='GET',
175 175 renderer='json_ext', xhr=True)
176 176 def pull_request_list_data(self):
177 177 self.load_default_context()
178 178
179 179 # additional filters
180 180 req_get = self.request.GET
181 181 source = str2bool(req_get.get('source'))
182 182 closed = str2bool(req_get.get('closed'))
183 183 my = str2bool(req_get.get('my'))
184 184 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186 186
187 187 filter_type = 'awaiting_review' if awaiting_review \
188 188 else 'awaiting_my_review' if awaiting_my_review \
189 189 else None
190 190
191 191 opened_by = None
192 192 if my:
193 193 opened_by = [self._rhodecode_user.user_id]
194 194
195 195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 196 if closed:
197 197 statuses = [PullRequest.STATUS_CLOSED]
198 198
199 199 data = self._get_pull_requests_list(
200 200 repo_name=self.db_repo_name, source=source,
201 201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202 202
203 203 return data
204 204
205 205 def _is_diff_cache_enabled(self, target_repo):
206 206 caching_enabled = self._get_general_setting(
207 207 target_repo, 'rhodecode_diff_cache')
208 208 log.debug('Diff caching enabled: %s', caching_enabled)
209 209 return caching_enabled
210 210
211 211 def _get_diffset(self, source_repo_name, source_repo,
212 212 source_ref_id, target_ref_id,
213 213 target_commit, source_commit, diff_limit, file_limit,
214 214 fulldiff):
215 215
216 216 vcs_diff = PullRequestModel().get_diff(
217 217 source_repo, source_ref_id, target_ref_id)
218 218
219 219 diff_processor = diffs.DiffProcessor(
220 220 vcs_diff, format='newdiff', diff_limit=diff_limit,
221 221 file_limit=file_limit, show_full_diff=fulldiff)
222 222
223 223 _parsed = diff_processor.prepare()
224 224
225 225 diffset = codeblocks.DiffSet(
226 226 repo_name=self.db_repo_name,
227 227 source_repo_name=source_repo_name,
228 228 source_node_getter=codeblocks.diffset_node_getter(target_commit),
229 229 target_node_getter=codeblocks.diffset_node_getter(source_commit),
230 230 )
231 231 diffset = self.path_filter.render_patchset_filtered(
232 232 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
233 233
234 234 return diffset
235 235
236 236 @LoginRequired()
237 237 @HasRepoPermissionAnyDecorator(
238 238 'repository.read', 'repository.write', 'repository.admin')
239 239 @view_config(
240 240 route_name='pullrequest_show', request_method='GET',
241 241 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
242 242 def pull_request_show(self):
243 243 pull_request_id = self.request.matchdict['pull_request_id']
244 244
245 245 c = self.load_default_context()
246 246
247 247 version = self.request.GET.get('version')
248 248 from_version = self.request.GET.get('from_version') or version
249 249 merge_checks = self.request.GET.get('merge_checks')
250 250 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
251 251 force_refresh = str2bool(self.request.GET.get('force_refresh'))
252 252
253 253 (pull_request_latest,
254 254 pull_request_at_ver,
255 255 pull_request_display_obj,
256 256 at_version) = PullRequestModel().get_pr_version(
257 257 pull_request_id, version=version)
258 258 pr_closed = pull_request_latest.is_closed()
259 259
260 260 if pr_closed and (version or from_version):
261 261 # not allow to browse versions
262 262 raise HTTPFound(h.route_path(
263 263 'pullrequest_show', repo_name=self.db_repo_name,
264 264 pull_request_id=pull_request_id))
265 265
266 266 versions = pull_request_display_obj.versions()
267 267
268 268 c.at_version = at_version
269 269 c.at_version_num = (at_version
270 270 if at_version and at_version != 'latest'
271 271 else None)
272 272 c.at_version_pos = ChangesetComment.get_index_from_version(
273 273 c.at_version_num, versions)
274 274
275 275 (prev_pull_request_latest,
276 276 prev_pull_request_at_ver,
277 277 prev_pull_request_display_obj,
278 278 prev_at_version) = PullRequestModel().get_pr_version(
279 279 pull_request_id, version=from_version)
280 280
281 281 c.from_version = prev_at_version
282 282 c.from_version_num = (prev_at_version
283 283 if prev_at_version and prev_at_version != 'latest'
284 284 else None)
285 285 c.from_version_pos = ChangesetComment.get_index_from_version(
286 286 c.from_version_num, versions)
287 287
288 288 # define if we're in COMPARE mode or VIEW at version mode
289 289 compare = at_version != prev_at_version
290 290
291 291 # pull_requests repo_name we opened it against
292 292 # ie. target_repo must match
293 293 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
294 294 raise HTTPNotFound()
295 295
296 296 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
297 297 pull_request_at_ver)
298 298
299 299 c.pull_request = pull_request_display_obj
300 300 c.pull_request_latest = pull_request_latest
301 301
302 302 if compare or (at_version and not at_version == 'latest'):
303 303 c.allowed_to_change_status = False
304 304 c.allowed_to_update = False
305 305 c.allowed_to_merge = False
306 306 c.allowed_to_delete = False
307 307 c.allowed_to_comment = False
308 308 c.allowed_to_close = False
309 309 else:
310 310 can_change_status = PullRequestModel().check_user_change_status(
311 311 pull_request_at_ver, self._rhodecode_user)
312 312 c.allowed_to_change_status = can_change_status and not pr_closed
313 313
314 314 c.allowed_to_update = PullRequestModel().check_user_update(
315 315 pull_request_latest, self._rhodecode_user) and not pr_closed
316 316 c.allowed_to_merge = PullRequestModel().check_user_merge(
317 317 pull_request_latest, self._rhodecode_user) and not pr_closed
318 318 c.allowed_to_delete = PullRequestModel().check_user_delete(
319 319 pull_request_latest, self._rhodecode_user) and not pr_closed
320 320 c.allowed_to_comment = not pr_closed
321 321 c.allowed_to_close = c.allowed_to_merge and not pr_closed
322 322
323 323 c.forbid_adding_reviewers = False
324 324 c.forbid_author_to_review = False
325 325 c.forbid_commit_author_to_review = False
326 326
327 327 if pull_request_latest.reviewer_data and \
328 328 'rules' in pull_request_latest.reviewer_data:
329 329 rules = pull_request_latest.reviewer_data['rules'] or {}
330 330 try:
331 331 c.forbid_adding_reviewers = rules.get(
332 332 'forbid_adding_reviewers')
333 333 c.forbid_author_to_review = rules.get(
334 334 'forbid_author_to_review')
335 335 c.forbid_commit_author_to_review = rules.get(
336 336 'forbid_commit_author_to_review')
337 337 except Exception:
338 338 pass
339 339
340 340 # check merge capabilities
341 341 _merge_check = MergeCheck.validate(
342 342 pull_request_latest, user=self._rhodecode_user,
343 translator=self.request.translate, force_shadow_repo_refresh=force_refresh)
343 translator=self.request.translate,
344 force_shadow_repo_refresh=force_refresh)
344 345 c.pr_merge_errors = _merge_check.error_details
345 346 c.pr_merge_possible = not _merge_check.failed
346 347 c.pr_merge_message = _merge_check.merge_msg
347 348
348 349 c.pr_merge_info = MergeCheck.get_merge_conditions(
349 350 pull_request_latest, translator=self.request.translate)
350 351
351 352 c.pull_request_review_status = _merge_check.review_status
352 353 if merge_checks:
353 354 self.request.override_renderer = \
354 355 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
355 356 return self._get_template_context(c)
356 357
357 358 comments_model = CommentsModel()
358 359
359 360 # reviewers and statuses
360 361 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
361 362 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
362 363
363 364 # GENERAL COMMENTS with versions #
364 365 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
365 366 q = q.order_by(ChangesetComment.comment_id.asc())
366 367 general_comments = q
367 368
368 369 # pick comments we want to render at current version
369 370 c.comment_versions = comments_model.aggregate_comments(
370 371 general_comments, versions, c.at_version_num)
371 372 c.comments = c.comment_versions[c.at_version_num]['until']
372 373
373 374 # INLINE COMMENTS with versions #
374 375 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
375 376 q = q.order_by(ChangesetComment.comment_id.asc())
376 377 inline_comments = q
377 378
378 379 c.inline_versions = comments_model.aggregate_comments(
379 380 inline_comments, versions, c.at_version_num, inline=True)
380 381
381 382 # inject latest version
382 383 latest_ver = PullRequest.get_pr_display_object(
383 384 pull_request_latest, pull_request_latest)
384 385
385 386 c.versions = versions + [latest_ver]
386 387
387 388 # if we use version, then do not show later comments
388 389 # than current version
389 390 display_inline_comments = collections.defaultdict(
390 391 lambda: collections.defaultdict(list))
391 392 for co in inline_comments:
392 393 if c.at_version_num:
393 394 # pick comments that are at least UPTO given version, so we
394 395 # don't render comments for higher version
395 396 should_render = co.pull_request_version_id and \
396 397 co.pull_request_version_id <= c.at_version_num
397 398 else:
398 399 # showing all, for 'latest'
399 400 should_render = True
400 401
401 402 if should_render:
402 403 display_inline_comments[co.f_path][co.line_no].append(co)
403 404
404 405 # load diff data into template context, if we use compare mode then
405 406 # diff is calculated based on changes between versions of PR
406 407
407 408 source_repo = pull_request_at_ver.source_repo
408 409 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
409 410
410 411 target_repo = pull_request_at_ver.target_repo
411 412 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
412 413
413 414 if compare:
414 415 # in compare switch the diff base to latest commit from prev version
415 416 target_ref_id = prev_pull_request_display_obj.revisions[0]
416 417
417 418 # despite opening commits for bookmarks/branches/tags, we always
418 419 # convert this to rev to prevent changes after bookmark or branch change
419 420 c.source_ref_type = 'rev'
420 421 c.source_ref = source_ref_id
421 422
422 423 c.target_ref_type = 'rev'
423 424 c.target_ref = target_ref_id
424 425
425 426 c.source_repo = source_repo
426 427 c.target_repo = target_repo
427 428
428 429 c.commit_ranges = []
429 430 source_commit = EmptyCommit()
430 431 target_commit = EmptyCommit()
431 432 c.missing_requirements = False
432 433
433 434 source_scm = source_repo.scm_instance()
434 435 target_scm = target_repo.scm_instance()
435 436
436 437 shadow_scm = None
437 438 try:
438 439 shadow_scm = pull_request_latest.get_shadow_repo()
439 440 except Exception:
440 441 log.debug('Failed to get shadow repo', exc_info=True)
441 442 # try first the existing source_repo, and then shadow
442 443 # repo if we can obtain one
443 444 commits_source_repo = source_scm or shadow_scm
444 445
445 446 c.commits_source_repo = commits_source_repo
446 447 c.ancestor = None # set it to None, to hide it from PR view
447 448
448 449 # empty version means latest, so we keep this to prevent
449 450 # double caching
450 451 version_normalized = version or 'latest'
451 452 from_version_normalized = from_version or 'latest'
452 453
453 454 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
454 455 target_repo)
455 456 cache_file_path = diff_cache_exist(
456 457 cache_path, 'pull_request', pull_request_id, version_normalized,
457 458 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
458 459
459 460 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
460 461 force_recache = str2bool(self.request.GET.get('force_recache'))
461 462
462 463 cached_diff = None
463 464 if caching_enabled:
464 465 cached_diff = load_cached_diff(cache_file_path)
465 466
466 467 has_proper_commit_cache = (
467 468 cached_diff and cached_diff.get('commits')
468 469 and len(cached_diff.get('commits', [])) == 5
469 470 and cached_diff.get('commits')[0]
470 471 and cached_diff.get('commits')[3])
471 472 if not force_recache and has_proper_commit_cache:
472 473 diff_commit_cache = \
473 474 (ancestor_commit, commit_cache, missing_requirements,
474 475 source_commit, target_commit) = cached_diff['commits']
475 476 else:
476 477 diff_commit_cache = \
477 478 (ancestor_commit, commit_cache, missing_requirements,
478 479 source_commit, target_commit) = self.get_commits(
479 480 commits_source_repo,
480 481 pull_request_at_ver,
481 482 source_commit,
482 483 source_ref_id,
483 484 source_scm,
484 485 target_commit,
485 486 target_ref_id,
486 487 target_scm)
487 488
488 489 # register our commit range
489 490 for comm in commit_cache.values():
490 491 c.commit_ranges.append(comm)
491 492
492 493 c.missing_requirements = missing_requirements
493 494 c.ancestor_commit = ancestor_commit
494 495 c.statuses = source_repo.statuses(
495 496 [x.raw_id for x in c.commit_ranges])
496 497
497 498 # auto collapse if we have more than limit
498 499 collapse_limit = diffs.DiffProcessor._collapse_commits_over
499 500 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
500 501 c.compare_mode = compare
501 502
502 503 # diff_limit is the old behavior, will cut off the whole diff
503 504 # if the limit is applied otherwise will just hide the
504 505 # big files from the front-end
505 506 diff_limit = c.visual.cut_off_limit_diff
506 507 file_limit = c.visual.cut_off_limit_file
507 508
508 509 c.missing_commits = False
509 510 if (c.missing_requirements
510 511 or isinstance(source_commit, EmptyCommit)
511 512 or source_commit == target_commit):
512 513
513 514 c.missing_commits = True
514 515 else:
515 516 c.inline_comments = display_inline_comments
516 517
517 518 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
518 519 if not force_recache and has_proper_diff_cache:
519 520 c.diffset = cached_diff['diff']
520 521 (ancestor_commit, commit_cache, missing_requirements,
521 522 source_commit, target_commit) = cached_diff['commits']
522 523 else:
523 524 c.diffset = self._get_diffset(
524 525 c.source_repo.repo_name, commits_source_repo,
525 526 source_ref_id, target_ref_id,
526 527 target_commit, source_commit,
527 528 diff_limit, file_limit, c.fulldiff)
528 529
529 530 # save cached diff
530 531 if caching_enabled:
531 532 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
532 533
533 534 c.limited_diff = c.diffset.limited_diff
534 535
535 536 # calculate removed files that are bound to comments
536 537 comment_deleted_files = [
537 538 fname for fname in display_inline_comments
538 539 if fname not in c.diffset.file_stats]
539 540
540 541 c.deleted_files_comments = collections.defaultdict(dict)
541 542 for fname, per_line_comments in display_inline_comments.items():
542 543 if fname in comment_deleted_files:
543 544 c.deleted_files_comments[fname]['stats'] = 0
544 545 c.deleted_files_comments[fname]['comments'] = list()
545 546 for lno, comments in per_line_comments.items():
546 547 c.deleted_files_comments[fname]['comments'].extend(
547 548 comments)
548 549
549 550 # this is a hack to properly display links, when creating PR, the
550 551 # compare view and others uses different notation, and
551 552 # compare_commits.mako renders links based on the target_repo.
552 553 # We need to swap that here to generate it properly on the html side
553 554 c.target_repo = c.source_repo
554 555
555 556 c.commit_statuses = ChangesetStatus.STATUSES
556 557
557 558 c.show_version_changes = not pr_closed
558 559 if c.show_version_changes:
559 560 cur_obj = pull_request_at_ver
560 561 prev_obj = prev_pull_request_at_ver
561 562
562 563 old_commit_ids = prev_obj.revisions
563 564 new_commit_ids = cur_obj.revisions
564 565 commit_changes = PullRequestModel()._calculate_commit_id_changes(
565 566 old_commit_ids, new_commit_ids)
566 567 c.commit_changes_summary = commit_changes
567 568
568 569 # calculate the diff for commits between versions
569 570 c.commit_changes = []
570 571 mark = lambda cs, fw: list(
571 572 h.itertools.izip_longest([], cs, fillvalue=fw))
572 573 for c_type, raw_id in mark(commit_changes.added, 'a') \
573 574 + mark(commit_changes.removed, 'r') \
574 575 + mark(commit_changes.common, 'c'):
575 576
576 577 if raw_id in commit_cache:
577 578 commit = commit_cache[raw_id]
578 579 else:
579 580 try:
580 581 commit = commits_source_repo.get_commit(raw_id)
581 582 except CommitDoesNotExistError:
582 583 # in case we fail extracting still use "dummy" commit
583 584 # for display in commit diff
584 585 commit = h.AttributeDict(
585 586 {'raw_id': raw_id,
586 587 'message': 'EMPTY or MISSING COMMIT'})
587 588 c.commit_changes.append([c_type, commit])
588 589
589 590 # current user review statuses for each version
590 591 c.review_versions = {}
591 592 if self._rhodecode_user.user_id in allowed_reviewers:
592 593 for co in general_comments:
593 594 if co.author.user_id == self._rhodecode_user.user_id:
594 595 status = co.status_change
595 596 if status:
596 597 _ver_pr = status[0].comment.pull_request_version_id
597 598 c.review_versions[_ver_pr] = status[0]
598 599
599 600 return self._get_template_context(c)
600 601
601 602 def get_commits(
602 603 self, commits_source_repo, pull_request_at_ver, source_commit,
603 604 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
604 605 commit_cache = collections.OrderedDict()
605 606 missing_requirements = False
606 607 try:
607 608 pre_load = ["author", "branch", "date", "message"]
608 609 show_revs = pull_request_at_ver.revisions
609 610 for rev in show_revs:
610 611 comm = commits_source_repo.get_commit(
611 612 commit_id=rev, pre_load=pre_load)
612 613 commit_cache[comm.raw_id] = comm
613 614
614 615 # Order here matters, we first need to get target, and then
615 616 # the source
616 617 target_commit = commits_source_repo.get_commit(
617 618 commit_id=safe_str(target_ref_id))
618 619
619 620 source_commit = commits_source_repo.get_commit(
620 621 commit_id=safe_str(source_ref_id))
621 622 except CommitDoesNotExistError:
622 623 log.warning(
623 624 'Failed to get commit from `{}` repo'.format(
624 625 commits_source_repo), exc_info=True)
625 626 except RepositoryRequirementError:
626 627 log.warning(
627 628 'Failed to get all required data from repo', exc_info=True)
628 629 missing_requirements = True
629 630 ancestor_commit = None
630 631 try:
631 632 ancestor_id = source_scm.get_common_ancestor(
632 633 source_commit.raw_id, target_commit.raw_id, target_scm)
633 634 ancestor_commit = source_scm.get_commit(ancestor_id)
634 635 except Exception:
635 636 ancestor_commit = None
636 637 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
637 638
638 639 def assure_not_empty_repo(self):
639 640 _ = self.request.translate
640 641
641 642 try:
642 643 self.db_repo.scm_instance().get_commit()
643 644 except EmptyRepositoryError:
644 645 h.flash(h.literal(_('There are no commits yet')),
645 646 category='warning')
646 647 raise HTTPFound(
647 648 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
648 649
649 650 @LoginRequired()
650 651 @NotAnonymous()
651 652 @HasRepoPermissionAnyDecorator(
652 653 'repository.read', 'repository.write', 'repository.admin')
653 654 @view_config(
654 655 route_name='pullrequest_new', request_method='GET',
655 656 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
656 657 def pull_request_new(self):
657 658 _ = self.request.translate
658 659 c = self.load_default_context()
659 660
660 661 self.assure_not_empty_repo()
661 662 source_repo = self.db_repo
662 663
663 664 commit_id = self.request.GET.get('commit')
664 665 branch_ref = self.request.GET.get('branch')
665 666 bookmark_ref = self.request.GET.get('bookmark')
666 667
667 668 try:
668 669 source_repo_data = PullRequestModel().generate_repo_data(
669 670 source_repo, commit_id=commit_id,
670 671 branch=branch_ref, bookmark=bookmark_ref,
671 672 translator=self.request.translate)
672 673 except CommitDoesNotExistError as e:
673 674 log.exception(e)
674 675 h.flash(_('Commit does not exist'), 'error')
675 676 raise HTTPFound(
676 677 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
677 678
678 679 default_target_repo = source_repo
679 680
680 681 if source_repo.parent:
681 682 parent_vcs_obj = source_repo.parent.scm_instance()
682 683 if parent_vcs_obj and not parent_vcs_obj.is_empty():
683 684 # change default if we have a parent repo
684 685 default_target_repo = source_repo.parent
685 686
686 687 target_repo_data = PullRequestModel().generate_repo_data(
687 688 default_target_repo, translator=self.request.translate)
688 689
689 690 selected_source_ref = source_repo_data['refs']['selected_ref']
690 691 title_source_ref = ''
691 692 if selected_source_ref:
692 693 title_source_ref = selected_source_ref.split(':', 2)[1]
693 694 c.default_title = PullRequestModel().generate_pullrequest_title(
694 695 source=source_repo.repo_name,
695 696 source_ref=title_source_ref,
696 697 target=default_target_repo.repo_name
697 698 )
698 699
699 700 c.default_repo_data = {
700 701 'source_repo_name': source_repo.repo_name,
701 702 'source_refs_json': json.dumps(source_repo_data),
702 703 'target_repo_name': default_target_repo.repo_name,
703 704 'target_refs_json': json.dumps(target_repo_data),
704 705 }
705 706 c.default_source_ref = selected_source_ref
706 707
707 708 return self._get_template_context(c)
708 709
709 710 @LoginRequired()
710 711 @NotAnonymous()
711 712 @HasRepoPermissionAnyDecorator(
712 713 'repository.read', 'repository.write', 'repository.admin')
713 714 @view_config(
714 715 route_name='pullrequest_repo_refs', request_method='GET',
715 716 renderer='json_ext', xhr=True)
716 717 def pull_request_repo_refs(self):
717 718 self.load_default_context()
718 719 target_repo_name = self.request.matchdict['target_repo_name']
719 720 repo = Repository.get_by_repo_name(target_repo_name)
720 721 if not repo:
721 722 raise HTTPNotFound()
722 723
723 724 target_perm = HasRepoPermissionAny(
724 725 'repository.read', 'repository.write', 'repository.admin')(
725 726 target_repo_name)
726 727 if not target_perm:
727 728 raise HTTPNotFound()
728 729
729 730 return PullRequestModel().generate_repo_data(
730 731 repo, translator=self.request.translate)
731 732
732 733 @LoginRequired()
733 734 @NotAnonymous()
734 735 @HasRepoPermissionAnyDecorator(
735 736 'repository.read', 'repository.write', 'repository.admin')
736 737 @view_config(
737 738 route_name='pullrequest_repo_destinations', request_method='GET',
738 739 renderer='json_ext', xhr=True)
739 740 def pull_request_repo_destinations(self):
740 741 _ = self.request.translate
741 742 filter_query = self.request.GET.get('query')
742 743
743 744 query = Repository.query() \
744 745 .order_by(func.length(Repository.repo_name)) \
745 746 .filter(
746 747 or_(Repository.repo_name == self.db_repo.repo_name,
747 748 Repository.fork_id == self.db_repo.repo_id))
748 749
749 750 if filter_query:
750 751 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
751 752 query = query.filter(
752 753 Repository.repo_name.ilike(ilike_expression))
753 754
754 755 add_parent = False
755 756 if self.db_repo.parent:
756 757 if filter_query in self.db_repo.parent.repo_name:
757 758 parent_vcs_obj = self.db_repo.parent.scm_instance()
758 759 if parent_vcs_obj and not parent_vcs_obj.is_empty():
759 760 add_parent = True
760 761
761 762 limit = 20 - 1 if add_parent else 20
762 763 all_repos = query.limit(limit).all()
763 764 if add_parent:
764 765 all_repos += [self.db_repo.parent]
765 766
766 767 repos = []
767 768 for obj in ScmModel().get_repos(all_repos):
768 769 repos.append({
769 770 'id': obj['name'],
770 771 'text': obj['name'],
771 772 'type': 'repo',
772 773 'repo_id': obj['dbrepo']['repo_id'],
773 774 'repo_type': obj['dbrepo']['repo_type'],
774 775 'private': obj['dbrepo']['private'],
775 776
776 777 })
777 778
778 779 data = {
779 780 'more': False,
780 781 'results': [{
781 782 'text': _('Repositories'),
782 783 'children': repos
783 784 }] if repos else []
784 785 }
785 786 return data
786 787
787 788 @LoginRequired()
788 789 @NotAnonymous()
789 790 @HasRepoPermissionAnyDecorator(
790 791 'repository.read', 'repository.write', 'repository.admin')
791 792 @CSRFRequired()
792 793 @view_config(
793 794 route_name='pullrequest_create', request_method='POST',
794 795 renderer=None)
795 796 def pull_request_create(self):
796 797 _ = self.request.translate
797 798 self.assure_not_empty_repo()
798 799 self.load_default_context()
799 800
800 801 controls = peppercorn.parse(self.request.POST.items())
801 802
802 803 try:
803 804 form = PullRequestForm(
804 805 self.request.translate, self.db_repo.repo_id)()
805 806 _form = form.to_python(controls)
806 807 except formencode.Invalid as errors:
807 808 if errors.error_dict.get('revisions'):
808 809 msg = 'Revisions: %s' % errors.error_dict['revisions']
809 810 elif errors.error_dict.get('pullrequest_title'):
810 811 msg = errors.error_dict.get('pullrequest_title')
811 812 else:
812 813 msg = _('Error creating pull request: {}').format(errors)
813 814 log.exception(msg)
814 815 h.flash(msg, 'error')
815 816
816 817 # would rather just go back to form ...
817 818 raise HTTPFound(
818 819 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
819 820
820 821 source_repo = _form['source_repo']
821 822 source_ref = _form['source_ref']
822 823 target_repo = _form['target_repo']
823 824 target_ref = _form['target_ref']
824 825 commit_ids = _form['revisions'][::-1]
825 826
826 827 # find the ancestor for this pr
827 828 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
828 829 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
829 830
830 831 # re-check permissions again here
831 832 # source_repo we must have read permissions
832 833
833 834 source_perm = HasRepoPermissionAny(
834 835 'repository.read',
835 836 'repository.write', 'repository.admin')(source_db_repo.repo_name)
836 837 if not source_perm:
837 838 msg = _('Not Enough permissions to source repo `{}`.'.format(
838 839 source_db_repo.repo_name))
839 840 h.flash(msg, category='error')
840 841 # copy the args back to redirect
841 842 org_query = self.request.GET.mixed()
842 843 raise HTTPFound(
843 844 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
844 845 _query=org_query))
845 846
846 847 # target repo we must have read permissions, and also later on
847 848 # we want to check branch permissions here
848 849 target_perm = HasRepoPermissionAny(
849 850 'repository.read',
850 851 'repository.write', 'repository.admin')(target_db_repo.repo_name)
851 852 if not target_perm:
852 853 msg = _('Not Enough permissions to target repo `{}`.'.format(
853 854 target_db_repo.repo_name))
854 855 h.flash(msg, category='error')
855 856 # copy the args back to redirect
856 857 org_query = self.request.GET.mixed()
857 858 raise HTTPFound(
858 859 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
859 860 _query=org_query))
860 861
861 862 source_scm = source_db_repo.scm_instance()
862 863 target_scm = target_db_repo.scm_instance()
863 864
864 865 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
865 866 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
866 867
867 868 ancestor = source_scm.get_common_ancestor(
868 869 source_commit.raw_id, target_commit.raw_id, target_scm)
869 870
870 871 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
871 872 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
872 873
873 874 pullrequest_title = _form['pullrequest_title']
874 875 title_source_ref = source_ref.split(':', 2)[1]
875 876 if not pullrequest_title:
876 877 pullrequest_title = PullRequestModel().generate_pullrequest_title(
877 878 source=source_repo,
878 879 source_ref=title_source_ref,
879 880 target=target_repo
880 881 )
881 882
882 883 description = _form['pullrequest_desc']
883 884
884 885 get_default_reviewers_data, validate_default_reviewers = \
885 886 PullRequestModel().get_reviewer_functions()
886 887
887 888 # recalculate reviewers logic, to make sure we can validate this
888 889 reviewer_rules = get_default_reviewers_data(
889 890 self._rhodecode_db_user, source_db_repo,
890 891 source_commit, target_db_repo, target_commit)
891 892
892 893 given_reviewers = _form['review_members']
893 894 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
894 895
895 896 try:
896 897 pull_request = PullRequestModel().create(
897 898 self._rhodecode_user.user_id, source_repo, source_ref,
898 899 target_repo, target_ref, commit_ids, reviewers,
899 900 pullrequest_title, description, reviewer_rules,
900 901 auth_user=self._rhodecode_user
901 902 )
902 903 Session().commit()
903 904
904 905 h.flash(_('Successfully opened new pull request'),
905 906 category='success')
906 907 except Exception:
907 908 msg = _('Error occurred during creation of this pull request.')
908 909 log.exception(msg)
909 910 h.flash(msg, category='error')
910 911
911 912 # copy the args back to redirect
912 913 org_query = self.request.GET.mixed()
913 914 raise HTTPFound(
914 915 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
915 916 _query=org_query))
916 917
917 918 raise HTTPFound(
918 919 h.route_path('pullrequest_show', repo_name=target_repo,
919 920 pull_request_id=pull_request.pull_request_id))
920 921
921 922 @LoginRequired()
922 923 @NotAnonymous()
923 924 @HasRepoPermissionAnyDecorator(
924 925 'repository.read', 'repository.write', 'repository.admin')
925 926 @CSRFRequired()
926 927 @view_config(
927 928 route_name='pullrequest_update', request_method='POST',
928 929 renderer='json_ext')
929 930 def pull_request_update(self):
930 931 pull_request = PullRequest.get_or_404(
931 932 self.request.matchdict['pull_request_id'])
932 933 _ = self.request.translate
933 934
934 935 self.load_default_context()
935 936
936 937 if pull_request.is_closed():
937 938 log.debug('update: forbidden because pull request is closed')
938 939 msg = _(u'Cannot update closed pull requests.')
939 940 h.flash(msg, category='error')
940 941 return True
941 942
942 943 # only owner or admin can update it
943 944 allowed_to_update = PullRequestModel().check_user_update(
944 945 pull_request, self._rhodecode_user)
945 946 if allowed_to_update:
946 947 controls = peppercorn.parse(self.request.POST.items())
947 948
948 949 if 'review_members' in controls:
949 950 self._update_reviewers(
950 951 pull_request, controls['review_members'],
951 952 pull_request.reviewer_data)
952 953 elif str2bool(self.request.POST.get('update_commits', 'false')):
953 954 self._update_commits(pull_request)
954 955 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
955 956 self._edit_pull_request(pull_request)
956 957 else:
957 958 raise HTTPBadRequest()
958 959 return True
959 960 raise HTTPForbidden()
960 961
961 962 def _edit_pull_request(self, pull_request):
962 963 _ = self.request.translate
963 964 try:
964 965 PullRequestModel().edit(
965 966 pull_request, self.request.POST.get('title'),
966 967 self.request.POST.get('description'), self._rhodecode_user)
967 968 except ValueError:
968 969 msg = _(u'Cannot update closed pull requests.')
969 970 h.flash(msg, category='error')
970 971 return
971 972 else:
972 973 Session().commit()
973 974
974 975 msg = _(u'Pull request title & description updated.')
975 976 h.flash(msg, category='success')
976 977 return
977 978
978 979 def _update_commits(self, pull_request):
979 980 _ = self.request.translate
980 981 resp = PullRequestModel().update_commits(pull_request)
981 982
982 983 if resp.executed:
983 984
984 985 if resp.target_changed and resp.source_changed:
985 986 changed = 'target and source repositories'
986 987 elif resp.target_changed and not resp.source_changed:
987 988 changed = 'target repository'
988 989 elif not resp.target_changed and resp.source_changed:
989 990 changed = 'source repository'
990 991 else:
991 992 changed = 'nothing'
992 993
993 994 msg = _(
994 995 u'Pull request updated to "{source_commit_id}" with '
995 996 u'{count_added} added, {count_removed} removed commits. '
996 997 u'Source of changes: {change_source}')
997 998 msg = msg.format(
998 999 source_commit_id=pull_request.source_ref_parts.commit_id,
999 1000 count_added=len(resp.changes.added),
1000 1001 count_removed=len(resp.changes.removed),
1001 1002 change_source=changed)
1002 1003 h.flash(msg, category='success')
1003 1004
1004 1005 channel = '/repo${}$/pr/{}'.format(
1005 1006 pull_request.target_repo.repo_name,
1006 1007 pull_request.pull_request_id)
1007 1008 message = msg + (
1008 1009 ' - <a onclick="window.location.reload()">'
1009 1010 '<strong>{}</strong></a>'.format(_('Reload page')))
1010 1011 channelstream.post_message(
1011 1012 channel, message, self._rhodecode_user.username,
1012 1013 registry=self.request.registry)
1013 1014 else:
1014 1015 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1015 1016 warning_reasons = [
1016 1017 UpdateFailureReason.NO_CHANGE,
1017 1018 UpdateFailureReason.WRONG_REF_TYPE,
1018 1019 ]
1019 1020 category = 'warning' if resp.reason in warning_reasons else 'error'
1020 1021 h.flash(msg, category=category)
1021 1022
1022 1023 @LoginRequired()
1023 1024 @NotAnonymous()
1024 1025 @HasRepoPermissionAnyDecorator(
1025 1026 'repository.read', 'repository.write', 'repository.admin')
1026 1027 @CSRFRequired()
1027 1028 @view_config(
1028 1029 route_name='pullrequest_merge', request_method='POST',
1029 1030 renderer='json_ext')
1030 1031 def pull_request_merge(self):
1031 1032 """
1032 1033 Merge will perform a server-side merge of the specified
1033 1034 pull request, if the pull request is approved and mergeable.
1034 1035 After successful merging, the pull request is automatically
1035 1036 closed, with a relevant comment.
1036 1037 """
1037 1038 pull_request = PullRequest.get_or_404(
1038 1039 self.request.matchdict['pull_request_id'])
1039 1040
1040 1041 self.load_default_context()
1041 1042 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
1042 1043 translator=self.request.translate)
1043 1044 merge_possible = not check.failed
1044 1045
1045 1046 for err_type, error_msg in check.errors:
1046 1047 h.flash(error_msg, category=err_type)
1047 1048
1048 1049 if merge_possible:
1049 1050 log.debug("Pre-conditions checked, trying to merge.")
1050 1051 extras = vcs_operation_context(
1051 1052 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1052 1053 username=self._rhodecode_db_user.username, action='push',
1053 1054 scm=pull_request.target_repo.repo_type)
1054 1055 self._merge_pull_request(
1055 1056 pull_request, self._rhodecode_db_user, extras)
1056 1057 else:
1057 1058 log.debug("Pre-conditions failed, NOT merging.")
1058 1059
1059 1060 raise HTTPFound(
1060 1061 h.route_path('pullrequest_show',
1061 1062 repo_name=pull_request.target_repo.repo_name,
1062 1063 pull_request_id=pull_request.pull_request_id))
1063 1064
1064 1065 def _merge_pull_request(self, pull_request, user, extras):
1065 1066 _ = self.request.translate
1066 merge_resp = PullRequestModel().merge(pull_request, user, extras=extras)
1067 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1067 1068
1068 1069 if merge_resp.executed:
1069 1070 log.debug("The merge was successful, closing the pull request.")
1070 1071 PullRequestModel().close_pull_request(
1071 1072 pull_request.pull_request_id, user)
1072 1073 Session().commit()
1073 1074 msg = _('Pull request was successfully merged and closed.')
1074 1075 h.flash(msg, category='success')
1075 1076 else:
1076 1077 log.debug(
1077 1078 "The merge was not successful. Merge response: %s",
1078 1079 merge_resp)
1079 1080 msg = PullRequestModel().merge_status_message(
1080 1081 merge_resp.failure_reason)
1081 1082 h.flash(msg, category='error')
1082 1083
1083 1084 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1084 1085 _ = self.request.translate
1085 1086 get_default_reviewers_data, validate_default_reviewers = \
1086 1087 PullRequestModel().get_reviewer_functions()
1087 1088
1088 1089 try:
1089 1090 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1090 1091 except ValueError as e:
1091 1092 log.error('Reviewers Validation: {}'.format(e))
1092 1093 h.flash(e, category='error')
1093 1094 return
1094 1095
1095 1096 PullRequestModel().update_reviewers(
1096 1097 pull_request, reviewers, self._rhodecode_user)
1097 1098 h.flash(_('Pull request reviewers updated.'), category='success')
1098 1099 Session().commit()
1099 1100
1100 1101 @LoginRequired()
1101 1102 @NotAnonymous()
1102 1103 @HasRepoPermissionAnyDecorator(
1103 1104 'repository.read', 'repository.write', 'repository.admin')
1104 1105 @CSRFRequired()
1105 1106 @view_config(
1106 1107 route_name='pullrequest_delete', request_method='POST',
1107 1108 renderer='json_ext')
1108 1109 def pull_request_delete(self):
1109 1110 _ = self.request.translate
1110 1111
1111 1112 pull_request = PullRequest.get_or_404(
1112 1113 self.request.matchdict['pull_request_id'])
1113 1114 self.load_default_context()
1114 1115
1115 1116 pr_closed = pull_request.is_closed()
1116 1117 allowed_to_delete = PullRequestModel().check_user_delete(
1117 1118 pull_request, self._rhodecode_user) and not pr_closed
1118 1119
1119 1120 # only owner can delete it !
1120 1121 if allowed_to_delete:
1121 1122 PullRequestModel().delete(pull_request, self._rhodecode_user)
1122 1123 Session().commit()
1123 1124 h.flash(_('Successfully deleted pull request'),
1124 1125 category='success')
1125 1126 raise HTTPFound(h.route_path('pullrequest_show_all',
1126 1127 repo_name=self.db_repo_name))
1127 1128
1128 1129 log.warning('user %s tried to delete pull request without access',
1129 1130 self._rhodecode_user)
1130 1131 raise HTTPNotFound()
1131 1132
1132 1133 @LoginRequired()
1133 1134 @NotAnonymous()
1134 1135 @HasRepoPermissionAnyDecorator(
1135 1136 'repository.read', 'repository.write', 'repository.admin')
1136 1137 @CSRFRequired()
1137 1138 @view_config(
1138 1139 route_name='pullrequest_comment_create', request_method='POST',
1139 1140 renderer='json_ext')
1140 1141 def pull_request_comment_create(self):
1141 1142 _ = self.request.translate
1142 1143
1143 1144 pull_request = PullRequest.get_or_404(
1144 1145 self.request.matchdict['pull_request_id'])
1145 1146 pull_request_id = pull_request.pull_request_id
1146 1147
1147 1148 if pull_request.is_closed():
1148 1149 log.debug('comment: forbidden because pull request is closed')
1149 1150 raise HTTPForbidden()
1150 1151
1151 1152 allowed_to_comment = PullRequestModel().check_user_comment(
1152 1153 pull_request, self._rhodecode_user)
1153 1154 if not allowed_to_comment:
1154 1155 log.debug(
1155 1156 'comment: forbidden because pull request is from forbidden repo')
1156 1157 raise HTTPForbidden()
1157 1158
1158 1159 c = self.load_default_context()
1159 1160
1160 1161 status = self.request.POST.get('changeset_status', None)
1161 1162 text = self.request.POST.get('text')
1162 1163 comment_type = self.request.POST.get('comment_type')
1163 1164 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1164 1165 close_pull_request = self.request.POST.get('close_pull_request')
1165 1166
1166 1167 # the logic here should work like following, if we submit close
1167 1168 # pr comment, use `close_pull_request_with_comment` function
1168 1169 # else handle regular comment logic
1169 1170
1170 1171 if close_pull_request:
1171 1172 # only owner or admin or person with write permissions
1172 1173 allowed_to_close = PullRequestModel().check_user_update(
1173 1174 pull_request, self._rhodecode_user)
1174 1175 if not allowed_to_close:
1175 1176 log.debug('comment: forbidden because not allowed to close '
1176 1177 'pull request %s', pull_request_id)
1177 1178 raise HTTPForbidden()
1178 1179 comment, status = PullRequestModel().close_pull_request_with_comment(
1179 1180 pull_request, self._rhodecode_user, self.db_repo, message=text)
1180 1181 Session().flush()
1181 1182 events.trigger(
1182 1183 events.PullRequestCommentEvent(pull_request, comment))
1183 1184
1184 1185 else:
1185 1186 # regular comment case, could be inline, or one with status.
1186 1187 # for that one we check also permissions
1187 1188
1188 1189 allowed_to_change_status = PullRequestModel().check_user_change_status(
1189 1190 pull_request, self._rhodecode_user)
1190 1191
1191 1192 if status and allowed_to_change_status:
1192 1193 message = (_('Status change %(transition_icon)s %(status)s')
1193 1194 % {'transition_icon': '>',
1194 1195 'status': ChangesetStatus.get_status_lbl(status)})
1195 1196 text = text or message
1196 1197
1197 1198 comment = CommentsModel().create(
1198 1199 text=text,
1199 1200 repo=self.db_repo.repo_id,
1200 1201 user=self._rhodecode_user.user_id,
1201 1202 pull_request=pull_request,
1202 1203 f_path=self.request.POST.get('f_path'),
1203 1204 line_no=self.request.POST.get('line'),
1204 1205 status_change=(ChangesetStatus.get_status_lbl(status)
1205 1206 if status and allowed_to_change_status else None),
1206 1207 status_change_type=(status
1207 1208 if status and allowed_to_change_status else None),
1208 1209 comment_type=comment_type,
1209 1210 resolves_comment_id=resolves_comment_id,
1210 1211 auth_user=self._rhodecode_user
1211 1212 )
1212 1213
1213 1214 if allowed_to_change_status:
1214 1215 # calculate old status before we change it
1215 1216 old_calculated_status = pull_request.calculated_review_status()
1216 1217
1217 1218 # get status if set !
1218 1219 if status:
1219 1220 ChangesetStatusModel().set_status(
1220 1221 self.db_repo.repo_id,
1221 1222 status,
1222 1223 self._rhodecode_user.user_id,
1223 1224 comment,
1224 1225 pull_request=pull_request
1225 1226 )
1226 1227
1227 1228 Session().flush()
1228 1229 # this is somehow required to get access to some relationship
1229 1230 # loaded on comment
1230 1231 Session().refresh(comment)
1231 1232
1232 1233 events.trigger(
1233 1234 events.PullRequestCommentEvent(pull_request, comment))
1234 1235
1235 1236 # we now calculate the status of pull request, and based on that
1236 1237 # calculation we set the commits status
1237 1238 calculated_status = pull_request.calculated_review_status()
1238 1239 if old_calculated_status != calculated_status:
1239 1240 PullRequestModel()._trigger_pull_request_hook(
1240 1241 pull_request, self._rhodecode_user, 'review_status_change')
1241 1242
1242 1243 Session().commit()
1243 1244
1244 1245 data = {
1245 1246 'target_id': h.safeid(h.safe_unicode(
1246 1247 self.request.POST.get('f_path'))),
1247 1248 }
1248 1249 if comment:
1249 1250 c.co = comment
1250 1251 rendered_comment = render(
1251 1252 'rhodecode:templates/changeset/changeset_comment_block.mako',
1252 1253 self._get_template_context(c), self.request)
1253 1254
1254 1255 data.update(comment.get_dict())
1255 1256 data.update({'rendered_text': rendered_comment})
1256 1257
1257 1258 return data
1258 1259
1259 1260 @LoginRequired()
1260 1261 @NotAnonymous()
1261 1262 @HasRepoPermissionAnyDecorator(
1262 1263 'repository.read', 'repository.write', 'repository.admin')
1263 1264 @CSRFRequired()
1264 1265 @view_config(
1265 1266 route_name='pullrequest_comment_delete', request_method='POST',
1266 1267 renderer='json_ext')
1267 1268 def pull_request_comment_delete(self):
1268 1269 pull_request = PullRequest.get_or_404(
1269 1270 self.request.matchdict['pull_request_id'])
1270 1271
1271 1272 comment = ChangesetComment.get_or_404(
1272 1273 self.request.matchdict['comment_id'])
1273 1274 comment_id = comment.comment_id
1274 1275
1275 1276 if pull_request.is_closed():
1276 1277 log.debug('comment: forbidden because pull request is closed')
1277 1278 raise HTTPForbidden()
1278 1279
1279 1280 if not comment:
1280 1281 log.debug('Comment with id:%s not found, skipping', comment_id)
1281 1282 # comment already deleted in another call probably
1282 1283 return True
1283 1284
1284 1285 if comment.pull_request.is_closed():
1285 1286 # don't allow deleting comments on closed pull request
1286 1287 raise HTTPForbidden()
1287 1288
1288 1289 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1289 1290 super_admin = h.HasPermissionAny('hg.admin')()
1290 1291 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1291 1292 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1292 1293 comment_repo_admin = is_repo_admin and is_repo_comment
1293 1294
1294 1295 if super_admin or comment_owner or comment_repo_admin:
1295 1296 old_calculated_status = comment.pull_request.calculated_review_status()
1296 1297 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1297 1298 Session().commit()
1298 1299 calculated_status = comment.pull_request.calculated_review_status()
1299 1300 if old_calculated_status != calculated_status:
1300 1301 PullRequestModel()._trigger_pull_request_hook(
1301 1302 comment.pull_request, self._rhodecode_user, 'review_status_change')
1302 1303 return True
1303 1304 else:
1304 1305 log.warning('No permissions for user %s to delete comment_id: %s',
1305 1306 self._rhodecode_db_user, comment_id)
1306 1307 raise HTTPNotFound()
@@ -1,673 +1,674 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import importlib
30 30 from functools import wraps
31 31 from StringIO import StringIO
32 32 from lxml import etree
33 33
34 34 import time
35 35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36 36
37 37 from pyramid.httpexceptions import (
38 38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 39 from zope.cachedescriptors.property import Lazy as LazyProperty
40 40
41 41 import rhodecode
42 42 from rhodecode.authentication.base import (
43 43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
44 44 from rhodecode.lib import caches
45 45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
46 46 from rhodecode.lib.base import (
47 47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
48 48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
49 49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
50 50 from rhodecode.lib.middleware import appenlight
51 51 from rhodecode.lib.middleware.utils import scm_app_http
52 52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
54 54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 55 from rhodecode.lib.vcs.backends import base
56 56
57 57 from rhodecode.model import meta
58 58 from rhodecode.model.db import User, Repository, PullRequest
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.pull_request import PullRequestModel
61 61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 def extract_svn_txn_id(acl_repo_name, data):
67 67 """
68 68 Helper method for extraction of svn txn_id from submited XML data during
69 69 POST operations
70 70 """
71 71 try:
72 72 root = etree.fromstring(data)
73 73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
74 74 for el in root:
75 75 if el.tag == '{DAV:}source':
76 76 for sub_el in el:
77 77 if sub_el.tag == '{DAV:}href':
78 78 match = pat.search(sub_el.text)
79 79 if match:
80 80 svn_tx_id = match.groupdict()['txn_id']
81 81 txn_id = caches.compute_key_from_params(
82 82 acl_repo_name, svn_tx_id)
83 83 return txn_id
84 84 except Exception:
85 85 log.exception('Failed to extract txn_id')
86 86
87 87
88 88 def initialize_generator(factory):
89 89 """
90 90 Initializes the returned generator by draining its first element.
91 91
92 92 This can be used to give a generator an initializer, which is the code
93 93 up to the first yield statement. This decorator enforces that the first
94 94 produced element has the value ``"__init__"`` to make its special
95 95 purpose very explicit in the using code.
96 96 """
97 97
98 98 @wraps(factory)
99 99 def wrapper(*args, **kwargs):
100 100 gen = factory(*args, **kwargs)
101 101 try:
102 102 init = gen.next()
103 103 except StopIteration:
104 104 raise ValueError('Generator must yield at least one element.')
105 105 if init != "__init__":
106 106 raise ValueError('First yielded element must be "__init__".')
107 107 return gen
108 108 return wrapper
109 109
110 110
111 111 class SimpleVCS(object):
112 112 """Common functionality for SCM HTTP handlers."""
113 113
114 114 SCM = 'unknown'
115 115
116 116 acl_repo_name = None
117 117 url_repo_name = None
118 118 vcs_repo_name = None
119 119 rc_extras = {}
120 120
121 121 # We have to handle requests to shadow repositories different than requests
122 122 # to normal repositories. Therefore we have to distinguish them. To do this
123 123 # we use this regex which will match only on URLs pointing to shadow
124 124 # repositories.
125 125 shadow_repo_re = re.compile(
126 126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
127 127 '(?P<target>{slug_pat})/' # target repo
128 128 'pull-request/(?P<pr_id>\d+)/' # pull request
129 129 'repository$' # shadow repo
130 130 .format(slug_pat=SLUG_RE.pattern))
131 131
132 132 def __init__(self, config, registry):
133 133 self.registry = registry
134 134 self.config = config
135 135 # re-populated by specialized middleware
136 136 self.repo_vcs_config = base.Config()
137 137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
138 138
139 139 registry.rhodecode_settings = self.rhodecode_settings
140 140 # authenticate this VCS request using authfunc
141 141 auth_ret_code_detection = \
142 142 str2bool(self.config.get('auth_ret_code_detection', False))
143 143 self.authenticate = BasicAuth(
144 144 '', authenticate, registry, config.get('auth_ret_code'),
145 145 auth_ret_code_detection)
146 146 self.ip_addr = '0.0.0.0'
147 147
148 148 @LazyProperty
149 149 def global_vcs_config(self):
150 150 try:
151 151 return VcsSettingsModel().get_ui_settings_as_config_obj()
152 152 except Exception:
153 153 return base.Config()
154 154
155 155 @property
156 156 def base_path(self):
157 157 settings_path = self.repo_vcs_config.get(
158 158 *VcsSettingsModel.PATH_SETTING)
159 159
160 160 if not settings_path:
161 161 settings_path = self.global_vcs_config.get(
162 162 *VcsSettingsModel.PATH_SETTING)
163 163
164 164 if not settings_path:
165 165 # try, maybe we passed in explicitly as config option
166 166 settings_path = self.config.get('base_path')
167 167
168 168 if not settings_path:
169 169 raise ValueError('FATAL: base_path is empty')
170 170 return settings_path
171 171
172 172 def set_repo_names(self, environ):
173 173 """
174 174 This will populate the attributes acl_repo_name, url_repo_name,
175 175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
176 176 shadow) repositories all names are equal. In case of requests to a
177 177 shadow repository the acl-name points to the target repo of the pull
178 178 request and the vcs-name points to the shadow repo file system path.
179 179 The url-name is always the URL used by the vcs client program.
180 180
181 181 Example in case of a shadow repo:
182 182 acl_repo_name = RepoGroup/MyRepo
183 183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
184 184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
185 185 """
186 186 # First we set the repo name from URL for all attributes. This is the
187 187 # default if handling normal (non shadow) repo requests.
188 188 self.url_repo_name = self._get_repository_name(environ)
189 189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
190 190 self.is_shadow_repo = False
191 191
192 192 # Check if this is a request to a shadow repository.
193 193 match = self.shadow_repo_re.match(self.url_repo_name)
194 194 if match:
195 195 match_dict = match.groupdict()
196 196
197 197 # Build acl repo name from regex match.
198 198 acl_repo_name = safe_unicode('{groups}{target}'.format(
199 199 groups=match_dict['groups'] or '',
200 200 target=match_dict['target']))
201 201
202 202 # Retrieve pull request instance by ID from regex match.
203 203 pull_request = PullRequest.get(match_dict['pr_id'])
204 204
205 205 # Only proceed if we got a pull request and if acl repo name from
206 206 # URL equals the target repo name of the pull request.
207 if pull_request and (acl_repo_name ==
208 pull_request.target_repo.repo_name):
207 if pull_request and \
208 (acl_repo_name == pull_request.target_repo.repo_name):
209 repo_id = pull_request.target_repo.repo_id
209 210 # Get file system path to shadow repository.
210 211 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 212 target_vcs = pull_request.target_repo.scm_instance()
212 213 vcs_repo_name = target_vcs._get_shadow_repository_path(
213 workspace_id)
214 repo_id, workspace_id)
214 215
215 216 # Store names for later usage.
216 217 self.vcs_repo_name = vcs_repo_name
217 218 self.acl_repo_name = acl_repo_name
218 219 self.is_shadow_repo = True
219 220
220 221 log.debug('Setting all VCS repository names: %s', {
221 222 'acl_repo_name': self.acl_repo_name,
222 223 'url_repo_name': self.url_repo_name,
223 224 'vcs_repo_name': self.vcs_repo_name,
224 225 })
225 226
226 227 @property
227 228 def scm_app(self):
228 229 custom_implementation = self.config['vcs.scm_app_implementation']
229 230 if custom_implementation == 'http':
230 231 log.info('Using HTTP implementation of scm app.')
231 232 scm_app_impl = scm_app_http
232 233 else:
233 234 log.info('Using custom implementation of scm_app: "{}"'.format(
234 235 custom_implementation))
235 236 scm_app_impl = importlib.import_module(custom_implementation)
236 237 return scm_app_impl
237 238
238 239 def _get_by_id(self, repo_name):
239 240 """
240 241 Gets a special pattern _<ID> from clone url and tries to replace it
241 242 with a repository_name for support of _<ID> non changeable urls
242 243 """
243 244
244 245 data = repo_name.split('/')
245 246 if len(data) >= 2:
246 247 from rhodecode.model.repo import RepoModel
247 248 by_id_match = RepoModel().get_repo_by_id(repo_name)
248 249 if by_id_match:
249 250 data[1] = by_id_match.repo_name
250 251
251 252 return safe_str('/'.join(data))
252 253
253 254 def _invalidate_cache(self, repo_name):
254 255 """
255 256 Set's cache for this repository for invalidation on next access
256 257
257 258 :param repo_name: full repo name, also a cache key
258 259 """
259 260 ScmModel().mark_for_invalidation(repo_name)
260 261
261 262 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
262 263 db_repo = Repository.get_by_repo_name(repo_name)
263 264 if not db_repo:
264 265 log.debug('Repository `%s` not found inside the database.',
265 266 repo_name)
266 267 return False
267 268
268 269 if db_repo.repo_type != scm_type:
269 270 log.warning(
270 271 'Repository `%s` have incorrect scm_type, expected %s got %s',
271 272 repo_name, db_repo.repo_type, scm_type)
272 273 return False
273 274
274 275 config = db_repo._config
275 276 config.set('extensions', 'largefiles', '')
276 277 return is_valid_repo(
277 278 repo_name, base_path,
278 279 explicit_scm=scm_type, expect_scm=scm_type, config=config)
279 280
280 281 def valid_and_active_user(self, user):
281 282 """
282 283 Checks if that user is not empty, and if it's actually object it checks
283 284 if he's active.
284 285
285 286 :param user: user object or None
286 287 :return: boolean
287 288 """
288 289 if user is None:
289 290 return False
290 291
291 292 elif user.active:
292 293 return True
293 294
294 295 return False
295 296
296 297 @property
297 298 def is_shadow_repo_dir(self):
298 299 return os.path.isdir(self.vcs_repo_name)
299 300
300 301 def _check_permission(self, action, user, repo_name, ip_addr=None,
301 302 plugin_id='', plugin_cache_active=False, cache_ttl=0):
302 303 """
303 304 Checks permissions using action (push/pull) user and repository
304 305 name. If plugin_cache and ttl is set it will use the plugin which
305 306 authenticated the user to store the cached permissions result for N
306 307 amount of seconds as in cache_ttl
307 308
308 309 :param action: push or pull action
309 310 :param user: user instance
310 311 :param repo_name: repository name
311 312 """
312 313
313 314 # get instance of cache manager configured for a namespace
314 315 cache_manager = get_perms_cache_manager(
315 316 custom_ttl=cache_ttl, suffix=user.user_id)
316 317 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
317 318 plugin_id, plugin_cache_active, cache_ttl)
318 319
319 320 # for environ based password can be empty, but then the validation is
320 321 # on the server that fills in the env data needed for authentication
321 322 _perm_calc_hash = caches.compute_key_from_params(
322 323 plugin_id, action, user.user_id, repo_name, ip_addr)
323 324
324 325 # _authenticate is a wrapper for .auth() method of plugin.
325 326 # it checks if .auth() sends proper data.
326 327 # For RhodeCodeExternalAuthPlugin it also maps users to
327 328 # Database and maps the attributes returned from .auth()
328 329 # to RhodeCode database. If this function returns data
329 330 # then auth is correct.
330 331 start = time.time()
331 332 log.debug('Running plugin `%s` permissions check', plugin_id)
332 333
333 334 def perm_func():
334 335 """
335 336 This function is used internally in Cache of Beaker to calculate
336 337 Results
337 338 """
338 339 log.debug('auth: calculating permission access now...')
339 340 # check IP
340 341 inherit = user.inherit_default_permissions
341 342 ip_allowed = AuthUser.check_ip_allowed(
342 343 user.user_id, ip_addr, inherit_from_default=inherit)
343 344 if ip_allowed:
344 345 log.info('Access for IP:%s allowed', ip_addr)
345 346 else:
346 347 return False
347 348
348 349 if action == 'push':
349 350 perms = ('repository.write', 'repository.admin')
350 351 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
351 352 return False
352 353
353 354 else:
354 355 # any other action need at least read permission
355 356 perms = (
356 357 'repository.read', 'repository.write', 'repository.admin')
357 358 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
358 359 return False
359 360
360 361 return True
361 362
362 363 if plugin_cache_active:
363 364 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
364 365 perm_result = cache_manager.get(
365 366 _perm_calc_hash, createfunc=perm_func)
366 367 else:
367 368 perm_result = perm_func()
368 369
369 370 auth_time = time.time() - start
370 371 log.debug('Permissions for plugin `%s` completed in %.3fs, '
371 372 'expiration time of fetched cache %.1fs.',
372 373 plugin_id, auth_time, cache_ttl)
373 374
374 375 return perm_result
375 376
376 377 def _check_ssl(self, environ, start_response):
377 378 """
378 379 Checks the SSL check flag and returns False if SSL is not present
379 380 and required True otherwise
380 381 """
381 382 org_proto = environ['wsgi._org_proto']
382 383 # check if we have SSL required ! if not it's a bad request !
383 384 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
384 385 if require_ssl and org_proto == 'http':
385 386 log.debug(
386 387 'Bad request: detected protocol is `%s` and '
387 388 'SSL/HTTPS is required.', org_proto)
388 389 return False
389 390 return True
390 391
391 392 def _get_default_cache_ttl(self):
392 393 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
393 394 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
394 395 plugin_settings = plugin.get_settings()
395 396 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
396 397 plugin_settings) or (False, 0)
397 398 return plugin_cache_active, cache_ttl
398 399
399 400 def __call__(self, environ, start_response):
400 401 try:
401 402 return self._handle_request(environ, start_response)
402 403 except Exception:
403 404 log.exception("Exception while handling request")
404 405 appenlight.track_exception(environ)
405 406 return HTTPInternalServerError()(environ, start_response)
406 407 finally:
407 408 meta.Session.remove()
408 409
409 410 def _handle_request(self, environ, start_response):
410 411
411 412 if not self._check_ssl(environ, start_response):
412 413 reason = ('SSL required, while RhodeCode was unable '
413 414 'to detect this as SSL request')
414 415 log.debug('User not allowed to proceed, %s', reason)
415 416 return HTTPNotAcceptable(reason)(environ, start_response)
416 417
417 418 if not self.url_repo_name:
418 419 log.warning('Repository name is empty: %s', self.url_repo_name)
419 420 # failed to get repo name, we fail now
420 421 return HTTPNotFound()(environ, start_response)
421 422 log.debug('Extracted repo name is %s', self.url_repo_name)
422 423
423 424 ip_addr = get_ip_addr(environ)
424 425 user_agent = get_user_agent(environ)
425 426 username = None
426 427
427 428 # skip passing error to error controller
428 429 environ['pylons.status_code_redirect'] = True
429 430
430 431 # ======================================================================
431 432 # GET ACTION PULL or PUSH
432 433 # ======================================================================
433 434 action = self._get_action(environ)
434 435
435 436 # ======================================================================
436 437 # Check if this is a request to a shadow repository of a pull request.
437 438 # In this case only pull action is allowed.
438 439 # ======================================================================
439 440 if self.is_shadow_repo and action != 'pull':
440 441 reason = 'Only pull action is allowed for shadow repositories.'
441 442 log.debug('User not allowed to proceed, %s', reason)
442 443 return HTTPNotAcceptable(reason)(environ, start_response)
443 444
444 445 # Check if the shadow repo actually exists, in case someone refers
445 446 # to it, and it has been deleted because of successful merge.
446 447 if self.is_shadow_repo and not self.is_shadow_repo_dir:
447 448 log.debug(
448 449 'Shadow repo detected, and shadow repo dir `%s` is missing',
449 450 self.is_shadow_repo_dir)
450 451 return HTTPNotFound()(environ, start_response)
451 452
452 453 # ======================================================================
453 454 # CHECK ANONYMOUS PERMISSION
454 455 # ======================================================================
455 456 if action in ['pull', 'push']:
456 457 anonymous_user = User.get_default_user()
457 458 username = anonymous_user.username
458 459 if anonymous_user.active:
459 460 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
460 461 # ONLY check permissions if the user is activated
461 462 anonymous_perm = self._check_permission(
462 463 action, anonymous_user, self.acl_repo_name, ip_addr,
463 464 plugin_id='anonymous_access',
464 465 plugin_cache_active=plugin_cache_active,
465 466 cache_ttl=cache_ttl,
466 467 )
467 468 else:
468 469 anonymous_perm = False
469 470
470 471 if not anonymous_user.active or not anonymous_perm:
471 472 if not anonymous_user.active:
472 473 log.debug('Anonymous access is disabled, running '
473 474 'authentication')
474 475
475 476 if not anonymous_perm:
476 477 log.debug('Not enough credentials to access this '
477 478 'repository as anonymous user')
478 479
479 480 username = None
480 481 # ==============================================================
481 482 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
482 483 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
483 484 # ==============================================================
484 485
485 486 # try to auth based on environ, container auth methods
486 487 log.debug('Running PRE-AUTH for container based authentication')
487 488 pre_auth = authenticate(
488 489 '', '', environ, VCS_TYPE, registry=self.registry,
489 490 acl_repo_name=self.acl_repo_name)
490 491 if pre_auth and pre_auth.get('username'):
491 492 username = pre_auth['username']
492 493 log.debug('PRE-AUTH got %s as username', username)
493 494 if pre_auth:
494 495 log.debug('PRE-AUTH successful from %s',
495 496 pre_auth.get('auth_data', {}).get('_plugin'))
496 497
497 498 # If not authenticated by the container, running basic auth
498 499 # before inject the calling repo_name for special scope checks
499 500 self.authenticate.acl_repo_name = self.acl_repo_name
500 501
501 502 plugin_cache_active, cache_ttl = False, 0
502 503 plugin = None
503 504 if not username:
504 505 self.authenticate.realm = self.authenticate.get_rc_realm()
505 506
506 507 try:
507 508 auth_result = self.authenticate(environ)
508 509 except (UserCreationError, NotAllowedToCreateUserError) as e:
509 510 log.error(e)
510 511 reason = safe_str(e)
511 512 return HTTPNotAcceptable(reason)(environ, start_response)
512 513
513 514 if isinstance(auth_result, dict):
514 515 AUTH_TYPE.update(environ, 'basic')
515 516 REMOTE_USER.update(environ, auth_result['username'])
516 517 username = auth_result['username']
517 518 plugin = auth_result.get('auth_data', {}).get('_plugin')
518 519 log.info(
519 520 'MAIN-AUTH successful for user `%s` from %s plugin',
520 521 username, plugin)
521 522
522 523 plugin_cache_active, cache_ttl = auth_result.get(
523 524 'auth_data', {}).get('_ttl_cache') or (False, 0)
524 525 else:
525 526 return auth_result.wsgi_application(
526 527 environ, start_response)
527 528
528 529
529 530 # ==============================================================
530 531 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
531 532 # ==============================================================
532 533 user = User.get_by_username(username)
533 534 if not self.valid_and_active_user(user):
534 535 return HTTPForbidden()(environ, start_response)
535 536 username = user.username
536 537 user.update_lastactivity()
537 538 meta.Session().commit()
538 539
539 540 # check user attributes for password change flag
540 541 user_obj = user
541 542 if user_obj and user_obj.username != User.DEFAULT_USER and \
542 543 user_obj.user_data.get('force_password_change'):
543 544 reason = 'password change required'
544 545 log.debug('User not allowed to authenticate, %s', reason)
545 546 return HTTPNotAcceptable(reason)(environ, start_response)
546 547
547 548 # check permissions for this repository
548 549 perm = self._check_permission(
549 550 action, user, self.acl_repo_name, ip_addr,
550 551 plugin, plugin_cache_active, cache_ttl)
551 552 if not perm:
552 553 return HTTPForbidden()(environ, start_response)
553 554
554 555 # extras are injected into UI object and later available
555 556 # in hooks executed by RhodeCode
556 557 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
557 558 extras = vcs_operation_context(
558 559 environ, repo_name=self.acl_repo_name, username=username,
559 560 action=action, scm=self.SCM, check_locking=check_locking,
560 561 is_shadow_repo=self.is_shadow_repo
561 562 )
562 563
563 564 # ======================================================================
564 565 # REQUEST HANDLING
565 566 # ======================================================================
566 567 repo_path = os.path.join(
567 568 safe_str(self.base_path), safe_str(self.vcs_repo_name))
568 569 log.debug('Repository path is %s', repo_path)
569 570
570 571 fix_PATH()
571 572
572 573 log.info(
573 574 '%s action on %s repo "%s" by "%s" from %s %s',
574 575 action, self.SCM, safe_str(self.url_repo_name),
575 576 safe_str(username), ip_addr, user_agent)
576 577
577 578 return self._generate_vcs_response(
578 579 environ, start_response, repo_path, extras, action)
579 580
580 581 @initialize_generator
581 582 def _generate_vcs_response(
582 583 self, environ, start_response, repo_path, extras, action):
583 584 """
584 585 Returns a generator for the response content.
585 586
586 587 This method is implemented as a generator, so that it can trigger
587 588 the cache validation after all content sent back to the client. It
588 589 also handles the locking exceptions which will be triggered when
589 590 the first chunk is produced by the underlying WSGI application.
590 591 """
591 592 txn_id = ''
592 593 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
593 594 # case for SVN, we want to re-use the callback daemon port
594 595 # so we use the txn_id, for this we peek the body, and still save
595 596 # it as wsgi.input
596 597 data = environ['wsgi.input'].read()
597 598 environ['wsgi.input'] = StringIO(data)
598 599 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
599 600
600 601 callback_daemon, extras = self._prepare_callback_daemon(
601 602 extras, environ, action, txn_id=txn_id)
602 603 log.debug('HOOKS extras is %s', extras)
603 604
604 605 config = self._create_config(extras, self.acl_repo_name)
605 606 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
606 607 with callback_daemon:
607 608 app.rc_extras = extras
608 609
609 610 try:
610 611 response = app(environ, start_response)
611 612 finally:
612 613 # This statement works together with the decorator
613 614 # "initialize_generator" above. The decorator ensures that
614 615 # we hit the first yield statement before the generator is
615 616 # returned back to the WSGI server. This is needed to
616 617 # ensure that the call to "app" above triggers the
617 618 # needed callback to "start_response" before the
618 619 # generator is actually used.
619 620 yield "__init__"
620 621
621 622 # iter content
622 623 for chunk in response:
623 624 yield chunk
624 625
625 626 try:
626 627 # invalidate cache on push
627 628 if action == 'push':
628 629 self._invalidate_cache(self.url_repo_name)
629 630 finally:
630 631 meta.Session.remove()
631 632
632 633 def _get_repository_name(self, environ):
633 634 """Get repository name out of the environmnent
634 635
635 636 :param environ: WSGI environment
636 637 """
637 638 raise NotImplementedError()
638 639
639 640 def _get_action(self, environ):
640 641 """Map request commands into a pull or push command.
641 642
642 643 :param environ: WSGI environment
643 644 """
644 645 raise NotImplementedError()
645 646
646 647 def _create_wsgi_app(self, repo_path, repo_name, config):
647 648 """Return the WSGI app that will finally handle the request."""
648 649 raise NotImplementedError()
649 650
650 651 def _create_config(self, extras, repo_name):
651 652 """Create a safe config representation."""
652 653 raise NotImplementedError()
653 654
654 655 def _should_use_callback_daemon(self, extras, environ, action):
655 656 return True
656 657
657 658 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
658 659 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
659 660 if not self._should_use_callback_daemon(extras, environ, action):
660 661 # disable callback daemon for actions that don't require it
661 662 direct_calls = True
662 663
663 664 return prepare_callback_daemon(
664 665 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
665 666 use_direct_calls=direct_calls, txn_id=txn_id)
666 667
667 668
668 669 def _should_check_locking(query_string):
669 670 # this is kind of hacky, but due to how mercurial handles client-server
670 671 # server see all operation on commit; bookmarks, phases and
671 672 # obsolescence marker in different transaction, we don't want to check
672 673 # locking on those
673 674 return query_string not in ['cmd=listkeys']
@@ -1,1730 +1,1746 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import os
31 31 import re
32 32 import time
33 33 import warnings
34 34 import shutil
35 35
36 36 from zope.cachedescriptors.property import Lazy as LazyProperty
37 37
38 38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 39 from rhodecode.lib.vcs import connection
40 40 from rhodecode.lib.vcs.utils import author_name, author_email
41 41 from rhodecode.lib.vcs.conf import settings
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 47 RepositoryError)
48 48
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 FILEMODE_DEFAULT = 0100644
54 54 FILEMODE_EXECUTABLE = 0100755
55 55
56 56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 57 MergeResponse = collections.namedtuple(
58 58 'MergeResponse',
59 59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60 60
61 61
62 62 class MergeFailureReason(object):
63 63 """
64 64 Enumeration with all the reasons why the server side merge could fail.
65 65
66 66 DO NOT change the number of the reasons, as they may be stored in the
67 67 database.
68 68
69 69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 70 reasons.
71 71 """
72 72
73 73 # Everything went well.
74 74 NONE = 0
75 75
76 76 # An unexpected exception was raised. Check the logs for more details.
77 77 UNKNOWN = 1
78 78
79 79 # The merge was not successful, there are conflicts.
80 80 MERGE_FAILED = 2
81 81
82 82 # The merge succeeded but we could not push it to the target repository.
83 83 PUSH_FAILED = 3
84 84
85 85 # The specified target is not a head in the target repository.
86 86 TARGET_IS_NOT_HEAD = 4
87 87
88 88 # The source repository contains more branches than the target. Pushing
89 89 # the merge will create additional branches in the target.
90 90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 91
92 92 # The target reference has multiple heads. That does not allow to correctly
93 93 # identify the target location. This could only happen for mercurial
94 94 # branches.
95 95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 96
97 97 # The target repository is locked
98 98 TARGET_IS_LOCKED = 7
99 99
100 100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 101 # A involved commit could not be found.
102 102 _DEPRECATED_MISSING_COMMIT = 8
103 103
104 104 # The target repo reference is missing.
105 105 MISSING_TARGET_REF = 9
106 106
107 107 # The source repo reference is missing.
108 108 MISSING_SOURCE_REF = 10
109 109
110 110 # The merge was not successful, there are conflicts related to sub
111 111 # repositories.
112 112 SUBREPO_MERGE_FAILED = 11
113 113
114 114
115 115 class UpdateFailureReason(object):
116 116 """
117 117 Enumeration with all the reasons why the pull request update could fail.
118 118
119 119 DO NOT change the number of the reasons, as they may be stored in the
120 120 database.
121 121
122 122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 123 reasons.
124 124 """
125 125
126 126 # Everything went well.
127 127 NONE = 0
128 128
129 129 # An unexpected exception was raised. Check the logs for more details.
130 130 UNKNOWN = 1
131 131
132 132 # The pull request is up to date.
133 133 NO_CHANGE = 2
134 134
135 135 # The pull request has a reference type that is not supported for update.
136 136 WRONG_REF_TYPE = 3
137 137
138 138 # Update failed because the target reference is missing.
139 139 MISSING_TARGET_REF = 4
140 140
141 141 # Update failed because the source reference is missing.
142 142 MISSING_SOURCE_REF = 5
143 143
144 144
145 145 class BaseRepository(object):
146 146 """
147 147 Base Repository for final backends
148 148
149 149 .. attribute:: DEFAULT_BRANCH_NAME
150 150
151 151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152 152
153 153 .. attribute:: commit_ids
154 154
155 155 list of all available commit ids, in ascending order
156 156
157 157 .. attribute:: path
158 158
159 159 absolute path to the repository
160 160
161 161 .. attribute:: bookmarks
162 162
163 163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 164 there are no bookmarks or the backend implementation does not support
165 165 bookmarks.
166 166
167 167 .. attribute:: tags
168 168
169 169 Mapping from name to :term:`Commit ID` of the tag.
170 170
171 171 """
172 172
173 173 DEFAULT_BRANCH_NAME = None
174 174 DEFAULT_CONTACT = u"Unknown"
175 175 DEFAULT_DESCRIPTION = u"unknown"
176 176 EMPTY_COMMIT_ID = '0' * 40
177 177
178 178 path = None
179 179 _remote = None
180 180
181 181 def __init__(self, repo_path, config=None, create=False, **kwargs):
182 182 """
183 183 Initializes repository. Raises RepositoryError if repository could
184 184 not be find at the given ``repo_path`` or directory at ``repo_path``
185 185 exists and ``create`` is set to True.
186 186
187 187 :param repo_path: local path of the repository
188 188 :param config: repository configuration
189 189 :param create=False: if set to True, would try to create repository.
190 190 :param src_url=None: if set, should be proper url from which repository
191 191 would be cloned; requires ``create`` parameter to be set to True -
192 192 raises RepositoryError if src_url is set and create evaluates to
193 193 False
194 194 """
195 195 raise NotImplementedError
196 196
197 197 def __repr__(self):
198 198 return '<%s at %s>' % (self.__class__.__name__, self.path)
199 199
200 200 def __len__(self):
201 201 return self.count()
202 202
203 203 def __eq__(self, other):
204 204 same_instance = isinstance(other, self.__class__)
205 205 return same_instance and other.path == self.path
206 206
207 207 def __ne__(self, other):
208 208 return not self.__eq__(other)
209 209
210 210 def get_create_shadow_cache_pr_path(self, db_repo):
211 211 path = db_repo.cached_diffs_dir
212 212 if not os.path.exists(path):
213 213 os.makedirs(path, 0755)
214 214 return path
215 215
216 216 @classmethod
217 217 def get_default_config(cls, default=None):
218 218 config = Config()
219 219 if default and isinstance(default, list):
220 220 for section, key, val in default:
221 221 config.set(section, key, val)
222 222 return config
223 223
224 224 @LazyProperty
225 225 def EMPTY_COMMIT(self):
226 226 return EmptyCommit(self.EMPTY_COMMIT_ID)
227 227
228 228 @LazyProperty
229 229 def alias(self):
230 230 for k, v in settings.BACKENDS.items():
231 231 if v.split('.')[-1] == str(self.__class__.__name__):
232 232 return k
233 233
234 234 @LazyProperty
235 235 def name(self):
236 236 return safe_unicode(os.path.basename(self.path))
237 237
238 238 @LazyProperty
239 239 def description(self):
240 240 raise NotImplementedError
241 241
242 242 def refs(self):
243 243 """
244 244 returns a `dict` with branches, bookmarks, tags, and closed_branches
245 245 for this repository
246 246 """
247 247 return dict(
248 248 branches=self.branches,
249 249 branches_closed=self.branches_closed,
250 250 tags=self.tags,
251 251 bookmarks=self.bookmarks
252 252 )
253 253
254 254 @LazyProperty
255 255 def branches(self):
256 256 """
257 257 A `dict` which maps branch names to commit ids.
258 258 """
259 259 raise NotImplementedError
260 260
261 261 @LazyProperty
262 262 def branches_closed(self):
263 263 """
264 264 A `dict` which maps tags names to commit ids.
265 265 """
266 266 raise NotImplementedError
267 267
268 268 @LazyProperty
269 269 def bookmarks(self):
270 270 """
271 271 A `dict` which maps tags names to commit ids.
272 272 """
273 273 raise NotImplementedError
274 274
275 275 @LazyProperty
276 276 def tags(self):
277 277 """
278 278 A `dict` which maps tags names to commit ids.
279 279 """
280 280 raise NotImplementedError
281 281
282 282 @LazyProperty
283 283 def size(self):
284 284 """
285 285 Returns combined size in bytes for all repository files
286 286 """
287 287 tip = self.get_commit()
288 288 return tip.size
289 289
290 290 def size_at_commit(self, commit_id):
291 291 commit = self.get_commit(commit_id)
292 292 return commit.size
293 293
294 294 def is_empty(self):
295 295 return not bool(self.commit_ids)
296 296
297 297 @staticmethod
298 298 def check_url(url, config):
299 299 """
300 300 Function will check given url and try to verify if it's a valid
301 301 link.
302 302 """
303 303 raise NotImplementedError
304 304
305 305 @staticmethod
306 306 def is_valid_repository(path):
307 307 """
308 308 Check if given `path` contains a valid repository of this backend
309 309 """
310 310 raise NotImplementedError
311 311
312 312 # ==========================================================================
313 313 # COMMITS
314 314 # ==========================================================================
315 315
316 316 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
317 317 """
318 318 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
319 319 are both None, most recent commit is returned.
320 320
321 321 :param pre_load: Optional. List of commit attributes to load.
322 322
323 323 :raises ``EmptyRepositoryError``: if there are no commits
324 324 """
325 325 raise NotImplementedError
326 326
327 327 def __iter__(self):
328 328 for commit_id in self.commit_ids:
329 329 yield self.get_commit(commit_id=commit_id)
330 330
331 331 def get_commits(
332 332 self, start_id=None, end_id=None, start_date=None, end_date=None,
333 333 branch_name=None, show_hidden=False, pre_load=None):
334 334 """
335 335 Returns iterator of `BaseCommit` objects from start to end
336 336 not inclusive. This should behave just like a list, ie. end is not
337 337 inclusive.
338 338
339 339 :param start_id: None or str, must be a valid commit id
340 340 :param end_id: None or str, must be a valid commit id
341 341 :param start_date:
342 342 :param end_date:
343 343 :param branch_name:
344 344 :param show_hidden:
345 345 :param pre_load:
346 346 """
347 347 raise NotImplementedError
348 348
349 349 def __getitem__(self, key):
350 350 """
351 351 Allows index based access to the commit objects of this repository.
352 352 """
353 353 pre_load = ["author", "branch", "date", "message", "parents"]
354 354 if isinstance(key, slice):
355 355 return self._get_range(key, pre_load)
356 356 return self.get_commit(commit_idx=key, pre_load=pre_load)
357 357
358 358 def _get_range(self, slice_obj, pre_load):
359 359 for commit_id in self.commit_ids.__getitem__(slice_obj):
360 360 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
361 361
362 362 def count(self):
363 363 return len(self.commit_ids)
364 364
365 365 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
366 366 """
367 367 Creates and returns a tag for the given ``commit_id``.
368 368
369 369 :param name: name for new tag
370 370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
371 371 :param commit_id: commit id for which new tag would be created
372 372 :param message: message of the tag's commit
373 373 :param date: date of tag's commit
374 374
375 375 :raises TagAlreadyExistError: if tag with same name already exists
376 376 """
377 377 raise NotImplementedError
378 378
379 379 def remove_tag(self, name, user, message=None, date=None):
380 380 """
381 381 Removes tag with the given ``name``.
382 382
383 383 :param name: name of the tag to be removed
384 384 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
385 385 :param message: message of the tag's removal commit
386 386 :param date: date of tag's removal commit
387 387
388 388 :raises TagDoesNotExistError: if tag with given name does not exists
389 389 """
390 390 raise NotImplementedError
391 391
392 392 def get_diff(
393 393 self, commit1, commit2, path=None, ignore_whitespace=False,
394 394 context=3, path1=None):
395 395 """
396 396 Returns (git like) *diff*, as plain text. Shows changes introduced by
397 397 `commit2` since `commit1`.
398 398
399 399 :param commit1: Entry point from which diff is shown. Can be
400 400 ``self.EMPTY_COMMIT`` - in this case, patch showing all
401 401 the changes since empty state of the repository until `commit2`
402 402 :param commit2: Until which commit changes should be shown.
403 403 :param path: Can be set to a path of a file to create a diff of that
404 404 file. If `path1` is also set, this value is only associated to
405 405 `commit2`.
406 406 :param ignore_whitespace: If set to ``True``, would not show whitespace
407 407 changes. Defaults to ``False``.
408 408 :param context: How many lines before/after changed lines should be
409 409 shown. Defaults to ``3``.
410 410 :param path1: Can be set to a path to associate with `commit1`. This
411 411 parameter works only for backends which support diff generation for
412 412 different paths. Other backends will raise a `ValueError` if `path1`
413 413 is set and has a different value than `path`.
414 414 :param file_path: filter this diff by given path pattern
415 415 """
416 416 raise NotImplementedError
417 417
418 418 def strip(self, commit_id, branch=None):
419 419 """
420 420 Strip given commit_id from the repository
421 421 """
422 422 raise NotImplementedError
423 423
424 424 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
425 425 """
426 426 Return a latest common ancestor commit if one exists for this repo
427 427 `commit_id1` vs `commit_id2` from `repo2`.
428 428
429 429 :param commit_id1: Commit it from this repository to use as a
430 430 target for the comparison.
431 431 :param commit_id2: Source commit id to use for comparison.
432 432 :param repo2: Source repository to use for comparison.
433 433 """
434 434 raise NotImplementedError
435 435
436 436 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
437 437 """
438 438 Compare this repository's revision `commit_id1` with `commit_id2`.
439 439
440 440 Returns a tuple(commits, ancestor) that would be merged from
441 441 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
442 442 will be returned as ancestor.
443 443
444 444 :param commit_id1: Commit it from this repository to use as a
445 445 target for the comparison.
446 446 :param commit_id2: Source commit id to use for comparison.
447 447 :param repo2: Source repository to use for comparison.
448 448 :param merge: If set to ``True`` will do a merge compare which also
449 449 returns the common ancestor.
450 450 :param pre_load: Optional. List of commit attributes to load.
451 451 """
452 452 raise NotImplementedError
453 453
454 def merge(self, target_ref, source_repo, source_ref, workspace_id,
454 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
455 455 user_name='', user_email='', message='', dry_run=False,
456 456 use_rebase=False, close_branch=False):
457 457 """
458 458 Merge the revisions specified in `source_ref` from `source_repo`
459 459 onto the `target_ref` of this repository.
460 460
461 461 `source_ref` and `target_ref` are named tupls with the following
462 462 fields `type`, `name` and `commit_id`.
463 463
464 464 Returns a MergeResponse named tuple with the following fields
465 465 'possible', 'executed', 'source_commit', 'target_commit',
466 466 'merge_commit'.
467 467
468 :param repo_id: `repo_id` target repo id.
469 :param workspace_id: `workspace_id` unique identifier.
468 470 :param target_ref: `target_ref` points to the commit on top of which
469 471 the `source_ref` should be merged.
470 472 :param source_repo: The repository that contains the commits to be
471 473 merged.
472 474 :param source_ref: `source_ref` points to the topmost commit from
473 475 the `source_repo` which should be merged.
474 :param workspace_id: `workspace_id` unique identifier.
475 476 :param user_name: Merge commit `user_name`.
476 477 :param user_email: Merge commit `user_email`.
477 478 :param message: Merge commit `message`.
478 479 :param dry_run: If `True` the merge will not take place.
479 480 :param use_rebase: If `True` commits from the source will be rebased
480 481 on top of the target instead of being merged.
481 482 :param close_branch: If `True` branch will be close before merging it
482 483 """
483 484 if dry_run:
484 485 message = message or 'dry_run_merge_message'
485 486 user_email = user_email or 'dry-run-merge@rhodecode.com'
486 487 user_name = user_name or 'Dry-Run User'
487 488 else:
488 489 if not user_name:
489 490 raise ValueError('user_name cannot be empty')
490 491 if not user_email:
491 492 raise ValueError('user_email cannot be empty')
492 493 if not message:
493 494 raise ValueError('message cannot be empty')
494 495
495 shadow_repository_path = self._maybe_prepare_merge_workspace(
496 workspace_id, target_ref, source_ref)
497
498 496 try:
499 497 return self._merge_repo(
500 shadow_repository_path, target_ref, source_repo,
498 repo_id, workspace_id, target_ref, source_repo,
501 499 source_ref, message, user_name, user_email, dry_run=dry_run,
502 500 use_rebase=use_rebase, close_branch=close_branch)
503 501 except RepositoryError:
504 502 log.exception(
505 503 'Unexpected failure when running merge, dry-run=%s',
506 504 dry_run)
507 505 return MergeResponse(
508 506 False, False, None, MergeFailureReason.UNKNOWN)
509 507
510 def _merge_repo(self, shadow_repository_path, target_ref,
508 def _merge_repo(self, repo_id, workspace_id, target_ref,
511 509 source_repo, source_ref, merge_message,
512 510 merger_name, merger_email, dry_run=False,
513 511 use_rebase=False, close_branch=False):
514 512 """Internal implementation of merge."""
515 513 raise NotImplementedError
516 514
517 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
515 def _maybe_prepare_merge_workspace(
516 self, repo_id, workspace_id, target_ref, source_ref):
518 517 """
519 518 Create the merge workspace.
520 519
521 520 :param workspace_id: `workspace_id` unique identifier.
522 521 """
523 522 raise NotImplementedError
524 523
525 def _get_shadow_repository_path(self, workspace_id):
526 raise NotImplementedError
524 def _get_legacy_shadow_repository_path(self, workspace_id):
525 """
526 Legacy version that was used before. We still need it for
527 backward compat
528 """
529 return os.path.join(
530 os.path.dirname(self.path),
531 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
527 532
528 def cleanup_merge_workspace(self, workspace_id):
533 def _get_shadow_repository_path(self, repo_id, workspace_id):
534 # The name of the shadow repository must start with '.', so it is
535 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
536 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
537 if os.path.exists(legacy_repository_path):
538 return legacy_repository_path
539 else:
540 return os.path.join(
541 os.path.dirname(self.path),
542 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
543
544 def cleanup_merge_workspace(self, repo_id, workspace_id):
529 545 """
530 546 Remove merge workspace.
531 547
532 548 This function MUST not fail in case there is no workspace associated to
533 549 the given `workspace_id`.
534 550
535 551 :param workspace_id: `workspace_id` unique identifier.
536 552 """
537 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
553 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
538 554 shadow_repository_path_del = '{}.{}.delete'.format(
539 555 shadow_repository_path, time.time())
540 556
541 557 # move the shadow repo, so it never conflicts with the one used.
542 558 # we use this method because shutil.rmtree had some edge case problems
543 559 # removing symlinked repositories
544 560 if not os.path.isdir(shadow_repository_path):
545 561 return
546 562
547 563 shutil.move(shadow_repository_path, shadow_repository_path_del)
548 564 try:
549 565 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
550 566 except Exception:
551 567 log.exception('Failed to gracefully remove shadow repo under %s',
552 568 shadow_repository_path_del)
553 569 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
554 570
555 571 # ========== #
556 572 # COMMIT API #
557 573 # ========== #
558 574
559 575 @LazyProperty
560 576 def in_memory_commit(self):
561 577 """
562 578 Returns :class:`InMemoryCommit` object for this repository.
563 579 """
564 580 raise NotImplementedError
565 581
566 582 # ======================== #
567 583 # UTILITIES FOR SUBCLASSES #
568 584 # ======================== #
569 585
570 586 def _validate_diff_commits(self, commit1, commit2):
571 587 """
572 588 Validates that the given commits are related to this repository.
573 589
574 590 Intended as a utility for sub classes to have a consistent validation
575 591 of input parameters in methods like :meth:`get_diff`.
576 592 """
577 593 self._validate_commit(commit1)
578 594 self._validate_commit(commit2)
579 595 if (isinstance(commit1, EmptyCommit) and
580 596 isinstance(commit2, EmptyCommit)):
581 597 raise ValueError("Cannot compare two empty commits")
582 598
583 599 def _validate_commit(self, commit):
584 600 if not isinstance(commit, BaseCommit):
585 601 raise TypeError(
586 602 "%s is not of type BaseCommit" % repr(commit))
587 603 if commit.repository != self and not isinstance(commit, EmptyCommit):
588 604 raise ValueError(
589 605 "Commit %s must be a valid commit from this repository %s, "
590 606 "related to this repository instead %s." %
591 607 (commit, self, commit.repository))
592 608
593 609 def _validate_commit_id(self, commit_id):
594 610 if not isinstance(commit_id, basestring):
595 611 raise TypeError("commit_id must be a string value")
596 612
597 613 def _validate_commit_idx(self, commit_idx):
598 614 if not isinstance(commit_idx, (int, long)):
599 615 raise TypeError("commit_idx must be a numeric value")
600 616
601 617 def _validate_branch_name(self, branch_name):
602 618 if branch_name and branch_name not in self.branches_all:
603 619 msg = ("Branch %s not found in %s" % (branch_name, self))
604 620 raise BranchDoesNotExistError(msg)
605 621
606 622 #
607 623 # Supporting deprecated API parts
608 624 # TODO: johbo: consider to move this into a mixin
609 625 #
610 626
611 627 @property
612 628 def EMPTY_CHANGESET(self):
613 629 warnings.warn(
614 630 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
615 631 return self.EMPTY_COMMIT_ID
616 632
617 633 @property
618 634 def revisions(self):
619 635 warnings.warn("Use commits attribute instead", DeprecationWarning)
620 636 return self.commit_ids
621 637
622 638 @revisions.setter
623 639 def revisions(self, value):
624 640 warnings.warn("Use commits attribute instead", DeprecationWarning)
625 641 self.commit_ids = value
626 642
627 643 def get_changeset(self, revision=None, pre_load=None):
628 644 warnings.warn("Use get_commit instead", DeprecationWarning)
629 645 commit_id = None
630 646 commit_idx = None
631 647 if isinstance(revision, basestring):
632 648 commit_id = revision
633 649 else:
634 650 commit_idx = revision
635 651 return self.get_commit(
636 652 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
637 653
638 654 def get_changesets(
639 655 self, start=None, end=None, start_date=None, end_date=None,
640 656 branch_name=None, pre_load=None):
641 657 warnings.warn("Use get_commits instead", DeprecationWarning)
642 658 start_id = self._revision_to_commit(start)
643 659 end_id = self._revision_to_commit(end)
644 660 return self.get_commits(
645 661 start_id=start_id, end_id=end_id, start_date=start_date,
646 662 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
647 663
648 664 def _revision_to_commit(self, revision):
649 665 """
650 666 Translates a revision to a commit_id
651 667
652 668 Helps to support the old changeset based API which allows to use
653 669 commit ids and commit indices interchangeable.
654 670 """
655 671 if revision is None:
656 672 return revision
657 673
658 674 if isinstance(revision, basestring):
659 675 commit_id = revision
660 676 else:
661 677 commit_id = self.commit_ids[revision]
662 678 return commit_id
663 679
664 680 @property
665 681 def in_memory_changeset(self):
666 682 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
667 683 return self.in_memory_commit
668 684
669 685 def get_path_permissions(self, username):
670 686 """
671 687 Returns a path permission checker or None if not supported
672 688
673 689 :param username: session user name
674 690 :return: an instance of BasePathPermissionChecker or None
675 691 """
676 692 return None
677 693
678 694 def install_hooks(self, force=False):
679 695 return self._remote.install_hooks(force)
680 696
681 697
682 698 class BaseCommit(object):
683 699 """
684 700 Each backend should implement it's commit representation.
685 701
686 702 **Attributes**
687 703
688 704 ``repository``
689 705 repository object within which commit exists
690 706
691 707 ``id``
692 708 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
693 709 just ``tip``.
694 710
695 711 ``raw_id``
696 712 raw commit representation (i.e. full 40 length sha for git
697 713 backend)
698 714
699 715 ``short_id``
700 716 shortened (if apply) version of ``raw_id``; it would be simple
701 717 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
702 718 as ``raw_id`` for subversion
703 719
704 720 ``idx``
705 721 commit index
706 722
707 723 ``files``
708 724 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
709 725
710 726 ``dirs``
711 727 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
712 728
713 729 ``nodes``
714 730 combined list of ``Node`` objects
715 731
716 732 ``author``
717 733 author of the commit, as unicode
718 734
719 735 ``message``
720 736 message of the commit, as unicode
721 737
722 738 ``parents``
723 739 list of parent commits
724 740
725 741 """
726 742
727 743 branch = None
728 744 """
729 745 Depending on the backend this should be set to the branch name of the
730 746 commit. Backends not supporting branches on commits should leave this
731 747 value as ``None``.
732 748 """
733 749
734 750 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
735 751 """
736 752 This template is used to generate a default prefix for repository archives
737 753 if no prefix has been specified.
738 754 """
739 755
740 756 def __str__(self):
741 757 return '<%s at %s:%s>' % (
742 758 self.__class__.__name__, self.idx, self.short_id)
743 759
744 760 def __repr__(self):
745 761 return self.__str__()
746 762
747 763 def __unicode__(self):
748 764 return u'%s:%s' % (self.idx, self.short_id)
749 765
750 766 def __eq__(self, other):
751 767 same_instance = isinstance(other, self.__class__)
752 768 return same_instance and self.raw_id == other.raw_id
753 769
754 770 def __json__(self):
755 771 parents = []
756 772 try:
757 773 for parent in self.parents:
758 774 parents.append({'raw_id': parent.raw_id})
759 775 except NotImplementedError:
760 776 # empty commit doesn't have parents implemented
761 777 pass
762 778
763 779 return {
764 780 'short_id': self.short_id,
765 781 'raw_id': self.raw_id,
766 782 'revision': self.idx,
767 783 'message': self.message,
768 784 'date': self.date,
769 785 'author': self.author,
770 786 'parents': parents,
771 787 'branch': self.branch
772 788 }
773 789
774 790 def __getstate__(self):
775 791 d = self.__dict__.copy()
776 792 d.pop('_remote', None)
777 793 d.pop('repository', None)
778 794 return d
779 795
780 796 def _get_refs(self):
781 797 return {
782 798 'branches': [self.branch] if self.branch else [],
783 799 'bookmarks': getattr(self, 'bookmarks', []),
784 800 'tags': self.tags
785 801 }
786 802
787 803 @LazyProperty
788 804 def last(self):
789 805 """
790 806 ``True`` if this is last commit in repository, ``False``
791 807 otherwise; trying to access this attribute while there is no
792 808 commits would raise `EmptyRepositoryError`
793 809 """
794 810 if self.repository is None:
795 811 raise CommitError("Cannot check if it's most recent commit")
796 812 return self.raw_id == self.repository.commit_ids[-1]
797 813
798 814 @LazyProperty
799 815 def parents(self):
800 816 """
801 817 Returns list of parent commits.
802 818 """
803 819 raise NotImplementedError
804 820
805 821 @property
806 822 def merge(self):
807 823 """
808 824 Returns boolean if commit is a merge.
809 825 """
810 826 return len(self.parents) > 1
811 827
812 828 @LazyProperty
813 829 def children(self):
814 830 """
815 831 Returns list of child commits.
816 832 """
817 833 raise NotImplementedError
818 834
819 835 @LazyProperty
820 836 def id(self):
821 837 """
822 838 Returns string identifying this commit.
823 839 """
824 840 raise NotImplementedError
825 841
826 842 @LazyProperty
827 843 def raw_id(self):
828 844 """
829 845 Returns raw string identifying this commit.
830 846 """
831 847 raise NotImplementedError
832 848
833 849 @LazyProperty
834 850 def short_id(self):
835 851 """
836 852 Returns shortened version of ``raw_id`` attribute, as string,
837 853 identifying this commit, useful for presentation to users.
838 854 """
839 855 raise NotImplementedError
840 856
841 857 @LazyProperty
842 858 def idx(self):
843 859 """
844 860 Returns integer identifying this commit.
845 861 """
846 862 raise NotImplementedError
847 863
848 864 @LazyProperty
849 865 def committer(self):
850 866 """
851 867 Returns committer for this commit
852 868 """
853 869 raise NotImplementedError
854 870
855 871 @LazyProperty
856 872 def committer_name(self):
857 873 """
858 874 Returns committer name for this commit
859 875 """
860 876
861 877 return author_name(self.committer)
862 878
863 879 @LazyProperty
864 880 def committer_email(self):
865 881 """
866 882 Returns committer email address for this commit
867 883 """
868 884
869 885 return author_email(self.committer)
870 886
871 887 @LazyProperty
872 888 def author(self):
873 889 """
874 890 Returns author for this commit
875 891 """
876 892
877 893 raise NotImplementedError
878 894
879 895 @LazyProperty
880 896 def author_name(self):
881 897 """
882 898 Returns author name for this commit
883 899 """
884 900
885 901 return author_name(self.author)
886 902
887 903 @LazyProperty
888 904 def author_email(self):
889 905 """
890 906 Returns author email address for this commit
891 907 """
892 908
893 909 return author_email(self.author)
894 910
895 911 def get_file_mode(self, path):
896 912 """
897 913 Returns stat mode of the file at `path`.
898 914 """
899 915 raise NotImplementedError
900 916
901 917 def is_link(self, path):
902 918 """
903 919 Returns ``True`` if given `path` is a symlink
904 920 """
905 921 raise NotImplementedError
906 922
907 923 def get_file_content(self, path):
908 924 """
909 925 Returns content of the file at the given `path`.
910 926 """
911 927 raise NotImplementedError
912 928
913 929 def get_file_size(self, path):
914 930 """
915 931 Returns size of the file at the given `path`.
916 932 """
917 933 raise NotImplementedError
918 934
919 935 def get_file_commit(self, path, pre_load=None):
920 936 """
921 937 Returns last commit of the file at the given `path`.
922 938
923 939 :param pre_load: Optional. List of commit attributes to load.
924 940 """
925 941 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
926 942 if not commits:
927 943 raise RepositoryError(
928 944 'Failed to fetch history for path {}. '
929 945 'Please check if such path exists in your repository'.format(
930 946 path))
931 947 return commits[0]
932 948
933 949 def get_file_history(self, path, limit=None, pre_load=None):
934 950 """
935 951 Returns history of file as reversed list of :class:`BaseCommit`
936 952 objects for which file at given `path` has been modified.
937 953
938 954 :param limit: Optional. Allows to limit the size of the returned
939 955 history. This is intended as a hint to the underlying backend, so
940 956 that it can apply optimizations depending on the limit.
941 957 :param pre_load: Optional. List of commit attributes to load.
942 958 """
943 959 raise NotImplementedError
944 960
945 961 def get_file_annotate(self, path, pre_load=None):
946 962 """
947 963 Returns a generator of four element tuples with
948 964 lineno, sha, commit lazy loader and line
949 965
950 966 :param pre_load: Optional. List of commit attributes to load.
951 967 """
952 968 raise NotImplementedError
953 969
954 970 def get_nodes(self, path):
955 971 """
956 972 Returns combined ``DirNode`` and ``FileNode`` objects list representing
957 973 state of commit at the given ``path``.
958 974
959 975 :raises ``CommitError``: if node at the given ``path`` is not
960 976 instance of ``DirNode``
961 977 """
962 978 raise NotImplementedError
963 979
964 980 def get_node(self, path):
965 981 """
966 982 Returns ``Node`` object from the given ``path``.
967 983
968 984 :raises ``NodeDoesNotExistError``: if there is no node at the given
969 985 ``path``
970 986 """
971 987 raise NotImplementedError
972 988
973 989 def get_largefile_node(self, path):
974 990 """
975 991 Returns the path to largefile from Mercurial/Git-lfs storage.
976 992 or None if it's not a largefile node
977 993 """
978 994 return None
979 995
980 996 def archive_repo(self, file_path, kind='tgz', subrepos=None,
981 997 prefix=None, write_metadata=False, mtime=None):
982 998 """
983 999 Creates an archive containing the contents of the repository.
984 1000
985 1001 :param file_path: path to the file which to create the archive.
986 1002 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
987 1003 :param prefix: name of root directory in archive.
988 1004 Default is repository name and commit's short_id joined with dash:
989 1005 ``"{repo_name}-{short_id}"``.
990 1006 :param write_metadata: write a metadata file into archive.
991 1007 :param mtime: custom modification time for archive creation, defaults
992 1008 to time.time() if not given.
993 1009
994 1010 :raise VCSError: If prefix has a problem.
995 1011 """
996 1012 allowed_kinds = settings.ARCHIVE_SPECS.keys()
997 1013 if kind not in allowed_kinds:
998 1014 raise ImproperArchiveTypeError(
999 1015 'Archive kind (%s) not supported use one of %s' %
1000 1016 (kind, allowed_kinds))
1001 1017
1002 1018 prefix = self._validate_archive_prefix(prefix)
1003 1019
1004 1020 mtime = mtime or time.mktime(self.date.timetuple())
1005 1021
1006 1022 file_info = []
1007 1023 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1008 1024 for _r, _d, files in cur_rev.walk('/'):
1009 1025 for f in files:
1010 1026 f_path = os.path.join(prefix, f.path)
1011 1027 file_info.append(
1012 1028 (f_path, f.mode, f.is_link(), f.raw_bytes))
1013 1029
1014 1030 if write_metadata:
1015 1031 metadata = [
1016 1032 ('repo_name', self.repository.name),
1017 1033 ('rev', self.raw_id),
1018 1034 ('create_time', mtime),
1019 1035 ('branch', self.branch),
1020 1036 ('tags', ','.join(self.tags)),
1021 1037 ]
1022 1038 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1023 1039 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1024 1040
1025 1041 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1026 1042
1027 1043 def _validate_archive_prefix(self, prefix):
1028 1044 if prefix is None:
1029 1045 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1030 1046 repo_name=safe_str(self.repository.name),
1031 1047 short_id=self.short_id)
1032 1048 elif not isinstance(prefix, str):
1033 1049 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1034 1050 elif prefix.startswith('/'):
1035 1051 raise VCSError("Prefix cannot start with leading slash")
1036 1052 elif prefix.strip() == '':
1037 1053 raise VCSError("Prefix cannot be empty")
1038 1054 return prefix
1039 1055
1040 1056 @LazyProperty
1041 1057 def root(self):
1042 1058 """
1043 1059 Returns ``RootNode`` object for this commit.
1044 1060 """
1045 1061 return self.get_node('')
1046 1062
1047 1063 def next(self, branch=None):
1048 1064 """
1049 1065 Returns next commit from current, if branch is gives it will return
1050 1066 next commit belonging to this branch
1051 1067
1052 1068 :param branch: show commits within the given named branch
1053 1069 """
1054 1070 indexes = xrange(self.idx + 1, self.repository.count())
1055 1071 return self._find_next(indexes, branch)
1056 1072
1057 1073 def prev(self, branch=None):
1058 1074 """
1059 1075 Returns previous commit from current, if branch is gives it will
1060 1076 return previous commit belonging to this branch
1061 1077
1062 1078 :param branch: show commit within the given named branch
1063 1079 """
1064 1080 indexes = xrange(self.idx - 1, -1, -1)
1065 1081 return self._find_next(indexes, branch)
1066 1082
1067 1083 def _find_next(self, indexes, branch=None):
1068 1084 if branch and self.branch != branch:
1069 1085 raise VCSError('Branch option used on commit not belonging '
1070 1086 'to that branch')
1071 1087
1072 1088 for next_idx in indexes:
1073 1089 commit = self.repository.get_commit(commit_idx=next_idx)
1074 1090 if branch and branch != commit.branch:
1075 1091 continue
1076 1092 return commit
1077 1093 raise CommitDoesNotExistError
1078 1094
1079 1095 def diff(self, ignore_whitespace=True, context=3):
1080 1096 """
1081 1097 Returns a `Diff` object representing the change made by this commit.
1082 1098 """
1083 1099 parent = (
1084 1100 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1085 1101 diff = self.repository.get_diff(
1086 1102 parent, self,
1087 1103 ignore_whitespace=ignore_whitespace,
1088 1104 context=context)
1089 1105 return diff
1090 1106
1091 1107 @LazyProperty
1092 1108 def added(self):
1093 1109 """
1094 1110 Returns list of added ``FileNode`` objects.
1095 1111 """
1096 1112 raise NotImplementedError
1097 1113
1098 1114 @LazyProperty
1099 1115 def changed(self):
1100 1116 """
1101 1117 Returns list of modified ``FileNode`` objects.
1102 1118 """
1103 1119 raise NotImplementedError
1104 1120
1105 1121 @LazyProperty
1106 1122 def removed(self):
1107 1123 """
1108 1124 Returns list of removed ``FileNode`` objects.
1109 1125 """
1110 1126 raise NotImplementedError
1111 1127
1112 1128 @LazyProperty
1113 1129 def size(self):
1114 1130 """
1115 1131 Returns total number of bytes from contents of all filenodes.
1116 1132 """
1117 1133 return sum((node.size for node in self.get_filenodes_generator()))
1118 1134
1119 1135 def walk(self, topurl=''):
1120 1136 """
1121 1137 Similar to os.walk method. Insted of filesystem it walks through
1122 1138 commit starting at given ``topurl``. Returns generator of tuples
1123 1139 (topnode, dirnodes, filenodes).
1124 1140 """
1125 1141 topnode = self.get_node(topurl)
1126 1142 if not topnode.is_dir():
1127 1143 return
1128 1144 yield (topnode, topnode.dirs, topnode.files)
1129 1145 for dirnode in topnode.dirs:
1130 1146 for tup in self.walk(dirnode.path):
1131 1147 yield tup
1132 1148
1133 1149 def get_filenodes_generator(self):
1134 1150 """
1135 1151 Returns generator that yields *all* file nodes.
1136 1152 """
1137 1153 for topnode, dirs, files in self.walk():
1138 1154 for node in files:
1139 1155 yield node
1140 1156
1141 1157 #
1142 1158 # Utilities for sub classes to support consistent behavior
1143 1159 #
1144 1160
1145 1161 def no_node_at_path(self, path):
1146 1162 return NodeDoesNotExistError(
1147 1163 u"There is no file nor directory at the given path: "
1148 1164 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1149 1165
1150 1166 def _fix_path(self, path):
1151 1167 """
1152 1168 Paths are stored without trailing slash so we need to get rid off it if
1153 1169 needed.
1154 1170 """
1155 1171 return path.rstrip('/')
1156 1172
1157 1173 #
1158 1174 # Deprecated API based on changesets
1159 1175 #
1160 1176
1161 1177 @property
1162 1178 def revision(self):
1163 1179 warnings.warn("Use idx instead", DeprecationWarning)
1164 1180 return self.idx
1165 1181
1166 1182 @revision.setter
1167 1183 def revision(self, value):
1168 1184 warnings.warn("Use idx instead", DeprecationWarning)
1169 1185 self.idx = value
1170 1186
1171 1187 def get_file_changeset(self, path):
1172 1188 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1173 1189 return self.get_file_commit(path)
1174 1190
1175 1191
1176 1192 class BaseChangesetClass(type):
1177 1193
1178 1194 def __instancecheck__(self, instance):
1179 1195 return isinstance(instance, BaseCommit)
1180 1196
1181 1197
1182 1198 class BaseChangeset(BaseCommit):
1183 1199
1184 1200 __metaclass__ = BaseChangesetClass
1185 1201
1186 1202 def __new__(cls, *args, **kwargs):
1187 1203 warnings.warn(
1188 1204 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1189 1205 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1190 1206
1191 1207
1192 1208 class BaseInMemoryCommit(object):
1193 1209 """
1194 1210 Represents differences between repository's state (most recent head) and
1195 1211 changes made *in place*.
1196 1212
1197 1213 **Attributes**
1198 1214
1199 1215 ``repository``
1200 1216 repository object for this in-memory-commit
1201 1217
1202 1218 ``added``
1203 1219 list of ``FileNode`` objects marked as *added*
1204 1220
1205 1221 ``changed``
1206 1222 list of ``FileNode`` objects marked as *changed*
1207 1223
1208 1224 ``removed``
1209 1225 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1210 1226 *removed*
1211 1227
1212 1228 ``parents``
1213 1229 list of :class:`BaseCommit` instances representing parents of
1214 1230 in-memory commit. Should always be 2-element sequence.
1215 1231
1216 1232 """
1217 1233
1218 1234 def __init__(self, repository):
1219 1235 self.repository = repository
1220 1236 self.added = []
1221 1237 self.changed = []
1222 1238 self.removed = []
1223 1239 self.parents = []
1224 1240
1225 1241 def add(self, *filenodes):
1226 1242 """
1227 1243 Marks given ``FileNode`` objects as *to be committed*.
1228 1244
1229 1245 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1230 1246 latest commit
1231 1247 :raises ``NodeAlreadyAddedError``: if node with same path is already
1232 1248 marked as *added*
1233 1249 """
1234 1250 # Check if not already marked as *added* first
1235 1251 for node in filenodes:
1236 1252 if node.path in (n.path for n in self.added):
1237 1253 raise NodeAlreadyAddedError(
1238 1254 "Such FileNode %s is already marked for addition"
1239 1255 % node.path)
1240 1256 for node in filenodes:
1241 1257 self.added.append(node)
1242 1258
1243 1259 def change(self, *filenodes):
1244 1260 """
1245 1261 Marks given ``FileNode`` objects to be *changed* in next commit.
1246 1262
1247 1263 :raises ``EmptyRepositoryError``: if there are no commits yet
1248 1264 :raises ``NodeAlreadyExistsError``: if node with same path is already
1249 1265 marked to be *changed*
1250 1266 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1251 1267 marked to be *removed*
1252 1268 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1253 1269 commit
1254 1270 :raises ``NodeNotChangedError``: if node hasn't really be changed
1255 1271 """
1256 1272 for node in filenodes:
1257 1273 if node.path in (n.path for n in self.removed):
1258 1274 raise NodeAlreadyRemovedError(
1259 1275 "Node at %s is already marked as removed" % node.path)
1260 1276 try:
1261 1277 self.repository.get_commit()
1262 1278 except EmptyRepositoryError:
1263 1279 raise EmptyRepositoryError(
1264 1280 "Nothing to change - try to *add* new nodes rather than "
1265 1281 "changing them")
1266 1282 for node in filenodes:
1267 1283 if node.path in (n.path for n in self.changed):
1268 1284 raise NodeAlreadyChangedError(
1269 1285 "Node at '%s' is already marked as changed" % node.path)
1270 1286 self.changed.append(node)
1271 1287
1272 1288 def remove(self, *filenodes):
1273 1289 """
1274 1290 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1275 1291 *removed* in next commit.
1276 1292
1277 1293 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1278 1294 be *removed*
1279 1295 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1280 1296 be *changed*
1281 1297 """
1282 1298 for node in filenodes:
1283 1299 if node.path in (n.path for n in self.removed):
1284 1300 raise NodeAlreadyRemovedError(
1285 1301 "Node is already marked to for removal at %s" % node.path)
1286 1302 if node.path in (n.path for n in self.changed):
1287 1303 raise NodeAlreadyChangedError(
1288 1304 "Node is already marked to be changed at %s" % node.path)
1289 1305 # We only mark node as *removed* - real removal is done by
1290 1306 # commit method
1291 1307 self.removed.append(node)
1292 1308
1293 1309 def reset(self):
1294 1310 """
1295 1311 Resets this instance to initial state (cleans ``added``, ``changed``
1296 1312 and ``removed`` lists).
1297 1313 """
1298 1314 self.added = []
1299 1315 self.changed = []
1300 1316 self.removed = []
1301 1317 self.parents = []
1302 1318
1303 1319 def get_ipaths(self):
1304 1320 """
1305 1321 Returns generator of paths from nodes marked as added, changed or
1306 1322 removed.
1307 1323 """
1308 1324 for node in itertools.chain(self.added, self.changed, self.removed):
1309 1325 yield node.path
1310 1326
1311 1327 def get_paths(self):
1312 1328 """
1313 1329 Returns list of paths from nodes marked as added, changed or removed.
1314 1330 """
1315 1331 return list(self.get_ipaths())
1316 1332
1317 1333 def check_integrity(self, parents=None):
1318 1334 """
1319 1335 Checks in-memory commit's integrity. Also, sets parents if not
1320 1336 already set.
1321 1337
1322 1338 :raises CommitError: if any error occurs (i.e.
1323 1339 ``NodeDoesNotExistError``).
1324 1340 """
1325 1341 if not self.parents:
1326 1342 parents = parents or []
1327 1343 if len(parents) == 0:
1328 1344 try:
1329 1345 parents = [self.repository.get_commit(), None]
1330 1346 except EmptyRepositoryError:
1331 1347 parents = [None, None]
1332 1348 elif len(parents) == 1:
1333 1349 parents += [None]
1334 1350 self.parents = parents
1335 1351
1336 1352 # Local parents, only if not None
1337 1353 parents = [p for p in self.parents if p]
1338 1354
1339 1355 # Check nodes marked as added
1340 1356 for p in parents:
1341 1357 for node in self.added:
1342 1358 try:
1343 1359 p.get_node(node.path)
1344 1360 except NodeDoesNotExistError:
1345 1361 pass
1346 1362 else:
1347 1363 raise NodeAlreadyExistsError(
1348 1364 "Node `%s` already exists at %s" % (node.path, p))
1349 1365
1350 1366 # Check nodes marked as changed
1351 1367 missing = set(self.changed)
1352 1368 not_changed = set(self.changed)
1353 1369 if self.changed and not parents:
1354 1370 raise NodeDoesNotExistError(str(self.changed[0].path))
1355 1371 for p in parents:
1356 1372 for node in self.changed:
1357 1373 try:
1358 1374 old = p.get_node(node.path)
1359 1375 missing.remove(node)
1360 1376 # if content actually changed, remove node from not_changed
1361 1377 if old.content != node.content:
1362 1378 not_changed.remove(node)
1363 1379 except NodeDoesNotExistError:
1364 1380 pass
1365 1381 if self.changed and missing:
1366 1382 raise NodeDoesNotExistError(
1367 1383 "Node `%s` marked as modified but missing in parents: %s"
1368 1384 % (node.path, parents))
1369 1385
1370 1386 if self.changed and not_changed:
1371 1387 raise NodeNotChangedError(
1372 1388 "Node `%s` wasn't actually changed (parents: %s)"
1373 1389 % (not_changed.pop().path, parents))
1374 1390
1375 1391 # Check nodes marked as removed
1376 1392 if self.removed and not parents:
1377 1393 raise NodeDoesNotExistError(
1378 1394 "Cannot remove node at %s as there "
1379 1395 "were no parents specified" % self.removed[0].path)
1380 1396 really_removed = set()
1381 1397 for p in parents:
1382 1398 for node in self.removed:
1383 1399 try:
1384 1400 p.get_node(node.path)
1385 1401 really_removed.add(node)
1386 1402 except CommitError:
1387 1403 pass
1388 1404 not_removed = set(self.removed) - really_removed
1389 1405 if not_removed:
1390 1406 # TODO: johbo: This code branch does not seem to be covered
1391 1407 raise NodeDoesNotExistError(
1392 1408 "Cannot remove node at %s from "
1393 1409 "following parents: %s" % (not_removed, parents))
1394 1410
1395 1411 def commit(
1396 1412 self, message, author, parents=None, branch=None, date=None,
1397 1413 **kwargs):
1398 1414 """
1399 1415 Performs in-memory commit (doesn't check workdir in any way) and
1400 1416 returns newly created :class:`BaseCommit`. Updates repository's
1401 1417 attribute `commits`.
1402 1418
1403 1419 .. note::
1404 1420
1405 1421 While overriding this method each backend's should call
1406 1422 ``self.check_integrity(parents)`` in the first place.
1407 1423
1408 1424 :param message: message of the commit
1409 1425 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1410 1426 :param parents: single parent or sequence of parents from which commit
1411 1427 would be derived
1412 1428 :param date: ``datetime.datetime`` instance. Defaults to
1413 1429 ``datetime.datetime.now()``.
1414 1430 :param branch: branch name, as string. If none given, default backend's
1415 1431 branch would be used.
1416 1432
1417 1433 :raises ``CommitError``: if any error occurs while committing
1418 1434 """
1419 1435 raise NotImplementedError
1420 1436
1421 1437
1422 1438 class BaseInMemoryChangesetClass(type):
1423 1439
1424 1440 def __instancecheck__(self, instance):
1425 1441 return isinstance(instance, BaseInMemoryCommit)
1426 1442
1427 1443
1428 1444 class BaseInMemoryChangeset(BaseInMemoryCommit):
1429 1445
1430 1446 __metaclass__ = BaseInMemoryChangesetClass
1431 1447
1432 1448 def __new__(cls, *args, **kwargs):
1433 1449 warnings.warn(
1434 1450 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1435 1451 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1436 1452
1437 1453
1438 1454 class EmptyCommit(BaseCommit):
1439 1455 """
1440 1456 An dummy empty commit. It's possible to pass hash when creating
1441 1457 an EmptyCommit
1442 1458 """
1443 1459
1444 1460 def __init__(
1445 1461 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1446 1462 message='', author='', date=None):
1447 1463 self._empty_commit_id = commit_id
1448 1464 # TODO: johbo: Solve idx parameter, default value does not make
1449 1465 # too much sense
1450 1466 self.idx = idx
1451 1467 self.message = message
1452 1468 self.author = author
1453 1469 self.date = date or datetime.datetime.fromtimestamp(0)
1454 1470 self.repository = repo
1455 1471 self.alias = alias
1456 1472
1457 1473 @LazyProperty
1458 1474 def raw_id(self):
1459 1475 """
1460 1476 Returns raw string identifying this commit, useful for web
1461 1477 representation.
1462 1478 """
1463 1479
1464 1480 return self._empty_commit_id
1465 1481
1466 1482 @LazyProperty
1467 1483 def branch(self):
1468 1484 if self.alias:
1469 1485 from rhodecode.lib.vcs.backends import get_backend
1470 1486 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1471 1487
1472 1488 @LazyProperty
1473 1489 def short_id(self):
1474 1490 return self.raw_id[:12]
1475 1491
1476 1492 @LazyProperty
1477 1493 def id(self):
1478 1494 return self.raw_id
1479 1495
1480 1496 def get_file_commit(self, path):
1481 1497 return self
1482 1498
1483 1499 def get_file_content(self, path):
1484 1500 return u''
1485 1501
1486 1502 def get_file_size(self, path):
1487 1503 return 0
1488 1504
1489 1505
1490 1506 class EmptyChangesetClass(type):
1491 1507
1492 1508 def __instancecheck__(self, instance):
1493 1509 return isinstance(instance, EmptyCommit)
1494 1510
1495 1511
1496 1512 class EmptyChangeset(EmptyCommit):
1497 1513
1498 1514 __metaclass__ = EmptyChangesetClass
1499 1515
1500 1516 def __new__(cls, *args, **kwargs):
1501 1517 warnings.warn(
1502 1518 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1503 1519 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1504 1520
1505 1521 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1506 1522 alias=None, revision=-1, message='', author='', date=None):
1507 1523 if requested_revision is not None:
1508 1524 warnings.warn(
1509 1525 "Parameter requested_revision not supported anymore",
1510 1526 DeprecationWarning)
1511 1527 super(EmptyChangeset, self).__init__(
1512 1528 commit_id=cs, repo=repo, alias=alias, idx=revision,
1513 1529 message=message, author=author, date=date)
1514 1530
1515 1531 @property
1516 1532 def revision(self):
1517 1533 warnings.warn("Use idx instead", DeprecationWarning)
1518 1534 return self.idx
1519 1535
1520 1536 @revision.setter
1521 1537 def revision(self, value):
1522 1538 warnings.warn("Use idx instead", DeprecationWarning)
1523 1539 self.idx = value
1524 1540
1525 1541
1526 1542 class EmptyRepository(BaseRepository):
1527 1543 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1528 1544 pass
1529 1545
1530 1546 def get_diff(self, *args, **kwargs):
1531 1547 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1532 1548 return GitDiff('')
1533 1549
1534 1550
1535 1551 class CollectionGenerator(object):
1536 1552
1537 1553 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1538 1554 self.repo = repo
1539 1555 self.commit_ids = commit_ids
1540 1556 # TODO: (oliver) this isn't currently hooked up
1541 1557 self.collection_size = None
1542 1558 self.pre_load = pre_load
1543 1559
1544 1560 def __len__(self):
1545 1561 if self.collection_size is not None:
1546 1562 return self.collection_size
1547 1563 return self.commit_ids.__len__()
1548 1564
1549 1565 def __iter__(self):
1550 1566 for commit_id in self.commit_ids:
1551 1567 # TODO: johbo: Mercurial passes in commit indices or commit ids
1552 1568 yield self._commit_factory(commit_id)
1553 1569
1554 1570 def _commit_factory(self, commit_id):
1555 1571 """
1556 1572 Allows backends to override the way commits are generated.
1557 1573 """
1558 1574 return self.repo.get_commit(commit_id=commit_id,
1559 1575 pre_load=self.pre_load)
1560 1576
1561 1577 def __getslice__(self, i, j):
1562 1578 """
1563 1579 Returns an iterator of sliced repository
1564 1580 """
1565 1581 commit_ids = self.commit_ids[i:j]
1566 1582 return self.__class__(
1567 1583 self.repo, commit_ids, pre_load=self.pre_load)
1568 1584
1569 1585 def __repr__(self):
1570 1586 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1571 1587
1572 1588
1573 1589 class Config(object):
1574 1590 """
1575 1591 Represents the configuration for a repository.
1576 1592
1577 1593 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1578 1594 standard library. It implements only the needed subset.
1579 1595 """
1580 1596
1581 1597 def __init__(self):
1582 1598 self._values = {}
1583 1599
1584 1600 def copy(self):
1585 1601 clone = Config()
1586 1602 for section, values in self._values.items():
1587 1603 clone._values[section] = values.copy()
1588 1604 return clone
1589 1605
1590 1606 def __repr__(self):
1591 1607 return '<Config(%s sections) at %s>' % (
1592 1608 len(self._values), hex(id(self)))
1593 1609
1594 1610 def items(self, section):
1595 1611 return self._values.get(section, {}).iteritems()
1596 1612
1597 1613 def get(self, section, option):
1598 1614 return self._values.get(section, {}).get(option)
1599 1615
1600 1616 def set(self, section, option, value):
1601 1617 section_values = self._values.setdefault(section, {})
1602 1618 section_values[option] = value
1603 1619
1604 1620 def clear_section(self, section):
1605 1621 self._values[section] = {}
1606 1622
1607 1623 def serialize(self):
1608 1624 """
1609 1625 Creates a list of three tuples (section, key, value) representing
1610 1626 this config object.
1611 1627 """
1612 1628 items = []
1613 1629 for section in self._values:
1614 1630 for option, value in self._values[section].items():
1615 1631 items.append(
1616 1632 (safe_str(section), safe_str(option), safe_str(value)))
1617 1633 return items
1618 1634
1619 1635
1620 1636 class Diff(object):
1621 1637 """
1622 1638 Represents a diff result from a repository backend.
1623 1639
1624 1640 Subclasses have to provide a backend specific value for
1625 1641 :attr:`_header_re` and :attr:`_meta_re`.
1626 1642 """
1627 1643 _meta_re = None
1628 1644 _header_re = None
1629 1645
1630 1646 def __init__(self, raw_diff):
1631 1647 self.raw = raw_diff
1632 1648
1633 1649 def chunks(self):
1634 1650 """
1635 1651 split the diff in chunks of separate --git a/file b/file chunks
1636 1652 to make diffs consistent we must prepend with \n, and make sure
1637 1653 we can detect last chunk as this was also has special rule
1638 1654 """
1639 1655
1640 1656 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1641 1657 header = diff_parts[0]
1642 1658
1643 1659 if self._meta_re:
1644 1660 match = self._meta_re.match(header)
1645 1661
1646 1662 chunks = diff_parts[1:]
1647 1663 total_chunks = len(chunks)
1648 1664
1649 1665 return (
1650 1666 DiffChunk(chunk, self, cur_chunk == total_chunks)
1651 1667 for cur_chunk, chunk in enumerate(chunks, start=1))
1652 1668
1653 1669
1654 1670 class DiffChunk(object):
1655 1671
1656 1672 def __init__(self, chunk, diff, last_chunk):
1657 1673 self._diff = diff
1658 1674
1659 1675 # since we split by \ndiff --git that part is lost from original diff
1660 1676 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1661 1677 if not last_chunk:
1662 1678 chunk += '\n'
1663 1679
1664 1680 match = self._diff._header_re.match(chunk)
1665 1681 self.header = match.groupdict()
1666 1682 self.diff = chunk[match.end():]
1667 1683 self.raw = chunk
1668 1684
1669 1685
1670 1686 class BasePathPermissionChecker(object):
1671 1687
1672 1688 @staticmethod
1673 1689 def create_from_patterns(includes, excludes):
1674 1690 if includes and '*' in includes and not excludes:
1675 1691 return AllPathPermissionChecker()
1676 1692 elif excludes and '*' in excludes:
1677 1693 return NonePathPermissionChecker()
1678 1694 else:
1679 1695 return PatternPathPermissionChecker(includes, excludes)
1680 1696
1681 1697 @property
1682 1698 def has_full_access(self):
1683 1699 raise NotImplemented()
1684 1700
1685 1701 def has_access(self, path):
1686 1702 raise NotImplemented()
1687 1703
1688 1704
1689 1705 class AllPathPermissionChecker(BasePathPermissionChecker):
1690 1706
1691 1707 @property
1692 1708 def has_full_access(self):
1693 1709 return True
1694 1710
1695 1711 def has_access(self, path):
1696 1712 return True
1697 1713
1698 1714
1699 1715 class NonePathPermissionChecker(BasePathPermissionChecker):
1700 1716
1701 1717 @property
1702 1718 def has_full_access(self):
1703 1719 return False
1704 1720
1705 1721 def has_access(self, path):
1706 1722 return False
1707 1723
1708 1724
1709 1725 class PatternPathPermissionChecker(BasePathPermissionChecker):
1710 1726
1711 1727 def __init__(self, includes, excludes):
1712 1728 self.includes = includes
1713 1729 self.excludes = excludes
1714 1730 self.includes_re = [] if not includes else [
1715 1731 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1716 1732 self.excludes_re = [] if not excludes else [
1717 1733 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1718 1734
1719 1735 @property
1720 1736 def has_full_access(self):
1721 1737 return '*' in self.includes and not self.excludes
1722 1738
1723 1739 def has_access(self, path):
1724 1740 for regex in self.excludes_re:
1725 1741 if regex.match(path):
1726 1742 return False
1727 1743 for regex in self.includes_re:
1728 1744 if regex.match(path):
1729 1745 return True
1730 1746 return False
@@ -1,1006 +1,1006 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference)
39 39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError,
44 44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 45
46 46
47 47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 class GitRepository(BaseRepository):
53 53 """
54 54 Git repository backend.
55 55 """
56 56 DEFAULT_BRANCH_NAME = 'master'
57 57
58 58 contact = BaseRepository.DEFAULT_CONTACT
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 update_after_clone=False, with_wire=None, bare=False):
62 62
63 63 self.path = safe_str(os.path.abspath(repo_path))
64 64 self.config = config if config else self.get_default_config()
65 65 self._remote = connection.Git(
66 66 self.path, self.config, with_wire=with_wire)
67 67
68 68 self._init_repo(create, src_url, update_after_clone, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def bare(self):
75 75 return self._remote.bare()
76 76
77 77 @LazyProperty
78 78 def head(self):
79 79 return self._remote.head()
80 80
81 81 @LazyProperty
82 82 def commit_ids(self):
83 83 """
84 84 Returns list of commit ids, in ascending order. Being lazy
85 85 attribute allows external tools to inject commit ids from cache.
86 86 """
87 87 commit_ids = self._get_all_commit_ids()
88 88 self._rebuild_cache(commit_ids)
89 89 return commit_ids
90 90
91 91 def _rebuild_cache(self, commit_ids):
92 92 self._commit_ids = dict((commit_id, index)
93 93 for index, commit_id in enumerate(commit_ids))
94 94
95 95 def run_git_command(self, cmd, **opts):
96 96 """
97 97 Runs given ``cmd`` as git command and returns tuple
98 98 (stdout, stderr).
99 99
100 100 :param cmd: git command to be executed
101 101 :param opts: env options to pass into Subprocess command
102 102 """
103 103 if not isinstance(cmd, list):
104 104 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
105 105
106 106 skip_stderr_log = opts.pop('skip_stderr_log', False)
107 107 out, err = self._remote.run_git_command(cmd, **opts)
108 108 if err and not skip_stderr_log:
109 109 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
110 110 return out, err
111 111
112 112 @staticmethod
113 113 def check_url(url, config):
114 114 """
115 115 Function will check given url and try to verify if it's a valid
116 116 link. Sometimes it may happened that git will issue basic
117 117 auth request that can cause whole API to hang when used from python
118 118 or other external calls.
119 119
120 120 On failures it'll raise urllib2.HTTPError, exception is also thrown
121 121 when the return code is non 200
122 122 """
123 123 # check first if it's not an url
124 124 if os.path.isdir(url) or url.startswith('file:'):
125 125 return True
126 126
127 127 if '+' in url.split('://', 1)[0]:
128 128 url = url.split('+', 1)[1]
129 129
130 130 # Request the _remote to verify the url
131 131 return connection.Git.check_url(url, config.serialize())
132 132
133 133 @staticmethod
134 134 def is_valid_repository(path):
135 135 if os.path.isdir(os.path.join(path, '.git')):
136 136 return True
137 137 # check case of bare repository
138 138 try:
139 139 GitRepository(path)
140 140 return True
141 141 except VCSError:
142 142 pass
143 143 return False
144 144
145 145 def _init_repo(self, create, src_url=None, update_after_clone=False,
146 146 bare=False):
147 147 if create and os.path.exists(self.path):
148 148 raise RepositoryError(
149 149 "Cannot create repository at %s, location already exist"
150 150 % self.path)
151 151
152 152 try:
153 153 if create and src_url:
154 154 GitRepository.check_url(src_url, self.config)
155 155 self.clone(src_url, update_after_clone, bare)
156 156 elif create:
157 157 os.makedirs(self.path, mode=0755)
158 158
159 159 if bare:
160 160 self._remote.init_bare()
161 161 else:
162 162 self._remote.init()
163 163 else:
164 164 if not self._remote.assert_correct_path():
165 165 raise RepositoryError(
166 166 'Path "%s" does not contain a Git repository' %
167 167 (self.path,))
168 168
169 169 # TODO: johbo: check if we have to translate the OSError here
170 170 except OSError as err:
171 171 raise RepositoryError(err)
172 172
173 173 def _get_all_commit_ids(self, filters=None):
174 174 # we must check if this repo is not empty, since later command
175 175 # fails if it is. And it's cheaper to ask than throw the subprocess
176 176 # errors
177 177 try:
178 178 self._remote.head()
179 179 except KeyError:
180 180 return []
181 181
182 182 rev_filter = ['--branches', '--tags']
183 183 extra_filter = []
184 184
185 185 if filters:
186 186 if filters.get('since'):
187 187 extra_filter.append('--since=%s' % (filters['since']))
188 188 if filters.get('until'):
189 189 extra_filter.append('--until=%s' % (filters['until']))
190 190 if filters.get('branch_name'):
191 191 rev_filter = ['--tags']
192 192 extra_filter.append(filters['branch_name'])
193 193 rev_filter.extend(extra_filter)
194 194
195 195 # if filters.get('start') or filters.get('end'):
196 196 # # skip is offset, max-count is limit
197 197 # if filters.get('start'):
198 198 # extra_filter += ' --skip=%s' % filters['start']
199 199 # if filters.get('end'):
200 200 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
201 201
202 202 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
203 203 try:
204 204 output, __ = self.run_git_command(cmd)
205 205 except RepositoryError:
206 206 # Can be raised for empty repositories
207 207 return []
208 208 return output.splitlines()
209 209
210 210 def _get_commit_id(self, commit_id_or_idx):
211 211 def is_null(value):
212 212 return len(value) == commit_id_or_idx.count('0')
213 213
214 214 if self.is_empty():
215 215 raise EmptyRepositoryError("There are no commits yet")
216 216
217 217 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
218 218 return self.commit_ids[-1]
219 219
220 220 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
221 221 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
222 222 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
223 223 try:
224 224 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
225 225 except Exception:
226 226 msg = "Commit %s does not exist for %s" % (
227 227 commit_id_or_idx, self)
228 228 raise CommitDoesNotExistError(msg)
229 229
230 230 elif is_bstr:
231 231 # check full path ref, eg. refs/heads/master
232 232 ref_id = self._refs.get(commit_id_or_idx)
233 233 if ref_id:
234 234 return ref_id
235 235
236 236 # check branch name
237 237 branch_ids = self.branches.values()
238 238 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
239 239 if ref_id:
240 240 return ref_id
241 241
242 242 # check tag name
243 243 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
244 244 if ref_id:
245 245 return ref_id
246 246
247 247 if (not SHA_PATTERN.match(commit_id_or_idx) or
248 248 commit_id_or_idx not in self.commit_ids):
249 249 msg = "Commit %s does not exist for %s" % (
250 250 commit_id_or_idx, self)
251 251 raise CommitDoesNotExistError(msg)
252 252
253 253 # Ensure we return full id
254 254 if not SHA_PATTERN.match(str(commit_id_or_idx)):
255 255 raise CommitDoesNotExistError(
256 256 "Given commit id %s not recognized" % commit_id_or_idx)
257 257 return commit_id_or_idx
258 258
259 259 def get_hook_location(self):
260 260 """
261 261 returns absolute path to location where hooks are stored
262 262 """
263 263 loc = os.path.join(self.path, 'hooks')
264 264 if not self.bare:
265 265 loc = os.path.join(self.path, '.git', 'hooks')
266 266 return loc
267 267
268 268 @LazyProperty
269 269 def last_change(self):
270 270 """
271 271 Returns last change made on this repository as
272 272 `datetime.datetime` object.
273 273 """
274 274 try:
275 275 return self.get_commit().date
276 276 except RepositoryError:
277 277 tzoffset = makedate()[1]
278 278 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
279 279
280 280 def _get_fs_mtime(self):
281 281 idx_loc = '' if self.bare else '.git'
282 282 # fallback to filesystem
283 283 in_path = os.path.join(self.path, idx_loc, "index")
284 284 he_path = os.path.join(self.path, idx_loc, "HEAD")
285 285 if os.path.exists(in_path):
286 286 return os.stat(in_path).st_mtime
287 287 else:
288 288 return os.stat(he_path).st_mtime
289 289
290 290 @LazyProperty
291 291 def description(self):
292 292 description = self._remote.get_description()
293 293 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
294 294
295 295 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
296 296 if self.is_empty():
297 297 return OrderedDict()
298 298
299 299 result = []
300 300 for ref, sha in self._refs.iteritems():
301 301 if ref.startswith(prefix):
302 302 ref_name = ref
303 303 if strip_prefix:
304 304 ref_name = ref[len(prefix):]
305 305 result.append((safe_unicode(ref_name), sha))
306 306
307 307 def get_name(entry):
308 308 return entry[0]
309 309
310 310 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
311 311
312 312 def _get_branches(self):
313 313 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
314 314
315 315 @LazyProperty
316 316 def branches(self):
317 317 return self._get_branches()
318 318
319 319 @LazyProperty
320 320 def branches_closed(self):
321 321 return {}
322 322
323 323 @LazyProperty
324 324 def bookmarks(self):
325 325 return {}
326 326
327 327 @LazyProperty
328 328 def branches_all(self):
329 329 all_branches = {}
330 330 all_branches.update(self.branches)
331 331 all_branches.update(self.branches_closed)
332 332 return all_branches
333 333
334 334 @LazyProperty
335 335 def tags(self):
336 336 return self._get_tags()
337 337
338 338 def _get_tags(self):
339 339 return self._get_refs_entries(
340 340 prefix='refs/tags/', strip_prefix=True, reverse=True)
341 341
342 342 def tag(self, name, user, commit_id=None, message=None, date=None,
343 343 **kwargs):
344 344 # TODO: fix this method to apply annotated tags correct with message
345 345 """
346 346 Creates and returns a tag for the given ``commit_id``.
347 347
348 348 :param name: name for new tag
349 349 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
350 350 :param commit_id: commit id for which new tag would be created
351 351 :param message: message of the tag's commit
352 352 :param date: date of tag's commit
353 353
354 354 :raises TagAlreadyExistError: if tag with same name already exists
355 355 """
356 356 if name in self.tags:
357 357 raise TagAlreadyExistError("Tag %s already exists" % name)
358 358 commit = self.get_commit(commit_id=commit_id)
359 359 message = message or "Added tag %s for commit %s" % (
360 360 name, commit.raw_id)
361 361 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
362 362
363 363 self._refs = self._get_refs()
364 364 self.tags = self._get_tags()
365 365 return commit
366 366
367 367 def remove_tag(self, name, user, message=None, date=None):
368 368 """
369 369 Removes tag with the given ``name``.
370 370
371 371 :param name: name of the tag to be removed
372 372 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 373 :param message: message of the tag's removal commit
374 374 :param date: date of tag's removal commit
375 375
376 376 :raises TagDoesNotExistError: if tag with given name does not exists
377 377 """
378 378 if name not in self.tags:
379 379 raise TagDoesNotExistError("Tag %s does not exist" % name)
380 380 tagpath = vcspath.join(
381 381 self._remote.get_refs_path(), 'refs', 'tags', name)
382 382 try:
383 383 os.remove(tagpath)
384 384 self._refs = self._get_refs()
385 385 self.tags = self._get_tags()
386 386 except OSError as e:
387 387 raise RepositoryError(e.strerror)
388 388
389 389 def _get_refs(self):
390 390 return self._remote.get_refs()
391 391
392 392 @LazyProperty
393 393 def _refs(self):
394 394 return self._get_refs()
395 395
396 396 @property
397 397 def _ref_tree(self):
398 398 node = tree = {}
399 399 for ref, sha in self._refs.iteritems():
400 400 path = ref.split('/')
401 401 for bit in path[:-1]:
402 402 node = node.setdefault(bit, {})
403 403 node[path[-1]] = sha
404 404 node = tree
405 405 return tree
406 406
407 407 def get_remote_ref(self, ref_name):
408 408 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
409 409 try:
410 410 return self._refs[ref_key]
411 411 except Exception:
412 412 return
413 413
414 414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 415 """
416 416 Returns `GitCommit` object representing commit from git repository
417 417 at the given `commit_id` or head (most recent commit) if None given.
418 418 """
419 419 if commit_id is not None:
420 420 self._validate_commit_id(commit_id)
421 421 elif commit_idx is not None:
422 422 self._validate_commit_idx(commit_idx)
423 423 commit_id = commit_idx
424 424 commit_id = self._get_commit_id(commit_id)
425 425 try:
426 426 # Need to call remote to translate id for tagging scenario
427 427 commit_id = self._remote.get_object(commit_id)["commit_id"]
428 428 idx = self._commit_ids[commit_id]
429 429 except KeyError:
430 430 raise RepositoryError("Cannot get object with id %s" % commit_id)
431 431
432 432 return GitCommit(self, commit_id, idx, pre_load=pre_load)
433 433
434 434 def get_commits(
435 435 self, start_id=None, end_id=None, start_date=None, end_date=None,
436 436 branch_name=None, show_hidden=False, pre_load=None):
437 437 """
438 438 Returns generator of `GitCommit` objects from start to end (both
439 439 are inclusive), in ascending date order.
440 440
441 441 :param start_id: None, str(commit_id)
442 442 :param end_id: None, str(commit_id)
443 443 :param start_date: if specified, commits with commit date less than
444 444 ``start_date`` would be filtered out from returned set
445 445 :param end_date: if specified, commits with commit date greater than
446 446 ``end_date`` would be filtered out from returned set
447 447 :param branch_name: if specified, commits not reachable from given
448 448 branch would be filtered out from returned set
449 449 :param show_hidden: Show hidden commits such as obsolete or hidden from
450 450 Mercurial evolve
451 451 :raise BranchDoesNotExistError: If given `branch_name` does not
452 452 exist.
453 453 :raise CommitDoesNotExistError: If commits for given `start` or
454 454 `end` could not be found.
455 455
456 456 """
457 457 if self.is_empty():
458 458 raise EmptyRepositoryError("There are no commits yet")
459 459 self._validate_branch_name(branch_name)
460 460
461 461 if start_id is not None:
462 462 self._validate_commit_id(start_id)
463 463 if end_id is not None:
464 464 self._validate_commit_id(end_id)
465 465
466 466 start_raw_id = self._get_commit_id(start_id)
467 467 start_pos = self._commit_ids[start_raw_id] if start_id else None
468 468 end_raw_id = self._get_commit_id(end_id)
469 469 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
470 470
471 471 if None not in [start_id, end_id] and start_pos > end_pos:
472 472 raise RepositoryError(
473 473 "Start commit '%s' cannot be after end commit '%s'" %
474 474 (start_id, end_id))
475 475
476 476 if end_pos is not None:
477 477 end_pos += 1
478 478
479 479 filter_ = []
480 480 if branch_name:
481 481 filter_.append({'branch_name': branch_name})
482 482 if start_date and not end_date:
483 483 filter_.append({'since': start_date})
484 484 if end_date and not start_date:
485 485 filter_.append({'until': end_date})
486 486 if start_date and end_date:
487 487 filter_.append({'since': start_date})
488 488 filter_.append({'until': end_date})
489 489
490 490 # if start_pos or end_pos:
491 491 # filter_.append({'start': start_pos})
492 492 # filter_.append({'end': end_pos})
493 493
494 494 if filter_:
495 495 revfilters = {
496 496 'branch_name': branch_name,
497 497 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
498 498 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
499 499 'start': start_pos,
500 500 'end': end_pos,
501 501 }
502 502 commit_ids = self._get_all_commit_ids(filters=revfilters)
503 503
504 504 # pure python stuff, it's slow due to walker walking whole repo
505 505 # def get_revs(walker):
506 506 # for walker_entry in walker:
507 507 # yield walker_entry.commit.id
508 508 # revfilters = {}
509 509 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
510 510 else:
511 511 commit_ids = self.commit_ids
512 512
513 513 if start_pos or end_pos:
514 514 commit_ids = commit_ids[start_pos: end_pos]
515 515
516 516 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
517 517
518 518 def get_diff(
519 519 self, commit1, commit2, path='', ignore_whitespace=False,
520 520 context=3, path1=None):
521 521 """
522 522 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 523 ``commit2`` since ``commit1``.
524 524
525 525 :param commit1: Entry point from which diff is shown. Can be
526 526 ``self.EMPTY_COMMIT`` - in this case, patch showing all
527 527 the changes since empty state of the repository until ``commit2``
528 528 :param commit2: Until which commits changes should be shown.
529 529 :param ignore_whitespace: If set to ``True``, would not show whitespace
530 530 changes. Defaults to ``False``.
531 531 :param context: How many lines before/after changed lines should be
532 532 shown. Defaults to ``3``.
533 533 """
534 534 self._validate_diff_commits(commit1, commit2)
535 535 if path1 is not None and path1 != path:
536 536 raise ValueError("Diff of two different paths not supported.")
537 537
538 538 flags = [
539 539 '-U%s' % context, '--full-index', '--binary', '-p',
540 540 '-M', '--abbrev=40']
541 541 if ignore_whitespace:
542 542 flags.append('-w')
543 543
544 544 if commit1 == self.EMPTY_COMMIT:
545 545 cmd = ['show'] + flags + [commit2.raw_id]
546 546 else:
547 547 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
548 548
549 549 if path:
550 550 cmd.extend(['--', path])
551 551
552 552 stdout, __ = self.run_git_command(cmd)
553 553 # If we used 'show' command, strip first few lines (until actual diff
554 554 # starts)
555 555 if commit1 == self.EMPTY_COMMIT:
556 556 lines = stdout.splitlines()
557 557 x = 0
558 558 for line in lines:
559 559 if line.startswith('diff'):
560 560 break
561 561 x += 1
562 562 # Append new line just like 'diff' command do
563 563 stdout = '\n'.join(lines[x:]) + '\n'
564 564 return GitDiff(stdout)
565 565
566 566 def strip(self, commit_id, branch_name):
567 567 commit = self.get_commit(commit_id=commit_id)
568 568 if commit.merge:
569 569 raise Exception('Cannot reset to merge commit')
570 570
571 571 # parent is going to be the new head now
572 572 commit = commit.parents[0]
573 573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574 574
575 575 self.commit_ids = self._get_all_commit_ids()
576 576 self._rebuild_cache(self.commit_ids)
577 577
578 578 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
579 579 if commit_id1 == commit_id2:
580 580 return commit_id1
581 581
582 582 if self != repo2:
583 583 commits = self._remote.get_missing_revs(
584 584 commit_id1, commit_id2, repo2.path)
585 585 if commits:
586 586 commit = repo2.get_commit(commits[-1])
587 587 if commit.parents:
588 588 ancestor_id = commit.parents[0].raw_id
589 589 else:
590 590 ancestor_id = None
591 591 else:
592 592 # no commits from other repo, ancestor_id is the commit_id2
593 593 ancestor_id = commit_id2
594 594 else:
595 595 output, __ = self.run_git_command(
596 596 ['merge-base', commit_id1, commit_id2])
597 597 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
598 598
599 599 return ancestor_id
600 600
601 601 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
602 602 repo1 = self
603 603 ancestor_id = None
604 604
605 605 if commit_id1 == commit_id2:
606 606 commits = []
607 607 elif repo1 != repo2:
608 608 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
609 609 repo2.path)
610 610 commits = [
611 611 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
612 612 for commit_id in reversed(missing_ids)]
613 613 else:
614 614 output, __ = repo1.run_git_command(
615 615 ['log', '--reverse', '--pretty=format: %H', '-s',
616 616 '%s..%s' % (commit_id1, commit_id2)])
617 617 commits = [
618 618 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
619 619 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
620 620
621 621 return commits
622 622
623 623 @LazyProperty
624 624 def in_memory_commit(self):
625 625 """
626 626 Returns ``GitInMemoryCommit`` object for this repository.
627 627 """
628 628 return GitInMemoryCommit(self)
629 629
630 630 def clone(self, url, update_after_clone=True, bare=False):
631 631 """
632 632 Tries to clone commits from external location.
633 633
634 634 :param update_after_clone: If set to ``False``, git won't checkout
635 635 working directory
636 636 :param bare: If set to ``True``, repository would be cloned into
637 637 *bare* git repository (no working directory at all).
638 638 """
639 639 # init_bare and init expect empty dir created to proceed
640 640 if not os.path.exists(self.path):
641 641 os.mkdir(self.path)
642 642
643 643 if bare:
644 644 self._remote.init_bare()
645 645 else:
646 646 self._remote.init()
647 647
648 648 deferred = '^{}'
649 649 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
650 650
651 651 return self._remote.clone(
652 652 url, deferred, valid_refs, update_after_clone)
653 653
654 654 def pull(self, url, commit_ids=None):
655 655 """
656 656 Tries to pull changes from external location. We use fetch here since
657 657 pull in get does merges and we want to be compatible with hg backend so
658 658 pull == fetch in this case
659 659 """
660 660 self.fetch(url, commit_ids=commit_ids)
661 661
662 662 def fetch(self, url, commit_ids=None):
663 663 """
664 664 Tries to fetch changes from external location.
665 665 """
666 666 refs = None
667 667
668 668 if commit_ids is not None:
669 669 remote_refs = self._remote.get_remote_refs(url)
670 670 refs = [
671 671 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
672 672 self._remote.fetch(url, refs=refs)
673 673
674 674 def push(self, url):
675 675 refs = None
676 676 self._remote.sync_push(url, refs=refs)
677 677
678 678 def set_refs(self, ref_name, commit_id):
679 679 self._remote.set_refs(ref_name, commit_id)
680 680
681 681 def remove_ref(self, ref_name):
682 682 self._remote.remove_ref(ref_name)
683 683
684 684 def _update_server_info(self):
685 685 """
686 686 runs gits update-server-info command in this repo instance
687 687 """
688 688 self._remote.update_server_info()
689 689
690 690 def _current_branch(self):
691 691 """
692 692 Return the name of the current branch.
693 693
694 694 It only works for non bare repositories (i.e. repositories with a
695 695 working copy)
696 696 """
697 697 if self.bare:
698 698 raise RepositoryError('Bare git repos do not have active branches')
699 699
700 700 if self.is_empty():
701 701 return None
702 702
703 703 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
704 704 return stdout.strip()
705 705
706 706 def _checkout(self, branch_name, create=False, force=False):
707 707 """
708 708 Checkout a branch in the working directory.
709 709
710 710 It tries to create the branch if create is True, failing if the branch
711 711 already exists.
712 712
713 713 It only works for non bare repositories (i.e. repositories with a
714 714 working copy)
715 715 """
716 716 if self.bare:
717 717 raise RepositoryError('Cannot checkout branches in a bare git repo')
718 718
719 719 cmd = ['checkout']
720 720 if force:
721 721 cmd.append('-f')
722 722 if create:
723 723 cmd.append('-b')
724 724 cmd.append(branch_name)
725 725 self.run_git_command(cmd, fail_on_stderr=False)
726 726
727 727 def _identify(self):
728 728 """
729 729 Return the current state of the working directory.
730 730 """
731 731 if self.bare:
732 732 raise RepositoryError('Bare git repos do not have active branches')
733 733
734 734 if self.is_empty():
735 735 return None
736 736
737 737 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
738 738 return stdout.strip()
739 739
740 740 def _local_clone(self, clone_path, branch_name, source_branch=None):
741 741 """
742 742 Create a local clone of the current repo.
743 743 """
744 744 # N.B.(skreft): the --branch option is required as otherwise the shallow
745 745 # clone will only fetch the active branch.
746 746 cmd = ['clone', '--branch', branch_name,
747 747 self.path, os.path.abspath(clone_path)]
748 748
749 749 self.run_git_command(cmd, fail_on_stderr=False)
750 750
751 751 # if we get the different source branch, make sure we also fetch it for
752 752 # merge conditions
753 753 if source_branch and source_branch != branch_name:
754 754 # check if the ref exists.
755 755 shadow_repo = GitRepository(os.path.abspath(clone_path))
756 756 if shadow_repo.get_remote_ref(source_branch):
757 757 cmd = ['fetch', self.path, source_branch]
758 758 self.run_git_command(cmd, fail_on_stderr=False)
759 759
760 760 def _local_fetch(self, repository_path, branch_name, use_origin=False):
761 761 """
762 762 Fetch a branch from a local repository.
763 763 """
764 764 repository_path = os.path.abspath(repository_path)
765 765 if repository_path == self.path:
766 766 raise ValueError('Cannot fetch from the same repository')
767 767
768 768 if use_origin:
769 769 branch_name = '+{branch}:refs/heads/{branch}'.format(
770 770 branch=branch_name)
771 771
772 772 cmd = ['fetch', '--no-tags', '--update-head-ok',
773 773 repository_path, branch_name]
774 774 self.run_git_command(cmd, fail_on_stderr=False)
775 775
776 776 def _local_reset(self, branch_name):
777 777 branch_name = '{}'.format(branch_name)
778 778 cmd = ['reset', '--hard', branch_name]
779 779 self.run_git_command(cmd, fail_on_stderr=False)
780 780
781 781 def _last_fetch_heads(self):
782 782 """
783 783 Return the last fetched heads that need merging.
784 784
785 785 The algorithm is defined at
786 786 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
787 787 """
788 788 if not self.bare:
789 789 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
790 790 else:
791 791 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
792 792
793 793 heads = []
794 794 with open(fetch_heads_path) as f:
795 795 for line in f:
796 796 if ' not-for-merge ' in line:
797 797 continue
798 798 line = re.sub('\t.*', '', line, flags=re.DOTALL)
799 799 heads.append(line)
800 800
801 801 return heads
802 802
803 803 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
804 804 return GitRepository(shadow_repository_path)
805 805
806 806 def _local_pull(self, repository_path, branch_name, ff_only=True):
807 807 """
808 808 Pull a branch from a local repository.
809 809 """
810 810 if self.bare:
811 811 raise RepositoryError('Cannot pull into a bare git repository')
812 812 # N.B.(skreft): The --ff-only option is to make sure this is a
813 813 # fast-forward (i.e., we are only pulling new changes and there are no
814 814 # conflicts with our current branch)
815 815 # Additionally, that option needs to go before --no-tags, otherwise git
816 816 # pull complains about it being an unknown flag.
817 817 cmd = ['pull']
818 818 if ff_only:
819 819 cmd.append('--ff-only')
820 820 cmd.extend(['--no-tags', repository_path, branch_name])
821 821 self.run_git_command(cmd, fail_on_stderr=False)
822 822
823 823 def _local_merge(self, merge_message, user_name, user_email, heads):
824 824 """
825 825 Merge the given head into the checked out branch.
826 826
827 827 It will force a merge commit.
828 828
829 829 Currently it raises an error if the repo is empty, as it is not possible
830 830 to create a merge commit in an empty repo.
831 831
832 832 :param merge_message: The message to use for the merge commit.
833 833 :param heads: the heads to merge.
834 834 """
835 835 if self.bare:
836 836 raise RepositoryError('Cannot merge into a bare git repository')
837 837
838 838 if not heads:
839 839 return
840 840
841 841 if self.is_empty():
842 842 # TODO(skreft): do somehting more robust in this case.
843 843 raise RepositoryError(
844 844 'Do not know how to merge into empty repositories yet')
845 845
846 846 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
847 847 # commit message. We also specify the user who is doing the merge.
848 848 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
849 849 '-c', 'user.email=%s' % safe_str(user_email),
850 850 'merge', '--no-ff', '-m', safe_str(merge_message)]
851 851 cmd.extend(heads)
852 852 try:
853 853 output = self.run_git_command(cmd, fail_on_stderr=False)
854 854 except RepositoryError:
855 855 # Cleanup any merge leftovers
856 856 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
857 857 raise
858 858
859 859 def _local_push(
860 860 self, source_branch, repository_path, target_branch,
861 861 enable_hooks=False, rc_scm_data=None):
862 862 """
863 863 Push the source_branch to the given repository and target_branch.
864 864
865 865 Currently it if the target_branch is not master and the target repo is
866 866 empty, the push will work, but then GitRepository won't be able to find
867 867 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
868 868 pointing to master, which does not exist).
869 869
870 870 It does not run the hooks in the target repo.
871 871 """
872 872 # TODO(skreft): deal with the case in which the target repo is empty,
873 873 # and the target_branch is not master.
874 874 target_repo = GitRepository(repository_path)
875 875 if (not target_repo.bare and
876 876 target_repo._current_branch() == target_branch):
877 877 # Git prevents pushing to the checked out branch, so simulate it by
878 878 # pulling into the target repository.
879 879 target_repo._local_pull(self.path, source_branch)
880 880 else:
881 881 cmd = ['push', os.path.abspath(repository_path),
882 882 '%s:%s' % (source_branch, target_branch)]
883 883 gitenv = {}
884 884 if rc_scm_data:
885 885 gitenv.update({'RC_SCM_DATA': rc_scm_data})
886 886
887 887 if not enable_hooks:
888 888 gitenv['RC_SKIP_HOOKS'] = '1'
889 889 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
890 890
891 891 def _get_new_pr_branch(self, source_branch, target_branch):
892 892 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
893 893 pr_branches = []
894 894 for branch in self.branches:
895 895 if branch.startswith(prefix):
896 896 pr_branches.append(int(branch[len(prefix):]))
897 897
898 898 if not pr_branches:
899 899 branch_id = 0
900 900 else:
901 901 branch_id = max(pr_branches) + 1
902 902
903 903 return '%s%d' % (prefix, branch_id)
904 904
905 def _merge_repo(self, shadow_repository_path, target_ref,
905 def _maybe_prepare_merge_workspace(
906 self, repo_id, workspace_id, target_ref, source_ref):
907 shadow_repository_path = self._get_shadow_repository_path(
908 repo_id, workspace_id)
909 if not os.path.exists(shadow_repository_path):
910 self._local_clone(
911 shadow_repository_path, target_ref.name, source_ref.name)
912 log.debug(
913 'Prepared shadow repository in %s', shadow_repository_path)
914
915 return shadow_repository_path
916
917 def _merge_repo(self, repo_id, workspace_id, target_ref,
906 918 source_repo, source_ref, merge_message,
907 919 merger_name, merger_email, dry_run=False,
908 920 use_rebase=False, close_branch=False):
909 921 if target_ref.commit_id != self.branches[target_ref.name]:
910 922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
911 923 target_ref.commit_id, self.branches[target_ref.name])
912 924 return MergeResponse(
913 925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
914 926
915 shadow_repo = GitRepository(shadow_repository_path)
927 shadow_repository_path = self._maybe_prepare_merge_workspace(
928 repo_id, workspace_id, target_ref, source_ref)
929 shadow_repo = self._get_shadow_instance(shadow_repository_path)
930
916 931 # checkout source, if it's different. Otherwise we could not
917 932 # fetch proper commits for merge testing
918 933 if source_ref.name != target_ref.name:
919 934 if shadow_repo.get_remote_ref(source_ref.name):
920 935 shadow_repo._checkout(source_ref.name, force=True)
921 936
922 937 # checkout target, and fetch changes
923 938 shadow_repo._checkout(target_ref.name, force=True)
924 939
925 940 # fetch/reset pull the target, in case it is changed
926 941 # this handles even force changes
927 942 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
928 943 shadow_repo._local_reset(target_ref.name)
929 944
930 945 # Need to reload repo to invalidate the cache, or otherwise we cannot
931 946 # retrieve the last target commit.
932 shadow_repo = GitRepository(shadow_repository_path)
947 shadow_repo = self._get_shadow_instance(shadow_repository_path)
933 948 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
934 949 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
935 950 target_ref, target_ref.commit_id,
936 951 shadow_repo.branches[target_ref.name])
937 952 return MergeResponse(
938 953 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
939 954
940 955 # calculate new branch
941 956 pr_branch = shadow_repo._get_new_pr_branch(
942 957 source_ref.name, target_ref.name)
943 958 log.debug('using pull-request merge branch: `%s`', pr_branch)
944 959 # checkout to temp branch, and fetch changes
945 960 shadow_repo._checkout(pr_branch, create=True)
946 961 try:
947 962 shadow_repo._local_fetch(source_repo.path, source_ref.name)
948 963 except RepositoryError:
949 964 log.exception('Failure when doing local fetch on git shadow repo')
950 965 return MergeResponse(
951 966 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
952 967
953 968 merge_ref = None
954 969 merge_failure_reason = MergeFailureReason.NONE
955 970 try:
956 971 shadow_repo._local_merge(merge_message, merger_name, merger_email,
957 972 [source_ref.commit_id])
958 973 merge_possible = True
959 974
960 975 # Need to reload repo to invalidate the cache, or otherwise we
961 976 # cannot retrieve the merge commit.
962 977 shadow_repo = GitRepository(shadow_repository_path)
963 978 merge_commit_id = shadow_repo.branches[pr_branch]
964 979
965 980 # Set a reference pointing to the merge commit. This reference may
966 981 # be used to easily identify the last successful merge commit in
967 982 # the shadow repository.
968 983 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
969 984 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
970 985 except RepositoryError:
971 986 log.exception('Failure when doing local merge on git shadow repo')
972 987 merge_possible = False
973 988 merge_failure_reason = MergeFailureReason.MERGE_FAILED
974 989
975 990 if merge_possible and not dry_run:
976 991 try:
977 992 shadow_repo._local_push(
978 993 pr_branch, self.path, target_ref.name, enable_hooks=True,
979 994 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
980 995 merge_succeeded = True
981 996 except RepositoryError:
982 997 log.exception(
983 998 'Failure when doing local push on git shadow repo')
984 999 merge_succeeded = False
985 1000 merge_failure_reason = MergeFailureReason.PUSH_FAILED
986 1001 else:
987 1002 merge_succeeded = False
988 1003
989 1004 return MergeResponse(
990 1005 merge_possible, merge_succeeded, merge_ref,
991 1006 merge_failure_reason)
992
993 def _get_shadow_repository_path(self, workspace_id):
994 # The name of the shadow repository must start with '.', so it is
995 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
996 return os.path.join(
997 os.path.dirname(self.path),
998 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
999
1000 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
1001 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
1002 if not os.path.exists(shadow_repository_path):
1003 self._local_clone(
1004 shadow_repository_path, target_ref.name, source_ref.name)
1005
1006 return shadow_repository_path
@@ -1,918 +1,915 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 update_after_clone=False, with_wire=None):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param update_after_clone=False: sets update of working copy after
71 71 making a clone
72 72 """
73 73
74 74 self.path = safe_str(os.path.abspath(repo_path))
75 75 # mercurial since 4.4.X requires certain configuration to be present
76 76 # because sometimes we init the repos with config we need to meet
77 77 # special requirements
78 78 self.config = config if config else self.get_default_config(
79 79 default=[('extensions', 'largefiles', '1')])
80 80
81 81 self._remote = connection.Hg(
82 82 self.path, self.config, with_wire=with_wire)
83 83
84 84 self._init_repo(create, src_url, update_after_clone)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def commit_ids(self):
91 91 """
92 92 Returns list of commit ids, in ascending order. Being lazy
93 93 attribute allows external tools to inject shas from cache.
94 94 """
95 95 commit_ids = self._get_all_commit_ids()
96 96 self._rebuild_cache(commit_ids)
97 97 return commit_ids
98 98
99 99 def _rebuild_cache(self, commit_ids):
100 100 self._commit_ids = dict((commit_id, index)
101 101 for index, commit_id in enumerate(commit_ids))
102 102
103 103 @LazyProperty
104 104 def branches(self):
105 105 return self._get_branches()
106 106
107 107 @LazyProperty
108 108 def branches_closed(self):
109 109 return self._get_branches(active=False, closed=True)
110 110
111 111 @LazyProperty
112 112 def branches_all(self):
113 113 all_branches = {}
114 114 all_branches.update(self.branches)
115 115 all_branches.update(self.branches_closed)
116 116 return all_branches
117 117
118 118 def _get_branches(self, active=True, closed=False):
119 119 """
120 120 Gets branches for this repository
121 121 Returns only not closed active branches by default
122 122
123 123 :param active: return also active branches
124 124 :param closed: return also closed branches
125 125
126 126 """
127 127 if self.is_empty():
128 128 return {}
129 129
130 130 def get_name(ctx):
131 131 return ctx[0]
132 132
133 133 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
134 134 self._remote.branches(active, closed).items()]
135 135
136 136 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
137 137
138 138 @LazyProperty
139 139 def tags(self):
140 140 """
141 141 Gets tags for this repository
142 142 """
143 143 return self._get_tags()
144 144
145 145 def _get_tags(self):
146 146 if self.is_empty():
147 147 return {}
148 148
149 149 def get_name(ctx):
150 150 return ctx[0]
151 151
152 152 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
153 153 self._remote.tags().items()]
154 154
155 155 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
156 156
157 157 def tag(self, name, user, commit_id=None, message=None, date=None,
158 158 **kwargs):
159 159 """
160 160 Creates and returns a tag for the given ``commit_id``.
161 161
162 162 :param name: name for new tag
163 163 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
164 164 :param commit_id: commit id for which new tag would be created
165 165 :param message: message of the tag's commit
166 166 :param date: date of tag's commit
167 167
168 168 :raises TagAlreadyExistError: if tag with same name already exists
169 169 """
170 170 if name in self.tags:
171 171 raise TagAlreadyExistError("Tag %s already exists" % name)
172 172 commit = self.get_commit(commit_id=commit_id)
173 173 local = kwargs.setdefault('local', False)
174 174
175 175 if message is None:
176 176 message = "Added tag %s for commit %s" % (name, commit.short_id)
177 177
178 178 date, tz = date_to_timestamp_plus_offset(date)
179 179
180 180 self._remote.tag(
181 181 name, commit.raw_id, message, local, user, date, tz)
182 182 self._remote.invalidate_vcs_cache()
183 183
184 184 # Reinitialize tags
185 185 self.tags = self._get_tags()
186 186 tag_id = self.tags[name]
187 187
188 188 return self.get_commit(commit_id=tag_id)
189 189
190 190 def remove_tag(self, name, user, message=None, date=None):
191 191 """
192 192 Removes tag with the given `name`.
193 193
194 194 :param name: name of the tag to be removed
195 195 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
196 196 :param message: message of the tag's removal commit
197 197 :param date: date of tag's removal commit
198 198
199 199 :raises TagDoesNotExistError: if tag with given name does not exists
200 200 """
201 201 if name not in self.tags:
202 202 raise TagDoesNotExistError("Tag %s does not exist" % name)
203 203 if message is None:
204 204 message = "Removed tag %s" % name
205 205 local = False
206 206
207 207 date, tz = date_to_timestamp_plus_offset(date)
208 208
209 209 self._remote.tag(name, nullid, message, local, user, date, tz)
210 210 self._remote.invalidate_vcs_cache()
211 211 self.tags = self._get_tags()
212 212
213 213 @LazyProperty
214 214 def bookmarks(self):
215 215 """
216 216 Gets bookmarks for this repository
217 217 """
218 218 return self._get_bookmarks()
219 219
220 220 def _get_bookmarks(self):
221 221 if self.is_empty():
222 222 return {}
223 223
224 224 def get_name(ctx):
225 225 return ctx[0]
226 226
227 227 _bookmarks = [
228 228 (safe_unicode(n), hexlify(h)) for n, h in
229 229 self._remote.bookmarks().items()]
230 230
231 231 return OrderedDict(sorted(_bookmarks, key=get_name))
232 232
233 233 def _get_all_commit_ids(self):
234 234 return self._remote.get_all_commit_ids('visible')
235 235
236 236 def get_diff(
237 237 self, commit1, commit2, path='', ignore_whitespace=False,
238 238 context=3, path1=None):
239 239 """
240 240 Returns (git like) *diff*, as plain text. Shows changes introduced by
241 241 `commit2` since `commit1`.
242 242
243 243 :param commit1: Entry point from which diff is shown. Can be
244 244 ``self.EMPTY_COMMIT`` - in this case, patch showing all
245 245 the changes since empty state of the repository until `commit2`
246 246 :param commit2: Until which commit changes should be shown.
247 247 :param ignore_whitespace: If set to ``True``, would not show whitespace
248 248 changes. Defaults to ``False``.
249 249 :param context: How many lines before/after changed lines should be
250 250 shown. Defaults to ``3``.
251 251 """
252 252 self._validate_diff_commits(commit1, commit2)
253 253 if path1 is not None and path1 != path:
254 254 raise ValueError("Diff of two different paths not supported.")
255 255
256 256 if path:
257 257 file_filter = [self.path, path]
258 258 else:
259 259 file_filter = None
260 260
261 261 diff = self._remote.diff(
262 262 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
263 263 opt_git=True, opt_ignorews=ignore_whitespace,
264 264 context=context)
265 265 return MercurialDiff(diff)
266 266
267 267 def strip(self, commit_id, branch=None):
268 268 self._remote.strip(commit_id, update=False, backup="none")
269 269
270 270 self._remote.invalidate_vcs_cache()
271 271 self.commit_ids = self._get_all_commit_ids()
272 272 self._rebuild_cache(self.commit_ids)
273 273
274 274 def verify(self):
275 275 verify = self._remote.verify()
276 276
277 277 self._remote.invalidate_vcs_cache()
278 278 return verify
279 279
280 280 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
281 281 if commit_id1 == commit_id2:
282 282 return commit_id1
283 283
284 284 ancestors = self._remote.revs_from_revspec(
285 285 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
286 286 other_path=repo2.path)
287 287 return repo2[ancestors[0]].raw_id if ancestors else None
288 288
289 289 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
290 290 if commit_id1 == commit_id2:
291 291 commits = []
292 292 else:
293 293 if merge:
294 294 indexes = self._remote.revs_from_revspec(
295 295 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
296 296 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
297 297 else:
298 298 indexes = self._remote.revs_from_revspec(
299 299 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
300 300 commit_id1, other_path=repo2.path)
301 301
302 302 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
303 303 for idx in indexes]
304 304
305 305 return commits
306 306
307 307 @staticmethod
308 308 def check_url(url, config):
309 309 """
310 310 Function will check given url and try to verify if it's a valid
311 311 link. Sometimes it may happened that mercurial will issue basic
312 312 auth request that can cause whole API to hang when used from python
313 313 or other external calls.
314 314
315 315 On failures it'll raise urllib2.HTTPError, exception is also thrown
316 316 when the return code is non 200
317 317 """
318 318 # check first if it's not an local url
319 319 if os.path.isdir(url) or url.startswith('file:'):
320 320 return True
321 321
322 322 # Request the _remote to verify the url
323 323 return connection.Hg.check_url(url, config.serialize())
324 324
325 325 @staticmethod
326 326 def is_valid_repository(path):
327 327 return os.path.isdir(os.path.join(path, '.hg'))
328 328
329 329 def _init_repo(self, create, src_url=None, update_after_clone=False):
330 330 """
331 331 Function will check for mercurial repository in given path. If there
332 332 is no repository in that path it will raise an exception unless
333 333 `create` parameter is set to True - in that case repository would
334 334 be created.
335 335
336 336 If `src_url` is given, would try to clone repository from the
337 337 location at given clone_point. Additionally it'll make update to
338 338 working copy accordingly to `update_after_clone` flag.
339 339 """
340 340 if create and os.path.exists(self.path):
341 341 raise RepositoryError(
342 342 "Cannot create repository at %s, location already exist"
343 343 % self.path)
344 344
345 345 if src_url:
346 346 url = str(self._get_url(src_url))
347 347 MercurialRepository.check_url(url, self.config)
348 348
349 349 self._remote.clone(url, self.path, update_after_clone)
350 350
351 351 # Don't try to create if we've already cloned repo
352 352 create = False
353 353
354 354 if create:
355 355 os.makedirs(self.path, mode=0755)
356 356
357 357 self._remote.localrepository(create)
358 358
359 359 @LazyProperty
360 360 def in_memory_commit(self):
361 361 return MercurialInMemoryCommit(self)
362 362
363 363 @LazyProperty
364 364 def description(self):
365 365 description = self._remote.get_config_value(
366 366 'web', 'description', untrusted=True)
367 367 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
368 368
369 369 @LazyProperty
370 370 def contact(self):
371 371 contact = (
372 372 self._remote.get_config_value("web", "contact") or
373 373 self._remote.get_config_value("ui", "username"))
374 374 return safe_unicode(contact or self.DEFAULT_CONTACT)
375 375
376 376 @LazyProperty
377 377 def last_change(self):
378 378 """
379 379 Returns last change made on this repository as
380 380 `datetime.datetime` object.
381 381 """
382 382 try:
383 383 return self.get_commit().date
384 384 except RepositoryError:
385 385 tzoffset = makedate()[1]
386 386 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
387 387
388 388 def _get_fs_mtime(self):
389 389 # fallback to filesystem
390 390 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
391 391 st_path = os.path.join(self.path, '.hg', "store")
392 392 if os.path.exists(cl_path):
393 393 return os.stat(cl_path).st_mtime
394 394 else:
395 395 return os.stat(st_path).st_mtime
396 396
397 397 def _get_url(self, url):
398 398 """
399 399 Returns normalized url. If schema is not given, would fall
400 400 to filesystem
401 401 (``file:///``) schema.
402 402 """
403 403 url = url.encode('utf8')
404 404 if url != 'default' and '://' not in url:
405 405 url = "file:" + urllib.pathname2url(url)
406 406 return url
407 407
408 408 def get_hook_location(self):
409 409 """
410 410 returns absolute path to location where hooks are stored
411 411 """
412 412 return os.path.join(self.path, '.hg', '.hgrc')
413 413
414 414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 415 """
416 416 Returns ``MercurialCommit`` object representing repository's
417 417 commit at the given `commit_id` or `commit_idx`.
418 418 """
419 419 if self.is_empty():
420 420 raise EmptyRepositoryError("There are no commits yet")
421 421
422 422 if commit_id is not None:
423 423 self._validate_commit_id(commit_id)
424 424 try:
425 425 idx = self._commit_ids[commit_id]
426 426 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
427 427 except KeyError:
428 428 pass
429 429 elif commit_idx is not None:
430 430 self._validate_commit_idx(commit_idx)
431 431 try:
432 432 id_ = self.commit_ids[commit_idx]
433 433 if commit_idx < 0:
434 434 commit_idx += len(self.commit_ids)
435 435 return MercurialCommit(
436 436 self, id_, commit_idx, pre_load=pre_load)
437 437 except IndexError:
438 438 commit_id = commit_idx
439 439 else:
440 440 commit_id = "tip"
441 441
442 442 if isinstance(commit_id, unicode):
443 443 commit_id = safe_str(commit_id)
444 444
445 445 try:
446 446 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 447 except CommitDoesNotExistError:
448 448 msg = "Commit %s does not exist for %s" % (
449 449 commit_id, self)
450 450 raise CommitDoesNotExistError(msg)
451 451
452 452 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
453 453
454 454 def get_commits(
455 455 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 456 branch_name=None, show_hidden=False, pre_load=None):
457 457 """
458 458 Returns generator of ``MercurialCommit`` objects from start to end
459 459 (both are inclusive)
460 460
461 461 :param start_id: None, str(commit_id)
462 462 :param end_id: None, str(commit_id)
463 463 :param start_date: if specified, commits with commit date less than
464 464 ``start_date`` would be filtered out from returned set
465 465 :param end_date: if specified, commits with commit date greater than
466 466 ``end_date`` would be filtered out from returned set
467 467 :param branch_name: if specified, commits not reachable from given
468 468 branch would be filtered out from returned set
469 469 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 470 Mercurial evolve
471 471 :raise BranchDoesNotExistError: If given ``branch_name`` does not
472 472 exist.
473 473 :raise CommitDoesNotExistError: If commit for given ``start`` or
474 474 ``end`` could not be found.
475 475 """
476 476 # actually we should check now if it's not an empty repo
477 477 branch_ancestors = False
478 478 if self.is_empty():
479 479 raise EmptyRepositoryError("There are no commits yet")
480 480 self._validate_branch_name(branch_name)
481 481
482 482 if start_id is not None:
483 483 self._validate_commit_id(start_id)
484 484 c_start = self.get_commit(commit_id=start_id)
485 485 start_pos = self._commit_ids[c_start.raw_id]
486 486 else:
487 487 start_pos = None
488 488
489 489 if end_id is not None:
490 490 self._validate_commit_id(end_id)
491 491 c_end = self.get_commit(commit_id=end_id)
492 492 end_pos = max(0, self._commit_ids[c_end.raw_id])
493 493 else:
494 494 end_pos = None
495 495
496 496 if None not in [start_id, end_id] and start_pos > end_pos:
497 497 raise RepositoryError(
498 498 "Start commit '%s' cannot be after end commit '%s'" %
499 499 (start_id, end_id))
500 500
501 501 if end_pos is not None:
502 502 end_pos += 1
503 503
504 504 commit_filter = []
505 505
506 506 if branch_name and not branch_ancestors:
507 507 commit_filter.append('branch("%s")' % (branch_name,))
508 508 elif branch_name and branch_ancestors:
509 509 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
510 510
511 511 if start_date and not end_date:
512 512 commit_filter.append('date(">%s")' % (start_date,))
513 513 if end_date and not start_date:
514 514 commit_filter.append('date("<%s")' % (end_date,))
515 515 if start_date and end_date:
516 516 commit_filter.append(
517 517 'date(">%s") and date("<%s")' % (start_date, end_date))
518 518
519 519 if not show_hidden:
520 520 commit_filter.append('not obsolete()')
521 521 commit_filter.append('not hidden()')
522 522
523 523 # TODO: johbo: Figure out a simpler way for this solution
524 524 collection_generator = CollectionGenerator
525 525 if commit_filter:
526 526 commit_filter = ' and '.join(map(safe_str, commit_filter))
527 527 revisions = self._remote.rev_range([commit_filter])
528 528 collection_generator = MercurialIndexBasedCollectionGenerator
529 529 else:
530 530 revisions = self.commit_ids
531 531
532 532 if start_pos or end_pos:
533 533 revisions = revisions[start_pos:end_pos]
534 534
535 535 return collection_generator(self, revisions, pre_load=pre_load)
536 536
537 537 def pull(self, url, commit_ids=None):
538 538 """
539 539 Tries to pull changes from external location.
540 540
541 541 :param commit_ids: Optional. Can be set to a list of commit ids
542 542 which shall be pulled from the other repository.
543 543 """
544 544 url = self._get_url(url)
545 545 self._remote.pull(url, commit_ids=commit_ids)
546 546 self._remote.invalidate_vcs_cache()
547 547
548 548 def push(self, url):
549 549 url = self._get_url(url)
550 550 self._remote.sync_push(url)
551 551
552 552 def _local_clone(self, clone_path):
553 553 """
554 554 Create a local clone of the current repo.
555 555 """
556 556 self._remote.clone(self.path, clone_path, update_after_clone=True,
557 557 hooks=False)
558 558
559 559 def _update(self, revision, clean=False):
560 560 """
561 561 Update the working copy to the specified revision.
562 562 """
563 563 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
564 564 self._remote.update(revision, clean=clean)
565 565
566 566 def _identify(self):
567 567 """
568 568 Return the current state of the working directory.
569 569 """
570 570 return self._remote.identify().strip().rstrip('+')
571 571
572 572 def _heads(self, branch=None):
573 573 """
574 574 Return the commit ids of the repository heads.
575 575 """
576 576 return self._remote.heads(branch=branch).strip().split(' ')
577 577
578 578 def _ancestor(self, revision1, revision2):
579 579 """
580 580 Return the common ancestor of the two revisions.
581 581 """
582 582 return self._remote.ancestor(revision1, revision2)
583 583
584 584 def _local_push(
585 585 self, revision, repository_path, push_branches=False,
586 586 enable_hooks=False):
587 587 """
588 588 Push the given revision to the specified repository.
589 589
590 590 :param push_branches: allow to create branches in the target repo.
591 591 """
592 592 self._remote.push(
593 593 [revision], repository_path, hooks=enable_hooks,
594 594 push_branches=push_branches)
595 595
596 596 def _local_merge(self, target_ref, merge_message, user_name, user_email,
597 597 source_ref, use_rebase=False, dry_run=False):
598 598 """
599 599 Merge the given source_revision into the checked out revision.
600 600
601 601 Returns the commit id of the merge and a boolean indicating if the
602 602 commit needs to be pushed.
603 603 """
604 604 self._update(target_ref.commit_id)
605 605
606 606 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
607 607 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
608 608
609 609 if ancestor == source_ref.commit_id:
610 610 # Nothing to do, the changes were already integrated
611 611 return target_ref.commit_id, False
612 612
613 613 elif ancestor == target_ref.commit_id and is_the_same_branch:
614 614 # In this case we should force a commit message
615 615 return source_ref.commit_id, True
616 616
617 617 if use_rebase:
618 618 try:
619 619 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
620 620 target_ref.commit_id)
621 621 self.bookmark(bookmark_name, revision=source_ref.commit_id)
622 622 self._remote.rebase(
623 623 source=source_ref.commit_id, dest=target_ref.commit_id)
624 624 self._remote.invalidate_vcs_cache()
625 625 self._update(bookmark_name)
626 626 return self._identify(), True
627 627 except RepositoryError:
628 628 # The rebase-abort may raise another exception which 'hides'
629 629 # the original one, therefore we log it here.
630 630 log.exception('Error while rebasing shadow repo during merge.')
631 631
632 632 # Cleanup any rebase leftovers
633 633 self._remote.invalidate_vcs_cache()
634 634 self._remote.rebase(abort=True)
635 635 self._remote.invalidate_vcs_cache()
636 636 self._remote.update(clean=True)
637 637 raise
638 638 else:
639 639 try:
640 640 self._remote.merge(source_ref.commit_id)
641 641 self._remote.invalidate_vcs_cache()
642 642 self._remote.commit(
643 643 message=safe_str(merge_message),
644 644 username=safe_str('%s <%s>' % (user_name, user_email)))
645 645 self._remote.invalidate_vcs_cache()
646 646 return self._identify(), True
647 647 except RepositoryError:
648 648 # Cleanup any merge leftovers
649 649 self._remote.update(clean=True)
650 650 raise
651 651
652 652 def _local_close(self, target_ref, user_name, user_email,
653 653 source_ref, close_message=''):
654 654 """
655 655 Close the branch of the given source_revision
656 656
657 657 Returns the commit id of the close and a boolean indicating if the
658 658 commit needs to be pushed.
659 659 """
660 660 self._update(source_ref.commit_id)
661 661 message = close_message or "Closing branch: `{}`".format(source_ref.name)
662 662 try:
663 663 self._remote.commit(
664 664 message=safe_str(message),
665 665 username=safe_str('%s <%s>' % (user_name, user_email)),
666 666 close_branch=True)
667 667 self._remote.invalidate_vcs_cache()
668 668 return self._identify(), True
669 669 except RepositoryError:
670 670 # Cleanup any commit leftovers
671 671 self._remote.update(clean=True)
672 672 raise
673 673
674 674 def _is_the_same_branch(self, target_ref, source_ref):
675 675 return (
676 676 self._get_branch_name(target_ref) ==
677 677 self._get_branch_name(source_ref))
678 678
679 679 def _get_branch_name(self, ref):
680 680 if ref.type == 'branch':
681 681 return ref.name
682 682 return self._remote.ctx_branch(ref.commit_id)
683 683
684 def _get_shadow_repository_path(self, workspace_id):
685 # The name of the shadow repository must start with '.', so it is
686 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
687 return os.path.join(
688 os.path.dirname(self.path),
689 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
690
691 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
692 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
684 def _maybe_prepare_merge_workspace(
685 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
686 shadow_repository_path = self._get_shadow_repository_path(
687 repo_id, workspace_id)
693 688 if not os.path.exists(shadow_repository_path):
694 689 self._local_clone(shadow_repository_path)
695 690 log.debug(
696 691 'Prepared shadow repository in %s', shadow_repository_path)
697 692
698 693 return shadow_repository_path
699 694
700 def _merge_repo(self, shadow_repository_path, target_ref,
695 def _merge_repo(self, repo_id, workspace_id, target_ref,
701 696 source_repo, source_ref, merge_message,
702 697 merger_name, merger_email, dry_run=False,
703 698 use_rebase=False, close_branch=False):
704 699
705 700 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
706 701 'rebase' if use_rebase else 'merge', dry_run)
707 702 if target_ref.commit_id not in self._heads():
708 703 return MergeResponse(
709 704 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
710 705
711 706 try:
712 707 if (target_ref.type == 'branch' and
713 708 len(self._heads(target_ref.name)) != 1):
714 709 return MergeResponse(
715 710 False, False, None,
716 711 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
717 712 except CommitDoesNotExistError:
718 713 log.exception('Failure when looking up branch heads on hg target')
719 714 return MergeResponse(
720 715 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
721 716
717 shadow_repository_path = self._maybe_prepare_merge_workspace(
718 repo_id, workspace_id, target_ref, source_ref)
722 719 shadow_repo = self._get_shadow_instance(shadow_repository_path)
723 720
724 721 log.debug('Pulling in target reference %s', target_ref)
725 722 self._validate_pull_reference(target_ref)
726 723 shadow_repo._local_pull(self.path, target_ref)
727 724 try:
728 725 log.debug('Pulling in source reference %s', source_ref)
729 726 source_repo._validate_pull_reference(source_ref)
730 727 shadow_repo._local_pull(source_repo.path, source_ref)
731 728 except CommitDoesNotExistError:
732 729 log.exception('Failure when doing local pull on hg shadow repo')
733 730 return MergeResponse(
734 731 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
735 732
736 733 merge_ref = None
737 734 merge_commit_id = None
738 735 close_commit_id = None
739 736 merge_failure_reason = MergeFailureReason.NONE
740 737
741 738 # enforce that close branch should be used only in case we source from
742 739 # an actual Branch
743 740 close_branch = close_branch and source_ref.type == 'branch'
744 741
745 742 # don't allow to close branch if source and target are the same
746 743 close_branch = close_branch and source_ref.name != target_ref.name
747 744
748 745 needs_push_on_close = False
749 746 if close_branch and not use_rebase and not dry_run:
750 747 try:
751 748 close_commit_id, needs_push_on_close = shadow_repo._local_close(
752 749 target_ref, merger_name, merger_email, source_ref)
753 750 merge_possible = True
754 751 except RepositoryError:
755 752 log.exception(
756 753 'Failure when doing close branch on hg shadow repo')
757 754 merge_possible = False
758 755 merge_failure_reason = MergeFailureReason.MERGE_FAILED
759 756 else:
760 757 merge_possible = True
761 758
762 759 needs_push = False
763 760 if merge_possible:
764 761 try:
765 762 merge_commit_id, needs_push = shadow_repo._local_merge(
766 763 target_ref, merge_message, merger_name, merger_email,
767 764 source_ref, use_rebase=use_rebase, dry_run=dry_run)
768 765 merge_possible = True
769 766
770 767 # read the state of the close action, if it
771 768 # maybe required a push
772 769 needs_push = needs_push or needs_push_on_close
773 770
774 771 # Set a bookmark pointing to the merge commit. This bookmark
775 772 # may be used to easily identify the last successful merge
776 773 # commit in the shadow repository.
777 774 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
778 775 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
779 776 except SubrepoMergeError:
780 777 log.exception(
781 778 'Subrepo merge error during local merge on hg shadow repo.')
782 779 merge_possible = False
783 780 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
784 781 needs_push = False
785 782 except RepositoryError:
786 783 log.exception('Failure when doing local merge on hg shadow repo')
787 784 merge_possible = False
788 785 merge_failure_reason = MergeFailureReason.MERGE_FAILED
789 786 needs_push = False
790 787
791 788 if merge_possible and not dry_run:
792 789 if needs_push:
793 790 # In case the target is a bookmark, update it, so after pushing
794 791 # the bookmarks is also updated in the target.
795 792 if target_ref.type == 'book':
796 793 shadow_repo.bookmark(
797 794 target_ref.name, revision=merge_commit_id)
798 795 try:
799 796 shadow_repo_with_hooks = self._get_shadow_instance(
800 797 shadow_repository_path,
801 798 enable_hooks=True)
802 799 # This is the actual merge action, we push from shadow
803 800 # into origin.
804 801 # Note: the push_branches option will push any new branch
805 802 # defined in the source repository to the target. This may
806 803 # be dangerous as branches are permanent in Mercurial.
807 804 # This feature was requested in issue #441.
808 805 shadow_repo_with_hooks._local_push(
809 806 merge_commit_id, self.path, push_branches=True,
810 807 enable_hooks=True)
811 808
812 809 # maybe we also need to push the close_commit_id
813 810 if close_commit_id:
814 811 shadow_repo_with_hooks._local_push(
815 812 close_commit_id, self.path, push_branches=True,
816 813 enable_hooks=True)
817 814 merge_succeeded = True
818 815 except RepositoryError:
819 816 log.exception(
820 817 'Failure when doing local push from the shadow '
821 818 'repository to the target repository.')
822 819 merge_succeeded = False
823 820 merge_failure_reason = MergeFailureReason.PUSH_FAILED
824 821 else:
825 822 merge_succeeded = True
826 823 else:
827 824 merge_succeeded = False
828 825
829 826 return MergeResponse(
830 827 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
831 828
832 829 def _get_shadow_instance(
833 830 self, shadow_repository_path, enable_hooks=False):
834 831 config = self.config.copy()
835 832 if not enable_hooks:
836 833 config.clear_section('hooks')
837 834 return MercurialRepository(shadow_repository_path, config)
838 835
839 836 def _validate_pull_reference(self, reference):
840 837 if not (reference.name in self.bookmarks or
841 838 reference.name in self.branches or
842 839 self.get_commit(reference.commit_id)):
843 840 raise CommitDoesNotExistError(
844 841 'Unknown branch, bookmark or commit id')
845 842
846 843 def _local_pull(self, repository_path, reference):
847 844 """
848 845 Fetch a branch, bookmark or commit from a local repository.
849 846 """
850 847 repository_path = os.path.abspath(repository_path)
851 848 if repository_path == self.path:
852 849 raise ValueError('Cannot pull from the same repository')
853 850
854 851 reference_type_to_option_name = {
855 852 'book': 'bookmark',
856 853 'branch': 'branch',
857 854 }
858 855 option_name = reference_type_to_option_name.get(
859 856 reference.type, 'revision')
860 857
861 858 if option_name == 'revision':
862 859 ref = reference.commit_id
863 860 else:
864 861 ref = reference.name
865 862
866 863 options = {option_name: [ref]}
867 864 self._remote.pull_cmd(repository_path, hooks=False, **options)
868 865 self._remote.invalidate_vcs_cache()
869 866
870 867 def bookmark(self, bookmark, revision=None):
871 868 if isinstance(bookmark, unicode):
872 869 bookmark = safe_str(bookmark)
873 870 self._remote.bookmark(bookmark, revision=revision)
874 871 self._remote.invalidate_vcs_cache()
875 872
876 873 def get_path_permissions(self, username):
877 874 hgacl_file = os.path.join(self.path, '.hg/hgacl')
878 875
879 876 def read_patterns(suffix):
880 877 svalue = None
881 878 try:
882 879 svalue = hgacl.get('narrowhgacl', username + suffix)
883 880 except configparser.NoOptionError:
884 881 try:
885 882 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
886 883 except configparser.NoOptionError:
887 884 pass
888 885 if not svalue:
889 886 return None
890 887 result = ['/']
891 888 for pattern in svalue.split():
892 889 result.append(pattern)
893 890 if '*' not in pattern and '?' not in pattern:
894 891 result.append(pattern + '/*')
895 892 return result
896 893
897 894 if os.path.exists(hgacl_file):
898 895 try:
899 896 hgacl = configparser.RawConfigParser()
900 897 hgacl.read(hgacl_file)
901 898
902 899 includes = read_patterns('.includes')
903 900 excludes = read_patterns('.excludes')
904 901 return BasePathPermissionChecker.create_from_patterns(
905 902 includes, excludes)
906 903 except BaseException as e:
907 904 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
908 905 hgacl_file, self.name, e)
909 906 raise exceptions.RepositoryRequirementError(msg)
910 907 else:
911 908 return None
912 909
913 910
914 911 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
915 912
916 913 def _commit_factory(self, commit_id):
917 914 return self.repo.get_commit(
918 915 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,4524 +1,4530 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # noqa
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from beaker.cache import cache_region
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 52
53 53 from pyramid.threadlocal import get_current_request
54 54
55 55 from rhodecode.translation import _
56 56 from rhodecode.lib.vcs import get_vcs_instance
57 57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
58 58 from rhodecode.lib.utils2 import (
59 59 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
60 60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 61 glob2re, StrictAttributeDict, cleaned_uri)
62 62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
63 63 JsonRaw
64 64 from rhodecode.lib.ext_json import json
65 65 from rhodecode.lib.caching_query import FromCache
66 66 from rhodecode.lib.encrypt import AESCipher
67 67
68 68 from rhodecode.model.meta import Base, Session
69 69
70 70 URL_SEP = '/'
71 71 log = logging.getLogger(__name__)
72 72
73 73 # =============================================================================
74 74 # BASE CLASSES
75 75 # =============================================================================
76 76
77 77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
78 78 # beaker.session.secret if first is not set.
79 79 # and initialized at environment.py
80 80 ENCRYPTION_KEY = None
81 81
82 82 # used to sort permissions by types, '#' used here is not allowed to be in
83 83 # usernames, and it's very early in sorted string.printable table.
84 84 PERMISSION_TYPE_SORT = {
85 85 'admin': '####',
86 86 'write': '###',
87 87 'read': '##',
88 88 'none': '#',
89 89 }
90 90
91 91
92 92 def display_user_sort(obj):
93 93 """
94 94 Sort function used to sort permissions in .permissions() function of
95 95 Repository, RepoGroup, UserGroup. Also it put the default user in front
96 96 of all other resources
97 97 """
98 98
99 99 if obj.username == User.DEFAULT_USER:
100 100 return '#####'
101 101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
102 102 return prefix + obj.username
103 103
104 104
105 105 def display_user_group_sort(obj):
106 106 """
107 107 Sort function used to sort permissions in .permissions() function of
108 108 Repository, RepoGroup, UserGroup. Also it put the default user in front
109 109 of all other resources
110 110 """
111 111
112 112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
113 113 return prefix + obj.users_group_name
114 114
115 115
116 116 def _hash_key(k):
117 117 return md5_safe(k)
118 118
119 119
120 120 def in_filter_generator(qry, items, limit=500):
121 121 """
122 122 Splits IN() into multiple with OR
123 123 e.g.::
124 124 cnt = Repository.query().filter(
125 125 or_(
126 126 *in_filter_generator(Repository.repo_id, range(100000))
127 127 )).count()
128 128 """
129 129 if not items:
130 130 # empty list will cause empty query which might cause security issues
131 131 # this can lead to hidden unpleasant results
132 132 items = [-1]
133 133
134 134 parts = []
135 135 for chunk in xrange(0, len(items), limit):
136 136 parts.append(
137 137 qry.in_(items[chunk: chunk + limit])
138 138 )
139 139
140 140 return parts
141 141
142 142
143 143 class EncryptedTextValue(TypeDecorator):
144 144 """
145 145 Special column for encrypted long text data, use like::
146 146
147 147 value = Column("encrypted_value", EncryptedValue(), nullable=False)
148 148
149 149 This column is intelligent so if value is in unencrypted form it return
150 150 unencrypted form, but on save it always encrypts
151 151 """
152 152 impl = Text
153 153
154 154 def process_bind_param(self, value, dialect):
155 155 if not value:
156 156 return value
157 157 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
158 158 # protect against double encrypting if someone manually starts
159 159 # doing
160 160 raise ValueError('value needs to be in unencrypted format, ie. '
161 161 'not starting with enc$aes')
162 162 return 'enc$aes_hmac$%s' % AESCipher(
163 163 ENCRYPTION_KEY, hmac=True).encrypt(value)
164 164
165 165 def process_result_value(self, value, dialect):
166 166 import rhodecode
167 167
168 168 if not value:
169 169 return value
170 170
171 171 parts = value.split('$', 3)
172 172 if not len(parts) == 3:
173 173 # probably not encrypted values
174 174 return value
175 175 else:
176 176 if parts[0] != 'enc':
177 177 # parts ok but without our header ?
178 178 return value
179 179 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
180 180 'rhodecode.encrypted_values.strict') or True)
181 181 # at that stage we know it's our encryption
182 182 if parts[1] == 'aes':
183 183 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
184 184 elif parts[1] == 'aes_hmac':
185 185 decrypted_data = AESCipher(
186 186 ENCRYPTION_KEY, hmac=True,
187 187 strict_verification=enc_strict_mode).decrypt(parts[2])
188 188 else:
189 189 raise ValueError(
190 190 'Encryption type part is wrong, must be `aes` '
191 191 'or `aes_hmac`, got `%s` instead' % (parts[1]))
192 192 return decrypted_data
193 193
194 194
195 195 class BaseModel(object):
196 196 """
197 197 Base Model for all classes
198 198 """
199 199
200 200 @classmethod
201 201 def _get_keys(cls):
202 202 """return column names for this model """
203 203 return class_mapper(cls).c.keys()
204 204
205 205 def get_dict(self):
206 206 """
207 207 return dict with keys and values corresponding
208 208 to this model data """
209 209
210 210 d = {}
211 211 for k in self._get_keys():
212 212 d[k] = getattr(self, k)
213 213
214 214 # also use __json__() if present to get additional fields
215 215 _json_attr = getattr(self, '__json__', None)
216 216 if _json_attr:
217 217 # update with attributes from __json__
218 218 if callable(_json_attr):
219 219 _json_attr = _json_attr()
220 220 for k, val in _json_attr.iteritems():
221 221 d[k] = val
222 222 return d
223 223
224 224 def get_appstruct(self):
225 225 """return list with keys and values tuples corresponding
226 226 to this model data """
227 227
228 228 lst = []
229 229 for k in self._get_keys():
230 230 lst.append((k, getattr(self, k),))
231 231 return lst
232 232
233 233 def populate_obj(self, populate_dict):
234 234 """populate model with data from given populate_dict"""
235 235
236 236 for k in self._get_keys():
237 237 if k in populate_dict:
238 238 setattr(self, k, populate_dict[k])
239 239
240 240 @classmethod
241 241 def query(cls):
242 242 return Session().query(cls)
243 243
244 244 @classmethod
245 245 def get(cls, id_):
246 246 if id_:
247 247 return cls.query().get(id_)
248 248
249 249 @classmethod
250 250 def get_or_404(cls, id_):
251 251 from pyramid.httpexceptions import HTTPNotFound
252 252
253 253 try:
254 254 id_ = int(id_)
255 255 except (TypeError, ValueError):
256 256 raise HTTPNotFound()
257 257
258 258 res = cls.query().get(id_)
259 259 if not res:
260 260 raise HTTPNotFound()
261 261 return res
262 262
263 263 @classmethod
264 264 def getAll(cls):
265 265 # deprecated and left for backward compatibility
266 266 return cls.get_all()
267 267
268 268 @classmethod
269 269 def get_all(cls):
270 270 return cls.query().all()
271 271
272 272 @classmethod
273 273 def delete(cls, id_):
274 274 obj = cls.query().get(id_)
275 275 Session().delete(obj)
276 276
277 277 @classmethod
278 278 def identity_cache(cls, session, attr_name, value):
279 279 exist_in_session = []
280 280 for (item_cls, pkey), instance in session.identity_map.items():
281 281 if cls == item_cls and getattr(instance, attr_name) == value:
282 282 exist_in_session.append(instance)
283 283 if exist_in_session:
284 284 if len(exist_in_session) == 1:
285 285 return exist_in_session[0]
286 286 log.exception(
287 287 'multiple objects with attr %s and '
288 288 'value %s found with same name: %r',
289 289 attr_name, value, exist_in_session)
290 290
291 291 def __repr__(self):
292 292 if hasattr(self, '__unicode__'):
293 293 # python repr needs to return str
294 294 try:
295 295 return safe_str(self.__unicode__())
296 296 except UnicodeDecodeError:
297 297 pass
298 298 return '<DB:%s>' % (self.__class__.__name__)
299 299
300 300
301 301 class RhodeCodeSetting(Base, BaseModel):
302 302 __tablename__ = 'rhodecode_settings'
303 303 __table_args__ = (
304 304 UniqueConstraint('app_settings_name'),
305 305 {'extend_existing': True, 'mysql_engine': 'InnoDB',
306 306 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
307 307 )
308 308
309 309 SETTINGS_TYPES = {
310 310 'str': safe_str,
311 311 'int': safe_int,
312 312 'unicode': safe_unicode,
313 313 'bool': str2bool,
314 314 'list': functools.partial(aslist, sep=',')
315 315 }
316 316 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
317 317 GLOBAL_CONF_KEY = 'app_settings'
318 318
319 319 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
320 320 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
321 321 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
322 322 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
323 323
324 324 def __init__(self, key='', val='', type='unicode'):
325 325 self.app_settings_name = key
326 326 self.app_settings_type = type
327 327 self.app_settings_value = val
328 328
329 329 @validates('_app_settings_value')
330 330 def validate_settings_value(self, key, val):
331 331 assert type(val) == unicode
332 332 return val
333 333
334 334 @hybrid_property
335 335 def app_settings_value(self):
336 336 v = self._app_settings_value
337 337 _type = self.app_settings_type
338 338 if _type:
339 339 _type = self.app_settings_type.split('.')[0]
340 340 # decode the encrypted value
341 341 if 'encrypted' in self.app_settings_type:
342 342 cipher = EncryptedTextValue()
343 343 v = safe_unicode(cipher.process_result_value(v, None))
344 344
345 345 converter = self.SETTINGS_TYPES.get(_type) or \
346 346 self.SETTINGS_TYPES['unicode']
347 347 return converter(v)
348 348
349 349 @app_settings_value.setter
350 350 def app_settings_value(self, val):
351 351 """
352 352 Setter that will always make sure we use unicode in app_settings_value
353 353
354 354 :param val:
355 355 """
356 356 val = safe_unicode(val)
357 357 # encode the encrypted value
358 358 if 'encrypted' in self.app_settings_type:
359 359 cipher = EncryptedTextValue()
360 360 val = safe_unicode(cipher.process_bind_param(val, None))
361 361 self._app_settings_value = val
362 362
363 363 @hybrid_property
364 364 def app_settings_type(self):
365 365 return self._app_settings_type
366 366
367 367 @app_settings_type.setter
368 368 def app_settings_type(self, val):
369 369 if val.split('.')[0] not in self.SETTINGS_TYPES:
370 370 raise Exception('type must be one of %s got %s'
371 371 % (self.SETTINGS_TYPES.keys(), val))
372 372 self._app_settings_type = val
373 373
374 374 def __unicode__(self):
375 375 return u"<%s('%s:%s[%s]')>" % (
376 376 self.__class__.__name__,
377 377 self.app_settings_name, self.app_settings_value,
378 378 self.app_settings_type
379 379 )
380 380
381 381
382 382 class RhodeCodeUi(Base, BaseModel):
383 383 __tablename__ = 'rhodecode_ui'
384 384 __table_args__ = (
385 385 UniqueConstraint('ui_key'),
386 386 {'extend_existing': True, 'mysql_engine': 'InnoDB',
387 387 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
388 388 )
389 389
390 390 HOOK_REPO_SIZE = 'changegroup.repo_size'
391 391 # HG
392 392 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
393 393 HOOK_PULL = 'outgoing.pull_logger'
394 394 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
395 395 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
396 396 HOOK_PUSH = 'changegroup.push_logger'
397 397 HOOK_PUSH_KEY = 'pushkey.key_push'
398 398
399 399 # TODO: johbo: Unify way how hooks are configured for git and hg,
400 400 # git part is currently hardcoded.
401 401
402 402 # SVN PATTERNS
403 403 SVN_BRANCH_ID = 'vcs_svn_branch'
404 404 SVN_TAG_ID = 'vcs_svn_tag'
405 405
406 406 ui_id = Column(
407 407 "ui_id", Integer(), nullable=False, unique=True, default=None,
408 408 primary_key=True)
409 409 ui_section = Column(
410 410 "ui_section", String(255), nullable=True, unique=None, default=None)
411 411 ui_key = Column(
412 412 "ui_key", String(255), nullable=True, unique=None, default=None)
413 413 ui_value = Column(
414 414 "ui_value", String(255), nullable=True, unique=None, default=None)
415 415 ui_active = Column(
416 416 "ui_active", Boolean(), nullable=True, unique=None, default=True)
417 417
418 418 def __repr__(self):
419 419 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
420 420 self.ui_key, self.ui_value)
421 421
422 422
423 423 class RepoRhodeCodeSetting(Base, BaseModel):
424 424 __tablename__ = 'repo_rhodecode_settings'
425 425 __table_args__ = (
426 426 UniqueConstraint(
427 427 'app_settings_name', 'repository_id',
428 428 name='uq_repo_rhodecode_setting_name_repo_id'),
429 429 {'extend_existing': True, 'mysql_engine': 'InnoDB',
430 430 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
431 431 )
432 432
433 433 repository_id = Column(
434 434 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
435 435 nullable=False)
436 436 app_settings_id = Column(
437 437 "app_settings_id", Integer(), nullable=False, unique=True,
438 438 default=None, primary_key=True)
439 439 app_settings_name = Column(
440 440 "app_settings_name", String(255), nullable=True, unique=None,
441 441 default=None)
442 442 _app_settings_value = Column(
443 443 "app_settings_value", String(4096), nullable=True, unique=None,
444 444 default=None)
445 445 _app_settings_type = Column(
446 446 "app_settings_type", String(255), nullable=True, unique=None,
447 447 default=None)
448 448
449 449 repository = relationship('Repository')
450 450
451 451 def __init__(self, repository_id, key='', val='', type='unicode'):
452 452 self.repository_id = repository_id
453 453 self.app_settings_name = key
454 454 self.app_settings_type = type
455 455 self.app_settings_value = val
456 456
457 457 @validates('_app_settings_value')
458 458 def validate_settings_value(self, key, val):
459 459 assert type(val) == unicode
460 460 return val
461 461
462 462 @hybrid_property
463 463 def app_settings_value(self):
464 464 v = self._app_settings_value
465 465 type_ = self.app_settings_type
466 466 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
467 467 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
468 468 return converter(v)
469 469
470 470 @app_settings_value.setter
471 471 def app_settings_value(self, val):
472 472 """
473 473 Setter that will always make sure we use unicode in app_settings_value
474 474
475 475 :param val:
476 476 """
477 477 self._app_settings_value = safe_unicode(val)
478 478
479 479 @hybrid_property
480 480 def app_settings_type(self):
481 481 return self._app_settings_type
482 482
483 483 @app_settings_type.setter
484 484 def app_settings_type(self, val):
485 485 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
486 486 if val not in SETTINGS_TYPES:
487 487 raise Exception('type must be one of %s got %s'
488 488 % (SETTINGS_TYPES.keys(), val))
489 489 self._app_settings_type = val
490 490
491 491 def __unicode__(self):
492 492 return u"<%s('%s:%s:%s[%s]')>" % (
493 493 self.__class__.__name__, self.repository.repo_name,
494 494 self.app_settings_name, self.app_settings_value,
495 495 self.app_settings_type
496 496 )
497 497
498 498
499 499 class RepoRhodeCodeUi(Base, BaseModel):
500 500 __tablename__ = 'repo_rhodecode_ui'
501 501 __table_args__ = (
502 502 UniqueConstraint(
503 503 'repository_id', 'ui_section', 'ui_key',
504 504 name='uq_repo_rhodecode_ui_repository_id_section_key'),
505 505 {'extend_existing': True, 'mysql_engine': 'InnoDB',
506 506 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
507 507 )
508 508
509 509 repository_id = Column(
510 510 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
511 511 nullable=False)
512 512 ui_id = Column(
513 513 "ui_id", Integer(), nullable=False, unique=True, default=None,
514 514 primary_key=True)
515 515 ui_section = Column(
516 516 "ui_section", String(255), nullable=True, unique=None, default=None)
517 517 ui_key = Column(
518 518 "ui_key", String(255), nullable=True, unique=None, default=None)
519 519 ui_value = Column(
520 520 "ui_value", String(255), nullable=True, unique=None, default=None)
521 521 ui_active = Column(
522 522 "ui_active", Boolean(), nullable=True, unique=None, default=True)
523 523
524 524 repository = relationship('Repository')
525 525
526 526 def __repr__(self):
527 527 return '<%s[%s:%s]%s=>%s]>' % (
528 528 self.__class__.__name__, self.repository.repo_name,
529 529 self.ui_section, self.ui_key, self.ui_value)
530 530
531 531
532 532 class User(Base, BaseModel):
533 533 __tablename__ = 'users'
534 534 __table_args__ = (
535 535 UniqueConstraint('username'), UniqueConstraint('email'),
536 536 Index('u_username_idx', 'username'),
537 537 Index('u_email_idx', 'email'),
538 538 {'extend_existing': True, 'mysql_engine': 'InnoDB',
539 539 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
540 540 )
541 541 DEFAULT_USER = 'default'
542 542 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
543 543 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
544 544
545 545 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
546 546 username = Column("username", String(255), nullable=True, unique=None, default=None)
547 547 password = Column("password", String(255), nullable=True, unique=None, default=None)
548 548 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
549 549 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
550 550 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
551 551 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
552 552 _email = Column("email", String(255), nullable=True, unique=None, default=None)
553 553 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
554 554 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
555 555
556 556 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
557 557 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
558 558 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
559 559 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
560 560 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
561 561 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
562 562
563 563 user_log = relationship('UserLog')
564 564 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
565 565
566 566 repositories = relationship('Repository')
567 567 repository_groups = relationship('RepoGroup')
568 568 user_groups = relationship('UserGroup')
569 569
570 570 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
571 571 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
572 572
573 573 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
574 574 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
575 575 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
576 576
577 577 group_member = relationship('UserGroupMember', cascade='all')
578 578
579 579 notifications = relationship('UserNotification', cascade='all')
580 580 # notifications assigned to this user
581 581 user_created_notifications = relationship('Notification', cascade='all')
582 582 # comments created by this user
583 583 user_comments = relationship('ChangesetComment', cascade='all')
584 584 # user profile extra info
585 585 user_emails = relationship('UserEmailMap', cascade='all')
586 586 user_ip_map = relationship('UserIpMap', cascade='all')
587 587 user_auth_tokens = relationship('UserApiKeys', cascade='all')
588 588 user_ssh_keys = relationship('UserSshKeys', cascade='all')
589 589
590 590 # gists
591 591 user_gists = relationship('Gist', cascade='all')
592 592 # user pull requests
593 593 user_pull_requests = relationship('PullRequest', cascade='all')
594 594 # external identities
595 595 extenal_identities = relationship(
596 596 'ExternalIdentity',
597 597 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
598 598 cascade='all')
599 599 # review rules
600 600 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
601 601
602 602 def __unicode__(self):
603 603 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
604 604 self.user_id, self.username)
605 605
606 606 @hybrid_property
607 607 def email(self):
608 608 return self._email
609 609
610 610 @email.setter
611 611 def email(self, val):
612 612 self._email = val.lower() if val else None
613 613
614 614 @hybrid_property
615 615 def first_name(self):
616 616 from rhodecode.lib import helpers as h
617 617 if self.name:
618 618 return h.escape(self.name)
619 619 return self.name
620 620
621 621 @hybrid_property
622 622 def last_name(self):
623 623 from rhodecode.lib import helpers as h
624 624 if self.lastname:
625 625 return h.escape(self.lastname)
626 626 return self.lastname
627 627
628 628 @hybrid_property
629 629 def api_key(self):
630 630 """
631 631 Fetch if exist an auth-token with role ALL connected to this user
632 632 """
633 633 user_auth_token = UserApiKeys.query()\
634 634 .filter(UserApiKeys.user_id == self.user_id)\
635 635 .filter(or_(UserApiKeys.expires == -1,
636 636 UserApiKeys.expires >= time.time()))\
637 637 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
638 638 if user_auth_token:
639 639 user_auth_token = user_auth_token.api_key
640 640
641 641 return user_auth_token
642 642
643 643 @api_key.setter
644 644 def api_key(self, val):
645 645 # don't allow to set API key this is deprecated for now
646 646 self._api_key = None
647 647
648 648 @property
649 649 def reviewer_pull_requests(self):
650 650 return PullRequestReviewers.query() \
651 651 .options(joinedload(PullRequestReviewers.pull_request)) \
652 652 .filter(PullRequestReviewers.user_id == self.user_id) \
653 653 .all()
654 654
655 655 @property
656 656 def firstname(self):
657 657 # alias for future
658 658 return self.name
659 659
660 660 @property
661 661 def emails(self):
662 662 other = UserEmailMap.query()\
663 663 .filter(UserEmailMap.user == self) \
664 664 .order_by(UserEmailMap.email_id.asc()) \
665 665 .all()
666 666 return [self.email] + [x.email for x in other]
667 667
668 668 @property
669 669 def auth_tokens(self):
670 670 auth_tokens = self.get_auth_tokens()
671 671 return [x.api_key for x in auth_tokens]
672 672
673 673 def get_auth_tokens(self):
674 674 return UserApiKeys.query()\
675 675 .filter(UserApiKeys.user == self)\
676 676 .order_by(UserApiKeys.user_api_key_id.asc())\
677 677 .all()
678 678
679 679 @LazyProperty
680 680 def feed_token(self):
681 681 return self.get_feed_token()
682 682
683 683 def get_feed_token(self, cache=True):
684 684 feed_tokens = UserApiKeys.query()\
685 685 .filter(UserApiKeys.user == self)\
686 686 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
687 687 if cache:
688 688 feed_tokens = feed_tokens.options(
689 689 FromCache("long_term", "get_user_feed_token_%s" % self.user_id))
690 690
691 691 feed_tokens = feed_tokens.all()
692 692 if feed_tokens:
693 693 return feed_tokens[0].api_key
694 694 return 'NO_FEED_TOKEN_AVAILABLE'
695 695
696 696 @classmethod
697 697 def get(cls, user_id, cache=False):
698 698 if not user_id:
699 699 return
700 700
701 701 user = cls.query()
702 702 if cache:
703 703 user = user.options(
704 704 FromCache("sql_cache_short", "get_users_%s" % user_id))
705 705 return user.get(user_id)
706 706
707 707 @classmethod
708 708 def extra_valid_auth_tokens(cls, user, role=None):
709 709 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
710 710 .filter(or_(UserApiKeys.expires == -1,
711 711 UserApiKeys.expires >= time.time()))
712 712 if role:
713 713 tokens = tokens.filter(or_(UserApiKeys.role == role,
714 714 UserApiKeys.role == UserApiKeys.ROLE_ALL))
715 715 return tokens.all()
716 716
717 717 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
718 718 from rhodecode.lib import auth
719 719
720 720 log.debug('Trying to authenticate user: %s via auth-token, '
721 721 'and roles: %s', self, roles)
722 722
723 723 if not auth_token:
724 724 return False
725 725
726 726 crypto_backend = auth.crypto_backend()
727 727
728 728 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
729 729 tokens_q = UserApiKeys.query()\
730 730 .filter(UserApiKeys.user_id == self.user_id)\
731 731 .filter(or_(UserApiKeys.expires == -1,
732 732 UserApiKeys.expires >= time.time()))
733 733
734 734 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
735 735
736 736 plain_tokens = []
737 737 hash_tokens = []
738 738
739 739 for token in tokens_q.all():
740 740 # verify scope first
741 741 if token.repo_id:
742 742 # token has a scope, we need to verify it
743 743 if scope_repo_id != token.repo_id:
744 744 log.debug(
745 745 'Scope mismatch: token has a set repo scope: %s, '
746 746 'and calling scope is:%s, skipping further checks',
747 747 token.repo, scope_repo_id)
748 748 # token has a scope, and it doesn't match, skip token
749 749 continue
750 750
751 751 if token.api_key.startswith(crypto_backend.ENC_PREF):
752 752 hash_tokens.append(token.api_key)
753 753 else:
754 754 plain_tokens.append(token.api_key)
755 755
756 756 is_plain_match = auth_token in plain_tokens
757 757 if is_plain_match:
758 758 return True
759 759
760 760 for hashed in hash_tokens:
761 761 # TODO(marcink): this is expensive to calculate, but most secure
762 762 match = crypto_backend.hash_check(auth_token, hashed)
763 763 if match:
764 764 return True
765 765
766 766 return False
767 767
768 768 @property
769 769 def ip_addresses(self):
770 770 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
771 771 return [x.ip_addr for x in ret]
772 772
773 773 @property
774 774 def username_and_name(self):
775 775 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
776 776
777 777 @property
778 778 def username_or_name_or_email(self):
779 779 full_name = self.full_name if self.full_name is not ' ' else None
780 780 return self.username or full_name or self.email
781 781
782 782 @property
783 783 def full_name(self):
784 784 return '%s %s' % (self.first_name, self.last_name)
785 785
786 786 @property
787 787 def full_name_or_username(self):
788 788 return ('%s %s' % (self.first_name, self.last_name)
789 789 if (self.first_name and self.last_name) else self.username)
790 790
791 791 @property
792 792 def full_contact(self):
793 793 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
794 794
795 795 @property
796 796 def short_contact(self):
797 797 return '%s %s' % (self.first_name, self.last_name)
798 798
799 799 @property
800 800 def is_admin(self):
801 801 return self.admin
802 802
803 803 def AuthUser(self, **kwargs):
804 804 """
805 805 Returns instance of AuthUser for this user
806 806 """
807 807 from rhodecode.lib.auth import AuthUser
808 808 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
809 809
810 810 @hybrid_property
811 811 def user_data(self):
812 812 if not self._user_data:
813 813 return {}
814 814
815 815 try:
816 816 return json.loads(self._user_data)
817 817 except TypeError:
818 818 return {}
819 819
820 820 @user_data.setter
821 821 def user_data(self, val):
822 822 if not isinstance(val, dict):
823 823 raise Exception('user_data must be dict, got %s' % type(val))
824 824 try:
825 825 self._user_data = json.dumps(val)
826 826 except Exception:
827 827 log.error(traceback.format_exc())
828 828
829 829 @classmethod
830 830 def get_by_username(cls, username, case_insensitive=False,
831 831 cache=False, identity_cache=False):
832 832 session = Session()
833 833
834 834 if case_insensitive:
835 835 q = cls.query().filter(
836 836 func.lower(cls.username) == func.lower(username))
837 837 else:
838 838 q = cls.query().filter(cls.username == username)
839 839
840 840 if cache:
841 841 if identity_cache:
842 842 val = cls.identity_cache(session, 'username', username)
843 843 if val:
844 844 return val
845 845 else:
846 846 cache_key = "get_user_by_name_%s" % _hash_key(username)
847 847 q = q.options(
848 848 FromCache("sql_cache_short", cache_key))
849 849
850 850 return q.scalar()
851 851
852 852 @classmethod
853 853 def get_by_auth_token(cls, auth_token, cache=False):
854 854 q = UserApiKeys.query()\
855 855 .filter(UserApiKeys.api_key == auth_token)\
856 856 .filter(or_(UserApiKeys.expires == -1,
857 857 UserApiKeys.expires >= time.time()))
858 858 if cache:
859 859 q = q.options(
860 860 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
861 861
862 862 match = q.first()
863 863 if match:
864 864 return match.user
865 865
866 866 @classmethod
867 867 def get_by_email(cls, email, case_insensitive=False, cache=False):
868 868
869 869 if case_insensitive:
870 870 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
871 871
872 872 else:
873 873 q = cls.query().filter(cls.email == email)
874 874
875 875 email_key = _hash_key(email)
876 876 if cache:
877 877 q = q.options(
878 878 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
879 879
880 880 ret = q.scalar()
881 881 if ret is None:
882 882 q = UserEmailMap.query()
883 883 # try fetching in alternate email map
884 884 if case_insensitive:
885 885 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
886 886 else:
887 887 q = q.filter(UserEmailMap.email == email)
888 888 q = q.options(joinedload(UserEmailMap.user))
889 889 if cache:
890 890 q = q.options(
891 891 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
892 892 ret = getattr(q.scalar(), 'user', None)
893 893
894 894 return ret
895 895
896 896 @classmethod
897 897 def get_from_cs_author(cls, author):
898 898 """
899 899 Tries to get User objects out of commit author string
900 900
901 901 :param author:
902 902 """
903 903 from rhodecode.lib.helpers import email, author_name
904 904 # Valid email in the attribute passed, see if they're in the system
905 905 _email = email(author)
906 906 if _email:
907 907 user = cls.get_by_email(_email, case_insensitive=True)
908 908 if user:
909 909 return user
910 910 # Maybe we can match by username?
911 911 _author = author_name(author)
912 912 user = cls.get_by_username(_author, case_insensitive=True)
913 913 if user:
914 914 return user
915 915
916 916 def update_userdata(self, **kwargs):
917 917 usr = self
918 918 old = usr.user_data
919 919 old.update(**kwargs)
920 920 usr.user_data = old
921 921 Session().add(usr)
922 922 log.debug('updated userdata with ', kwargs)
923 923
924 924 def update_lastlogin(self):
925 925 """Update user lastlogin"""
926 926 self.last_login = datetime.datetime.now()
927 927 Session().add(self)
928 928 log.debug('updated user %s lastlogin', self.username)
929 929
930 930 def update_lastactivity(self):
931 931 """Update user lastactivity"""
932 932 self.last_activity = datetime.datetime.now()
933 933 Session().add(self)
934 934 log.debug('updated user `%s` last activity', self.username)
935 935
936 936 def update_password(self, new_password):
937 937 from rhodecode.lib.auth import get_crypt_password
938 938
939 939 self.password = get_crypt_password(new_password)
940 940 Session().add(self)
941 941
942 942 @classmethod
943 943 def get_first_super_admin(cls):
944 944 user = User.query().filter(User.admin == true()).first()
945 945 if user is None:
946 946 raise Exception('FATAL: Missing administrative account!')
947 947 return user
948 948
949 949 @classmethod
950 950 def get_all_super_admins(cls):
951 951 """
952 952 Returns all admin accounts sorted by username
953 953 """
954 954 return User.query().filter(User.admin == true())\
955 955 .order_by(User.username.asc()).all()
956 956
957 957 @classmethod
958 958 def get_default_user(cls, cache=False, refresh=False):
959 959 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
960 960 if user is None:
961 961 raise Exception('FATAL: Missing default account!')
962 962 if refresh:
963 963 # The default user might be based on outdated state which
964 964 # has been loaded from the cache.
965 965 # A call to refresh() ensures that the
966 966 # latest state from the database is used.
967 967 Session().refresh(user)
968 968 return user
969 969
970 970 def _get_default_perms(self, user, suffix=''):
971 971 from rhodecode.model.permission import PermissionModel
972 972 return PermissionModel().get_default_perms(user.user_perms, suffix)
973 973
974 974 def get_default_perms(self, suffix=''):
975 975 return self._get_default_perms(self, suffix)
976 976
977 977 def get_api_data(self, include_secrets=False, details='full'):
978 978 """
979 979 Common function for generating user related data for API
980 980
981 981 :param include_secrets: By default secrets in the API data will be replaced
982 982 by a placeholder value to prevent exposing this data by accident. In case
983 983 this data shall be exposed, set this flag to ``True``.
984 984
985 985 :param details: details can be 'basic|full' basic gives only a subset of
986 986 the available user information that includes user_id, name and emails.
987 987 """
988 988 user = self
989 989 user_data = self.user_data
990 990 data = {
991 991 'user_id': user.user_id,
992 992 'username': user.username,
993 993 'firstname': user.name,
994 994 'lastname': user.lastname,
995 995 'email': user.email,
996 996 'emails': user.emails,
997 997 }
998 998 if details == 'basic':
999 999 return data
1000 1000
1001 1001 auth_token_length = 40
1002 1002 auth_token_replacement = '*' * auth_token_length
1003 1003
1004 1004 extras = {
1005 1005 'auth_tokens': [auth_token_replacement],
1006 1006 'active': user.active,
1007 1007 'admin': user.admin,
1008 1008 'extern_type': user.extern_type,
1009 1009 'extern_name': user.extern_name,
1010 1010 'last_login': user.last_login,
1011 1011 'last_activity': user.last_activity,
1012 1012 'ip_addresses': user.ip_addresses,
1013 1013 'language': user_data.get('language')
1014 1014 }
1015 1015 data.update(extras)
1016 1016
1017 1017 if include_secrets:
1018 1018 data['auth_tokens'] = user.auth_tokens
1019 1019 return data
1020 1020
1021 1021 def __json__(self):
1022 1022 data = {
1023 1023 'full_name': self.full_name,
1024 1024 'full_name_or_username': self.full_name_or_username,
1025 1025 'short_contact': self.short_contact,
1026 1026 'full_contact': self.full_contact,
1027 1027 }
1028 1028 data.update(self.get_api_data())
1029 1029 return data
1030 1030
1031 1031
1032 1032 class UserApiKeys(Base, BaseModel):
1033 1033 __tablename__ = 'user_api_keys'
1034 1034 __table_args__ = (
1035 1035 Index('uak_api_key_idx', 'api_key', unique=True),
1036 1036 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1037 1037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1038 1038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1039 1039 )
1040 1040 __mapper_args__ = {}
1041 1041
1042 1042 # ApiKey role
1043 1043 ROLE_ALL = 'token_role_all'
1044 1044 ROLE_HTTP = 'token_role_http'
1045 1045 ROLE_VCS = 'token_role_vcs'
1046 1046 ROLE_API = 'token_role_api'
1047 1047 ROLE_FEED = 'token_role_feed'
1048 1048 ROLE_PASSWORD_RESET = 'token_password_reset'
1049 1049
1050 1050 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1051 1051
1052 1052 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1053 1053 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1054 1054 api_key = Column("api_key", String(255), nullable=False, unique=True)
1055 1055 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1056 1056 expires = Column('expires', Float(53), nullable=False)
1057 1057 role = Column('role', String(255), nullable=True)
1058 1058 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1059 1059
1060 1060 # scope columns
1061 1061 repo_id = Column(
1062 1062 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1063 1063 nullable=True, unique=None, default=None)
1064 1064 repo = relationship('Repository', lazy='joined')
1065 1065
1066 1066 repo_group_id = Column(
1067 1067 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1068 1068 nullable=True, unique=None, default=None)
1069 1069 repo_group = relationship('RepoGroup', lazy='joined')
1070 1070
1071 1071 user = relationship('User', lazy='joined')
1072 1072
1073 1073 def __unicode__(self):
1074 1074 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1075 1075
1076 1076 def __json__(self):
1077 1077 data = {
1078 1078 'auth_token': self.api_key,
1079 1079 'role': self.role,
1080 1080 'scope': self.scope_humanized,
1081 1081 'expired': self.expired
1082 1082 }
1083 1083 return data
1084 1084
1085 1085 def get_api_data(self, include_secrets=False):
1086 1086 data = self.__json__()
1087 1087 if include_secrets:
1088 1088 return data
1089 1089 else:
1090 1090 data['auth_token'] = self.token_obfuscated
1091 1091 return data
1092 1092
1093 1093 @hybrid_property
1094 1094 def description_safe(self):
1095 1095 from rhodecode.lib import helpers as h
1096 1096 return h.escape(self.description)
1097 1097
1098 1098 @property
1099 1099 def expired(self):
1100 1100 if self.expires == -1:
1101 1101 return False
1102 1102 return time.time() > self.expires
1103 1103
1104 1104 @classmethod
1105 1105 def _get_role_name(cls, role):
1106 1106 return {
1107 1107 cls.ROLE_ALL: _('all'),
1108 1108 cls.ROLE_HTTP: _('http/web interface'),
1109 1109 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1110 1110 cls.ROLE_API: _('api calls'),
1111 1111 cls.ROLE_FEED: _('feed access'),
1112 1112 }.get(role, role)
1113 1113
1114 1114 @property
1115 1115 def role_humanized(self):
1116 1116 return self._get_role_name(self.role)
1117 1117
1118 1118 def _get_scope(self):
1119 1119 if self.repo:
1120 1120 return repr(self.repo)
1121 1121 if self.repo_group:
1122 1122 return repr(self.repo_group) + ' (recursive)'
1123 1123 return 'global'
1124 1124
1125 1125 @property
1126 1126 def scope_humanized(self):
1127 1127 return self._get_scope()
1128 1128
1129 1129 @property
1130 1130 def token_obfuscated(self):
1131 1131 if self.api_key:
1132 1132 return self.api_key[:4] + "****"
1133 1133
1134 1134
1135 1135 class UserEmailMap(Base, BaseModel):
1136 1136 __tablename__ = 'user_email_map'
1137 1137 __table_args__ = (
1138 1138 Index('uem_email_idx', 'email'),
1139 1139 UniqueConstraint('email'),
1140 1140 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1141 1141 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1142 1142 )
1143 1143 __mapper_args__ = {}
1144 1144
1145 1145 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1146 1146 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1147 1147 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1148 1148 user = relationship('User', lazy='joined')
1149 1149
1150 1150 @validates('_email')
1151 1151 def validate_email(self, key, email):
1152 1152 # check if this email is not main one
1153 1153 main_email = Session().query(User).filter(User.email == email).scalar()
1154 1154 if main_email is not None:
1155 1155 raise AttributeError('email %s is present is user table' % email)
1156 1156 return email
1157 1157
1158 1158 @hybrid_property
1159 1159 def email(self):
1160 1160 return self._email
1161 1161
1162 1162 @email.setter
1163 1163 def email(self, val):
1164 1164 self._email = val.lower() if val else None
1165 1165
1166 1166
1167 1167 class UserIpMap(Base, BaseModel):
1168 1168 __tablename__ = 'user_ip_map'
1169 1169 __table_args__ = (
1170 1170 UniqueConstraint('user_id', 'ip_addr'),
1171 1171 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1172 1172 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1173 1173 )
1174 1174 __mapper_args__ = {}
1175 1175
1176 1176 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1177 1177 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1178 1178 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1179 1179 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1180 1180 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1181 1181 user = relationship('User', lazy='joined')
1182 1182
1183 1183 @hybrid_property
1184 1184 def description_safe(self):
1185 1185 from rhodecode.lib import helpers as h
1186 1186 return h.escape(self.description)
1187 1187
1188 1188 @classmethod
1189 1189 def _get_ip_range(cls, ip_addr):
1190 1190 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1191 1191 return [str(net.network_address), str(net.broadcast_address)]
1192 1192
1193 1193 def __json__(self):
1194 1194 return {
1195 1195 'ip_addr': self.ip_addr,
1196 1196 'ip_range': self._get_ip_range(self.ip_addr),
1197 1197 }
1198 1198
1199 1199 def __unicode__(self):
1200 1200 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1201 1201 self.user_id, self.ip_addr)
1202 1202
1203 1203
1204 1204 class UserSshKeys(Base, BaseModel):
1205 1205 __tablename__ = 'user_ssh_keys'
1206 1206 __table_args__ = (
1207 1207 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1208 1208
1209 1209 UniqueConstraint('ssh_key_fingerprint'),
1210 1210
1211 1211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1212 1212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1213 1213 )
1214 1214 __mapper_args__ = {}
1215 1215
1216 1216 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 1217 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1218 1218 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1219 1219
1220 1220 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1221 1221
1222 1222 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1223 1223 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1224 1224 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1225 1225
1226 1226 user = relationship('User', lazy='joined')
1227 1227
1228 1228 def __json__(self):
1229 1229 data = {
1230 1230 'ssh_fingerprint': self.ssh_key_fingerprint,
1231 1231 'description': self.description,
1232 1232 'created_on': self.created_on
1233 1233 }
1234 1234 return data
1235 1235
1236 1236 def get_api_data(self):
1237 1237 data = self.__json__()
1238 1238 return data
1239 1239
1240 1240
1241 1241 class UserLog(Base, BaseModel):
1242 1242 __tablename__ = 'user_logs'
1243 1243 __table_args__ = (
1244 1244 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1245 1245 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1246 1246 )
1247 1247 VERSION_1 = 'v1'
1248 1248 VERSION_2 = 'v2'
1249 1249 VERSIONS = [VERSION_1, VERSION_2]
1250 1250
1251 1251 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1252 1252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1253 1253 username = Column("username", String(255), nullable=True, unique=None, default=None)
1254 1254 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1255 1255 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1256 1256 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1257 1257 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1258 1258 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1259 1259
1260 1260 version = Column("version", String(255), nullable=True, default=VERSION_1)
1261 1261 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 1262 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1263 1263
1264 1264 def __unicode__(self):
1265 1265 return u"<%s('id:%s:%s')>" % (
1266 1266 self.__class__.__name__, self.repository_name, self.action)
1267 1267
1268 1268 def __json__(self):
1269 1269 return {
1270 1270 'user_id': self.user_id,
1271 1271 'username': self.username,
1272 1272 'repository_id': self.repository_id,
1273 1273 'repository_name': self.repository_name,
1274 1274 'user_ip': self.user_ip,
1275 1275 'action_date': self.action_date,
1276 1276 'action': self.action,
1277 1277 }
1278 1278
1279 1279 @hybrid_property
1280 1280 def entry_id(self):
1281 1281 return self.user_log_id
1282 1282
1283 1283 @property
1284 1284 def action_as_day(self):
1285 1285 return datetime.date(*self.action_date.timetuple()[:3])
1286 1286
1287 1287 user = relationship('User')
1288 1288 repository = relationship('Repository', cascade='')
1289 1289
1290 1290
1291 1291 class UserGroup(Base, BaseModel):
1292 1292 __tablename__ = 'users_groups'
1293 1293 __table_args__ = (
1294 1294 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1295 1295 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1296 1296 )
1297 1297
1298 1298 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1299 1299 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1300 1300 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1301 1301 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1302 1302 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1303 1303 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1304 1304 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1305 1305 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1306 1306
1307 1307 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1308 1308 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1309 1309 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1310 1310 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1311 1311 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1312 1312 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1313 1313
1314 1314 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1315 1315 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1316 1316
1317 1317 @classmethod
1318 1318 def _load_group_data(cls, column):
1319 1319 if not column:
1320 1320 return {}
1321 1321
1322 1322 try:
1323 1323 return json.loads(column) or {}
1324 1324 except TypeError:
1325 1325 return {}
1326 1326
1327 1327 @hybrid_property
1328 1328 def description_safe(self):
1329 1329 from rhodecode.lib import helpers as h
1330 1330 return h.escape(self.user_group_description)
1331 1331
1332 1332 @hybrid_property
1333 1333 def group_data(self):
1334 1334 return self._load_group_data(self._group_data)
1335 1335
1336 1336 @group_data.expression
1337 1337 def group_data(self, **kwargs):
1338 1338 return self._group_data
1339 1339
1340 1340 @group_data.setter
1341 1341 def group_data(self, val):
1342 1342 try:
1343 1343 self._group_data = json.dumps(val)
1344 1344 except Exception:
1345 1345 log.error(traceback.format_exc())
1346 1346
1347 1347 @classmethod
1348 1348 def _load_sync(cls, group_data):
1349 1349 if group_data:
1350 1350 return group_data.get('extern_type')
1351 1351
1352 1352 @property
1353 1353 def sync(self):
1354 1354 return self._load_sync(self.group_data)
1355 1355
1356 1356 def __unicode__(self):
1357 1357 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1358 1358 self.users_group_id,
1359 1359 self.users_group_name)
1360 1360
1361 1361 @classmethod
1362 1362 def get_by_group_name(cls, group_name, cache=False,
1363 1363 case_insensitive=False):
1364 1364 if case_insensitive:
1365 1365 q = cls.query().filter(func.lower(cls.users_group_name) ==
1366 1366 func.lower(group_name))
1367 1367
1368 1368 else:
1369 1369 q = cls.query().filter(cls.users_group_name == group_name)
1370 1370 if cache:
1371 1371 q = q.options(
1372 1372 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1373 1373 return q.scalar()
1374 1374
1375 1375 @classmethod
1376 1376 def get(cls, user_group_id, cache=False):
1377 1377 if not user_group_id:
1378 1378 return
1379 1379
1380 1380 user_group = cls.query()
1381 1381 if cache:
1382 1382 user_group = user_group.options(
1383 1383 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1384 1384 return user_group.get(user_group_id)
1385 1385
1386 1386 def permissions(self, with_admins=True, with_owner=True):
1387 1387 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1388 1388 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1389 1389 joinedload(UserUserGroupToPerm.user),
1390 1390 joinedload(UserUserGroupToPerm.permission),)
1391 1391
1392 1392 # get owners and admins and permissions. We do a trick of re-writing
1393 1393 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1394 1394 # has a global reference and changing one object propagates to all
1395 1395 # others. This means if admin is also an owner admin_row that change
1396 1396 # would propagate to both objects
1397 1397 perm_rows = []
1398 1398 for _usr in q.all():
1399 1399 usr = AttributeDict(_usr.user.get_dict())
1400 1400 usr.permission = _usr.permission.permission_name
1401 1401 perm_rows.append(usr)
1402 1402
1403 1403 # filter the perm rows by 'default' first and then sort them by
1404 1404 # admin,write,read,none permissions sorted again alphabetically in
1405 1405 # each group
1406 1406 perm_rows = sorted(perm_rows, key=display_user_sort)
1407 1407
1408 1408 _admin_perm = 'usergroup.admin'
1409 1409 owner_row = []
1410 1410 if with_owner:
1411 1411 usr = AttributeDict(self.user.get_dict())
1412 1412 usr.owner_row = True
1413 1413 usr.permission = _admin_perm
1414 1414 owner_row.append(usr)
1415 1415
1416 1416 super_admin_rows = []
1417 1417 if with_admins:
1418 1418 for usr in User.get_all_super_admins():
1419 1419 # if this admin is also owner, don't double the record
1420 1420 if usr.user_id == owner_row[0].user_id:
1421 1421 owner_row[0].admin_row = True
1422 1422 else:
1423 1423 usr = AttributeDict(usr.get_dict())
1424 1424 usr.admin_row = True
1425 1425 usr.permission = _admin_perm
1426 1426 super_admin_rows.append(usr)
1427 1427
1428 1428 return super_admin_rows + owner_row + perm_rows
1429 1429
1430 1430 def permission_user_groups(self):
1431 1431 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1432 1432 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1433 1433 joinedload(UserGroupUserGroupToPerm.target_user_group),
1434 1434 joinedload(UserGroupUserGroupToPerm.permission),)
1435 1435
1436 1436 perm_rows = []
1437 1437 for _user_group in q.all():
1438 1438 usr = AttributeDict(_user_group.user_group.get_dict())
1439 1439 usr.permission = _user_group.permission.permission_name
1440 1440 perm_rows.append(usr)
1441 1441
1442 1442 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1443 1443 return perm_rows
1444 1444
1445 1445 def _get_default_perms(self, user_group, suffix=''):
1446 1446 from rhodecode.model.permission import PermissionModel
1447 1447 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1448 1448
1449 1449 def get_default_perms(self, suffix=''):
1450 1450 return self._get_default_perms(self, suffix)
1451 1451
1452 1452 def get_api_data(self, with_group_members=True, include_secrets=False):
1453 1453 """
1454 1454 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1455 1455 basically forwarded.
1456 1456
1457 1457 """
1458 1458 user_group = self
1459 1459 data = {
1460 1460 'users_group_id': user_group.users_group_id,
1461 1461 'group_name': user_group.users_group_name,
1462 1462 'group_description': user_group.user_group_description,
1463 1463 'active': user_group.users_group_active,
1464 1464 'owner': user_group.user.username,
1465 1465 'sync': user_group.sync,
1466 1466 'owner_email': user_group.user.email,
1467 1467 }
1468 1468
1469 1469 if with_group_members:
1470 1470 users = []
1471 1471 for user in user_group.members:
1472 1472 user = user.user
1473 1473 users.append(user.get_api_data(include_secrets=include_secrets))
1474 1474 data['users'] = users
1475 1475
1476 1476 return data
1477 1477
1478 1478
1479 1479 class UserGroupMember(Base, BaseModel):
1480 1480 __tablename__ = 'users_groups_members'
1481 1481 __table_args__ = (
1482 1482 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1483 1483 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1484 1484 )
1485 1485
1486 1486 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1487 1487 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1488 1488 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1489 1489
1490 1490 user = relationship('User', lazy='joined')
1491 1491 users_group = relationship('UserGroup')
1492 1492
1493 1493 def __init__(self, gr_id='', u_id=''):
1494 1494 self.users_group_id = gr_id
1495 1495 self.user_id = u_id
1496 1496
1497 1497
1498 1498 class RepositoryField(Base, BaseModel):
1499 1499 __tablename__ = 'repositories_fields'
1500 1500 __table_args__ = (
1501 1501 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1502 1502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1503 1503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1504 1504 )
1505 1505 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1506 1506
1507 1507 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1508 1508 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1509 1509 field_key = Column("field_key", String(250))
1510 1510 field_label = Column("field_label", String(1024), nullable=False)
1511 1511 field_value = Column("field_value", String(10000), nullable=False)
1512 1512 field_desc = Column("field_desc", String(1024), nullable=False)
1513 1513 field_type = Column("field_type", String(255), nullable=False, unique=None)
1514 1514 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1515 1515
1516 1516 repository = relationship('Repository')
1517 1517
1518 1518 @property
1519 1519 def field_key_prefixed(self):
1520 1520 return 'ex_%s' % self.field_key
1521 1521
1522 1522 @classmethod
1523 1523 def un_prefix_key(cls, key):
1524 1524 if key.startswith(cls.PREFIX):
1525 1525 return key[len(cls.PREFIX):]
1526 1526 return key
1527 1527
1528 1528 @classmethod
1529 1529 def get_by_key_name(cls, key, repo):
1530 1530 row = cls.query()\
1531 1531 .filter(cls.repository == repo)\
1532 1532 .filter(cls.field_key == key).scalar()
1533 1533 return row
1534 1534
1535 1535
1536 1536 class Repository(Base, BaseModel):
1537 1537 __tablename__ = 'repositories'
1538 1538 __table_args__ = (
1539 1539 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1540 1540 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1541 1541 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1542 1542 )
1543 1543 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1544 1544 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1545 1545 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1546 1546
1547 1547 STATE_CREATED = 'repo_state_created'
1548 1548 STATE_PENDING = 'repo_state_pending'
1549 1549 STATE_ERROR = 'repo_state_error'
1550 1550
1551 1551 LOCK_AUTOMATIC = 'lock_auto'
1552 1552 LOCK_API = 'lock_api'
1553 1553 LOCK_WEB = 'lock_web'
1554 1554 LOCK_PULL = 'lock_pull'
1555 1555
1556 1556 NAME_SEP = URL_SEP
1557 1557
1558 1558 repo_id = Column(
1559 1559 "repo_id", Integer(), nullable=False, unique=True, default=None,
1560 1560 primary_key=True)
1561 1561 _repo_name = Column(
1562 1562 "repo_name", Text(), nullable=False, default=None)
1563 1563 _repo_name_hash = Column(
1564 1564 "repo_name_hash", String(255), nullable=False, unique=True)
1565 1565 repo_state = Column("repo_state", String(255), nullable=True)
1566 1566
1567 1567 clone_uri = Column(
1568 1568 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1569 1569 default=None)
1570 1570 push_uri = Column(
1571 1571 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1572 1572 default=None)
1573 1573 repo_type = Column(
1574 1574 "repo_type", String(255), nullable=False, unique=False, default=None)
1575 1575 user_id = Column(
1576 1576 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1577 1577 unique=False, default=None)
1578 1578 private = Column(
1579 1579 "private", Boolean(), nullable=True, unique=None, default=None)
1580 1580 enable_statistics = Column(
1581 1581 "statistics", Boolean(), nullable=True, unique=None, default=True)
1582 1582 enable_downloads = Column(
1583 1583 "downloads", Boolean(), nullable=True, unique=None, default=True)
1584 1584 description = Column(
1585 1585 "description", String(10000), nullable=True, unique=None, default=None)
1586 1586 created_on = Column(
1587 1587 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1588 1588 default=datetime.datetime.now)
1589 1589 updated_on = Column(
1590 1590 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1591 1591 default=datetime.datetime.now)
1592 1592 _landing_revision = Column(
1593 1593 "landing_revision", String(255), nullable=False, unique=False,
1594 1594 default=None)
1595 1595 enable_locking = Column(
1596 1596 "enable_locking", Boolean(), nullable=False, unique=None,
1597 1597 default=False)
1598 1598 _locked = Column(
1599 1599 "locked", String(255), nullable=True, unique=False, default=None)
1600 1600 _changeset_cache = Column(
1601 1601 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1602 1602
1603 1603 fork_id = Column(
1604 1604 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1605 1605 nullable=True, unique=False, default=None)
1606 1606 group_id = Column(
1607 1607 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1608 1608 unique=False, default=None)
1609 1609
1610 1610 user = relationship('User', lazy='joined')
1611 1611 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1612 1612 group = relationship('RepoGroup', lazy='joined')
1613 1613 repo_to_perm = relationship(
1614 1614 'UserRepoToPerm', cascade='all',
1615 1615 order_by='UserRepoToPerm.repo_to_perm_id')
1616 1616 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1617 1617 stats = relationship('Statistics', cascade='all', uselist=False)
1618 1618
1619 1619 followers = relationship(
1620 1620 'UserFollowing',
1621 1621 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1622 1622 cascade='all')
1623 1623 extra_fields = relationship(
1624 1624 'RepositoryField', cascade="all, delete, delete-orphan")
1625 1625 logs = relationship('UserLog')
1626 1626 comments = relationship(
1627 1627 'ChangesetComment', cascade="all, delete, delete-orphan")
1628 1628 pull_requests_source = relationship(
1629 1629 'PullRequest',
1630 1630 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1631 1631 cascade="all, delete, delete-orphan")
1632 1632 pull_requests_target = relationship(
1633 1633 'PullRequest',
1634 1634 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1635 1635 cascade="all, delete, delete-orphan")
1636 1636 ui = relationship('RepoRhodeCodeUi', cascade="all")
1637 1637 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1638 1638 integrations = relationship('Integration',
1639 1639 cascade="all, delete, delete-orphan")
1640 1640
1641 1641 scoped_tokens = relationship('UserApiKeys', cascade="all")
1642 1642
1643 1643 def __unicode__(self):
1644 1644 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1645 1645 safe_unicode(self.repo_name))
1646 1646
1647 1647 @hybrid_property
1648 1648 def description_safe(self):
1649 1649 from rhodecode.lib import helpers as h
1650 1650 return h.escape(self.description)
1651 1651
1652 1652 @hybrid_property
1653 1653 def landing_rev(self):
1654 1654 # always should return [rev_type, rev]
1655 1655 if self._landing_revision:
1656 1656 _rev_info = self._landing_revision.split(':')
1657 1657 if len(_rev_info) < 2:
1658 1658 _rev_info.insert(0, 'rev')
1659 1659 return [_rev_info[0], _rev_info[1]]
1660 1660 return [None, None]
1661 1661
1662 1662 @landing_rev.setter
1663 1663 def landing_rev(self, val):
1664 1664 if ':' not in val:
1665 1665 raise ValueError('value must be delimited with `:` and consist '
1666 1666 'of <rev_type>:<rev>, got %s instead' % val)
1667 1667 self._landing_revision = val
1668 1668
1669 1669 @hybrid_property
1670 1670 def locked(self):
1671 1671 if self._locked:
1672 1672 user_id, timelocked, reason = self._locked.split(':')
1673 1673 lock_values = int(user_id), timelocked, reason
1674 1674 else:
1675 1675 lock_values = [None, None, None]
1676 1676 return lock_values
1677 1677
1678 1678 @locked.setter
1679 1679 def locked(self, val):
1680 1680 if val and isinstance(val, (list, tuple)):
1681 1681 self._locked = ':'.join(map(str, val))
1682 1682 else:
1683 1683 self._locked = None
1684 1684
1685 1685 @hybrid_property
1686 1686 def changeset_cache(self):
1687 1687 from rhodecode.lib.vcs.backends.base import EmptyCommit
1688 1688 dummy = EmptyCommit().__json__()
1689 1689 if not self._changeset_cache:
1690 1690 return dummy
1691 1691 try:
1692 1692 return json.loads(self._changeset_cache)
1693 1693 except TypeError:
1694 1694 return dummy
1695 1695 except Exception:
1696 1696 log.error(traceback.format_exc())
1697 1697 return dummy
1698 1698
1699 1699 @changeset_cache.setter
1700 1700 def changeset_cache(self, val):
1701 1701 try:
1702 1702 self._changeset_cache = json.dumps(val)
1703 1703 except Exception:
1704 1704 log.error(traceback.format_exc())
1705 1705
1706 1706 @hybrid_property
1707 1707 def repo_name(self):
1708 1708 return self._repo_name
1709 1709
1710 1710 @repo_name.setter
1711 1711 def repo_name(self, value):
1712 1712 self._repo_name = value
1713 1713 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1714 1714
1715 1715 @classmethod
1716 1716 def normalize_repo_name(cls, repo_name):
1717 1717 """
1718 1718 Normalizes os specific repo_name to the format internally stored inside
1719 1719 database using URL_SEP
1720 1720
1721 1721 :param cls:
1722 1722 :param repo_name:
1723 1723 """
1724 1724 return cls.NAME_SEP.join(repo_name.split(os.sep))
1725 1725
1726 1726 @classmethod
1727 1727 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1728 1728 session = Session()
1729 1729 q = session.query(cls).filter(cls.repo_name == repo_name)
1730 1730
1731 1731 if cache:
1732 1732 if identity_cache:
1733 1733 val = cls.identity_cache(session, 'repo_name', repo_name)
1734 1734 if val:
1735 1735 return val
1736 1736 else:
1737 1737 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1738 1738 q = q.options(
1739 1739 FromCache("sql_cache_short", cache_key))
1740 1740
1741 1741 return q.scalar()
1742 1742
1743 1743 @classmethod
1744 1744 def get_by_id_or_repo_name(cls, repoid):
1745 1745 if isinstance(repoid, (int, long)):
1746 1746 try:
1747 1747 repo = cls.get(repoid)
1748 1748 except ValueError:
1749 1749 repo = None
1750 1750 else:
1751 1751 repo = cls.get_by_repo_name(repoid)
1752 1752 return repo
1753 1753
1754 1754 @classmethod
1755 1755 def get_by_full_path(cls, repo_full_path):
1756 1756 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1757 1757 repo_name = cls.normalize_repo_name(repo_name)
1758 1758 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1759 1759
1760 1760 @classmethod
1761 1761 def get_repo_forks(cls, repo_id):
1762 1762 return cls.query().filter(Repository.fork_id == repo_id)
1763 1763
1764 1764 @classmethod
1765 1765 def base_path(cls):
1766 1766 """
1767 1767 Returns base path when all repos are stored
1768 1768
1769 1769 :param cls:
1770 1770 """
1771 1771 q = Session().query(RhodeCodeUi)\
1772 1772 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1773 1773 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1774 1774 return q.one().ui_value
1775 1775
1776 1776 @classmethod
1777 1777 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1778 1778 case_insensitive=True):
1779 1779 q = Repository.query()
1780 1780
1781 1781 if not isinstance(user_id, Optional):
1782 1782 q = q.filter(Repository.user_id == user_id)
1783 1783
1784 1784 if not isinstance(group_id, Optional):
1785 1785 q = q.filter(Repository.group_id == group_id)
1786 1786
1787 1787 if case_insensitive:
1788 1788 q = q.order_by(func.lower(Repository.repo_name))
1789 1789 else:
1790 1790 q = q.order_by(Repository.repo_name)
1791 1791 return q.all()
1792 1792
1793 1793 @property
1794 1794 def forks(self):
1795 1795 """
1796 1796 Return forks of this repo
1797 1797 """
1798 1798 return Repository.get_repo_forks(self.repo_id)
1799 1799
1800 1800 @property
1801 1801 def parent(self):
1802 1802 """
1803 1803 Returns fork parent
1804 1804 """
1805 1805 return self.fork
1806 1806
1807 1807 @property
1808 1808 def just_name(self):
1809 1809 return self.repo_name.split(self.NAME_SEP)[-1]
1810 1810
1811 1811 @property
1812 1812 def groups_with_parents(self):
1813 1813 groups = []
1814 1814 if self.group is None:
1815 1815 return groups
1816 1816
1817 1817 cur_gr = self.group
1818 1818 groups.insert(0, cur_gr)
1819 1819 while 1:
1820 1820 gr = getattr(cur_gr, 'parent_group', None)
1821 1821 cur_gr = cur_gr.parent_group
1822 1822 if gr is None:
1823 1823 break
1824 1824 groups.insert(0, gr)
1825 1825
1826 1826 return groups
1827 1827
1828 1828 @property
1829 1829 def groups_and_repo(self):
1830 1830 return self.groups_with_parents, self
1831 1831
1832 1832 @LazyProperty
1833 1833 def repo_path(self):
1834 1834 """
1835 1835 Returns base full path for that repository means where it actually
1836 1836 exists on a filesystem
1837 1837 """
1838 1838 q = Session().query(RhodeCodeUi).filter(
1839 1839 RhodeCodeUi.ui_key == self.NAME_SEP)
1840 1840 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1841 1841 return q.one().ui_value
1842 1842
1843 1843 @property
1844 1844 def repo_full_path(self):
1845 1845 p = [self.repo_path]
1846 1846 # we need to split the name by / since this is how we store the
1847 1847 # names in the database, but that eventually needs to be converted
1848 1848 # into a valid system path
1849 1849 p += self.repo_name.split(self.NAME_SEP)
1850 1850 return os.path.join(*map(safe_unicode, p))
1851 1851
1852 1852 @property
1853 1853 def cache_keys(self):
1854 1854 """
1855 1855 Returns associated cache keys for that repo
1856 1856 """
1857 1857 return CacheKey.query()\
1858 1858 .filter(CacheKey.cache_args == self.repo_name)\
1859 1859 .order_by(CacheKey.cache_key)\
1860 1860 .all()
1861 1861
1862 1862 @property
1863 1863 def cached_diffs_relative_dir(self):
1864 1864 """
1865 1865 Return a relative to the repository store path of cached diffs
1866 1866 used for safe display for users, who shouldn't know the absolute store
1867 1867 path
1868 1868 """
1869 1869 return os.path.join(
1870 1870 os.path.dirname(self.repo_name),
1871 1871 self.cached_diffs_dir.split(os.path.sep)[-1])
1872 1872
1873 1873 @property
1874 1874 def cached_diffs_dir(self):
1875 1875 path = self.repo_full_path
1876 1876 return os.path.join(
1877 1877 os.path.dirname(path),
1878 1878 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1879 1879
1880 1880 def cached_diffs(self):
1881 1881 diff_cache_dir = self.cached_diffs_dir
1882 1882 if os.path.isdir(diff_cache_dir):
1883 1883 return os.listdir(diff_cache_dir)
1884 1884 return []
1885 1885
1886 def shadow_repos(self):
1887 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1888 return [
1889 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1890 if x.startswith(shadow_repos_pattern)]
1891
1886 1892 def get_new_name(self, repo_name):
1887 1893 """
1888 1894 returns new full repository name based on assigned group and new new
1889 1895
1890 1896 :param group_name:
1891 1897 """
1892 1898 path_prefix = self.group.full_path_splitted if self.group else []
1893 1899 return self.NAME_SEP.join(path_prefix + [repo_name])
1894 1900
1895 1901 @property
1896 1902 def _config(self):
1897 1903 """
1898 1904 Returns db based config object.
1899 1905 """
1900 1906 from rhodecode.lib.utils import make_db_config
1901 1907 return make_db_config(clear_session=False, repo=self)
1902 1908
1903 1909 def permissions(self, with_admins=True, with_owner=True):
1904 1910 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1905 1911 q = q.options(joinedload(UserRepoToPerm.repository),
1906 1912 joinedload(UserRepoToPerm.user),
1907 1913 joinedload(UserRepoToPerm.permission),)
1908 1914
1909 1915 # get owners and admins and permissions. We do a trick of re-writing
1910 1916 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1911 1917 # has a global reference and changing one object propagates to all
1912 1918 # others. This means if admin is also an owner admin_row that change
1913 1919 # would propagate to both objects
1914 1920 perm_rows = []
1915 1921 for _usr in q.all():
1916 1922 usr = AttributeDict(_usr.user.get_dict())
1917 1923 usr.permission = _usr.permission.permission_name
1918 1924 perm_rows.append(usr)
1919 1925
1920 1926 # filter the perm rows by 'default' first and then sort them by
1921 1927 # admin,write,read,none permissions sorted again alphabetically in
1922 1928 # each group
1923 1929 perm_rows = sorted(perm_rows, key=display_user_sort)
1924 1930
1925 1931 _admin_perm = 'repository.admin'
1926 1932 owner_row = []
1927 1933 if with_owner:
1928 1934 usr = AttributeDict(self.user.get_dict())
1929 1935 usr.owner_row = True
1930 1936 usr.permission = _admin_perm
1931 1937 owner_row.append(usr)
1932 1938
1933 1939 super_admin_rows = []
1934 1940 if with_admins:
1935 1941 for usr in User.get_all_super_admins():
1936 1942 # if this admin is also owner, don't double the record
1937 1943 if usr.user_id == owner_row[0].user_id:
1938 1944 owner_row[0].admin_row = True
1939 1945 else:
1940 1946 usr = AttributeDict(usr.get_dict())
1941 1947 usr.admin_row = True
1942 1948 usr.permission = _admin_perm
1943 1949 super_admin_rows.append(usr)
1944 1950
1945 1951 return super_admin_rows + owner_row + perm_rows
1946 1952
1947 1953 def permission_user_groups(self):
1948 1954 q = UserGroupRepoToPerm.query().filter(
1949 1955 UserGroupRepoToPerm.repository == self)
1950 1956 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1951 1957 joinedload(UserGroupRepoToPerm.users_group),
1952 1958 joinedload(UserGroupRepoToPerm.permission),)
1953 1959
1954 1960 perm_rows = []
1955 1961 for _user_group in q.all():
1956 1962 usr = AttributeDict(_user_group.users_group.get_dict())
1957 1963 usr.permission = _user_group.permission.permission_name
1958 1964 perm_rows.append(usr)
1959 1965
1960 1966 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1961 1967 return perm_rows
1962 1968
1963 1969 def get_api_data(self, include_secrets=False):
1964 1970 """
1965 1971 Common function for generating repo api data
1966 1972
1967 1973 :param include_secrets: See :meth:`User.get_api_data`.
1968 1974
1969 1975 """
1970 1976 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1971 1977 # move this methods on models level.
1972 1978 from rhodecode.model.settings import SettingsModel
1973 1979 from rhodecode.model.repo import RepoModel
1974 1980
1975 1981 repo = self
1976 1982 _user_id, _time, _reason = self.locked
1977 1983
1978 1984 data = {
1979 1985 'repo_id': repo.repo_id,
1980 1986 'repo_name': repo.repo_name,
1981 1987 'repo_type': repo.repo_type,
1982 1988 'clone_uri': repo.clone_uri or '',
1983 1989 'push_uri': repo.push_uri or '',
1984 1990 'url': RepoModel().get_url(self),
1985 1991 'private': repo.private,
1986 1992 'created_on': repo.created_on,
1987 1993 'description': repo.description_safe,
1988 1994 'landing_rev': repo.landing_rev,
1989 1995 'owner': repo.user.username,
1990 1996 'fork_of': repo.fork.repo_name if repo.fork else None,
1991 1997 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1992 1998 'enable_statistics': repo.enable_statistics,
1993 1999 'enable_locking': repo.enable_locking,
1994 2000 'enable_downloads': repo.enable_downloads,
1995 2001 'last_changeset': repo.changeset_cache,
1996 2002 'locked_by': User.get(_user_id).get_api_data(
1997 2003 include_secrets=include_secrets) if _user_id else None,
1998 2004 'locked_date': time_to_datetime(_time) if _time else None,
1999 2005 'lock_reason': _reason if _reason else None,
2000 2006 }
2001 2007
2002 2008 # TODO: mikhail: should be per-repo settings here
2003 2009 rc_config = SettingsModel().get_all_settings()
2004 2010 repository_fields = str2bool(
2005 2011 rc_config.get('rhodecode_repository_fields'))
2006 2012 if repository_fields:
2007 2013 for f in self.extra_fields:
2008 2014 data[f.field_key_prefixed] = f.field_value
2009 2015
2010 2016 return data
2011 2017
2012 2018 @classmethod
2013 2019 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2014 2020 if not lock_time:
2015 2021 lock_time = time.time()
2016 2022 if not lock_reason:
2017 2023 lock_reason = cls.LOCK_AUTOMATIC
2018 2024 repo.locked = [user_id, lock_time, lock_reason]
2019 2025 Session().add(repo)
2020 2026 Session().commit()
2021 2027
2022 2028 @classmethod
2023 2029 def unlock(cls, repo):
2024 2030 repo.locked = None
2025 2031 Session().add(repo)
2026 2032 Session().commit()
2027 2033
2028 2034 @classmethod
2029 2035 def getlock(cls, repo):
2030 2036 return repo.locked
2031 2037
2032 2038 def is_user_lock(self, user_id):
2033 2039 if self.lock[0]:
2034 2040 lock_user_id = safe_int(self.lock[0])
2035 2041 user_id = safe_int(user_id)
2036 2042 # both are ints, and they are equal
2037 2043 return all([lock_user_id, user_id]) and lock_user_id == user_id
2038 2044
2039 2045 return False
2040 2046
2041 2047 def get_locking_state(self, action, user_id, only_when_enabled=True):
2042 2048 """
2043 2049 Checks locking on this repository, if locking is enabled and lock is
2044 2050 present returns a tuple of make_lock, locked, locked_by.
2045 2051 make_lock can have 3 states None (do nothing) True, make lock
2046 2052 False release lock, This value is later propagated to hooks, which
2047 2053 do the locking. Think about this as signals passed to hooks what to do.
2048 2054
2049 2055 """
2050 2056 # TODO: johbo: This is part of the business logic and should be moved
2051 2057 # into the RepositoryModel.
2052 2058
2053 2059 if action not in ('push', 'pull'):
2054 2060 raise ValueError("Invalid action value: %s" % repr(action))
2055 2061
2056 2062 # defines if locked error should be thrown to user
2057 2063 currently_locked = False
2058 2064 # defines if new lock should be made, tri-state
2059 2065 make_lock = None
2060 2066 repo = self
2061 2067 user = User.get(user_id)
2062 2068
2063 2069 lock_info = repo.locked
2064 2070
2065 2071 if repo and (repo.enable_locking or not only_when_enabled):
2066 2072 if action == 'push':
2067 2073 # check if it's already locked !, if it is compare users
2068 2074 locked_by_user_id = lock_info[0]
2069 2075 if user.user_id == locked_by_user_id:
2070 2076 log.debug(
2071 2077 'Got `push` action from user %s, now unlocking', user)
2072 2078 # unlock if we have push from user who locked
2073 2079 make_lock = False
2074 2080 else:
2075 2081 # we're not the same user who locked, ban with
2076 2082 # code defined in settings (default is 423 HTTP Locked) !
2077 2083 log.debug('Repo %s is currently locked by %s', repo, user)
2078 2084 currently_locked = True
2079 2085 elif action == 'pull':
2080 2086 # [0] user [1] date
2081 2087 if lock_info[0] and lock_info[1]:
2082 2088 log.debug('Repo %s is currently locked by %s', repo, user)
2083 2089 currently_locked = True
2084 2090 else:
2085 2091 log.debug('Setting lock on repo %s by %s', repo, user)
2086 2092 make_lock = True
2087 2093
2088 2094 else:
2089 2095 log.debug('Repository %s do not have locking enabled', repo)
2090 2096
2091 2097 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2092 2098 make_lock, currently_locked, lock_info)
2093 2099
2094 2100 from rhodecode.lib.auth import HasRepoPermissionAny
2095 2101 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2096 2102 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2097 2103 # if we don't have at least write permission we cannot make a lock
2098 2104 log.debug('lock state reset back to FALSE due to lack '
2099 2105 'of at least read permission')
2100 2106 make_lock = False
2101 2107
2102 2108 return make_lock, currently_locked, lock_info
2103 2109
2104 2110 @property
2105 2111 def last_db_change(self):
2106 2112 return self.updated_on
2107 2113
2108 2114 @property
2109 2115 def clone_uri_hidden(self):
2110 2116 clone_uri = self.clone_uri
2111 2117 if clone_uri:
2112 2118 import urlobject
2113 2119 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2114 2120 if url_obj.password:
2115 2121 clone_uri = url_obj.with_password('*****')
2116 2122 return clone_uri
2117 2123
2118 2124 @property
2119 2125 def push_uri_hidden(self):
2120 2126 push_uri = self.push_uri
2121 2127 if push_uri:
2122 2128 import urlobject
2123 2129 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2124 2130 if url_obj.password:
2125 2131 push_uri = url_obj.with_password('*****')
2126 2132 return push_uri
2127 2133
2128 2134 def clone_url(self, **override):
2129 2135 from rhodecode.model.settings import SettingsModel
2130 2136
2131 2137 uri_tmpl = None
2132 2138 if 'with_id' in override:
2133 2139 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2134 2140 del override['with_id']
2135 2141
2136 2142 if 'uri_tmpl' in override:
2137 2143 uri_tmpl = override['uri_tmpl']
2138 2144 del override['uri_tmpl']
2139 2145
2140 2146 ssh = False
2141 2147 if 'ssh' in override:
2142 2148 ssh = True
2143 2149 del override['ssh']
2144 2150
2145 2151 # we didn't override our tmpl from **overrides
2146 2152 if not uri_tmpl:
2147 2153 rc_config = SettingsModel().get_all_settings(cache=True)
2148 2154 if ssh:
2149 2155 uri_tmpl = rc_config.get(
2150 2156 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2151 2157 else:
2152 2158 uri_tmpl = rc_config.get(
2153 2159 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2154 2160
2155 2161 request = get_current_request()
2156 2162 return get_clone_url(request=request,
2157 2163 uri_tmpl=uri_tmpl,
2158 2164 repo_name=self.repo_name,
2159 2165 repo_id=self.repo_id, **override)
2160 2166
2161 2167 def set_state(self, state):
2162 2168 self.repo_state = state
2163 2169 Session().add(self)
2164 2170 #==========================================================================
2165 2171 # SCM PROPERTIES
2166 2172 #==========================================================================
2167 2173
2168 2174 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2169 2175 return get_commit_safe(
2170 2176 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2171 2177
2172 2178 def get_changeset(self, rev=None, pre_load=None):
2173 2179 warnings.warn("Use get_commit", DeprecationWarning)
2174 2180 commit_id = None
2175 2181 commit_idx = None
2176 2182 if isinstance(rev, basestring):
2177 2183 commit_id = rev
2178 2184 else:
2179 2185 commit_idx = rev
2180 2186 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2181 2187 pre_load=pre_load)
2182 2188
2183 2189 def get_landing_commit(self):
2184 2190 """
2185 2191 Returns landing commit, or if that doesn't exist returns the tip
2186 2192 """
2187 2193 _rev_type, _rev = self.landing_rev
2188 2194 commit = self.get_commit(_rev)
2189 2195 if isinstance(commit, EmptyCommit):
2190 2196 return self.get_commit()
2191 2197 return commit
2192 2198
2193 2199 def update_commit_cache(self, cs_cache=None, config=None):
2194 2200 """
2195 2201 Update cache of last changeset for repository, keys should be::
2196 2202
2197 2203 short_id
2198 2204 raw_id
2199 2205 revision
2200 2206 parents
2201 2207 message
2202 2208 date
2203 2209 author
2204 2210
2205 2211 :param cs_cache:
2206 2212 """
2207 2213 from rhodecode.lib.vcs.backends.base import BaseChangeset
2208 2214 if cs_cache is None:
2209 2215 # use no-cache version here
2210 2216 scm_repo = self.scm_instance(cache=False, config=config)
2211 2217 if scm_repo:
2212 2218 cs_cache = scm_repo.get_commit(
2213 2219 pre_load=["author", "date", "message", "parents"])
2214 2220 else:
2215 2221 cs_cache = EmptyCommit()
2216 2222
2217 2223 if isinstance(cs_cache, BaseChangeset):
2218 2224 cs_cache = cs_cache.__json__()
2219 2225
2220 2226 def is_outdated(new_cs_cache):
2221 2227 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2222 2228 new_cs_cache['revision'] != self.changeset_cache['revision']):
2223 2229 return True
2224 2230 return False
2225 2231
2226 2232 # check if we have maybe already latest cached revision
2227 2233 if is_outdated(cs_cache) or not self.changeset_cache:
2228 2234 _default = datetime.datetime.fromtimestamp(0)
2229 2235 last_change = cs_cache.get('date') or _default
2230 2236 log.debug('updated repo %s with new cs cache %s',
2231 2237 self.repo_name, cs_cache)
2232 2238 self.updated_on = last_change
2233 2239 self.changeset_cache = cs_cache
2234 2240 Session().add(self)
2235 2241 Session().commit()
2236 2242 else:
2237 2243 log.debug('Skipping update_commit_cache for repo:`%s` '
2238 2244 'commit already with latest changes', self.repo_name)
2239 2245
2240 2246 @property
2241 2247 def tip(self):
2242 2248 return self.get_commit('tip')
2243 2249
2244 2250 @property
2245 2251 def author(self):
2246 2252 return self.tip.author
2247 2253
2248 2254 @property
2249 2255 def last_change(self):
2250 2256 return self.scm_instance().last_change
2251 2257
2252 2258 def get_comments(self, revisions=None):
2253 2259 """
2254 2260 Returns comments for this repository grouped by revisions
2255 2261
2256 2262 :param revisions: filter query by revisions only
2257 2263 """
2258 2264 cmts = ChangesetComment.query()\
2259 2265 .filter(ChangesetComment.repo == self)
2260 2266 if revisions:
2261 2267 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2262 2268 grouped = collections.defaultdict(list)
2263 2269 for cmt in cmts.all():
2264 2270 grouped[cmt.revision].append(cmt)
2265 2271 return grouped
2266 2272
2267 2273 def statuses(self, revisions=None):
2268 2274 """
2269 2275 Returns statuses for this repository
2270 2276
2271 2277 :param revisions: list of revisions to get statuses for
2272 2278 """
2273 2279 statuses = ChangesetStatus.query()\
2274 2280 .filter(ChangesetStatus.repo == self)\
2275 2281 .filter(ChangesetStatus.version == 0)
2276 2282
2277 2283 if revisions:
2278 2284 # Try doing the filtering in chunks to avoid hitting limits
2279 2285 size = 500
2280 2286 status_results = []
2281 2287 for chunk in xrange(0, len(revisions), size):
2282 2288 status_results += statuses.filter(
2283 2289 ChangesetStatus.revision.in_(
2284 2290 revisions[chunk: chunk+size])
2285 2291 ).all()
2286 2292 else:
2287 2293 status_results = statuses.all()
2288 2294
2289 2295 grouped = {}
2290 2296
2291 2297 # maybe we have open new pullrequest without a status?
2292 2298 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2293 2299 status_lbl = ChangesetStatus.get_status_lbl(stat)
2294 2300 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2295 2301 for rev in pr.revisions:
2296 2302 pr_id = pr.pull_request_id
2297 2303 pr_repo = pr.target_repo.repo_name
2298 2304 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2299 2305
2300 2306 for stat in status_results:
2301 2307 pr_id = pr_repo = None
2302 2308 if stat.pull_request:
2303 2309 pr_id = stat.pull_request.pull_request_id
2304 2310 pr_repo = stat.pull_request.target_repo.repo_name
2305 2311 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2306 2312 pr_id, pr_repo]
2307 2313 return grouped
2308 2314
2309 2315 # ==========================================================================
2310 2316 # SCM CACHE INSTANCE
2311 2317 # ==========================================================================
2312 2318
2313 2319 def scm_instance(self, **kwargs):
2314 2320 import rhodecode
2315 2321
2316 2322 # Passing a config will not hit the cache currently only used
2317 2323 # for repo2dbmapper
2318 2324 config = kwargs.pop('config', None)
2319 2325 cache = kwargs.pop('cache', None)
2320 2326 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2321 2327 # if cache is NOT defined use default global, else we have a full
2322 2328 # control over cache behaviour
2323 2329 if cache is None and full_cache and not config:
2324 2330 return self._get_instance_cached()
2325 2331 return self._get_instance(cache=bool(cache), config=config)
2326 2332
2327 2333 def _get_instance_cached(self):
2328 2334 @cache_region('long_term')
2329 2335 def _get_repo(cache_key):
2330 2336 return self._get_instance()
2331 2337
2332 2338 invalidator_context = CacheKey.repo_context_cache(
2333 2339 _get_repo, self.repo_name, None, thread_scoped=True)
2334 2340
2335 2341 with invalidator_context as context:
2336 2342 context.invalidate()
2337 2343 repo = context.compute()
2338 2344
2339 2345 return repo
2340 2346
2341 2347 def _get_instance(self, cache=True, config=None):
2342 2348 config = config or self._config
2343 2349 custom_wire = {
2344 2350 'cache': cache # controls the vcs.remote cache
2345 2351 }
2346 2352 repo = get_vcs_instance(
2347 2353 repo_path=safe_str(self.repo_full_path),
2348 2354 config=config,
2349 2355 with_wire=custom_wire,
2350 2356 create=False,
2351 2357 _vcs_alias=self.repo_type)
2352 2358
2353 2359 return repo
2354 2360
2355 2361 def __json__(self):
2356 2362 return {'landing_rev': self.landing_rev}
2357 2363
2358 2364 def get_dict(self):
2359 2365
2360 2366 # Since we transformed `repo_name` to a hybrid property, we need to
2361 2367 # keep compatibility with the code which uses `repo_name` field.
2362 2368
2363 2369 result = super(Repository, self).get_dict()
2364 2370 result['repo_name'] = result.pop('_repo_name', None)
2365 2371 return result
2366 2372
2367 2373
2368 2374 class RepoGroup(Base, BaseModel):
2369 2375 __tablename__ = 'groups'
2370 2376 __table_args__ = (
2371 2377 UniqueConstraint('group_name', 'group_parent_id'),
2372 2378 CheckConstraint('group_id != group_parent_id'),
2373 2379 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2374 2380 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2375 2381 )
2376 2382 __mapper_args__ = {'order_by': 'group_name'}
2377 2383
2378 2384 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2379 2385
2380 2386 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2381 2387 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2382 2388 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2383 2389 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2384 2390 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2385 2391 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2386 2392 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2387 2393 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2388 2394 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2389 2395
2390 2396 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2391 2397 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2392 2398 parent_group = relationship('RepoGroup', remote_side=group_id)
2393 2399 user = relationship('User')
2394 2400 integrations = relationship('Integration',
2395 2401 cascade="all, delete, delete-orphan")
2396 2402
2397 2403 def __init__(self, group_name='', parent_group=None):
2398 2404 self.group_name = group_name
2399 2405 self.parent_group = parent_group
2400 2406
2401 2407 def __unicode__(self):
2402 2408 return u"<%s('id:%s:%s')>" % (
2403 2409 self.__class__.__name__, self.group_id, self.group_name)
2404 2410
2405 2411 @hybrid_property
2406 2412 def description_safe(self):
2407 2413 from rhodecode.lib import helpers as h
2408 2414 return h.escape(self.group_description)
2409 2415
2410 2416 @classmethod
2411 2417 def _generate_choice(cls, repo_group):
2412 2418 from webhelpers.html import literal as _literal
2413 2419 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2414 2420 return repo_group.group_id, _name(repo_group.full_path_splitted)
2415 2421
2416 2422 @classmethod
2417 2423 def groups_choices(cls, groups=None, show_empty_group=True):
2418 2424 if not groups:
2419 2425 groups = cls.query().all()
2420 2426
2421 2427 repo_groups = []
2422 2428 if show_empty_group:
2423 2429 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2424 2430
2425 2431 repo_groups.extend([cls._generate_choice(x) for x in groups])
2426 2432
2427 2433 repo_groups = sorted(
2428 2434 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2429 2435 return repo_groups
2430 2436
2431 2437 @classmethod
2432 2438 def url_sep(cls):
2433 2439 return URL_SEP
2434 2440
2435 2441 @classmethod
2436 2442 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2437 2443 if case_insensitive:
2438 2444 gr = cls.query().filter(func.lower(cls.group_name)
2439 2445 == func.lower(group_name))
2440 2446 else:
2441 2447 gr = cls.query().filter(cls.group_name == group_name)
2442 2448 if cache:
2443 2449 name_key = _hash_key(group_name)
2444 2450 gr = gr.options(
2445 2451 FromCache("sql_cache_short", "get_group_%s" % name_key))
2446 2452 return gr.scalar()
2447 2453
2448 2454 @classmethod
2449 2455 def get_user_personal_repo_group(cls, user_id):
2450 2456 user = User.get(user_id)
2451 2457 if user.username == User.DEFAULT_USER:
2452 2458 return None
2453 2459
2454 2460 return cls.query()\
2455 2461 .filter(cls.personal == true()) \
2456 2462 .filter(cls.user == user).scalar()
2457 2463
2458 2464 @classmethod
2459 2465 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2460 2466 case_insensitive=True):
2461 2467 q = RepoGroup.query()
2462 2468
2463 2469 if not isinstance(user_id, Optional):
2464 2470 q = q.filter(RepoGroup.user_id == user_id)
2465 2471
2466 2472 if not isinstance(group_id, Optional):
2467 2473 q = q.filter(RepoGroup.group_parent_id == group_id)
2468 2474
2469 2475 if case_insensitive:
2470 2476 q = q.order_by(func.lower(RepoGroup.group_name))
2471 2477 else:
2472 2478 q = q.order_by(RepoGroup.group_name)
2473 2479 return q.all()
2474 2480
2475 2481 @property
2476 2482 def parents(self):
2477 2483 parents_recursion_limit = 10
2478 2484 groups = []
2479 2485 if self.parent_group is None:
2480 2486 return groups
2481 2487 cur_gr = self.parent_group
2482 2488 groups.insert(0, cur_gr)
2483 2489 cnt = 0
2484 2490 while 1:
2485 2491 cnt += 1
2486 2492 gr = getattr(cur_gr, 'parent_group', None)
2487 2493 cur_gr = cur_gr.parent_group
2488 2494 if gr is None:
2489 2495 break
2490 2496 if cnt == parents_recursion_limit:
2491 2497 # this will prevent accidental infinit loops
2492 2498 log.error(('more than %s parents found for group %s, stopping '
2493 2499 'recursive parent fetching' % (parents_recursion_limit, self)))
2494 2500 break
2495 2501
2496 2502 groups.insert(0, gr)
2497 2503 return groups
2498 2504
2499 2505 @property
2500 2506 def last_db_change(self):
2501 2507 return self.updated_on
2502 2508
2503 2509 @property
2504 2510 def children(self):
2505 2511 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2506 2512
2507 2513 @property
2508 2514 def name(self):
2509 2515 return self.group_name.split(RepoGroup.url_sep())[-1]
2510 2516
2511 2517 @property
2512 2518 def full_path(self):
2513 2519 return self.group_name
2514 2520
2515 2521 @property
2516 2522 def full_path_splitted(self):
2517 2523 return self.group_name.split(RepoGroup.url_sep())
2518 2524
2519 2525 @property
2520 2526 def repositories(self):
2521 2527 return Repository.query()\
2522 2528 .filter(Repository.group == self)\
2523 2529 .order_by(Repository.repo_name)
2524 2530
2525 2531 @property
2526 2532 def repositories_recursive_count(self):
2527 2533 cnt = self.repositories.count()
2528 2534
2529 2535 def children_count(group):
2530 2536 cnt = 0
2531 2537 for child in group.children:
2532 2538 cnt += child.repositories.count()
2533 2539 cnt += children_count(child)
2534 2540 return cnt
2535 2541
2536 2542 return cnt + children_count(self)
2537 2543
2538 2544 def _recursive_objects(self, include_repos=True):
2539 2545 all_ = []
2540 2546
2541 2547 def _get_members(root_gr):
2542 2548 if include_repos:
2543 2549 for r in root_gr.repositories:
2544 2550 all_.append(r)
2545 2551 childs = root_gr.children.all()
2546 2552 if childs:
2547 2553 for gr in childs:
2548 2554 all_.append(gr)
2549 2555 _get_members(gr)
2550 2556
2551 2557 _get_members(self)
2552 2558 return [self] + all_
2553 2559
2554 2560 def recursive_groups_and_repos(self):
2555 2561 """
2556 2562 Recursive return all groups, with repositories in those groups
2557 2563 """
2558 2564 return self._recursive_objects()
2559 2565
2560 2566 def recursive_groups(self):
2561 2567 """
2562 2568 Returns all children groups for this group including children of children
2563 2569 """
2564 2570 return self._recursive_objects(include_repos=False)
2565 2571
2566 2572 def get_new_name(self, group_name):
2567 2573 """
2568 2574 returns new full group name based on parent and new name
2569 2575
2570 2576 :param group_name:
2571 2577 """
2572 2578 path_prefix = (self.parent_group.full_path_splitted if
2573 2579 self.parent_group else [])
2574 2580 return RepoGroup.url_sep().join(path_prefix + [group_name])
2575 2581
2576 2582 def permissions(self, with_admins=True, with_owner=True):
2577 2583 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2578 2584 q = q.options(joinedload(UserRepoGroupToPerm.group),
2579 2585 joinedload(UserRepoGroupToPerm.user),
2580 2586 joinedload(UserRepoGroupToPerm.permission),)
2581 2587
2582 2588 # get owners and admins and permissions. We do a trick of re-writing
2583 2589 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2584 2590 # has a global reference and changing one object propagates to all
2585 2591 # others. This means if admin is also an owner admin_row that change
2586 2592 # would propagate to both objects
2587 2593 perm_rows = []
2588 2594 for _usr in q.all():
2589 2595 usr = AttributeDict(_usr.user.get_dict())
2590 2596 usr.permission = _usr.permission.permission_name
2591 2597 perm_rows.append(usr)
2592 2598
2593 2599 # filter the perm rows by 'default' first and then sort them by
2594 2600 # admin,write,read,none permissions sorted again alphabetically in
2595 2601 # each group
2596 2602 perm_rows = sorted(perm_rows, key=display_user_sort)
2597 2603
2598 2604 _admin_perm = 'group.admin'
2599 2605 owner_row = []
2600 2606 if with_owner:
2601 2607 usr = AttributeDict(self.user.get_dict())
2602 2608 usr.owner_row = True
2603 2609 usr.permission = _admin_perm
2604 2610 owner_row.append(usr)
2605 2611
2606 2612 super_admin_rows = []
2607 2613 if with_admins:
2608 2614 for usr in User.get_all_super_admins():
2609 2615 # if this admin is also owner, don't double the record
2610 2616 if usr.user_id == owner_row[0].user_id:
2611 2617 owner_row[0].admin_row = True
2612 2618 else:
2613 2619 usr = AttributeDict(usr.get_dict())
2614 2620 usr.admin_row = True
2615 2621 usr.permission = _admin_perm
2616 2622 super_admin_rows.append(usr)
2617 2623
2618 2624 return super_admin_rows + owner_row + perm_rows
2619 2625
2620 2626 def permission_user_groups(self):
2621 2627 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2622 2628 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2623 2629 joinedload(UserGroupRepoGroupToPerm.users_group),
2624 2630 joinedload(UserGroupRepoGroupToPerm.permission),)
2625 2631
2626 2632 perm_rows = []
2627 2633 for _user_group in q.all():
2628 2634 usr = AttributeDict(_user_group.users_group.get_dict())
2629 2635 usr.permission = _user_group.permission.permission_name
2630 2636 perm_rows.append(usr)
2631 2637
2632 2638 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2633 2639 return perm_rows
2634 2640
2635 2641 def get_api_data(self):
2636 2642 """
2637 2643 Common function for generating api data
2638 2644
2639 2645 """
2640 2646 group = self
2641 2647 data = {
2642 2648 'group_id': group.group_id,
2643 2649 'group_name': group.group_name,
2644 2650 'group_description': group.description_safe,
2645 2651 'parent_group': group.parent_group.group_name if group.parent_group else None,
2646 2652 'repositories': [x.repo_name for x in group.repositories],
2647 2653 'owner': group.user.username,
2648 2654 }
2649 2655 return data
2650 2656
2651 2657
2652 2658 class Permission(Base, BaseModel):
2653 2659 __tablename__ = 'permissions'
2654 2660 __table_args__ = (
2655 2661 Index('p_perm_name_idx', 'permission_name'),
2656 2662 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2657 2663 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2658 2664 )
2659 2665 PERMS = [
2660 2666 ('hg.admin', _('RhodeCode Super Administrator')),
2661 2667
2662 2668 ('repository.none', _('Repository no access')),
2663 2669 ('repository.read', _('Repository read access')),
2664 2670 ('repository.write', _('Repository write access')),
2665 2671 ('repository.admin', _('Repository admin access')),
2666 2672
2667 2673 ('group.none', _('Repository group no access')),
2668 2674 ('group.read', _('Repository group read access')),
2669 2675 ('group.write', _('Repository group write access')),
2670 2676 ('group.admin', _('Repository group admin access')),
2671 2677
2672 2678 ('usergroup.none', _('User group no access')),
2673 2679 ('usergroup.read', _('User group read access')),
2674 2680 ('usergroup.write', _('User group write access')),
2675 2681 ('usergroup.admin', _('User group admin access')),
2676 2682
2677 2683 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2678 2684 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2679 2685
2680 2686 ('hg.usergroup.create.false', _('User Group creation disabled')),
2681 2687 ('hg.usergroup.create.true', _('User Group creation enabled')),
2682 2688
2683 2689 ('hg.create.none', _('Repository creation disabled')),
2684 2690 ('hg.create.repository', _('Repository creation enabled')),
2685 2691 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2686 2692 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2687 2693
2688 2694 ('hg.fork.none', _('Repository forking disabled')),
2689 2695 ('hg.fork.repository', _('Repository forking enabled')),
2690 2696
2691 2697 ('hg.register.none', _('Registration disabled')),
2692 2698 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2693 2699 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2694 2700
2695 2701 ('hg.password_reset.enabled', _('Password reset enabled')),
2696 2702 ('hg.password_reset.hidden', _('Password reset hidden')),
2697 2703 ('hg.password_reset.disabled', _('Password reset disabled')),
2698 2704
2699 2705 ('hg.extern_activate.manual', _('Manual activation of external account')),
2700 2706 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2701 2707
2702 2708 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2703 2709 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2704 2710 ]
2705 2711
2706 2712 # definition of system default permissions for DEFAULT user
2707 2713 DEFAULT_USER_PERMISSIONS = [
2708 2714 'repository.read',
2709 2715 'group.read',
2710 2716 'usergroup.read',
2711 2717 'hg.create.repository',
2712 2718 'hg.repogroup.create.false',
2713 2719 'hg.usergroup.create.false',
2714 2720 'hg.create.write_on_repogroup.true',
2715 2721 'hg.fork.repository',
2716 2722 'hg.register.manual_activate',
2717 2723 'hg.password_reset.enabled',
2718 2724 'hg.extern_activate.auto',
2719 2725 'hg.inherit_default_perms.true',
2720 2726 ]
2721 2727
2722 2728 # defines which permissions are more important higher the more important
2723 2729 # Weight defines which permissions are more important.
2724 2730 # The higher number the more important.
2725 2731 PERM_WEIGHTS = {
2726 2732 'repository.none': 0,
2727 2733 'repository.read': 1,
2728 2734 'repository.write': 3,
2729 2735 'repository.admin': 4,
2730 2736
2731 2737 'group.none': 0,
2732 2738 'group.read': 1,
2733 2739 'group.write': 3,
2734 2740 'group.admin': 4,
2735 2741
2736 2742 'usergroup.none': 0,
2737 2743 'usergroup.read': 1,
2738 2744 'usergroup.write': 3,
2739 2745 'usergroup.admin': 4,
2740 2746
2741 2747 'hg.repogroup.create.false': 0,
2742 2748 'hg.repogroup.create.true': 1,
2743 2749
2744 2750 'hg.usergroup.create.false': 0,
2745 2751 'hg.usergroup.create.true': 1,
2746 2752
2747 2753 'hg.fork.none': 0,
2748 2754 'hg.fork.repository': 1,
2749 2755 'hg.create.none': 0,
2750 2756 'hg.create.repository': 1
2751 2757 }
2752 2758
2753 2759 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2754 2760 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2755 2761 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2756 2762
2757 2763 def __unicode__(self):
2758 2764 return u"<%s('%s:%s')>" % (
2759 2765 self.__class__.__name__, self.permission_id, self.permission_name
2760 2766 )
2761 2767
2762 2768 @classmethod
2763 2769 def get_by_key(cls, key):
2764 2770 return cls.query().filter(cls.permission_name == key).scalar()
2765 2771
2766 2772 @classmethod
2767 2773 def get_default_repo_perms(cls, user_id, repo_id=None):
2768 2774 q = Session().query(UserRepoToPerm, Repository, Permission)\
2769 2775 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2770 2776 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2771 2777 .filter(UserRepoToPerm.user_id == user_id)
2772 2778 if repo_id:
2773 2779 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2774 2780 return q.all()
2775 2781
2776 2782 @classmethod
2777 2783 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2778 2784 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2779 2785 .join(
2780 2786 Permission,
2781 2787 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2782 2788 .join(
2783 2789 Repository,
2784 2790 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2785 2791 .join(
2786 2792 UserGroup,
2787 2793 UserGroupRepoToPerm.users_group_id ==
2788 2794 UserGroup.users_group_id)\
2789 2795 .join(
2790 2796 UserGroupMember,
2791 2797 UserGroupRepoToPerm.users_group_id ==
2792 2798 UserGroupMember.users_group_id)\
2793 2799 .filter(
2794 2800 UserGroupMember.user_id == user_id,
2795 2801 UserGroup.users_group_active == true())
2796 2802 if repo_id:
2797 2803 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2798 2804 return q.all()
2799 2805
2800 2806 @classmethod
2801 2807 def get_default_group_perms(cls, user_id, repo_group_id=None):
2802 2808 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2803 2809 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2804 2810 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2805 2811 .filter(UserRepoGroupToPerm.user_id == user_id)
2806 2812 if repo_group_id:
2807 2813 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2808 2814 return q.all()
2809 2815
2810 2816 @classmethod
2811 2817 def get_default_group_perms_from_user_group(
2812 2818 cls, user_id, repo_group_id=None):
2813 2819 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2814 2820 .join(
2815 2821 Permission,
2816 2822 UserGroupRepoGroupToPerm.permission_id ==
2817 2823 Permission.permission_id)\
2818 2824 .join(
2819 2825 RepoGroup,
2820 2826 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2821 2827 .join(
2822 2828 UserGroup,
2823 2829 UserGroupRepoGroupToPerm.users_group_id ==
2824 2830 UserGroup.users_group_id)\
2825 2831 .join(
2826 2832 UserGroupMember,
2827 2833 UserGroupRepoGroupToPerm.users_group_id ==
2828 2834 UserGroupMember.users_group_id)\
2829 2835 .filter(
2830 2836 UserGroupMember.user_id == user_id,
2831 2837 UserGroup.users_group_active == true())
2832 2838 if repo_group_id:
2833 2839 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2834 2840 return q.all()
2835 2841
2836 2842 @classmethod
2837 2843 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2838 2844 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2839 2845 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2840 2846 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2841 2847 .filter(UserUserGroupToPerm.user_id == user_id)
2842 2848 if user_group_id:
2843 2849 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2844 2850 return q.all()
2845 2851
2846 2852 @classmethod
2847 2853 def get_default_user_group_perms_from_user_group(
2848 2854 cls, user_id, user_group_id=None):
2849 2855 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2850 2856 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2851 2857 .join(
2852 2858 Permission,
2853 2859 UserGroupUserGroupToPerm.permission_id ==
2854 2860 Permission.permission_id)\
2855 2861 .join(
2856 2862 TargetUserGroup,
2857 2863 UserGroupUserGroupToPerm.target_user_group_id ==
2858 2864 TargetUserGroup.users_group_id)\
2859 2865 .join(
2860 2866 UserGroup,
2861 2867 UserGroupUserGroupToPerm.user_group_id ==
2862 2868 UserGroup.users_group_id)\
2863 2869 .join(
2864 2870 UserGroupMember,
2865 2871 UserGroupUserGroupToPerm.user_group_id ==
2866 2872 UserGroupMember.users_group_id)\
2867 2873 .filter(
2868 2874 UserGroupMember.user_id == user_id,
2869 2875 UserGroup.users_group_active == true())
2870 2876 if user_group_id:
2871 2877 q = q.filter(
2872 2878 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2873 2879
2874 2880 return q.all()
2875 2881
2876 2882
2877 2883 class UserRepoToPerm(Base, BaseModel):
2878 2884 __tablename__ = 'repo_to_perm'
2879 2885 __table_args__ = (
2880 2886 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2881 2887 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2882 2888 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2883 2889 )
2884 2890 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2885 2891 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2886 2892 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2887 2893 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2888 2894
2889 2895 user = relationship('User')
2890 2896 repository = relationship('Repository')
2891 2897 permission = relationship('Permission')
2892 2898
2893 2899 @classmethod
2894 2900 def create(cls, user, repository, permission):
2895 2901 n = cls()
2896 2902 n.user = user
2897 2903 n.repository = repository
2898 2904 n.permission = permission
2899 2905 Session().add(n)
2900 2906 return n
2901 2907
2902 2908 def __unicode__(self):
2903 2909 return u'<%s => %s >' % (self.user, self.repository)
2904 2910
2905 2911
2906 2912 class UserUserGroupToPerm(Base, BaseModel):
2907 2913 __tablename__ = 'user_user_group_to_perm'
2908 2914 __table_args__ = (
2909 2915 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2910 2916 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2911 2917 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2912 2918 )
2913 2919 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2914 2920 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2915 2921 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2916 2922 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2917 2923
2918 2924 user = relationship('User')
2919 2925 user_group = relationship('UserGroup')
2920 2926 permission = relationship('Permission')
2921 2927
2922 2928 @classmethod
2923 2929 def create(cls, user, user_group, permission):
2924 2930 n = cls()
2925 2931 n.user = user
2926 2932 n.user_group = user_group
2927 2933 n.permission = permission
2928 2934 Session().add(n)
2929 2935 return n
2930 2936
2931 2937 def __unicode__(self):
2932 2938 return u'<%s => %s >' % (self.user, self.user_group)
2933 2939
2934 2940
2935 2941 class UserToPerm(Base, BaseModel):
2936 2942 __tablename__ = 'user_to_perm'
2937 2943 __table_args__ = (
2938 2944 UniqueConstraint('user_id', 'permission_id'),
2939 2945 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2940 2946 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2941 2947 )
2942 2948 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2943 2949 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2944 2950 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2945 2951
2946 2952 user = relationship('User')
2947 2953 permission = relationship('Permission', lazy='joined')
2948 2954
2949 2955 def __unicode__(self):
2950 2956 return u'<%s => %s >' % (self.user, self.permission)
2951 2957
2952 2958
2953 2959 class UserGroupRepoToPerm(Base, BaseModel):
2954 2960 __tablename__ = 'users_group_repo_to_perm'
2955 2961 __table_args__ = (
2956 2962 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2957 2963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2958 2964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2959 2965 )
2960 2966 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2961 2967 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2962 2968 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2963 2969 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2964 2970
2965 2971 users_group = relationship('UserGroup')
2966 2972 permission = relationship('Permission')
2967 2973 repository = relationship('Repository')
2968 2974
2969 2975 @classmethod
2970 2976 def create(cls, users_group, repository, permission):
2971 2977 n = cls()
2972 2978 n.users_group = users_group
2973 2979 n.repository = repository
2974 2980 n.permission = permission
2975 2981 Session().add(n)
2976 2982 return n
2977 2983
2978 2984 def __unicode__(self):
2979 2985 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2980 2986
2981 2987
2982 2988 class UserGroupUserGroupToPerm(Base, BaseModel):
2983 2989 __tablename__ = 'user_group_user_group_to_perm'
2984 2990 __table_args__ = (
2985 2991 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2986 2992 CheckConstraint('target_user_group_id != user_group_id'),
2987 2993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2988 2994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2989 2995 )
2990 2996 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2991 2997 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2992 2998 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2993 2999 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2994 3000
2995 3001 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2996 3002 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2997 3003 permission = relationship('Permission')
2998 3004
2999 3005 @classmethod
3000 3006 def create(cls, target_user_group, user_group, permission):
3001 3007 n = cls()
3002 3008 n.target_user_group = target_user_group
3003 3009 n.user_group = user_group
3004 3010 n.permission = permission
3005 3011 Session().add(n)
3006 3012 return n
3007 3013
3008 3014 def __unicode__(self):
3009 3015 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3010 3016
3011 3017
3012 3018 class UserGroupToPerm(Base, BaseModel):
3013 3019 __tablename__ = 'users_group_to_perm'
3014 3020 __table_args__ = (
3015 3021 UniqueConstraint('users_group_id', 'permission_id',),
3016 3022 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3017 3023 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3018 3024 )
3019 3025 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3020 3026 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3021 3027 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3022 3028
3023 3029 users_group = relationship('UserGroup')
3024 3030 permission = relationship('Permission')
3025 3031
3026 3032
3027 3033 class UserRepoGroupToPerm(Base, BaseModel):
3028 3034 __tablename__ = 'user_repo_group_to_perm'
3029 3035 __table_args__ = (
3030 3036 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3031 3037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3032 3038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3033 3039 )
3034 3040
3035 3041 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3036 3042 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3037 3043 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3038 3044 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3039 3045
3040 3046 user = relationship('User')
3041 3047 group = relationship('RepoGroup')
3042 3048 permission = relationship('Permission')
3043 3049
3044 3050 @classmethod
3045 3051 def create(cls, user, repository_group, permission):
3046 3052 n = cls()
3047 3053 n.user = user
3048 3054 n.group = repository_group
3049 3055 n.permission = permission
3050 3056 Session().add(n)
3051 3057 return n
3052 3058
3053 3059
3054 3060 class UserGroupRepoGroupToPerm(Base, BaseModel):
3055 3061 __tablename__ = 'users_group_repo_group_to_perm'
3056 3062 __table_args__ = (
3057 3063 UniqueConstraint('users_group_id', 'group_id'),
3058 3064 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3059 3065 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3060 3066 )
3061 3067
3062 3068 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3063 3069 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3064 3070 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3065 3071 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3066 3072
3067 3073 users_group = relationship('UserGroup')
3068 3074 permission = relationship('Permission')
3069 3075 group = relationship('RepoGroup')
3070 3076
3071 3077 @classmethod
3072 3078 def create(cls, user_group, repository_group, permission):
3073 3079 n = cls()
3074 3080 n.users_group = user_group
3075 3081 n.group = repository_group
3076 3082 n.permission = permission
3077 3083 Session().add(n)
3078 3084 return n
3079 3085
3080 3086 def __unicode__(self):
3081 3087 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3082 3088
3083 3089
3084 3090 class Statistics(Base, BaseModel):
3085 3091 __tablename__ = 'statistics'
3086 3092 __table_args__ = (
3087 3093 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3088 3094 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3089 3095 )
3090 3096 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3091 3097 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3092 3098 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3093 3099 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3094 3100 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3095 3101 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3096 3102
3097 3103 repository = relationship('Repository', single_parent=True)
3098 3104
3099 3105
3100 3106 class UserFollowing(Base, BaseModel):
3101 3107 __tablename__ = 'user_followings'
3102 3108 __table_args__ = (
3103 3109 UniqueConstraint('user_id', 'follows_repository_id'),
3104 3110 UniqueConstraint('user_id', 'follows_user_id'),
3105 3111 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3106 3112 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3107 3113 )
3108 3114
3109 3115 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3110 3116 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3111 3117 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3112 3118 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3113 3119 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3114 3120
3115 3121 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3116 3122
3117 3123 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3118 3124 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3119 3125
3120 3126 @classmethod
3121 3127 def get_repo_followers(cls, repo_id):
3122 3128 return cls.query().filter(cls.follows_repo_id == repo_id)
3123 3129
3124 3130
3125 3131 class CacheKey(Base, BaseModel):
3126 3132 __tablename__ = 'cache_invalidation'
3127 3133 __table_args__ = (
3128 3134 UniqueConstraint('cache_key'),
3129 3135 Index('key_idx', 'cache_key'),
3130 3136 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3131 3137 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3132 3138 )
3133 3139 CACHE_TYPE_ATOM = 'ATOM'
3134 3140 CACHE_TYPE_RSS = 'RSS'
3135 3141 CACHE_TYPE_README = 'README'
3136 3142
3137 3143 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3138 3144 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3139 3145 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3140 3146 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3141 3147
3142 3148 def __init__(self, cache_key, cache_args=''):
3143 3149 self.cache_key = cache_key
3144 3150 self.cache_args = cache_args
3145 3151 self.cache_active = False
3146 3152
3147 3153 def __unicode__(self):
3148 3154 return u"<%s('%s:%s[%s]')>" % (
3149 3155 self.__class__.__name__,
3150 3156 self.cache_id, self.cache_key, self.cache_active)
3151 3157
3152 3158 def _cache_key_partition(self):
3153 3159 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3154 3160 return prefix, repo_name, suffix
3155 3161
3156 3162 def get_prefix(self):
3157 3163 """
3158 3164 Try to extract prefix from existing cache key. The key could consist
3159 3165 of prefix, repo_name, suffix
3160 3166 """
3161 3167 # this returns prefix, repo_name, suffix
3162 3168 return self._cache_key_partition()[0]
3163 3169
3164 3170 def get_suffix(self):
3165 3171 """
3166 3172 get suffix that might have been used in _get_cache_key to
3167 3173 generate self.cache_key. Only used for informational purposes
3168 3174 in repo_edit.mako.
3169 3175 """
3170 3176 # prefix, repo_name, suffix
3171 3177 return self._cache_key_partition()[2]
3172 3178
3173 3179 @classmethod
3174 3180 def delete_all_cache(cls):
3175 3181 """
3176 3182 Delete all cache keys from database.
3177 3183 Should only be run when all instances are down and all entries
3178 3184 thus stale.
3179 3185 """
3180 3186 cls.query().delete()
3181 3187 Session().commit()
3182 3188
3183 3189 @classmethod
3184 3190 def get_cache_key(cls, repo_name, cache_type):
3185 3191 """
3186 3192
3187 3193 Generate a cache key for this process of RhodeCode instance.
3188 3194 Prefix most likely will be process id or maybe explicitly set
3189 3195 instance_id from .ini file.
3190 3196 """
3191 3197 import rhodecode
3192 3198 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3193 3199
3194 3200 repo_as_unicode = safe_unicode(repo_name)
3195 3201 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3196 3202 if cache_type else repo_as_unicode
3197 3203
3198 3204 return u'{}{}'.format(prefix, key)
3199 3205
3200 3206 @classmethod
3201 3207 def set_invalidate(cls, repo_name, delete=False):
3202 3208 """
3203 3209 Mark all caches of a repo as invalid in the database.
3204 3210 """
3205 3211
3206 3212 try:
3207 3213 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3208 3214 if delete:
3209 3215 log.debug('cache objects deleted for repo %s',
3210 3216 safe_str(repo_name))
3211 3217 qry.delete()
3212 3218 else:
3213 3219 log.debug('cache objects marked as invalid for repo %s',
3214 3220 safe_str(repo_name))
3215 3221 qry.update({"cache_active": False})
3216 3222
3217 3223 Session().commit()
3218 3224 except Exception:
3219 3225 log.exception(
3220 3226 'Cache key invalidation failed for repository %s',
3221 3227 safe_str(repo_name))
3222 3228 Session().rollback()
3223 3229
3224 3230 @classmethod
3225 3231 def get_active_cache(cls, cache_key):
3226 3232 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3227 3233 if inv_obj:
3228 3234 return inv_obj
3229 3235 return None
3230 3236
3231 3237 @classmethod
3232 3238 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3233 3239 thread_scoped=False):
3234 3240 """
3235 3241 @cache_region('long_term')
3236 3242 def _heavy_calculation(cache_key):
3237 3243 return 'result'
3238 3244
3239 3245 cache_context = CacheKey.repo_context_cache(
3240 3246 _heavy_calculation, repo_name, cache_type)
3241 3247
3242 3248 with cache_context as context:
3243 3249 context.invalidate()
3244 3250 computed = context.compute()
3245 3251
3246 3252 assert computed == 'result'
3247 3253 """
3248 3254 from rhodecode.lib import caches
3249 3255 return caches.InvalidationContext(
3250 3256 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3251 3257
3252 3258
3253 3259 class ChangesetComment(Base, BaseModel):
3254 3260 __tablename__ = 'changeset_comments'
3255 3261 __table_args__ = (
3256 3262 Index('cc_revision_idx', 'revision'),
3257 3263 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3258 3264 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3259 3265 )
3260 3266
3261 3267 COMMENT_OUTDATED = u'comment_outdated'
3262 3268 COMMENT_TYPE_NOTE = u'note'
3263 3269 COMMENT_TYPE_TODO = u'todo'
3264 3270 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3265 3271
3266 3272 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3267 3273 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3268 3274 revision = Column('revision', String(40), nullable=True)
3269 3275 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3270 3276 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3271 3277 line_no = Column('line_no', Unicode(10), nullable=True)
3272 3278 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3273 3279 f_path = Column('f_path', Unicode(1000), nullable=True)
3274 3280 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3275 3281 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3276 3282 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3277 3283 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3278 3284 renderer = Column('renderer', Unicode(64), nullable=True)
3279 3285 display_state = Column('display_state', Unicode(128), nullable=True)
3280 3286
3281 3287 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3282 3288 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3283 3289 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3284 3290 author = relationship('User', lazy='joined')
3285 3291 repo = relationship('Repository')
3286 3292 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3287 3293 pull_request = relationship('PullRequest', lazy='joined')
3288 3294 pull_request_version = relationship('PullRequestVersion')
3289 3295
3290 3296 @classmethod
3291 3297 def get_users(cls, revision=None, pull_request_id=None):
3292 3298 """
3293 3299 Returns user associated with this ChangesetComment. ie those
3294 3300 who actually commented
3295 3301
3296 3302 :param cls:
3297 3303 :param revision:
3298 3304 """
3299 3305 q = Session().query(User)\
3300 3306 .join(ChangesetComment.author)
3301 3307 if revision:
3302 3308 q = q.filter(cls.revision == revision)
3303 3309 elif pull_request_id:
3304 3310 q = q.filter(cls.pull_request_id == pull_request_id)
3305 3311 return q.all()
3306 3312
3307 3313 @classmethod
3308 3314 def get_index_from_version(cls, pr_version, versions):
3309 3315 num_versions = [x.pull_request_version_id for x in versions]
3310 3316 try:
3311 3317 return num_versions.index(pr_version) +1
3312 3318 except (IndexError, ValueError):
3313 3319 return
3314 3320
3315 3321 @property
3316 3322 def outdated(self):
3317 3323 return self.display_state == self.COMMENT_OUTDATED
3318 3324
3319 3325 def outdated_at_version(self, version):
3320 3326 """
3321 3327 Checks if comment is outdated for given pull request version
3322 3328 """
3323 3329 return self.outdated and self.pull_request_version_id != version
3324 3330
3325 3331 def older_than_version(self, version):
3326 3332 """
3327 3333 Checks if comment is made from previous version than given
3328 3334 """
3329 3335 if version is None:
3330 3336 return self.pull_request_version_id is not None
3331 3337
3332 3338 return self.pull_request_version_id < version
3333 3339
3334 3340 @property
3335 3341 def resolved(self):
3336 3342 return self.resolved_by[0] if self.resolved_by else None
3337 3343
3338 3344 @property
3339 3345 def is_todo(self):
3340 3346 return self.comment_type == self.COMMENT_TYPE_TODO
3341 3347
3342 3348 @property
3343 3349 def is_inline(self):
3344 3350 return self.line_no and self.f_path
3345 3351
3346 3352 def get_index_version(self, versions):
3347 3353 return self.get_index_from_version(
3348 3354 self.pull_request_version_id, versions)
3349 3355
3350 3356 def __repr__(self):
3351 3357 if self.comment_id:
3352 3358 return '<DB:Comment #%s>' % self.comment_id
3353 3359 else:
3354 3360 return '<DB:Comment at %#x>' % id(self)
3355 3361
3356 3362 def get_api_data(self):
3357 3363 comment = self
3358 3364 data = {
3359 3365 'comment_id': comment.comment_id,
3360 3366 'comment_type': comment.comment_type,
3361 3367 'comment_text': comment.text,
3362 3368 'comment_status': comment.status_change,
3363 3369 'comment_f_path': comment.f_path,
3364 3370 'comment_lineno': comment.line_no,
3365 3371 'comment_author': comment.author,
3366 3372 'comment_created_on': comment.created_on
3367 3373 }
3368 3374 return data
3369 3375
3370 3376 def __json__(self):
3371 3377 data = dict()
3372 3378 data.update(self.get_api_data())
3373 3379 return data
3374 3380
3375 3381
3376 3382 class ChangesetStatus(Base, BaseModel):
3377 3383 __tablename__ = 'changeset_statuses'
3378 3384 __table_args__ = (
3379 3385 Index('cs_revision_idx', 'revision'),
3380 3386 Index('cs_version_idx', 'version'),
3381 3387 UniqueConstraint('repo_id', 'revision', 'version'),
3382 3388 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3383 3389 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3384 3390 )
3385 3391 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3386 3392 STATUS_APPROVED = 'approved'
3387 3393 STATUS_REJECTED = 'rejected'
3388 3394 STATUS_UNDER_REVIEW = 'under_review'
3389 3395
3390 3396 STATUSES = [
3391 3397 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3392 3398 (STATUS_APPROVED, _("Approved")),
3393 3399 (STATUS_REJECTED, _("Rejected")),
3394 3400 (STATUS_UNDER_REVIEW, _("Under Review")),
3395 3401 ]
3396 3402
3397 3403 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3398 3404 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3399 3405 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3400 3406 revision = Column('revision', String(40), nullable=False)
3401 3407 status = Column('status', String(128), nullable=False, default=DEFAULT)
3402 3408 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3403 3409 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3404 3410 version = Column('version', Integer(), nullable=False, default=0)
3405 3411 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3406 3412
3407 3413 author = relationship('User', lazy='joined')
3408 3414 repo = relationship('Repository')
3409 3415 comment = relationship('ChangesetComment', lazy='joined')
3410 3416 pull_request = relationship('PullRequest', lazy='joined')
3411 3417
3412 3418 def __unicode__(self):
3413 3419 return u"<%s('%s[v%s]:%s')>" % (
3414 3420 self.__class__.__name__,
3415 3421 self.status, self.version, self.author
3416 3422 )
3417 3423
3418 3424 @classmethod
3419 3425 def get_status_lbl(cls, value):
3420 3426 return dict(cls.STATUSES).get(value)
3421 3427
3422 3428 @property
3423 3429 def status_lbl(self):
3424 3430 return ChangesetStatus.get_status_lbl(self.status)
3425 3431
3426 3432 def get_api_data(self):
3427 3433 status = self
3428 3434 data = {
3429 3435 'status_id': status.changeset_status_id,
3430 3436 'status': status.status,
3431 3437 }
3432 3438 return data
3433 3439
3434 3440 def __json__(self):
3435 3441 data = dict()
3436 3442 data.update(self.get_api_data())
3437 3443 return data
3438 3444
3439 3445
3440 3446 class _PullRequestBase(BaseModel):
3441 3447 """
3442 3448 Common attributes of pull request and version entries.
3443 3449 """
3444 3450
3445 3451 # .status values
3446 3452 STATUS_NEW = u'new'
3447 3453 STATUS_OPEN = u'open'
3448 3454 STATUS_CLOSED = u'closed'
3449 3455
3450 3456 title = Column('title', Unicode(255), nullable=True)
3451 3457 description = Column(
3452 3458 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3453 3459 nullable=True)
3454 3460 # new/open/closed status of pull request (not approve/reject/etc)
3455 3461 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3456 3462 created_on = Column(
3457 3463 'created_on', DateTime(timezone=False), nullable=False,
3458 3464 default=datetime.datetime.now)
3459 3465 updated_on = Column(
3460 3466 'updated_on', DateTime(timezone=False), nullable=False,
3461 3467 default=datetime.datetime.now)
3462 3468
3463 3469 @declared_attr
3464 3470 def user_id(cls):
3465 3471 return Column(
3466 3472 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3467 3473 unique=None)
3468 3474
3469 3475 # 500 revisions max
3470 3476 _revisions = Column(
3471 3477 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3472 3478
3473 3479 @declared_attr
3474 3480 def source_repo_id(cls):
3475 3481 # TODO: dan: rename column to source_repo_id
3476 3482 return Column(
3477 3483 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3478 3484 nullable=False)
3479 3485
3480 3486 source_ref = Column('org_ref', Unicode(255), nullable=False)
3481 3487
3482 3488 @declared_attr
3483 3489 def target_repo_id(cls):
3484 3490 # TODO: dan: rename column to target_repo_id
3485 3491 return Column(
3486 3492 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3487 3493 nullable=False)
3488 3494
3489 3495 target_ref = Column('other_ref', Unicode(255), nullable=False)
3490 3496 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3491 3497
3492 3498 # TODO: dan: rename column to last_merge_source_rev
3493 3499 _last_merge_source_rev = Column(
3494 3500 'last_merge_org_rev', String(40), nullable=True)
3495 3501 # TODO: dan: rename column to last_merge_target_rev
3496 3502 _last_merge_target_rev = Column(
3497 3503 'last_merge_other_rev', String(40), nullable=True)
3498 3504 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3499 3505 merge_rev = Column('merge_rev', String(40), nullable=True)
3500 3506
3501 3507 reviewer_data = Column(
3502 3508 'reviewer_data_json', MutationObj.as_mutable(
3503 3509 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3504 3510
3505 3511 @property
3506 3512 def reviewer_data_json(self):
3507 3513 return json.dumps(self.reviewer_data)
3508 3514
3509 3515 @hybrid_property
3510 3516 def description_safe(self):
3511 3517 from rhodecode.lib import helpers as h
3512 3518 return h.escape(self.description)
3513 3519
3514 3520 @hybrid_property
3515 3521 def revisions(self):
3516 3522 return self._revisions.split(':') if self._revisions else []
3517 3523
3518 3524 @revisions.setter
3519 3525 def revisions(self, val):
3520 3526 self._revisions = ':'.join(val)
3521 3527
3522 3528 @hybrid_property
3523 3529 def last_merge_status(self):
3524 3530 return safe_int(self._last_merge_status)
3525 3531
3526 3532 @last_merge_status.setter
3527 3533 def last_merge_status(self, val):
3528 3534 self._last_merge_status = val
3529 3535
3530 3536 @declared_attr
3531 3537 def author(cls):
3532 3538 return relationship('User', lazy='joined')
3533 3539
3534 3540 @declared_attr
3535 3541 def source_repo(cls):
3536 3542 return relationship(
3537 3543 'Repository',
3538 3544 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3539 3545
3540 3546 @property
3541 3547 def source_ref_parts(self):
3542 3548 return self.unicode_to_reference(self.source_ref)
3543 3549
3544 3550 @declared_attr
3545 3551 def target_repo(cls):
3546 3552 return relationship(
3547 3553 'Repository',
3548 3554 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3549 3555
3550 3556 @property
3551 3557 def target_ref_parts(self):
3552 3558 return self.unicode_to_reference(self.target_ref)
3553 3559
3554 3560 @property
3555 3561 def shadow_merge_ref(self):
3556 3562 return self.unicode_to_reference(self._shadow_merge_ref)
3557 3563
3558 3564 @shadow_merge_ref.setter
3559 3565 def shadow_merge_ref(self, ref):
3560 3566 self._shadow_merge_ref = self.reference_to_unicode(ref)
3561 3567
3562 3568 def unicode_to_reference(self, raw):
3563 3569 """
3564 3570 Convert a unicode (or string) to a reference object.
3565 3571 If unicode evaluates to False it returns None.
3566 3572 """
3567 3573 if raw:
3568 3574 refs = raw.split(':')
3569 3575 return Reference(*refs)
3570 3576 else:
3571 3577 return None
3572 3578
3573 3579 def reference_to_unicode(self, ref):
3574 3580 """
3575 3581 Convert a reference object to unicode.
3576 3582 If reference is None it returns None.
3577 3583 """
3578 3584 if ref:
3579 3585 return u':'.join(ref)
3580 3586 else:
3581 3587 return None
3582 3588
3583 3589 def get_api_data(self, with_merge_state=True):
3584 3590 from rhodecode.model.pull_request import PullRequestModel
3585 3591
3586 3592 pull_request = self
3587 3593 if with_merge_state:
3588 3594 merge_status = PullRequestModel().merge_status(pull_request)
3589 3595 merge_state = {
3590 3596 'status': merge_status[0],
3591 3597 'message': safe_unicode(merge_status[1]),
3592 3598 }
3593 3599 else:
3594 3600 merge_state = {'status': 'not_available',
3595 3601 'message': 'not_available'}
3596 3602
3597 3603 merge_data = {
3598 3604 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3599 3605 'reference': (
3600 3606 pull_request.shadow_merge_ref._asdict()
3601 3607 if pull_request.shadow_merge_ref else None),
3602 3608 }
3603 3609
3604 3610 data = {
3605 3611 'pull_request_id': pull_request.pull_request_id,
3606 3612 'url': PullRequestModel().get_url(pull_request),
3607 3613 'title': pull_request.title,
3608 3614 'description': pull_request.description,
3609 3615 'status': pull_request.status,
3610 3616 'created_on': pull_request.created_on,
3611 3617 'updated_on': pull_request.updated_on,
3612 3618 'commit_ids': pull_request.revisions,
3613 3619 'review_status': pull_request.calculated_review_status(),
3614 3620 'mergeable': merge_state,
3615 3621 'source': {
3616 3622 'clone_url': pull_request.source_repo.clone_url(),
3617 3623 'repository': pull_request.source_repo.repo_name,
3618 3624 'reference': {
3619 3625 'name': pull_request.source_ref_parts.name,
3620 3626 'type': pull_request.source_ref_parts.type,
3621 3627 'commit_id': pull_request.source_ref_parts.commit_id,
3622 3628 },
3623 3629 },
3624 3630 'target': {
3625 3631 'clone_url': pull_request.target_repo.clone_url(),
3626 3632 'repository': pull_request.target_repo.repo_name,
3627 3633 'reference': {
3628 3634 'name': pull_request.target_ref_parts.name,
3629 3635 'type': pull_request.target_ref_parts.type,
3630 3636 'commit_id': pull_request.target_ref_parts.commit_id,
3631 3637 },
3632 3638 },
3633 3639 'merge': merge_data,
3634 3640 'author': pull_request.author.get_api_data(include_secrets=False,
3635 3641 details='basic'),
3636 3642 'reviewers': [
3637 3643 {
3638 3644 'user': reviewer.get_api_data(include_secrets=False,
3639 3645 details='basic'),
3640 3646 'reasons': reasons,
3641 3647 'review_status': st[0][1].status if st else 'not_reviewed',
3642 3648 }
3643 3649 for obj, reviewer, reasons, mandatory, st in
3644 3650 pull_request.reviewers_statuses()
3645 3651 ]
3646 3652 }
3647 3653
3648 3654 return data
3649 3655
3650 3656
3651 3657 class PullRequest(Base, _PullRequestBase):
3652 3658 __tablename__ = 'pull_requests'
3653 3659 __table_args__ = (
3654 3660 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3655 3661 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3656 3662 )
3657 3663
3658 3664 pull_request_id = Column(
3659 3665 'pull_request_id', Integer(), nullable=False, primary_key=True)
3660 3666
3661 3667 def __repr__(self):
3662 3668 if self.pull_request_id:
3663 3669 return '<DB:PullRequest #%s>' % self.pull_request_id
3664 3670 else:
3665 3671 return '<DB:PullRequest at %#x>' % id(self)
3666 3672
3667 3673 reviewers = relationship('PullRequestReviewers',
3668 3674 cascade="all, delete, delete-orphan")
3669 3675 statuses = relationship('ChangesetStatus',
3670 3676 cascade="all, delete, delete-orphan")
3671 3677 comments = relationship('ChangesetComment',
3672 3678 cascade="all, delete, delete-orphan")
3673 3679 versions = relationship('PullRequestVersion',
3674 3680 cascade="all, delete, delete-orphan",
3675 3681 lazy='dynamic')
3676 3682
3677 3683 @classmethod
3678 3684 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3679 3685 internal_methods=None):
3680 3686
3681 3687 class PullRequestDisplay(object):
3682 3688 """
3683 3689 Special object wrapper for showing PullRequest data via Versions
3684 3690 It mimics PR object as close as possible. This is read only object
3685 3691 just for display
3686 3692 """
3687 3693
3688 3694 def __init__(self, attrs, internal=None):
3689 3695 self.attrs = attrs
3690 3696 # internal have priority over the given ones via attrs
3691 3697 self.internal = internal or ['versions']
3692 3698
3693 3699 def __getattr__(self, item):
3694 3700 if item in self.internal:
3695 3701 return getattr(self, item)
3696 3702 try:
3697 3703 return self.attrs[item]
3698 3704 except KeyError:
3699 3705 raise AttributeError(
3700 3706 '%s object has no attribute %s' % (self, item))
3701 3707
3702 3708 def __repr__(self):
3703 3709 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3704 3710
3705 3711 def versions(self):
3706 3712 return pull_request_obj.versions.order_by(
3707 3713 PullRequestVersion.pull_request_version_id).all()
3708 3714
3709 3715 def is_closed(self):
3710 3716 return pull_request_obj.is_closed()
3711 3717
3712 3718 @property
3713 3719 def pull_request_version_id(self):
3714 3720 return getattr(pull_request_obj, 'pull_request_version_id', None)
3715 3721
3716 3722 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3717 3723
3718 3724 attrs.author = StrictAttributeDict(
3719 3725 pull_request_obj.author.get_api_data())
3720 3726 if pull_request_obj.target_repo:
3721 3727 attrs.target_repo = StrictAttributeDict(
3722 3728 pull_request_obj.target_repo.get_api_data())
3723 3729 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3724 3730
3725 3731 if pull_request_obj.source_repo:
3726 3732 attrs.source_repo = StrictAttributeDict(
3727 3733 pull_request_obj.source_repo.get_api_data())
3728 3734 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3729 3735
3730 3736 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3731 3737 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3732 3738 attrs.revisions = pull_request_obj.revisions
3733 3739
3734 3740 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3735 3741 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3736 3742 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3737 3743
3738 3744 return PullRequestDisplay(attrs, internal=internal_methods)
3739 3745
3740 3746 def is_closed(self):
3741 3747 return self.status == self.STATUS_CLOSED
3742 3748
3743 3749 def __json__(self):
3744 3750 return {
3745 3751 'revisions': self.revisions,
3746 3752 }
3747 3753
3748 3754 def calculated_review_status(self):
3749 3755 from rhodecode.model.changeset_status import ChangesetStatusModel
3750 3756 return ChangesetStatusModel().calculated_review_status(self)
3751 3757
3752 3758 def reviewers_statuses(self):
3753 3759 from rhodecode.model.changeset_status import ChangesetStatusModel
3754 3760 return ChangesetStatusModel().reviewers_statuses(self)
3755 3761
3756 3762 @property
3757 3763 def workspace_id(self):
3758 3764 from rhodecode.model.pull_request import PullRequestModel
3759 3765 return PullRequestModel()._workspace_id(self)
3760 3766
3761 3767 def get_shadow_repo(self):
3762 3768 workspace_id = self.workspace_id
3763 3769 vcs_obj = self.target_repo.scm_instance()
3764 3770 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3765 workspace_id)
3771 self.target_repo.repo_id, workspace_id)
3766 3772 if os.path.isdir(shadow_repository_path):
3767 3773 return vcs_obj._get_shadow_instance(shadow_repository_path)
3768 3774
3769 3775
3770 3776 class PullRequestVersion(Base, _PullRequestBase):
3771 3777 __tablename__ = 'pull_request_versions'
3772 3778 __table_args__ = (
3773 3779 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3774 3780 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3775 3781 )
3776 3782
3777 3783 pull_request_version_id = Column(
3778 3784 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3779 3785 pull_request_id = Column(
3780 3786 'pull_request_id', Integer(),
3781 3787 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3782 3788 pull_request = relationship('PullRequest')
3783 3789
3784 3790 def __repr__(self):
3785 3791 if self.pull_request_version_id:
3786 3792 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3787 3793 else:
3788 3794 return '<DB:PullRequestVersion at %#x>' % id(self)
3789 3795
3790 3796 @property
3791 3797 def reviewers(self):
3792 3798 return self.pull_request.reviewers
3793 3799
3794 3800 @property
3795 3801 def versions(self):
3796 3802 return self.pull_request.versions
3797 3803
3798 3804 def is_closed(self):
3799 3805 # calculate from original
3800 3806 return self.pull_request.status == self.STATUS_CLOSED
3801 3807
3802 3808 def calculated_review_status(self):
3803 3809 return self.pull_request.calculated_review_status()
3804 3810
3805 3811 def reviewers_statuses(self):
3806 3812 return self.pull_request.reviewers_statuses()
3807 3813
3808 3814
3809 3815 class PullRequestReviewers(Base, BaseModel):
3810 3816 __tablename__ = 'pull_request_reviewers'
3811 3817 __table_args__ = (
3812 3818 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3813 3819 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3814 3820 )
3815 3821
3816 3822 @hybrid_property
3817 3823 def reasons(self):
3818 3824 if not self._reasons:
3819 3825 return []
3820 3826 return self._reasons
3821 3827
3822 3828 @reasons.setter
3823 3829 def reasons(self, val):
3824 3830 val = val or []
3825 3831 if any(not isinstance(x, basestring) for x in val):
3826 3832 raise Exception('invalid reasons type, must be list of strings')
3827 3833 self._reasons = val
3828 3834
3829 3835 pull_requests_reviewers_id = Column(
3830 3836 'pull_requests_reviewers_id', Integer(), nullable=False,
3831 3837 primary_key=True)
3832 3838 pull_request_id = Column(
3833 3839 "pull_request_id", Integer(),
3834 3840 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3835 3841 user_id = Column(
3836 3842 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3837 3843 _reasons = Column(
3838 3844 'reason', MutationList.as_mutable(
3839 3845 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3840 3846
3841 3847 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3842 3848 user = relationship('User')
3843 3849 pull_request = relationship('PullRequest')
3844 3850
3845 3851 rule_data = Column(
3846 3852 'rule_data_json',
3847 3853 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3848 3854
3849 3855 def rule_user_group_data(self):
3850 3856 """
3851 3857 Returns the voting user group rule data for this reviewer
3852 3858 """
3853 3859
3854 3860 if self.rule_data and 'vote_rule' in self.rule_data:
3855 3861 user_group_data = {}
3856 3862 if 'rule_user_group_entry_id' in self.rule_data:
3857 3863 # means a group with voting rules !
3858 3864 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3859 3865 user_group_data['name'] = self.rule_data['rule_name']
3860 3866 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3861 3867
3862 3868 return user_group_data
3863 3869
3864 3870 def __unicode__(self):
3865 3871 return u"<%s('id:%s')>" % (self.__class__.__name__,
3866 3872 self.pull_requests_reviewers_id)
3867 3873
3868 3874
3869 3875 class Notification(Base, BaseModel):
3870 3876 __tablename__ = 'notifications'
3871 3877 __table_args__ = (
3872 3878 Index('notification_type_idx', 'type'),
3873 3879 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3874 3880 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3875 3881 )
3876 3882
3877 3883 TYPE_CHANGESET_COMMENT = u'cs_comment'
3878 3884 TYPE_MESSAGE = u'message'
3879 3885 TYPE_MENTION = u'mention'
3880 3886 TYPE_REGISTRATION = u'registration'
3881 3887 TYPE_PULL_REQUEST = u'pull_request'
3882 3888 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3883 3889
3884 3890 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3885 3891 subject = Column('subject', Unicode(512), nullable=True)
3886 3892 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3887 3893 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3888 3894 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3889 3895 type_ = Column('type', Unicode(255))
3890 3896
3891 3897 created_by_user = relationship('User')
3892 3898 notifications_to_users = relationship('UserNotification', lazy='joined',
3893 3899 cascade="all, delete, delete-orphan")
3894 3900
3895 3901 @property
3896 3902 def recipients(self):
3897 3903 return [x.user for x in UserNotification.query()\
3898 3904 .filter(UserNotification.notification == self)\
3899 3905 .order_by(UserNotification.user_id.asc()).all()]
3900 3906
3901 3907 @classmethod
3902 3908 def create(cls, created_by, subject, body, recipients, type_=None):
3903 3909 if type_ is None:
3904 3910 type_ = Notification.TYPE_MESSAGE
3905 3911
3906 3912 notification = cls()
3907 3913 notification.created_by_user = created_by
3908 3914 notification.subject = subject
3909 3915 notification.body = body
3910 3916 notification.type_ = type_
3911 3917 notification.created_on = datetime.datetime.now()
3912 3918
3913 3919 for u in recipients:
3914 3920 assoc = UserNotification()
3915 3921 assoc.notification = notification
3916 3922
3917 3923 # if created_by is inside recipients mark his notification
3918 3924 # as read
3919 3925 if u.user_id == created_by.user_id:
3920 3926 assoc.read = True
3921 3927
3922 3928 u.notifications.append(assoc)
3923 3929 Session().add(notification)
3924 3930
3925 3931 return notification
3926 3932
3927 3933
3928 3934 class UserNotification(Base, BaseModel):
3929 3935 __tablename__ = 'user_to_notification'
3930 3936 __table_args__ = (
3931 3937 UniqueConstraint('user_id', 'notification_id'),
3932 3938 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3933 3939 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3934 3940 )
3935 3941 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3936 3942 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3937 3943 read = Column('read', Boolean, default=False)
3938 3944 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3939 3945
3940 3946 user = relationship('User', lazy="joined")
3941 3947 notification = relationship('Notification', lazy="joined",
3942 3948 order_by=lambda: Notification.created_on.desc(),)
3943 3949
3944 3950 def mark_as_read(self):
3945 3951 self.read = True
3946 3952 Session().add(self)
3947 3953
3948 3954
3949 3955 class Gist(Base, BaseModel):
3950 3956 __tablename__ = 'gists'
3951 3957 __table_args__ = (
3952 3958 Index('g_gist_access_id_idx', 'gist_access_id'),
3953 3959 Index('g_created_on_idx', 'created_on'),
3954 3960 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3955 3961 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3956 3962 )
3957 3963 GIST_PUBLIC = u'public'
3958 3964 GIST_PRIVATE = u'private'
3959 3965 DEFAULT_FILENAME = u'gistfile1.txt'
3960 3966
3961 3967 ACL_LEVEL_PUBLIC = u'acl_public'
3962 3968 ACL_LEVEL_PRIVATE = u'acl_private'
3963 3969
3964 3970 gist_id = Column('gist_id', Integer(), primary_key=True)
3965 3971 gist_access_id = Column('gist_access_id', Unicode(250))
3966 3972 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3967 3973 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3968 3974 gist_expires = Column('gist_expires', Float(53), nullable=False)
3969 3975 gist_type = Column('gist_type', Unicode(128), nullable=False)
3970 3976 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3971 3977 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3972 3978 acl_level = Column('acl_level', Unicode(128), nullable=True)
3973 3979
3974 3980 owner = relationship('User')
3975 3981
3976 3982 def __repr__(self):
3977 3983 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3978 3984
3979 3985 @hybrid_property
3980 3986 def description_safe(self):
3981 3987 from rhodecode.lib import helpers as h
3982 3988 return h.escape(self.gist_description)
3983 3989
3984 3990 @classmethod
3985 3991 def get_or_404(cls, id_):
3986 3992 from pyramid.httpexceptions import HTTPNotFound
3987 3993
3988 3994 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3989 3995 if not res:
3990 3996 raise HTTPNotFound()
3991 3997 return res
3992 3998
3993 3999 @classmethod
3994 4000 def get_by_access_id(cls, gist_access_id):
3995 4001 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3996 4002
3997 4003 def gist_url(self):
3998 4004 from rhodecode.model.gist import GistModel
3999 4005 return GistModel().get_url(self)
4000 4006
4001 4007 @classmethod
4002 4008 def base_path(cls):
4003 4009 """
4004 4010 Returns base path when all gists are stored
4005 4011
4006 4012 :param cls:
4007 4013 """
4008 4014 from rhodecode.model.gist import GIST_STORE_LOC
4009 4015 q = Session().query(RhodeCodeUi)\
4010 4016 .filter(RhodeCodeUi.ui_key == URL_SEP)
4011 4017 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4012 4018 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4013 4019
4014 4020 def get_api_data(self):
4015 4021 """
4016 4022 Common function for generating gist related data for API
4017 4023 """
4018 4024 gist = self
4019 4025 data = {
4020 4026 'gist_id': gist.gist_id,
4021 4027 'type': gist.gist_type,
4022 4028 'access_id': gist.gist_access_id,
4023 4029 'description': gist.gist_description,
4024 4030 'url': gist.gist_url(),
4025 4031 'expires': gist.gist_expires,
4026 4032 'created_on': gist.created_on,
4027 4033 'modified_at': gist.modified_at,
4028 4034 'content': None,
4029 4035 'acl_level': gist.acl_level,
4030 4036 }
4031 4037 return data
4032 4038
4033 4039 def __json__(self):
4034 4040 data = dict(
4035 4041 )
4036 4042 data.update(self.get_api_data())
4037 4043 return data
4038 4044 # SCM functions
4039 4045
4040 4046 def scm_instance(self, **kwargs):
4041 4047 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4042 4048 return get_vcs_instance(
4043 4049 repo_path=safe_str(full_repo_path), create=False)
4044 4050
4045 4051
4046 4052 class ExternalIdentity(Base, BaseModel):
4047 4053 __tablename__ = 'external_identities'
4048 4054 __table_args__ = (
4049 4055 Index('local_user_id_idx', 'local_user_id'),
4050 4056 Index('external_id_idx', 'external_id'),
4051 4057 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4052 4058 'mysql_charset': 'utf8'})
4053 4059
4054 4060 external_id = Column('external_id', Unicode(255), default=u'',
4055 4061 primary_key=True)
4056 4062 external_username = Column('external_username', Unicode(1024), default=u'')
4057 4063 local_user_id = Column('local_user_id', Integer(),
4058 4064 ForeignKey('users.user_id'), primary_key=True)
4059 4065 provider_name = Column('provider_name', Unicode(255), default=u'',
4060 4066 primary_key=True)
4061 4067 access_token = Column('access_token', String(1024), default=u'')
4062 4068 alt_token = Column('alt_token', String(1024), default=u'')
4063 4069 token_secret = Column('token_secret', String(1024), default=u'')
4064 4070
4065 4071 @classmethod
4066 4072 def by_external_id_and_provider(cls, external_id, provider_name,
4067 4073 local_user_id=None):
4068 4074 """
4069 4075 Returns ExternalIdentity instance based on search params
4070 4076
4071 4077 :param external_id:
4072 4078 :param provider_name:
4073 4079 :return: ExternalIdentity
4074 4080 """
4075 4081 query = cls.query()
4076 4082 query = query.filter(cls.external_id == external_id)
4077 4083 query = query.filter(cls.provider_name == provider_name)
4078 4084 if local_user_id:
4079 4085 query = query.filter(cls.local_user_id == local_user_id)
4080 4086 return query.first()
4081 4087
4082 4088 @classmethod
4083 4089 def user_by_external_id_and_provider(cls, external_id, provider_name):
4084 4090 """
4085 4091 Returns User instance based on search params
4086 4092
4087 4093 :param external_id:
4088 4094 :param provider_name:
4089 4095 :return: User
4090 4096 """
4091 4097 query = User.query()
4092 4098 query = query.filter(cls.external_id == external_id)
4093 4099 query = query.filter(cls.provider_name == provider_name)
4094 4100 query = query.filter(User.user_id == cls.local_user_id)
4095 4101 return query.first()
4096 4102
4097 4103 @classmethod
4098 4104 def by_local_user_id(cls, local_user_id):
4099 4105 """
4100 4106 Returns all tokens for user
4101 4107
4102 4108 :param local_user_id:
4103 4109 :return: ExternalIdentity
4104 4110 """
4105 4111 query = cls.query()
4106 4112 query = query.filter(cls.local_user_id == local_user_id)
4107 4113 return query
4108 4114
4109 4115
4110 4116 class Integration(Base, BaseModel):
4111 4117 __tablename__ = 'integrations'
4112 4118 __table_args__ = (
4113 4119 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4114 4120 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
4115 4121 )
4116 4122
4117 4123 integration_id = Column('integration_id', Integer(), primary_key=True)
4118 4124 integration_type = Column('integration_type', String(255))
4119 4125 enabled = Column('enabled', Boolean(), nullable=False)
4120 4126 name = Column('name', String(255), nullable=False)
4121 4127 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4122 4128 default=False)
4123 4129
4124 4130 settings = Column(
4125 4131 'settings_json', MutationObj.as_mutable(
4126 4132 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4127 4133 repo_id = Column(
4128 4134 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4129 4135 nullable=True, unique=None, default=None)
4130 4136 repo = relationship('Repository', lazy='joined')
4131 4137
4132 4138 repo_group_id = Column(
4133 4139 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4134 4140 nullable=True, unique=None, default=None)
4135 4141 repo_group = relationship('RepoGroup', lazy='joined')
4136 4142
4137 4143 @property
4138 4144 def scope(self):
4139 4145 if self.repo:
4140 4146 return repr(self.repo)
4141 4147 if self.repo_group:
4142 4148 if self.child_repos_only:
4143 4149 return repr(self.repo_group) + ' (child repos only)'
4144 4150 else:
4145 4151 return repr(self.repo_group) + ' (recursive)'
4146 4152 if self.child_repos_only:
4147 4153 return 'root_repos'
4148 4154 return 'global'
4149 4155
4150 4156 def __repr__(self):
4151 4157 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4152 4158
4153 4159
4154 4160 class RepoReviewRuleUser(Base, BaseModel):
4155 4161 __tablename__ = 'repo_review_rules_users'
4156 4162 __table_args__ = (
4157 4163 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4158 4164 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4159 4165 )
4160 4166
4161 4167 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4162 4168 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4163 4169 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4164 4170 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4165 4171 user = relationship('User')
4166 4172
4167 4173 def rule_data(self):
4168 4174 return {
4169 4175 'mandatory': self.mandatory
4170 4176 }
4171 4177
4172 4178
4173 4179 class RepoReviewRuleUserGroup(Base, BaseModel):
4174 4180 __tablename__ = 'repo_review_rules_users_groups'
4175 4181 __table_args__ = (
4176 4182 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4177 4183 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4178 4184 )
4179 4185 VOTE_RULE_ALL = -1
4180 4186
4181 4187 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4182 4188 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4183 4189 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4184 4190 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4185 4191 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4186 4192 users_group = relationship('UserGroup')
4187 4193
4188 4194 def rule_data(self):
4189 4195 return {
4190 4196 'mandatory': self.mandatory,
4191 4197 'vote_rule': self.vote_rule
4192 4198 }
4193 4199
4194 4200 @property
4195 4201 def vote_rule_label(self):
4196 4202 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4197 4203 return 'all must vote'
4198 4204 else:
4199 4205 return 'min. vote {}'.format(self.vote_rule)
4200 4206
4201 4207
4202 4208 class RepoReviewRule(Base, BaseModel):
4203 4209 __tablename__ = 'repo_review_rules'
4204 4210 __table_args__ = (
4205 4211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4206 4212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4207 4213 )
4208 4214
4209 4215 repo_review_rule_id = Column(
4210 4216 'repo_review_rule_id', Integer(), primary_key=True)
4211 4217 repo_id = Column(
4212 4218 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4213 4219 repo = relationship('Repository', backref='review_rules')
4214 4220
4215 4221 review_rule_name = Column('review_rule_name', String(255))
4216 4222 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4217 4223 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4218 4224 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4219 4225
4220 4226 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4221 4227 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4222 4228 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4223 4229 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4224 4230
4225 4231 rule_users = relationship('RepoReviewRuleUser')
4226 4232 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4227 4233
4228 4234 def _validate_glob(self, value):
4229 4235 re.compile('^' + glob2re(value) + '$')
4230 4236
4231 4237 @hybrid_property
4232 4238 def source_branch_pattern(self):
4233 4239 return self._branch_pattern or '*'
4234 4240
4235 4241 @source_branch_pattern.setter
4236 4242 def source_branch_pattern(self, value):
4237 4243 self._validate_glob(value)
4238 4244 self._branch_pattern = value or '*'
4239 4245
4240 4246 @hybrid_property
4241 4247 def target_branch_pattern(self):
4242 4248 return self._target_branch_pattern or '*'
4243 4249
4244 4250 @target_branch_pattern.setter
4245 4251 def target_branch_pattern(self, value):
4246 4252 self._validate_glob(value)
4247 4253 self._target_branch_pattern = value or '*'
4248 4254
4249 4255 @hybrid_property
4250 4256 def file_pattern(self):
4251 4257 return self._file_pattern or '*'
4252 4258
4253 4259 @file_pattern.setter
4254 4260 def file_pattern(self, value):
4255 4261 self._validate_glob(value)
4256 4262 self._file_pattern = value or '*'
4257 4263
4258 4264 def matches(self, source_branch, target_branch, files_changed):
4259 4265 """
4260 4266 Check if this review rule matches a branch/files in a pull request
4261 4267
4262 4268 :param source_branch: source branch name for the commit
4263 4269 :param target_branch: target branch name for the commit
4264 4270 :param files_changed: list of file paths changed in the pull request
4265 4271 """
4266 4272
4267 4273 source_branch = source_branch or ''
4268 4274 target_branch = target_branch or ''
4269 4275 files_changed = files_changed or []
4270 4276
4271 4277 branch_matches = True
4272 4278 if source_branch or target_branch:
4273 4279 if self.source_branch_pattern == '*':
4274 4280 source_branch_match = True
4275 4281 else:
4276 4282 source_branch_regex = re.compile(
4277 4283 '^' + glob2re(self.source_branch_pattern) + '$')
4278 4284 source_branch_match = bool(source_branch_regex.search(source_branch))
4279 4285 if self.target_branch_pattern == '*':
4280 4286 target_branch_match = True
4281 4287 else:
4282 4288 target_branch_regex = re.compile(
4283 4289 '^' + glob2re(self.target_branch_pattern) + '$')
4284 4290 target_branch_match = bool(target_branch_regex.search(target_branch))
4285 4291
4286 4292 branch_matches = source_branch_match and target_branch_match
4287 4293
4288 4294 files_matches = True
4289 4295 if self.file_pattern != '*':
4290 4296 files_matches = False
4291 4297 file_regex = re.compile(glob2re(self.file_pattern))
4292 4298 for filename in files_changed:
4293 4299 if file_regex.search(filename):
4294 4300 files_matches = True
4295 4301 break
4296 4302
4297 4303 return branch_matches and files_matches
4298 4304
4299 4305 @property
4300 4306 def review_users(self):
4301 4307 """ Returns the users which this rule applies to """
4302 4308
4303 4309 users = collections.OrderedDict()
4304 4310
4305 4311 for rule_user in self.rule_users:
4306 4312 if rule_user.user.active:
4307 4313 if rule_user.user not in users:
4308 4314 users[rule_user.user.username] = {
4309 4315 'user': rule_user.user,
4310 4316 'source': 'user',
4311 4317 'source_data': {},
4312 4318 'data': rule_user.rule_data()
4313 4319 }
4314 4320
4315 4321 for rule_user_group in self.rule_user_groups:
4316 4322 source_data = {
4317 4323 'user_group_id': rule_user_group.users_group.users_group_id,
4318 4324 'name': rule_user_group.users_group.users_group_name,
4319 4325 'members': len(rule_user_group.users_group.members)
4320 4326 }
4321 4327 for member in rule_user_group.users_group.members:
4322 4328 if member.user.active:
4323 4329 key = member.user.username
4324 4330 if key in users:
4325 4331 # skip this member as we have him already
4326 4332 # this prevents from override the "first" matched
4327 4333 # users with duplicates in multiple groups
4328 4334 continue
4329 4335
4330 4336 users[key] = {
4331 4337 'user': member.user,
4332 4338 'source': 'user_group',
4333 4339 'source_data': source_data,
4334 4340 'data': rule_user_group.rule_data()
4335 4341 }
4336 4342
4337 4343 return users
4338 4344
4339 4345 def user_group_vote_rule(self):
4340 4346 rules = []
4341 4347 if self.rule_user_groups:
4342 4348 for user_group in self.rule_user_groups:
4343 4349 rules.append(user_group)
4344 4350 return rules
4345 4351
4346 4352 def __repr__(self):
4347 4353 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4348 4354 self.repo_review_rule_id, self.repo)
4349 4355
4350 4356
4351 4357 class ScheduleEntry(Base, BaseModel):
4352 4358 __tablename__ = 'schedule_entries'
4353 4359 __table_args__ = (
4354 4360 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4355 4361 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4356 4362 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4357 4363 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4358 4364 )
4359 4365 schedule_types = ['crontab', 'timedelta', 'integer']
4360 4366 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4361 4367
4362 4368 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4363 4369 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4364 4370 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4365 4371
4366 4372 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4367 4373 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4368 4374
4369 4375 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4370 4376 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4371 4377
4372 4378 # task
4373 4379 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4374 4380 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4375 4381 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4376 4382 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4377 4383
4378 4384 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4379 4385 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4380 4386
4381 4387 @hybrid_property
4382 4388 def schedule_type(self):
4383 4389 return self._schedule_type
4384 4390
4385 4391 @schedule_type.setter
4386 4392 def schedule_type(self, val):
4387 4393 if val not in self.schedule_types:
4388 4394 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4389 4395 val, self.schedule_type))
4390 4396
4391 4397 self._schedule_type = val
4392 4398
4393 4399 @classmethod
4394 4400 def get_uid(cls, obj):
4395 4401 args = obj.task_args
4396 4402 kwargs = obj.task_kwargs
4397 4403 if isinstance(args, JsonRaw):
4398 4404 try:
4399 4405 args = json.loads(args)
4400 4406 except ValueError:
4401 4407 args = tuple()
4402 4408
4403 4409 if isinstance(kwargs, JsonRaw):
4404 4410 try:
4405 4411 kwargs = json.loads(kwargs)
4406 4412 except ValueError:
4407 4413 kwargs = dict()
4408 4414
4409 4415 dot_notation = obj.task_dot_notation
4410 4416 val = '.'.join(map(safe_str, [
4411 4417 sorted(dot_notation), args, sorted(kwargs.items())]))
4412 4418 return hashlib.sha1(val).hexdigest()
4413 4419
4414 4420 @classmethod
4415 4421 def get_by_schedule_name(cls, schedule_name):
4416 4422 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4417 4423
4418 4424 @classmethod
4419 4425 def get_by_schedule_id(cls, schedule_id):
4420 4426 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4421 4427
4422 4428 @property
4423 4429 def task(self):
4424 4430 return self.task_dot_notation
4425 4431
4426 4432 @property
4427 4433 def schedule(self):
4428 4434 from rhodecode.lib.celerylib.utils import raw_2_schedule
4429 4435 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4430 4436 return schedule
4431 4437
4432 4438 @property
4433 4439 def args(self):
4434 4440 try:
4435 4441 return list(self.task_args or [])
4436 4442 except ValueError:
4437 4443 return list()
4438 4444
4439 4445 @property
4440 4446 def kwargs(self):
4441 4447 try:
4442 4448 return dict(self.task_kwargs or {})
4443 4449 except ValueError:
4444 4450 return dict()
4445 4451
4446 4452 def _as_raw(self, val):
4447 4453 if hasattr(val, 'de_coerce'):
4448 4454 val = val.de_coerce()
4449 4455 if val:
4450 4456 val = json.dumps(val)
4451 4457
4452 4458 return val
4453 4459
4454 4460 @property
4455 4461 def schedule_definition_raw(self):
4456 4462 return self._as_raw(self.schedule_definition)
4457 4463
4458 4464 @property
4459 4465 def args_raw(self):
4460 4466 return self._as_raw(self.task_args)
4461 4467
4462 4468 @property
4463 4469 def kwargs_raw(self):
4464 4470 return self._as_raw(self.task_kwargs)
4465 4471
4466 4472 def __repr__(self):
4467 4473 return '<DB:ScheduleEntry({}:{})>'.format(
4468 4474 self.schedule_entry_id, self.schedule_name)
4469 4475
4470 4476
4471 4477 @event.listens_for(ScheduleEntry, 'before_update')
4472 4478 def update_task_uid(mapper, connection, target):
4473 4479 target.task_uid = ScheduleEntry.get_uid(target)
4474 4480
4475 4481
4476 4482 @event.listens_for(ScheduleEntry, 'before_insert')
4477 4483 def set_task_uid(mapper, connection, target):
4478 4484 target.task_uid = ScheduleEntry.get_uid(target)
4479 4485
4480 4486
4481 4487 class DbMigrateVersion(Base, BaseModel):
4482 4488 __tablename__ = 'db_migrate_version'
4483 4489 __table_args__ = (
4484 4490 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4485 4491 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4486 4492 )
4487 4493 repository_id = Column('repository_id', String(250), primary_key=True)
4488 4494 repository_path = Column('repository_path', Text)
4489 4495 version = Column('version', Integer)
4490 4496
4491 4497
4492 4498 class DbSession(Base, BaseModel):
4493 4499 __tablename__ = 'db_session'
4494 4500 __table_args__ = (
4495 4501 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4496 4502 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4497 4503 )
4498 4504
4499 4505 def __repr__(self):
4500 4506 return '<DB:DbSession({})>'.format(self.id)
4501 4507
4502 4508 id = Column('id', Integer())
4503 4509 namespace = Column('namespace', String(255), primary_key=True)
4504 4510 accessed = Column('accessed', DateTime, nullable=False)
4505 4511 created = Column('created', DateTime, nullable=False)
4506 4512 data = Column('data', PickleType, nullable=False)
4507 4513
4508 4514
4509 4515
4510 4516 class BeakerCache(Base, BaseModel):
4511 4517 __tablename__ = 'beaker_cache'
4512 4518 __table_args__ = (
4513 4519 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4514 4520 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4515 4521 )
4516 4522
4517 4523 def __repr__(self):
4518 4524 return '<DB:DbSession({})>'.format(self.id)
4519 4525
4520 4526 id = Column('id', Integer())
4521 4527 namespace = Column('namespace', String(255), primary_key=True)
4522 4528 accessed = Column('accessed', DateTime, nullable=False)
4523 4529 created = Column('created', DateTime, nullable=False)
4524 4530 data = Column('data', PickleType, nullable=False)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now