##// END OF EJS Templates
pull-requests: fix way how pull-request calculates common ancestors....
marcink -
r4346:4dcd6440 default
parent child Browse files
Show More
@@ -0,0 +1,47 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4 from sqlalchemy import *
5
6 from alembic.migration import MigrationContext
7 from alembic.operations import Operations
8 from sqlalchemy import BigInteger
9
10 from rhodecode.lib.dbmigrate.versions import _reset_base
11 from rhodecode.model import init_model_encryption
12
13
14 log = logging.getLogger(__name__)
15
16
17 def upgrade(migrate_engine):
18 """
19 Upgrade operations go here.
20 Don't create your own engine; bind migrate_engine to your metadata
21 """
22 _reset_base(migrate_engine)
23 from rhodecode.lib.dbmigrate.schema import db_4_19_0_0 as db
24
25 init_model_encryption(db)
26
27 context = MigrationContext.configure(migrate_engine.connect())
28 op = Operations(context)
29
30 pull_requests = db.PullRequest.__table__
31 with op.batch_alter_table(pull_requests.name) as batch_op:
32 new_column = Column('common_ancestor_id', Unicode(255), nullable=True)
33 batch_op.add_column(new_column)
34
35 pull_request_version = db.PullRequestVersion.__table__
36 with op.batch_alter_table(pull_request_version.name) as batch_op:
37 new_column = Column('common_ancestor_id', Unicode(255), nullable=True)
38 batch_op.add_column(new_column)
39
40
41 def downgrade(migrate_engine):
42 meta = MetaData()
43 meta.bind = migrate_engine
44
45
46 def fixups(models, _SESSION):
47 pass
@@ -1,60 +1,60 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 from collections import OrderedDict
23 23
24 24 import sys
25 25 import platform
26 26
27 27 VERSION = tuple(open(os.path.join(
28 28 os.path.dirname(__file__), 'VERSION')).read().split('.'))
29 29
30 30 BACKENDS = OrderedDict()
31 31
32 32 BACKENDS['hg'] = 'Mercurial repository'
33 33 BACKENDS['git'] = 'Git repository'
34 34 BACKENDS['svn'] = 'Subversion repository'
35 35
36 36
37 37 CELERY_ENABLED = False
38 38 CELERY_EAGER = False
39 39
40 40 # link to config for pyramid
41 41 CONFIG = {}
42 42
43 43 # Populated with the settings dictionary from application init in
44 44 # rhodecode.conf.environment.load_pyramid_environment
45 45 PYRAMID_SETTINGS = {}
46 46
47 47 # Linked module for extensions
48 48 EXTENSIONS = {}
49 49
50 50 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
51 __dbversion__ = 106 # defines current db version for migrations
51 __dbversion__ = 107 # defines current db version for migrations
52 52 __platform__ = platform.system()
53 53 __license__ = 'AGPLv3, and Commercial License'
54 54 __author__ = 'RhodeCode GmbH'
55 55 __url__ = 'https://code.rhodecode.com'
56 56
57 57 is_windows = __platform__ in ['Windows']
58 58 is_unix = not is_windows
59 59 is_test = False
60 60 disable_error_handler = False
@@ -1,1018 +1,1018 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from rhodecode import events
25 25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 26 from rhodecode.api.utils import (
27 27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 29 validate_repo_permissions, resolve_ref_or_error, validate_set_owner_permissions)
30 30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 from rhodecode.lib.utils2 import str2bool
33 33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 34 from rhodecode.model.comment import CommentsModel
35 35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment, PullRequest
36 36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 37 from rhodecode.model.settings import SettingsModel
38 38 from rhodecode.model.validation_schema import Invalid
39 39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 40 ReviewerListSchema)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 @jsonrpc_method()
46 46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None),
47 47 merge_state=Optional(False)):
48 48 """
49 49 Get a pull request based on the given ID.
50 50
51 51 :param apiuser: This is filled automatically from the |authtoken|.
52 52 :type apiuser: AuthUser
53 53 :param repoid: Optional, repository name or repository ID from where
54 54 the pull request was opened.
55 55 :type repoid: str or int
56 56 :param pullrequestid: ID of the requested pull request.
57 57 :type pullrequestid: int
58 58 :param merge_state: Optional calculate merge state for each repository.
59 59 This could result in longer time to fetch the data
60 60 :type merge_state: bool
61 61
62 62 Example output:
63 63
64 64 .. code-block:: bash
65 65
66 66 "id": <id_given_in_input>,
67 67 "result":
68 68 {
69 69 "pull_request_id": "<pull_request_id>",
70 70 "url": "<url>",
71 71 "title": "<title>",
72 72 "description": "<description>",
73 73 "status" : "<status>",
74 74 "created_on": "<date_time_created>",
75 75 "updated_on": "<date_time_updated>",
76 76 "versions": "<number_or_versions_of_pr>",
77 77 "commit_ids": [
78 78 ...
79 79 "<commit_id>",
80 80 "<commit_id>",
81 81 ...
82 82 ],
83 83 "review_status": "<review_status>",
84 84 "mergeable": {
85 85 "status": "<bool>",
86 86 "message": "<message>",
87 87 },
88 88 "source": {
89 89 "clone_url": "<clone_url>",
90 90 "repository": "<repository_name>",
91 91 "reference":
92 92 {
93 93 "name": "<name>",
94 94 "type": "<type>",
95 95 "commit_id": "<commit_id>",
96 96 }
97 97 },
98 98 "target": {
99 99 "clone_url": "<clone_url>",
100 100 "repository": "<repository_name>",
101 101 "reference":
102 102 {
103 103 "name": "<name>",
104 104 "type": "<type>",
105 105 "commit_id": "<commit_id>",
106 106 }
107 107 },
108 108 "merge": {
109 109 "clone_url": "<clone_url>",
110 110 "reference":
111 111 {
112 112 "name": "<name>",
113 113 "type": "<type>",
114 114 "commit_id": "<commit_id>",
115 115 }
116 116 },
117 117 "author": <user_obj>,
118 118 "reviewers": [
119 119 ...
120 120 {
121 121 "user": "<user_obj>",
122 122 "review_status": "<review_status>",
123 123 }
124 124 ...
125 125 ]
126 126 },
127 127 "error": null
128 128 """
129 129
130 130 pull_request = get_pull_request_or_error(pullrequestid)
131 131 if Optional.extract(repoid):
132 132 repo = get_repo_or_error(repoid)
133 133 else:
134 134 repo = pull_request.target_repo
135 135
136 136 if not PullRequestModel().check_user_read(pull_request, apiuser, api=True):
137 137 raise JSONRPCError('repository `%s` or pull request `%s` '
138 138 'does not exist' % (repoid, pullrequestid))
139 139
140 140 # NOTE(marcink): only calculate and return merge state if the pr state is 'created'
141 141 # otherwise we can lock the repo on calculation of merge state while update/merge
142 142 # is happening.
143 143 pr_created = pull_request.pull_request_state == pull_request.STATE_CREATED
144 144 merge_state = Optional.extract(merge_state, binary=True) and pr_created
145 145 data = pull_request.get_api_data(with_merge_state=merge_state)
146 146 return data
147 147
148 148
149 149 @jsonrpc_method()
150 150 def get_pull_requests(request, apiuser, repoid, status=Optional('new'),
151 151 merge_state=Optional(False)):
152 152 """
153 153 Get all pull requests from the repository specified in `repoid`.
154 154
155 155 :param apiuser: This is filled automatically from the |authtoken|.
156 156 :type apiuser: AuthUser
157 157 :param repoid: Optional repository name or repository ID.
158 158 :type repoid: str or int
159 159 :param status: Only return pull requests with the specified status.
160 160 Valid options are.
161 161 * ``new`` (default)
162 162 * ``open``
163 163 * ``closed``
164 164 :type status: str
165 165 :param merge_state: Optional calculate merge state for each repository.
166 166 This could result in longer time to fetch the data
167 167 :type merge_state: bool
168 168
169 169 Example output:
170 170
171 171 .. code-block:: bash
172 172
173 173 "id": <id_given_in_input>,
174 174 "result":
175 175 [
176 176 ...
177 177 {
178 178 "pull_request_id": "<pull_request_id>",
179 179 "url": "<url>",
180 180 "title" : "<title>",
181 181 "description": "<description>",
182 182 "status": "<status>",
183 183 "created_on": "<date_time_created>",
184 184 "updated_on": "<date_time_updated>",
185 185 "commit_ids": [
186 186 ...
187 187 "<commit_id>",
188 188 "<commit_id>",
189 189 ...
190 190 ],
191 191 "review_status": "<review_status>",
192 192 "mergeable": {
193 193 "status": "<bool>",
194 194 "message: "<message>",
195 195 },
196 196 "source": {
197 197 "clone_url": "<clone_url>",
198 198 "reference":
199 199 {
200 200 "name": "<name>",
201 201 "type": "<type>",
202 202 "commit_id": "<commit_id>",
203 203 }
204 204 },
205 205 "target": {
206 206 "clone_url": "<clone_url>",
207 207 "reference":
208 208 {
209 209 "name": "<name>",
210 210 "type": "<type>",
211 211 "commit_id": "<commit_id>",
212 212 }
213 213 },
214 214 "merge": {
215 215 "clone_url": "<clone_url>",
216 216 "reference":
217 217 {
218 218 "name": "<name>",
219 219 "type": "<type>",
220 220 "commit_id": "<commit_id>",
221 221 }
222 222 },
223 223 "author": <user_obj>,
224 224 "reviewers": [
225 225 ...
226 226 {
227 227 "user": "<user_obj>",
228 228 "review_status": "<review_status>",
229 229 }
230 230 ...
231 231 ]
232 232 }
233 233 ...
234 234 ],
235 235 "error": null
236 236
237 237 """
238 238 repo = get_repo_or_error(repoid)
239 239 if not has_superadmin_permission(apiuser):
240 240 _perms = (
241 241 'repository.admin', 'repository.write', 'repository.read',)
242 242 validate_repo_permissions(apiuser, repoid, repo, _perms)
243 243
244 244 status = Optional.extract(status)
245 245 merge_state = Optional.extract(merge_state, binary=True)
246 246 pull_requests = PullRequestModel().get_all(repo, statuses=[status],
247 247 order_by='id', order_dir='desc')
248 248 data = [pr.get_api_data(with_merge_state=merge_state) for pr in pull_requests]
249 249 return data
250 250
251 251
252 252 @jsonrpc_method()
253 253 def merge_pull_request(
254 254 request, apiuser, pullrequestid, repoid=Optional(None),
255 255 userid=Optional(OAttr('apiuser'))):
256 256 """
257 257 Merge the pull request specified by `pullrequestid` into its target
258 258 repository.
259 259
260 260 :param apiuser: This is filled automatically from the |authtoken|.
261 261 :type apiuser: AuthUser
262 262 :param repoid: Optional, repository name or repository ID of the
263 263 target repository to which the |pr| is to be merged.
264 264 :type repoid: str or int
265 265 :param pullrequestid: ID of the pull request which shall be merged.
266 266 :type pullrequestid: int
267 267 :param userid: Merge the pull request as this user.
268 268 :type userid: Optional(str or int)
269 269
270 270 Example output:
271 271
272 272 .. code-block:: bash
273 273
274 274 "id": <id_given_in_input>,
275 275 "result": {
276 276 "executed": "<bool>",
277 277 "failure_reason": "<int>",
278 278 "merge_status_message": "<str>",
279 279 "merge_commit_id": "<merge_commit_id>",
280 280 "possible": "<bool>",
281 281 "merge_ref": {
282 282 "commit_id": "<commit_id>",
283 283 "type": "<type>",
284 284 "name": "<name>"
285 285 }
286 286 },
287 287 "error": null
288 288 """
289 289 pull_request = get_pull_request_or_error(pullrequestid)
290 290 if Optional.extract(repoid):
291 291 repo = get_repo_or_error(repoid)
292 292 else:
293 293 repo = pull_request.target_repo
294 294 auth_user = apiuser
295 295 if not isinstance(userid, Optional):
296 296 if (has_superadmin_permission(apiuser) or
297 297 HasRepoPermissionAnyApi('repository.admin')(
298 298 user=apiuser, repo_name=repo.repo_name)):
299 299 apiuser = get_user_or_error(userid)
300 300 auth_user = apiuser.AuthUser()
301 301 else:
302 302 raise JSONRPCError('userid is not the same as your user')
303 303
304 304 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
305 305 raise JSONRPCError(
306 306 'Operation forbidden because pull request is in state {}, '
307 307 'only state {} is allowed.'.format(
308 308 pull_request.pull_request_state, PullRequest.STATE_CREATED))
309 309
310 310 with pull_request.set_state(PullRequest.STATE_UPDATING):
311 311 check = MergeCheck.validate(pull_request, auth_user=auth_user,
312 312 translator=request.translate)
313 313 merge_possible = not check.failed
314 314
315 315 if not merge_possible:
316 316 error_messages = []
317 317 for err_type, error_msg in check.errors:
318 318 error_msg = request.translate(error_msg)
319 319 error_messages.append(error_msg)
320 320
321 321 reasons = ','.join(error_messages)
322 322 raise JSONRPCError(
323 323 'merge not possible for following reasons: {}'.format(reasons))
324 324
325 325 target_repo = pull_request.target_repo
326 326 extras = vcs_operation_context(
327 327 request.environ, repo_name=target_repo.repo_name,
328 328 username=auth_user.username, action='push',
329 329 scm=target_repo.repo_type)
330 330 with pull_request.set_state(PullRequest.STATE_UPDATING):
331 331 merge_response = PullRequestModel().merge_repo(
332 332 pull_request, apiuser, extras=extras)
333 333 if merge_response.executed:
334 334 PullRequestModel().close_pull_request(pull_request.pull_request_id, auth_user)
335 335
336 336 Session().commit()
337 337
338 338 # In previous versions the merge response directly contained the merge
339 339 # commit id. It is now contained in the merge reference object. To be
340 340 # backwards compatible we have to extract it again.
341 341 merge_response = merge_response.asdict()
342 342 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
343 343
344 344 return merge_response
345 345
346 346
347 347 @jsonrpc_method()
348 348 def get_pull_request_comments(
349 349 request, apiuser, pullrequestid, repoid=Optional(None)):
350 350 """
351 351 Get all comments of pull request specified with the `pullrequestid`
352 352
353 353 :param apiuser: This is filled automatically from the |authtoken|.
354 354 :type apiuser: AuthUser
355 355 :param repoid: Optional repository name or repository ID.
356 356 :type repoid: str or int
357 357 :param pullrequestid: The pull request ID.
358 358 :type pullrequestid: int
359 359
360 360 Example output:
361 361
362 362 .. code-block:: bash
363 363
364 364 id : <id_given_in_input>
365 365 result : [
366 366 {
367 367 "comment_author": {
368 368 "active": true,
369 369 "full_name_or_username": "Tom Gore",
370 370 "username": "admin"
371 371 },
372 372 "comment_created_on": "2017-01-02T18:43:45.533",
373 373 "comment_f_path": null,
374 374 "comment_id": 25,
375 375 "comment_lineno": null,
376 376 "comment_status": {
377 377 "status": "under_review",
378 378 "status_lbl": "Under Review"
379 379 },
380 380 "comment_text": "Example text",
381 381 "comment_type": null,
382 382 "pull_request_version": null,
383 383 "comment_commit_id": None,
384 384 "comment_pull_request_id": <pull_request_id>
385 385 }
386 386 ],
387 387 error : null
388 388 """
389 389
390 390 pull_request = get_pull_request_or_error(pullrequestid)
391 391 if Optional.extract(repoid):
392 392 repo = get_repo_or_error(repoid)
393 393 else:
394 394 repo = pull_request.target_repo
395 395
396 396 if not PullRequestModel().check_user_read(
397 397 pull_request, apiuser, api=True):
398 398 raise JSONRPCError('repository `%s` or pull request `%s` '
399 399 'does not exist' % (repoid, pullrequestid))
400 400
401 401 (pull_request_latest,
402 402 pull_request_at_ver,
403 403 pull_request_display_obj,
404 404 at_version) = PullRequestModel().get_pr_version(
405 405 pull_request.pull_request_id, version=None)
406 406
407 407 versions = pull_request_display_obj.versions()
408 408 ver_map = {
409 409 ver.pull_request_version_id: cnt
410 410 for cnt, ver in enumerate(versions, 1)
411 411 }
412 412
413 413 # GENERAL COMMENTS with versions #
414 414 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
415 415 q = q.order_by(ChangesetComment.comment_id.asc())
416 416 general_comments = q.all()
417 417
418 418 # INLINE COMMENTS with versions #
419 419 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
420 420 q = q.order_by(ChangesetComment.comment_id.asc())
421 421 inline_comments = q.all()
422 422
423 423 data = []
424 424 for comment in inline_comments + general_comments:
425 425 full_data = comment.get_api_data()
426 426 pr_version_id = None
427 427 if comment.pull_request_version_id:
428 428 pr_version_id = 'v{}'.format(
429 429 ver_map[comment.pull_request_version_id])
430 430
431 431 # sanitize some entries
432 432
433 433 full_data['pull_request_version'] = pr_version_id
434 434 full_data['comment_author'] = {
435 435 'username': full_data['comment_author'].username,
436 436 'full_name_or_username': full_data['comment_author'].full_name_or_username,
437 437 'active': full_data['comment_author'].active,
438 438 }
439 439
440 440 if full_data['comment_status']:
441 441 full_data['comment_status'] = {
442 442 'status': full_data['comment_status'][0].status,
443 443 'status_lbl': full_data['comment_status'][0].status_lbl,
444 444 }
445 445 else:
446 446 full_data['comment_status'] = {}
447 447
448 448 data.append(full_data)
449 449 return data
450 450
451 451
452 452 @jsonrpc_method()
453 453 def comment_pull_request(
454 454 request, apiuser, pullrequestid, repoid=Optional(None),
455 455 message=Optional(None), commit_id=Optional(None), status=Optional(None),
456 456 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
457 457 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
458 458 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
459 459 """
460 460 Comment on the pull request specified with the `pullrequestid`,
461 461 in the |repo| specified by the `repoid`, and optionally change the
462 462 review status.
463 463
464 464 :param apiuser: This is filled automatically from the |authtoken|.
465 465 :type apiuser: AuthUser
466 466 :param repoid: Optional repository name or repository ID.
467 467 :type repoid: str or int
468 468 :param pullrequestid: The pull request ID.
469 469 :type pullrequestid: int
470 470 :param commit_id: Specify the commit_id for which to set a comment. If
471 471 given commit_id is different than latest in the PR status
472 472 change won't be performed.
473 473 :type commit_id: str
474 474 :param message: The text content of the comment.
475 475 :type message: str
476 476 :param status: (**Optional**) Set the approval status of the pull
477 477 request. One of: 'not_reviewed', 'approved', 'rejected',
478 478 'under_review'
479 479 :type status: str
480 480 :param comment_type: Comment type, one of: 'note', 'todo'
481 481 :type comment_type: Optional(str), default: 'note'
482 482 :param resolves_comment_id: id of comment which this one will resolve
483 483 :type resolves_comment_id: Optional(int)
484 484 :param extra_recipients: list of user ids or usernames to add
485 485 notifications for this comment. Acts like a CC for notification
486 486 :type extra_recipients: Optional(list)
487 487 :param userid: Comment on the pull request as this user
488 488 :type userid: Optional(str or int)
489 489 :param send_email: Define if this comment should also send email notification
490 490 :type send_email: Optional(bool)
491 491
492 492 Example output:
493 493
494 494 .. code-block:: bash
495 495
496 496 id : <id_given_in_input>
497 497 result : {
498 498 "pull_request_id": "<Integer>",
499 499 "comment_id": "<Integer>",
500 500 "status": {"given": <given_status>,
501 501 "was_changed": <bool status_was_actually_changed> },
502 502 },
503 503 error : null
504 504 """
505 505 pull_request = get_pull_request_or_error(pullrequestid)
506 506 if Optional.extract(repoid):
507 507 repo = get_repo_or_error(repoid)
508 508 else:
509 509 repo = pull_request.target_repo
510 510
511 511 auth_user = apiuser
512 512 if not isinstance(userid, Optional):
513 513 if (has_superadmin_permission(apiuser) or
514 514 HasRepoPermissionAnyApi('repository.admin')(
515 515 user=apiuser, repo_name=repo.repo_name)):
516 516 apiuser = get_user_or_error(userid)
517 517 auth_user = apiuser.AuthUser()
518 518 else:
519 519 raise JSONRPCError('userid is not the same as your user')
520 520
521 521 if pull_request.is_closed():
522 522 raise JSONRPCError(
523 523 'pull request `%s` comment failed, pull request is closed' % (
524 524 pullrequestid,))
525 525
526 526 if not PullRequestModel().check_user_read(
527 527 pull_request, apiuser, api=True):
528 528 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
529 529 message = Optional.extract(message)
530 530 status = Optional.extract(status)
531 531 commit_id = Optional.extract(commit_id)
532 532 comment_type = Optional.extract(comment_type)
533 533 resolves_comment_id = Optional.extract(resolves_comment_id)
534 534 extra_recipients = Optional.extract(extra_recipients)
535 535 send_email = Optional.extract(send_email, binary=True)
536 536
537 537 if not message and not status:
538 538 raise JSONRPCError(
539 539 'Both message and status parameters are missing. '
540 540 'At least one is required.')
541 541
542 542 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
543 543 status is not None):
544 544 raise JSONRPCError('Unknown comment status: `%s`' % status)
545 545
546 546 if commit_id and commit_id not in pull_request.revisions:
547 547 raise JSONRPCError(
548 548 'Invalid commit_id `%s` for this pull request.' % commit_id)
549 549
550 550 allowed_to_change_status = PullRequestModel().check_user_change_status(
551 551 pull_request, apiuser)
552 552
553 553 # if commit_id is passed re-validated if user is allowed to change status
554 554 # based on latest commit_id from the PR
555 555 if commit_id:
556 556 commit_idx = pull_request.revisions.index(commit_id)
557 557 if commit_idx != 0:
558 558 allowed_to_change_status = False
559 559
560 560 if resolves_comment_id:
561 561 comment = ChangesetComment.get(resolves_comment_id)
562 562 if not comment:
563 563 raise JSONRPCError(
564 564 'Invalid resolves_comment_id `%s` for this pull request.'
565 565 % resolves_comment_id)
566 566 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
567 567 raise JSONRPCError(
568 568 'Comment `%s` is wrong type for setting status to resolved.'
569 569 % resolves_comment_id)
570 570
571 571 text = message
572 572 status_label = ChangesetStatus.get_status_lbl(status)
573 573 if status and allowed_to_change_status:
574 574 st_message = ('Status change %(transition_icon)s %(status)s'
575 575 % {'transition_icon': '>', 'status': status_label})
576 576 text = message or st_message
577 577
578 578 rc_config = SettingsModel().get_all_settings()
579 579 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
580 580
581 581 status_change = status and allowed_to_change_status
582 582 comment = CommentsModel().create(
583 583 text=text,
584 584 repo=pull_request.target_repo.repo_id,
585 585 user=apiuser.user_id,
586 586 pull_request=pull_request.pull_request_id,
587 587 f_path=None,
588 588 line_no=None,
589 589 status_change=(status_label if status_change else None),
590 590 status_change_type=(status if status_change else None),
591 591 closing_pr=False,
592 592 renderer=renderer,
593 593 comment_type=comment_type,
594 594 resolves_comment_id=resolves_comment_id,
595 595 auth_user=auth_user,
596 596 extra_recipients=extra_recipients,
597 597 send_email=send_email
598 598 )
599 599
600 600 if allowed_to_change_status and status:
601 601 old_calculated_status = pull_request.calculated_review_status()
602 602 ChangesetStatusModel().set_status(
603 603 pull_request.target_repo.repo_id,
604 604 status,
605 605 apiuser.user_id,
606 606 comment,
607 607 pull_request=pull_request.pull_request_id
608 608 )
609 609 Session().flush()
610 610
611 611 Session().commit()
612 612
613 613 PullRequestModel().trigger_pull_request_hook(
614 614 pull_request, apiuser, 'comment',
615 615 data={'comment': comment})
616 616
617 617 if allowed_to_change_status and status:
618 618 # we now calculate the status of pull request, and based on that
619 619 # calculation we set the commits status
620 620 calculated_status = pull_request.calculated_review_status()
621 621 if old_calculated_status != calculated_status:
622 622 PullRequestModel().trigger_pull_request_hook(
623 623 pull_request, apiuser, 'review_status_change',
624 624 data={'status': calculated_status})
625 625
626 626 data = {
627 627 'pull_request_id': pull_request.pull_request_id,
628 628 'comment_id': comment.comment_id if comment else None,
629 629 'status': {'given': status, 'was_changed': status_change},
630 630 }
631 631 return data
632 632
633 633
634 634 @jsonrpc_method()
635 635 def create_pull_request(
636 636 request, apiuser, source_repo, target_repo, source_ref, target_ref,
637 637 owner=Optional(OAttr('apiuser')), title=Optional(''), description=Optional(''),
638 638 description_renderer=Optional(''), reviewers=Optional(None)):
639 639 """
640 640 Creates a new pull request.
641 641
642 642 Accepts refs in the following formats:
643 643
644 644 * branch:<branch_name>:<sha>
645 645 * branch:<branch_name>
646 646 * bookmark:<bookmark_name>:<sha> (Mercurial only)
647 647 * bookmark:<bookmark_name> (Mercurial only)
648 648
649 649 :param apiuser: This is filled automatically from the |authtoken|.
650 650 :type apiuser: AuthUser
651 651 :param source_repo: Set the source repository name.
652 652 :type source_repo: str
653 653 :param target_repo: Set the target repository name.
654 654 :type target_repo: str
655 655 :param source_ref: Set the source ref name.
656 656 :type source_ref: str
657 657 :param target_ref: Set the target ref name.
658 658 :type target_ref: str
659 659 :param owner: user_id or username
660 660 :type owner: Optional(str)
661 661 :param title: Optionally Set the pull request title, it's generated otherwise
662 662 :type title: str
663 663 :param description: Set the pull request description.
664 664 :type description: Optional(str)
665 665 :type description_renderer: Optional(str)
666 666 :param description_renderer: Set pull request renderer for the description.
667 667 It should be 'rst', 'markdown' or 'plain'. If not give default
668 668 system renderer will be used
669 669 :param reviewers: Set the new pull request reviewers list.
670 670 Reviewer defined by review rules will be added automatically to the
671 671 defined list.
672 672 :type reviewers: Optional(list)
673 673 Accepts username strings or objects of the format:
674 674
675 675 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
676 676 """
677 677
678 678 source_db_repo = get_repo_or_error(source_repo)
679 679 target_db_repo = get_repo_or_error(target_repo)
680 680 if not has_superadmin_permission(apiuser):
681 681 _perms = ('repository.admin', 'repository.write', 'repository.read',)
682 682 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
683 683
684 684 owner = validate_set_owner_permissions(apiuser, owner)
685 685
686 686 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
687 687 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
688 688
689 source_scm = source_db_repo.scm_instance()
690 target_scm = target_db_repo.scm_instance()
691
692 689 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
693 690 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
694 691
695 ancestor = source_scm.get_common_ancestor(
696 source_commit.raw_id, target_commit.raw_id, target_scm)
697 if not ancestor:
698 raise JSONRPCError('no common ancestor found')
699
700 # recalculate target ref based on ancestor
701 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
702 full_target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
703
704 commit_ranges = target_scm.compare(
705 target_commit.raw_id, source_commit.raw_id, source_scm,
706 merge=True, pre_load=[])
707
708 if not commit_ranges:
709 raise JSONRPCError('no commits found')
710
711 692 reviewer_objects = Optional.extract(reviewers) or []
712 693
713 694 # serialize and validate passed in given reviewers
714 695 if reviewer_objects:
715 696 schema = ReviewerListSchema()
716 697 try:
717 698 reviewer_objects = schema.deserialize(reviewer_objects)
718 699 except Invalid as err:
719 700 raise JSONRPCValidationError(colander_exc=err)
720 701
721 702 # validate users
722 703 for reviewer_object in reviewer_objects:
723 704 user = get_user_or_error(reviewer_object['username'])
724 705 reviewer_object['user_id'] = user.user_id
725 706
726 707 get_default_reviewers_data, validate_default_reviewers = \
727 708 PullRequestModel().get_reviewer_functions()
728 709
729 710 # recalculate reviewers logic, to make sure we can validate this
730 reviewer_rules = get_default_reviewers_data(
711 default_reviewers_data = get_default_reviewers_data(
731 712 owner, source_db_repo,
732 713 source_commit, target_db_repo, target_commit)
733 714
734 715 # now MERGE our given with the calculated
735 reviewer_objects = reviewer_rules['reviewers'] + reviewer_objects
716 reviewer_objects = default_reviewers_data['reviewers'] + reviewer_objects
736 717
737 718 try:
738 719 reviewers = validate_default_reviewers(
739 reviewer_objects, reviewer_rules)
720 reviewer_objects, default_reviewers_data)
740 721 except ValueError as e:
741 722 raise JSONRPCError('Reviewers Validation: {}'.format(e))
742 723
743 724 title = Optional.extract(title)
744 725 if not title:
745 726 title_source_ref = source_ref.split(':', 2)[1]
746 727 title = PullRequestModel().generate_pullrequest_title(
747 728 source=source_repo,
748 729 source_ref=title_source_ref,
749 730 target=target_repo
750 731 )
732
733 diff_info = default_reviewers_data['diff_info']
734 common_ancestor_id = diff_info['ancestor']
735 commits = diff_info['commits']
736
737 if not common_ancestor_id:
738 raise JSONRPCError('no common ancestor found')
739
740 if not commits:
741 raise JSONRPCError('no commits found')
742
743 # NOTE(marcink): reversed is consistent with how we open it in the WEB interface
744 revisions = [commit.raw_id for commit in reversed(commits)]
745
746 # recalculate target ref based on ancestor
747 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
748 full_target_ref = ':'.join((target_ref_type, target_ref_name, common_ancestor_id))
749
751 750 # fetch renderer, if set fallback to plain in case of PR
752 751 rc_config = SettingsModel().get_all_settings()
753 752 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
754 753 description = Optional.extract(description)
755 754 description_renderer = Optional.extract(description_renderer) or default_system_renderer
756 755
757 756 pull_request = PullRequestModel().create(
758 757 created_by=owner.user_id,
759 758 source_repo=source_repo,
760 759 source_ref=full_source_ref,
761 760 target_repo=target_repo,
762 761 target_ref=full_target_ref,
763 revisions=[commit.raw_id for commit in reversed(commit_ranges)],
762 common_ancestor_id=common_ancestor_id,
763 revisions=revisions,
764 764 reviewers=reviewers,
765 765 title=title,
766 766 description=description,
767 767 description_renderer=description_renderer,
768 reviewer_data=reviewer_rules,
768 reviewer_data=default_reviewers_data,
769 769 auth_user=apiuser
770 770 )
771 771
772 772 Session().commit()
773 773 data = {
774 774 'msg': 'Created new pull request `{}`'.format(title),
775 775 'pull_request_id': pull_request.pull_request_id,
776 776 }
777 777 return data
778 778
779 779
780 780 @jsonrpc_method()
781 781 def update_pull_request(
782 782 request, apiuser, pullrequestid, repoid=Optional(None),
783 783 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
784 784 reviewers=Optional(None), update_commits=Optional(None)):
785 785 """
786 786 Updates a pull request.
787 787
788 788 :param apiuser: This is filled automatically from the |authtoken|.
789 789 :type apiuser: AuthUser
790 790 :param repoid: Optional repository name or repository ID.
791 791 :type repoid: str or int
792 792 :param pullrequestid: The pull request ID.
793 793 :type pullrequestid: int
794 794 :param title: Set the pull request title.
795 795 :type title: str
796 796 :param description: Update pull request description.
797 797 :type description: Optional(str)
798 798 :type description_renderer: Optional(str)
799 799 :param description_renderer: Update pull request renderer for the description.
800 800 It should be 'rst', 'markdown' or 'plain'
801 801 :param reviewers: Update pull request reviewers list with new value.
802 802 :type reviewers: Optional(list)
803 803 Accepts username strings or objects of the format:
804 804
805 805 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
806 806
807 807 :param update_commits: Trigger update of commits for this pull request
808 808 :type: update_commits: Optional(bool)
809 809
810 810 Example output:
811 811
812 812 .. code-block:: bash
813 813
814 814 id : <id_given_in_input>
815 815 result : {
816 816 "msg": "Updated pull request `63`",
817 817 "pull_request": <pull_request_object>,
818 818 "updated_reviewers": {
819 819 "added": [
820 820 "username"
821 821 ],
822 822 "removed": []
823 823 },
824 824 "updated_commits": {
825 825 "added": [
826 826 "<sha1_hash>"
827 827 ],
828 828 "common": [
829 829 "<sha1_hash>",
830 830 "<sha1_hash>",
831 831 ],
832 832 "removed": []
833 833 }
834 834 }
835 835 error : null
836 836 """
837 837
838 838 pull_request = get_pull_request_or_error(pullrequestid)
839 839 if Optional.extract(repoid):
840 840 repo = get_repo_or_error(repoid)
841 841 else:
842 842 repo = pull_request.target_repo
843 843
844 844 if not PullRequestModel().check_user_update(
845 845 pull_request, apiuser, api=True):
846 846 raise JSONRPCError(
847 847 'pull request `%s` update failed, no permission to update.' % (
848 848 pullrequestid,))
849 849 if pull_request.is_closed():
850 850 raise JSONRPCError(
851 851 'pull request `%s` update failed, pull request is closed' % (
852 852 pullrequestid,))
853 853
854 854 reviewer_objects = Optional.extract(reviewers) or []
855 855
856 856 if reviewer_objects:
857 857 schema = ReviewerListSchema()
858 858 try:
859 859 reviewer_objects = schema.deserialize(reviewer_objects)
860 860 except Invalid as err:
861 861 raise JSONRPCValidationError(colander_exc=err)
862 862
863 863 # validate users
864 864 for reviewer_object in reviewer_objects:
865 865 user = get_user_or_error(reviewer_object['username'])
866 866 reviewer_object['user_id'] = user.user_id
867 867
868 868 get_default_reviewers_data, get_validated_reviewers = \
869 869 PullRequestModel().get_reviewer_functions()
870 870
871 871 # re-use stored rules
872 872 reviewer_rules = pull_request.reviewer_data
873 873 try:
874 874 reviewers = get_validated_reviewers(
875 875 reviewer_objects, reviewer_rules)
876 876 except ValueError as e:
877 877 raise JSONRPCError('Reviewers Validation: {}'.format(e))
878 878 else:
879 879 reviewers = []
880 880
881 881 title = Optional.extract(title)
882 882 description = Optional.extract(description)
883 883 description_renderer = Optional.extract(description_renderer)
884 884
885 885 if title or description:
886 886 PullRequestModel().edit(
887 887 pull_request,
888 888 title or pull_request.title,
889 889 description or pull_request.description,
890 890 description_renderer or pull_request.description_renderer,
891 891 apiuser)
892 892 Session().commit()
893 893
894 894 commit_changes = {"added": [], "common": [], "removed": []}
895 895 if str2bool(Optional.extract(update_commits)):
896 896
897 897 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
898 898 raise JSONRPCError(
899 899 'Operation forbidden because pull request is in state {}, '
900 900 'only state {} is allowed.'.format(
901 901 pull_request.pull_request_state, PullRequest.STATE_CREATED))
902 902
903 903 with pull_request.set_state(PullRequest.STATE_UPDATING):
904 904 if PullRequestModel().has_valid_update_type(pull_request):
905 905 db_user = apiuser.get_instance()
906 906 update_response = PullRequestModel().update_commits(
907 907 pull_request, db_user)
908 908 commit_changes = update_response.changes or commit_changes
909 909 Session().commit()
910 910
911 911 reviewers_changes = {"added": [], "removed": []}
912 912 if reviewers:
913 913 old_calculated_status = pull_request.calculated_review_status()
914 914 added_reviewers, removed_reviewers = \
915 915 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
916 916
917 917 reviewers_changes['added'] = sorted(
918 918 [get_user_or_error(n).username for n in added_reviewers])
919 919 reviewers_changes['removed'] = sorted(
920 920 [get_user_or_error(n).username for n in removed_reviewers])
921 921 Session().commit()
922 922
923 923 # trigger status changed if change in reviewers changes the status
924 924 calculated_status = pull_request.calculated_review_status()
925 925 if old_calculated_status != calculated_status:
926 926 PullRequestModel().trigger_pull_request_hook(
927 927 pull_request, apiuser, 'review_status_change',
928 928 data={'status': calculated_status})
929 929
930 930 data = {
931 931 'msg': 'Updated pull request `{}`'.format(
932 932 pull_request.pull_request_id),
933 933 'pull_request': pull_request.get_api_data(),
934 934 'updated_commits': commit_changes,
935 935 'updated_reviewers': reviewers_changes
936 936 }
937 937
938 938 return data
939 939
940 940
941 941 @jsonrpc_method()
942 942 def close_pull_request(
943 943 request, apiuser, pullrequestid, repoid=Optional(None),
944 944 userid=Optional(OAttr('apiuser')), message=Optional('')):
945 945 """
946 946 Close the pull request specified by `pullrequestid`.
947 947
948 948 :param apiuser: This is filled automatically from the |authtoken|.
949 949 :type apiuser: AuthUser
950 950 :param repoid: Repository name or repository ID to which the pull
951 951 request belongs.
952 952 :type repoid: str or int
953 953 :param pullrequestid: ID of the pull request to be closed.
954 954 :type pullrequestid: int
955 955 :param userid: Close the pull request as this user.
956 956 :type userid: Optional(str or int)
957 957 :param message: Optional message to close the Pull Request with. If not
958 958 specified it will be generated automatically.
959 959 :type message: Optional(str)
960 960
961 961 Example output:
962 962
963 963 .. code-block:: bash
964 964
965 965 "id": <id_given_in_input>,
966 966 "result": {
967 967 "pull_request_id": "<int>",
968 968 "close_status": "<str:status_lbl>,
969 969 "closed": "<bool>"
970 970 },
971 971 "error": null
972 972
973 973 """
974 974 _ = request.translate
975 975
976 976 pull_request = get_pull_request_or_error(pullrequestid)
977 977 if Optional.extract(repoid):
978 978 repo = get_repo_or_error(repoid)
979 979 else:
980 980 repo = pull_request.target_repo
981 981
982 982 if not isinstance(userid, Optional):
983 983 if (has_superadmin_permission(apiuser) or
984 984 HasRepoPermissionAnyApi('repository.admin')(
985 985 user=apiuser, repo_name=repo.repo_name)):
986 986 apiuser = get_user_or_error(userid)
987 987 else:
988 988 raise JSONRPCError('userid is not the same as your user')
989 989
990 990 if pull_request.is_closed():
991 991 raise JSONRPCError(
992 992 'pull request `%s` is already closed' % (pullrequestid,))
993 993
994 994 # only owner or admin or person with write permissions
995 995 allowed_to_close = PullRequestModel().check_user_update(
996 996 pull_request, apiuser, api=True)
997 997
998 998 if not allowed_to_close:
999 999 raise JSONRPCError(
1000 1000 'pull request `%s` close failed, no permission to close.' % (
1001 1001 pullrequestid,))
1002 1002
1003 1003 # message we're using to close the PR, else it's automatically generated
1004 1004 message = Optional.extract(message)
1005 1005
1006 1006 # finally close the PR, with proper message comment
1007 1007 comment, status = PullRequestModel().close_pull_request_with_comment(
1008 1008 pull_request, apiuser, repo, message=message, auth_user=apiuser)
1009 1009 status_lbl = ChangesetStatus.get_status_lbl(status)
1010 1010
1011 1011 Session().commit()
1012 1012
1013 1013 data = {
1014 1014 'pull_request_id': pull_request.pull_request_id,
1015 1015 'close_status': status_lbl,
1016 1016 'closed': True,
1017 1017 }
1018 1018 return data
@@ -1,666 +1,667 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import lxml.html
24 24
25 25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 26 from rhodecode.tests import assert_session_flash
27 27 from rhodecode.tests.utils import AssertResponse, commit_change
28 28
29 29
30 30 def route_path(name, params=None, **kwargs):
31 31 import urllib
32 32
33 33 base_url = {
34 34 'repo_compare_select': '/{repo_name}/compare',
35 35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
36 36 }[name].format(**kwargs)
37 37
38 38 if params:
39 39 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
40 40 return base_url
41 41
42 42
43 43 @pytest.mark.usefixtures("autologin_user", "app")
44 44 class TestCompareView(object):
45 45
46 46 def test_compare_index_is_reached_at_least_once(self, backend):
47 47 repo = backend.repo
48 48 self.app.get(
49 49 route_path('repo_compare_select', repo_name=repo.repo_name))
50 50
51 51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
52 52 def test_compare_remote_with_different_commit_indexes(self, backend):
53 53 # Preparing the following repository structure:
54 54 #
55 55 # Origin repository has two commits:
56 56 #
57 57 # 0 1
58 58 # A -- D
59 59 #
60 60 # The fork of it has a few more commits and "D" has a commit index
61 61 # which does not exist in origin.
62 62 #
63 63 # 0 1 2 3 4
64 64 # A -- -- -- D -- E
65 65 # \- B -- C
66 66 #
67 67
68 68 fork = backend.create_repo()
69 69
70 70 # prepare fork
71 71 commit0 = commit_change(
72 72 fork.repo_name, filename='file1', content='A',
73 73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
74 74
75 75 commit1 = commit_change(
76 76 fork.repo_name, filename='file1', content='B',
77 77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
78 78
79 79 commit_change( # commit 2
80 80 fork.repo_name, filename='file1', content='C',
81 81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
82 82
83 83 commit3 = commit_change(
84 84 fork.repo_name, filename='file1', content='D',
85 85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
86 86
87 87 commit4 = commit_change(
88 88 fork.repo_name, filename='file1', content='E',
89 89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
90 90
91 91 # prepare origin repository, taking just the history up to D
92 92 origin = backend.create_repo()
93 93
94 94 origin_repo = origin.scm_instance(cache=False)
95 95 origin_repo.config.clear_section('hooks')
96 96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
97 97 origin_repo = origin.scm_instance(cache=False) # cache rebuild
98 98
99 99 # Verify test fixture setup
100 100 # This does not work for git
101 101 if backend.alias != 'git':
102 102 assert 5 == len(fork.scm_instance().commit_ids)
103 103 assert 2 == len(origin_repo.commit_ids)
104 104
105 105 # Comparing the revisions
106 106 response = self.app.get(
107 107 route_path('repo_compare',
108 108 repo_name=origin.repo_name,
109 109 source_ref_type="rev", source_ref=commit3.raw_id,
110 110 target_ref_type="rev", target_ref=commit4.raw_id,
111 111 params=dict(merge='1', target_repo=fork.repo_name)
112 112 ))
113 113
114 114 compare_page = ComparePage(response)
115 115 compare_page.contains_commits([commit4])
116 116
117 117 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
118 118 def test_compare_forks_on_branch_extra_commits(self, backend):
119 119 repo1 = backend.create_repo()
120 120
121 121 # commit something !
122 122 commit0 = commit_change(
123 123 repo1.repo_name, filename='file1', content='line1\n',
124 124 message='commit1', vcs_type=backend.alias, parent=None,
125 125 newfile=True)
126 126
127 127 # fork this repo
128 128 repo2 = backend.create_fork()
129 129
130 130 # add two extra commit into fork
131 131 commit1 = commit_change(
132 132 repo2.repo_name, filename='file1', content='line1\nline2\n',
133 133 message='commit2', vcs_type=backend.alias, parent=commit0)
134 134
135 135 commit2 = commit_change(
136 136 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
137 137 message='commit3', vcs_type=backend.alias, parent=commit1)
138 138
139 139 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
140 140 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
141 141
142 142 response = self.app.get(
143 143 route_path('repo_compare',
144 144 repo_name=repo1.repo_name,
145 145 source_ref_type="branch", source_ref=commit_id2,
146 146 target_ref_type="branch", target_ref=commit_id1,
147 147 params=dict(merge='1', target_repo=repo2.repo_name)
148 148 ))
149 149
150 150 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
151 151 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
152 152
153 153 compare_page = ComparePage(response)
154 154 compare_page.contains_change_summary(1, 2, 0)
155 155 compare_page.contains_commits([commit1, commit2])
156 156
157 157 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
158 158 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
159 159
160 160 # Swap is removed when comparing branches since it's a PR feature and
161 161 # it is then a preview mode
162 162 compare_page.swap_is_hidden()
163 163 compare_page.target_source_are_disabled()
164 164
165 165 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
166 166 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend):
167 167 repo1 = backend.create_repo()
168 168
169 169 # commit something !
170 170 commit0 = commit_change(
171 171 repo1.repo_name, filename='file1', content='line1\n',
172 172 message='commit1', vcs_type=backend.alias, parent=None,
173 173 newfile=True)
174 174
175 175 # fork this repo
176 176 repo2 = backend.create_fork()
177 177
178 178 # now commit something to origin repo
179 179 commit_change(
180 180 repo1.repo_name, filename='file2', content='line1file2\n',
181 181 message='commit2', vcs_type=backend.alias, parent=commit0,
182 182 newfile=True)
183 183
184 184 # add two extra commit into fork
185 185 commit1 = commit_change(
186 186 repo2.repo_name, filename='file1', content='line1\nline2\n',
187 187 message='commit2', vcs_type=backend.alias, parent=commit0)
188 188
189 189 commit2 = commit_change(
190 190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
191 191 message='commit3', vcs_type=backend.alias, parent=commit1)
192 192
193 193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
194 194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
195 195
196 196 response = self.app.get(
197 197 route_path('repo_compare',
198 198 repo_name=repo1.repo_name,
199 199 source_ref_type="branch", source_ref=commit_id2,
200 200 target_ref_type="branch", target_ref=commit_id1,
201 201 params=dict(merge='1', target_repo=repo2.repo_name),
202 202 ))
203 203
204 204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
205 205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
206 206
207 207 compare_page = ComparePage(response)
208 208 compare_page.contains_change_summary(1, 2, 0)
209 209 compare_page.contains_commits([commit1, commit2])
210 210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
211 211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
212 212
213 213 # Swap is removed when comparing branches since it's a PR feature and
214 214 # it is then a preview mode
215 215 compare_page.swap_is_hidden()
216 216 compare_page.target_source_are_disabled()
217 217
218 218 @pytest.mark.xfail_backends("svn")
219 219 # TODO(marcink): no svn support for compare two seperate repos
220 220 def test_compare_of_unrelated_forks(self, backend):
221 221 orig = backend.create_repo(number_of_commits=1)
222 222 fork = backend.create_repo(number_of_commits=1)
223 223
224 224 response = self.app.get(
225 225 route_path('repo_compare',
226 226 repo_name=orig.repo_name,
227 227 source_ref_type="rev", source_ref="tip",
228 228 target_ref_type="rev", target_ref="tip",
229 229 params=dict(merge='1', target_repo=fork.repo_name),
230 230 ),
231 231 status=302)
232 232 response = response.follow()
233 233 response.mustcontain("Repositories unrelated.")
234 234
235 235 @pytest.mark.xfail_backends("svn")
236 236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
237 237
238 238 # repo1:
239 239 # commit0:
240 240 # commit1:
241 241 # repo1-fork- in which we will cherry pick bottom commits
242 242 # commit0:
243 243 # commit1:
244 244 # commit2: x
245 245 # commit3: x
246 246 # commit4: x
247 247 # commit5:
248 248 # make repo1, and commit1+commit2
249 249
250 250 repo1 = backend.create_repo()
251 251
252 252 # commit something !
253 253 commit0 = commit_change(
254 254 repo1.repo_name, filename='file1', content='line1\n',
255 255 message='commit1', vcs_type=backend.alias, parent=None,
256 256 newfile=True)
257 257 commit1 = commit_change(
258 258 repo1.repo_name, filename='file1', content='line1\nline2\n',
259 259 message='commit2', vcs_type=backend.alias, parent=commit0)
260 260
261 261 # fork this repo
262 262 repo2 = backend.create_fork()
263 263
264 264 # now make commit3-6
265 265 commit2 = commit_change(
266 266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
267 267 message='commit3', vcs_type=backend.alias, parent=commit1)
268 268 commit3 = commit_change(
269 269 repo1.repo_name, filename='file1',
270 270 content='line1\nline2\nline3\nline4\n', message='commit4',
271 271 vcs_type=backend.alias, parent=commit2)
272 272 commit4 = commit_change(
273 273 repo1.repo_name, filename='file1',
274 274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
275 275 vcs_type=backend.alias, parent=commit3)
276 276 commit_change( # commit 5
277 277 repo1.repo_name, filename='file1',
278 278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
279 279 message='commit6', vcs_type=backend.alias, parent=commit4)
280 280
281 281 response = self.app.get(
282 282 route_path('repo_compare',
283 283 repo_name=repo2.repo_name,
284 284 # parent of commit2, in target repo2
285 285 source_ref_type="rev", source_ref=commit1.raw_id,
286 286 target_ref_type="rev", target_ref=commit4.raw_id,
287 287 params=dict(merge='1', target_repo=repo1.repo_name),
288 288 ))
289 289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
290 290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
291 291
292 292 # files
293 293 compare_page = ComparePage(response)
294 294 compare_page.contains_change_summary(1, 3, 0)
295 295 compare_page.contains_commits([commit2, commit3, commit4])
296 296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
297 297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
298 298
299 299 @pytest.mark.xfail_backends("svn")
300 300 def test_compare_cherry_pick_commits_from_top(self, backend):
301 301 # repo1:
302 302 # commit0:
303 303 # commit1:
304 304 # repo1-fork- in which we will cherry pick bottom commits
305 305 # commit0:
306 306 # commit1:
307 307 # commit2:
308 308 # commit3: x
309 309 # commit4: x
310 310 # commit5: x
311 311
312 312 # make repo1, and commit1+commit2
313 313 repo1 = backend.create_repo()
314 314
315 315 # commit something !
316 316 commit0 = commit_change(
317 317 repo1.repo_name, filename='file1', content='line1\n',
318 318 message='commit1', vcs_type=backend.alias, parent=None,
319 319 newfile=True)
320 320 commit1 = commit_change(
321 321 repo1.repo_name, filename='file1', content='line1\nline2\n',
322 322 message='commit2', vcs_type=backend.alias, parent=commit0)
323 323
324 324 # fork this repo
325 325 backend.create_fork()
326 326
327 327 # now make commit3-6
328 328 commit2 = commit_change(
329 329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
330 330 message='commit3', vcs_type=backend.alias, parent=commit1)
331 331 commit3 = commit_change(
332 332 repo1.repo_name, filename='file1',
333 333 content='line1\nline2\nline3\nline4\n', message='commit4',
334 334 vcs_type=backend.alias, parent=commit2)
335 335 commit4 = commit_change(
336 336 repo1.repo_name, filename='file1',
337 337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
338 338 vcs_type=backend.alias, parent=commit3)
339 339 commit5 = commit_change(
340 340 repo1.repo_name, filename='file1',
341 341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
342 342 message='commit6', vcs_type=backend.alias, parent=commit4)
343 343
344 344 response = self.app.get(
345 345 route_path('repo_compare',
346 346 repo_name=repo1.repo_name,
347 347 # parent of commit3, not in source repo2
348 348 source_ref_type="rev", source_ref=commit2.raw_id,
349 349 target_ref_type="rev", target_ref=commit5.raw_id,
350 350 params=dict(merge='1'),))
351 351
352 352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
353 353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
354 354
355 355 compare_page = ComparePage(response)
356 356 compare_page.contains_change_summary(1, 3, 0)
357 357 compare_page.contains_commits([commit3, commit4, commit5])
358 358
359 359 # files
360 360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
361 361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
362 362
363 363 @pytest.mark.xfail_backends("svn")
364 364 def test_compare_remote_branches(self, backend):
365 365 repo1 = backend.repo
366 366 repo2 = backend.create_fork()
367 367
368 368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
369 369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
370 370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
371 371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
372 372
373 373 response = self.app.get(
374 374 route_path('repo_compare',
375 375 repo_name=repo1.repo_name,
376 376 source_ref_type="rev", source_ref=commit_id1,
377 377 target_ref_type="rev", target_ref=commit_id2,
378 378 params=dict(merge='1', target_repo=repo2.repo_name),
379 379 ))
380 380
381 381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
382 382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
383 383
384 384 compare_page = ComparePage(response)
385 385
386 386 # outgoing commits between those commits
387 387 compare_page.contains_commits(
388 388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
389 389
390 390 # files
391 391 compare_page.contains_file_links_and_anchors([
392 392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
393 393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
394 394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
395 395 ])
396 396
397 397 @pytest.mark.xfail_backends("svn")
398 398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
399 399 repo1 = backend.create_repo()
400 400 r1_name = repo1.repo_name
401 401
402 402 commit0 = commit_change(
403 403 repo=r1_name, filename='file1',
404 404 content='line1', message='commit1', vcs_type=backend.alias,
405 405 newfile=True)
406 406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
407 407
408 408 # fork the repo1
409 409 repo2 = backend.create_fork()
410 410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
411 411
412 412 self.r2_id = repo2.repo_id
413 413 r2_name = repo2.repo_name
414 414
415 415 commit1 = commit_change(
416 416 repo=r2_name, filename='file1-fork',
417 417 content='file1-line1-from-fork', message='commit1-fork',
418 418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
419 419 newfile=True)
420 420
421 421 commit2 = commit_change(
422 422 repo=r2_name, filename='file2-fork',
423 423 content='file2-line1-from-fork', message='commit2-fork',
424 424 vcs_type=backend.alias, parent=commit1,
425 425 newfile=True)
426 426
427 427 commit_change( # commit 3
428 428 repo=r2_name, filename='file3-fork',
429 429 content='file3-line1-from-fork', message='commit3-fork',
430 430 vcs_type=backend.alias, parent=commit2, newfile=True)
431 431
432 432 # compare !
433 433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
434 434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
435 435
436 436 response = self.app.get(
437 437 route_path('repo_compare',
438 438 repo_name=r2_name,
439 439 source_ref_type="branch", source_ref=commit_id1,
440 440 target_ref_type="branch", target_ref=commit_id2,
441 441 params=dict(merge='1', target_repo=r1_name),
442 442 ))
443 443
444 444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
445 445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
446 446 response.mustcontain('No files')
447 447 response.mustcontain('No commits in this compare')
448 448
449 449 commit0 = commit_change(
450 450 repo=r1_name, filename='file2',
451 451 content='line1-added-after-fork', message='commit2-parent',
452 452 vcs_type=backend.alias, parent=None, newfile=True)
453 453
454 454 # compare !
455 455 response = self.app.get(
456 456 route_path('repo_compare',
457 457 repo_name=r2_name,
458 458 source_ref_type="branch", source_ref=commit_id1,
459 459 target_ref_type="branch", target_ref=commit_id2,
460 460 params=dict(merge='1', target_repo=r1_name),
461 461 ))
462 462
463 463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
464 464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
465 465
466 466 response.mustcontain("""commit2-parent""")
467 467 response.mustcontain("""line1-added-after-fork""")
468 468 compare_page = ComparePage(response)
469 469 compare_page.contains_change_summary(1, 1, 0)
470 470
471 471 @pytest.mark.xfail_backends("svn")
472 472 def test_compare_commits(self, backend, xhr_header):
473 473 commit0 = backend.repo.get_commit(commit_idx=0)
474 474 commit1 = backend.repo.get_commit(commit_idx=1)
475 475
476 476 response = self.app.get(
477 477 route_path('repo_compare',
478 478 repo_name=backend.repo_name,
479 479 source_ref_type="rev", source_ref=commit0.raw_id,
480 480 target_ref_type="rev", target_ref=commit1.raw_id,
481 481 params=dict(merge='1')
482 482 ),
483 483 extra_environ=xhr_header, )
484 484
485 485 # outgoing commits between those commits
486 486 compare_page = ComparePage(response)
487 compare_page.contains_commits(commits=[commit1], ancestors=[commit0])
487 compare_page.contains_commits(commits=[commit1])
488 488
489 489 def test_errors_when_comparing_unknown_source_repo(self, backend):
490 490 repo = backend.repo
491 491 badrepo = 'badrepo'
492 492
493 493 response = self.app.get(
494 494 route_path('repo_compare',
495 495 repo_name=badrepo,
496 496 source_ref_type="rev", source_ref='tip',
497 497 target_ref_type="rev", target_ref='tip',
498 498 params=dict(merge='1', target_repo=repo.repo_name)
499 499 ),
500 500 status=404)
501 501
502 502 def test_errors_when_comparing_unknown_target_repo(self, backend):
503 503 repo = backend.repo
504 504 badrepo = 'badrepo'
505 505
506 506 response = self.app.get(
507 507 route_path('repo_compare',
508 508 repo_name=repo.repo_name,
509 509 source_ref_type="rev", source_ref='tip',
510 510 target_ref_type="rev", target_ref='tip',
511 511 params=dict(merge='1', target_repo=badrepo),
512 512 ),
513 513 status=302)
514 514 redirected = response.follow()
515 515 redirected.mustcontain(
516 516 'Could not find the target repo: `{}`'.format(badrepo))
517 517
518 518 def test_compare_not_in_preview_mode(self, backend_stub):
519 519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
520 520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
521 521
522 522 response = self.app.get(
523 523 route_path('repo_compare',
524 524 repo_name=backend_stub.repo_name,
525 525 source_ref_type="rev", source_ref=commit0.raw_id,
526 526 target_ref_type="rev", target_ref=commit1.raw_id,
527 527 ))
528 528
529 529 # outgoing commits between those commits
530 530 compare_page = ComparePage(response)
531 531 compare_page.swap_is_visible()
532 532 compare_page.target_source_are_enabled()
533 533
534 534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
535 535 orig = backend_hg.create_repo(number_of_commits=1)
536 536 fork = backend_hg.create_fork()
537 537
538 538 settings_util.create_repo_rhodecode_ui(
539 539 orig, 'extensions', value='', key='largefiles', active=False)
540 540 settings_util.create_repo_rhodecode_ui(
541 541 fork, 'extensions', value='', key='largefiles', active=True)
542 542
543 543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
544 544 'MercurialRepository.compare')
545 545 with mock.patch(compare_module) as compare_mock:
546 546 compare_mock.side_effect = RepositoryRequirementError()
547 547
548 548 response = self.app.get(
549 549 route_path('repo_compare',
550 550 repo_name=orig.repo_name,
551 551 source_ref_type="rev", source_ref="tip",
552 552 target_ref_type="rev", target_ref="tip",
553 553 params=dict(merge='1', target_repo=fork.repo_name),
554 554 ),
555 555 status=302)
556 556
557 557 assert_session_flash(
558 558 response,
559 559 'Could not compare repos with different large file settings')
560 560
561 561
562 562 @pytest.mark.usefixtures("autologin_user")
563 563 class TestCompareControllerSvn(object):
564 564
565 565 def test_supports_references_with_path(self, app, backend_svn):
566 566 repo = backend_svn['svn-simple-layout']
567 567 commit_id = repo.get_commit(commit_idx=-1).raw_id
568 568 response = app.get(
569 569 route_path('repo_compare',
570 570 repo_name=repo.repo_name,
571 571 source_ref_type="tag",
572 572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
573 573 target_ref_type="tag",
574 574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
575 575 params=dict(merge='1'),
576 576 ),
577 577 status=200)
578 578
579 579 # Expecting no commits, since both paths are at the same revision
580 580 response.mustcontain('No commits in this compare')
581 581
582 582 # Should find only one file changed when comparing those two tags
583 583 response.mustcontain('example.py')
584 584 compare_page = ComparePage(response)
585 585 compare_page.contains_change_summary(1, 5, 1)
586 586
587 587 def test_shows_commits_if_different_ids(self, app, backend_svn):
588 588 repo = backend_svn['svn-simple-layout']
589 589 source_id = repo.get_commit(commit_idx=-6).raw_id
590 590 target_id = repo.get_commit(commit_idx=-1).raw_id
591 591 response = app.get(
592 592 route_path('repo_compare',
593 593 repo_name=repo.repo_name,
594 594 source_ref_type="tag",
595 595 source_ref="%s@%s" % ('tags/v0.1', source_id),
596 596 target_ref_type="tag",
597 597 target_ref="%s@%s" % ('tags/v0.2', target_id),
598 598 params=dict(merge='1')
599 599 ),
600 600 status=200)
601 601
602 602 # It should show commits
603 603 assert 'No commits in this compare' not in response.body
604 604
605 605 # Should find only one file changed when comparing those two tags
606 606 response.mustcontain('example.py')
607 607 compare_page = ComparePage(response)
608 608 compare_page.contains_change_summary(1, 5, 1)
609 609
610 610
611 611 class ComparePage(AssertResponse):
612 612 """
613 613 Abstracts the page template from the tests
614 614 """
615 615
616 616 def contains_file_links_and_anchors(self, files):
617 617 doc = lxml.html.fromstring(self.response.body)
618 618 for filename, file_id in files:
619 619 self.contains_one_anchor(file_id)
620 620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
621 621 assert len(diffblock) == 2
622 622 assert len(diffblock[0].cssselect('a[href="#%s"]' % file_id)) == 1
623 623
624 624 def contains_change_summary(self, files_changed, inserted, deleted):
625 625 template = (
626 626 '{files_changed} file{plural} changed: '
627 627 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
628 628 self.response.mustcontain(template.format(
629 629 files_changed=files_changed,
630 630 plural="s" if files_changed > 1 else "",
631 631 inserted=inserted,
632 632 deleted=deleted))
633 633
634 634 def contains_commits(self, commits, ancestors=None):
635 635 response = self.response
636 636
637 637 for commit in commits:
638 638 # Expecting to see the commit message in an element which
639 639 # has the ID "c-{commit.raw_id}"
640 640 self.element_contains('#c-' + commit.raw_id, commit.message)
641 641 self.contains_one_link(
642 642 'r%s:%s' % (commit.idx, commit.short_id),
643 643 self._commit_url(commit))
644
644 645 if ancestors:
645 646 response.mustcontain('Ancestor')
646 647 for ancestor in ancestors:
647 648 self.contains_one_link(
648 649 ancestor.short_id, self._commit_url(ancestor))
649 650
650 651 def _commit_url(self, commit):
651 652 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
652 653
653 654 def swap_is_hidden(self):
654 655 assert '<a id="btn-swap"' not in self.response.text
655 656
656 657 def swap_is_visible(self):
657 658 assert '<a id="btn-swap"' in self.response.text
658 659
659 660 def target_source_are_disabled(self):
660 661 response = self.response
661 662 response.mustcontain("var enable_fields = false;")
662 663 response.mustcontain('.select2("enable", enable_fields)')
663 664
664 665 def target_source_are_enabled(self):
665 666 response = self.response
666 667 response.mustcontain("var enable_fields = true;")
@@ -1,79 +1,87 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from rhodecode.lib import helpers as h
22 22 from rhodecode.lib.utils2 import safe_int
23 from rhodecode.model.pull_request import get_diff_info
24
25 REVIEWER_API_VERSION = 'V3'
23 26
24 27
25 28 def reviewer_as_json(user, reasons=None, mandatory=False, rules=None, user_group=None):
26 29 """
27 30 Returns json struct of a reviewer for frontend
28 31
29 32 :param user: the reviewer
30 33 :param reasons: list of strings of why they are reviewers
31 34 :param mandatory: bool, to set user as mandatory
32 35 """
33 36
34 37 return {
35 38 'user_id': user.user_id,
36 39 'reasons': reasons or [],
37 40 'rules': rules or [],
38 41 'mandatory': mandatory,
39 42 'user_group': user_group,
40 43 'username': user.username,
41 44 'first_name': user.first_name,
42 45 'last_name': user.last_name,
43 46 'user_link': h.link_to_user(user),
44 47 'gravatar_link': h.gravatar_url(user.email, 14),
45 48 }
46 49
47 50
48 51 def get_default_reviewers_data(
49 52 current_user, source_repo, source_commit, target_repo, target_commit):
53 """
54 Return json for default reviewers of a repository
55 """
50 56
51 """ Return json for default reviewers of a repository """
57 diff_info = get_diff_info(
58 source_repo, source_commit.raw_id, target_repo, target_commit.raw_id)
52 59
53 60 reasons = ['Default reviewer', 'Repository owner']
54 61 json_reviewers = [reviewer_as_json(
55 62 user=target_repo.user, reasons=reasons, mandatory=False, rules=None)]
56 63
57 64 return {
58 'api_ver': 'v1', # define version for later possible schema upgrade
65 'api_ver': REVIEWER_API_VERSION, # define version for later possible schema upgrade
66 'diff_info': diff_info,
59 67 'reviewers': json_reviewers,
60 68 'rules': {},
61 69 'rules_data': {},
62 70 }
63 71
64 72
65 73 def validate_default_reviewers(review_members, reviewer_rules):
66 74 """
67 75 Function to validate submitted reviewers against the saved rules
68 76
69 77 """
70 78 reviewers = []
71 79 reviewer_by_id = {}
72 80 for r in review_members:
73 81 reviewer_user_id = safe_int(r['user_id'])
74 82 entry = (reviewer_user_id, r['reasons'], r['mandatory'], r['rules'])
75 83
76 84 reviewer_by_id[reviewer_user_id] = entry
77 85 reviewers.append(entry)
78 86
79 87 return reviewers
@@ -1,1512 +1,1520 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.auth import (
39 39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 40 NotAnonymous, CSRFRequired)
41 41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 RepositoryRequirementError, EmptyRepositoryError)
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
45 45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 ChangesetComment, ChangesetStatus, Repository)
47 from rhodecode.model.db import (
48 func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository)
49 49 from rhodecode.model.forms import PullRequestForm
50 50 from rhodecode.model.meta import Session
51 51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 52 from rhodecode.model.scm import ScmModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 58
59 59 def load_default_context(self):
60 60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 63 # backward compat., we use for OLD PRs a plain renderer
64 64 c.renderer = 'plain'
65 65 return c
66 66
67 67 def _get_pull_requests_list(
68 68 self, repo_name, source, filter_type, opened_by, statuses):
69 69
70 70 draw, start, limit = self._extract_chunk(self.request)
71 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 72 _render = self.request.get_partial_renderer(
73 73 'rhodecode:templates/data_table/_dt_elements.mako')
74 74
75 75 # pagination
76 76
77 77 if filter_type == 'awaiting_review':
78 78 pull_requests = PullRequestModel().get_awaiting_review(
79 79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 80 statuses=statuses, offset=start, length=limit,
81 81 order_by=order_by, order_dir=order_dir)
82 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 83 repo_name, search_q=search_q, source=source, statuses=statuses,
84 84 opened_by=opened_by)
85 85 elif filter_type == 'awaiting_my_review':
86 86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 89 offset=start, length=limit, order_by=order_by,
90 90 order_dir=order_dir)
91 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 93 statuses=statuses, opened_by=opened_by)
94 94 else:
95 95 pull_requests = PullRequestModel().get_all(
96 96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 97 statuses=statuses, offset=start, length=limit,
98 98 order_by=order_by, order_dir=order_dir)
99 99 pull_requests_total_count = PullRequestModel().count_all(
100 100 repo_name, search_q=search_q, source=source, statuses=statuses,
101 101 opened_by=opened_by)
102 102
103 103 data = []
104 104 comments_model = CommentsModel()
105 105 for pr in pull_requests:
106 106 comments = comments_model.get_all_comments(
107 107 self.db_repo.repo_id, pull_request=pr)
108 108
109 109 data.append({
110 110 'name': _render('pullrequest_name',
111 111 pr.pull_request_id, pr.pull_request_state,
112 112 pr.work_in_progress, pr.target_repo.repo_name),
113 113 'name_raw': pr.pull_request_id,
114 114 'status': _render('pullrequest_status',
115 115 pr.calculated_review_status()),
116 116 'title': _render('pullrequest_title', pr.title, pr.description),
117 117 'description': h.escape(pr.description),
118 118 'updated_on': _render('pullrequest_updated_on',
119 119 h.datetime_to_time(pr.updated_on)),
120 120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 121 'created_on': _render('pullrequest_updated_on',
122 122 h.datetime_to_time(pr.created_on)),
123 123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 124 'state': pr.pull_request_state,
125 125 'author': _render('pullrequest_author',
126 126 pr.author.full_contact, ),
127 127 'author_raw': pr.author.full_name,
128 128 'comments': _render('pullrequest_comments', len(comments)),
129 129 'comments_raw': len(comments),
130 130 'closed': pr.is_closed(),
131 131 })
132 132
133 133 data = ({
134 134 'draw': draw,
135 135 'data': data,
136 136 'recordsTotal': pull_requests_total_count,
137 137 'recordsFiltered': pull_requests_total_count,
138 138 })
139 139 return data
140 140
141 141 @LoginRequired()
142 142 @HasRepoPermissionAnyDecorator(
143 143 'repository.read', 'repository.write', 'repository.admin')
144 144 @view_config(
145 145 route_name='pullrequest_show_all', request_method='GET',
146 146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 147 def pull_request_list(self):
148 148 c = self.load_default_context()
149 149
150 150 req_get = self.request.GET
151 151 c.source = str2bool(req_get.get('source'))
152 152 c.closed = str2bool(req_get.get('closed'))
153 153 c.my = str2bool(req_get.get('my'))
154 154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156 156
157 157 c.active = 'open'
158 158 if c.my:
159 159 c.active = 'my'
160 160 if c.closed:
161 161 c.active = 'closed'
162 162 if c.awaiting_review and not c.source:
163 163 c.active = 'awaiting'
164 164 if c.source and not c.awaiting_review:
165 165 c.active = 'source'
166 166 if c.awaiting_my_review:
167 167 c.active = 'awaiting_my'
168 168
169 169 return self._get_template_context(c)
170 170
171 171 @LoginRequired()
172 172 @HasRepoPermissionAnyDecorator(
173 173 'repository.read', 'repository.write', 'repository.admin')
174 174 @view_config(
175 175 route_name='pullrequest_show_all_data', request_method='GET',
176 176 renderer='json_ext', xhr=True)
177 177 def pull_request_list_data(self):
178 178 self.load_default_context()
179 179
180 180 # additional filters
181 181 req_get = self.request.GET
182 182 source = str2bool(req_get.get('source'))
183 183 closed = str2bool(req_get.get('closed'))
184 184 my = str2bool(req_get.get('my'))
185 185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187 187
188 188 filter_type = 'awaiting_review' if awaiting_review \
189 189 else 'awaiting_my_review' if awaiting_my_review \
190 190 else None
191 191
192 192 opened_by = None
193 193 if my:
194 194 opened_by = [self._rhodecode_user.user_id]
195 195
196 196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 197 if closed:
198 198 statuses = [PullRequest.STATUS_CLOSED]
199 199
200 200 data = self._get_pull_requests_list(
201 201 repo_name=self.db_repo_name, source=source,
202 202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203 203
204 204 return data
205 205
206 206 def _is_diff_cache_enabled(self, target_repo):
207 207 caching_enabled = self._get_general_setting(
208 208 target_repo, 'rhodecode_diff_cache')
209 209 log.debug('Diff caching enabled: %s', caching_enabled)
210 210 return caching_enabled
211 211
212 212 def _get_diffset(self, source_repo_name, source_repo,
213 ancestor_commit,
213 214 source_ref_id, target_ref_id,
214 215 target_commit, source_commit, diff_limit, file_limit,
215 216 fulldiff, hide_whitespace_changes, diff_context):
216 217
218 target_ref_id = ancestor_commit.raw_id
217 219 vcs_diff = PullRequestModel().get_diff(
218 220 source_repo, source_ref_id, target_ref_id,
219 221 hide_whitespace_changes, diff_context)
220 222
221 223 diff_processor = diffs.DiffProcessor(
222 224 vcs_diff, format='newdiff', diff_limit=diff_limit,
223 225 file_limit=file_limit, show_full_diff=fulldiff)
224 226
225 227 _parsed = diff_processor.prepare()
226 228
227 229 diffset = codeblocks.DiffSet(
228 230 repo_name=self.db_repo_name,
229 231 source_repo_name=source_repo_name,
230 232 source_node_getter=codeblocks.diffset_node_getter(target_commit),
231 233 target_node_getter=codeblocks.diffset_node_getter(source_commit),
232 234 )
233 235 diffset = self.path_filter.render_patchset_filtered(
234 236 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
235 237
236 238 return diffset
237 239
238 240 def _get_range_diffset(self, source_scm, source_repo,
239 241 commit1, commit2, diff_limit, file_limit,
240 242 fulldiff, hide_whitespace_changes, diff_context):
241 243 vcs_diff = source_scm.get_diff(
242 244 commit1, commit2,
243 245 ignore_whitespace=hide_whitespace_changes,
244 246 context=diff_context)
245 247
246 248 diff_processor = diffs.DiffProcessor(
247 249 vcs_diff, format='newdiff', diff_limit=diff_limit,
248 250 file_limit=file_limit, show_full_diff=fulldiff)
249 251
250 252 _parsed = diff_processor.prepare()
251 253
252 254 diffset = codeblocks.DiffSet(
253 255 repo_name=source_repo.repo_name,
254 256 source_node_getter=codeblocks.diffset_node_getter(commit1),
255 257 target_node_getter=codeblocks.diffset_node_getter(commit2))
256 258
257 259 diffset = self.path_filter.render_patchset_filtered(
258 260 diffset, _parsed, commit1.raw_id, commit2.raw_id)
259 261
260 262 return diffset
261 263
262 264 @LoginRequired()
263 265 @HasRepoPermissionAnyDecorator(
264 266 'repository.read', 'repository.write', 'repository.admin')
265 267 @view_config(
266 268 route_name='pullrequest_show', request_method='GET',
267 269 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
268 270 def pull_request_show(self):
269 271 _ = self.request.translate
270 272 c = self.load_default_context()
271 273
272 274 pull_request = PullRequest.get_or_404(
273 275 self.request.matchdict['pull_request_id'])
274 276 pull_request_id = pull_request.pull_request_id
275 277
276 278 c.state_progressing = pull_request.is_state_changing()
277 279
278 280 _new_state = {
279 281 'created': PullRequest.STATE_CREATED,
280 282 }.get(self.request.GET.get('force_state'))
283
281 284 if c.is_super_admin and _new_state:
282 285 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
283 286 h.flash(
284 287 _('Pull Request state was force changed to `{}`').format(_new_state),
285 288 category='success')
286 289 Session().commit()
287 290
288 291 raise HTTPFound(h.route_path(
289 292 'pullrequest_show', repo_name=self.db_repo_name,
290 293 pull_request_id=pull_request_id))
291 294
292 295 version = self.request.GET.get('version')
293 296 from_version = self.request.GET.get('from_version') or version
294 297 merge_checks = self.request.GET.get('merge_checks')
295 298 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
296 299
297 300 # fetch global flags of ignore ws or context lines
298 301 diff_context = diffs.get_diff_context(self.request)
299 302 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
300 303
301 304 force_refresh = str2bool(self.request.GET.get('force_refresh'))
302 305
303 306 (pull_request_latest,
304 307 pull_request_at_ver,
305 308 pull_request_display_obj,
306 309 at_version) = PullRequestModel().get_pr_version(
307 310 pull_request_id, version=version)
308 311 pr_closed = pull_request_latest.is_closed()
309 312
310 313 if pr_closed and (version or from_version):
311 314 # not allow to browse versions
312 315 raise HTTPFound(h.route_path(
313 316 'pullrequest_show', repo_name=self.db_repo_name,
314 317 pull_request_id=pull_request_id))
315 318
316 319 versions = pull_request_display_obj.versions()
317 320 # used to store per-commit range diffs
318 321 c.changes = collections.OrderedDict()
319 322 c.range_diff_on = self.request.GET.get('range-diff') == "1"
320 323
321 324 c.at_version = at_version
322 325 c.at_version_num = (at_version
323 326 if at_version and at_version != 'latest'
324 327 else None)
325 328 c.at_version_pos = ChangesetComment.get_index_from_version(
326 329 c.at_version_num, versions)
327 330
328 331 (prev_pull_request_latest,
329 332 prev_pull_request_at_ver,
330 333 prev_pull_request_display_obj,
331 334 prev_at_version) = PullRequestModel().get_pr_version(
332 335 pull_request_id, version=from_version)
333 336
334 337 c.from_version = prev_at_version
335 338 c.from_version_num = (prev_at_version
336 339 if prev_at_version and prev_at_version != 'latest'
337 340 else None)
338 341 c.from_version_pos = ChangesetComment.get_index_from_version(
339 342 c.from_version_num, versions)
340 343
341 344 # define if we're in COMPARE mode or VIEW at version mode
342 345 compare = at_version != prev_at_version
343 346
344 347 # pull_requests repo_name we opened it against
345 348 # ie. target_repo must match
346 349 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
347 350 raise HTTPNotFound()
348 351
349 352 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
350 353 pull_request_at_ver)
351 354
352 355 c.pull_request = pull_request_display_obj
353 356 c.renderer = pull_request_at_ver.description_renderer or c.renderer
354 357 c.pull_request_latest = pull_request_latest
355 358
356 359 if compare or (at_version and not at_version == 'latest'):
357 360 c.allowed_to_change_status = False
358 361 c.allowed_to_update = False
359 362 c.allowed_to_merge = False
360 363 c.allowed_to_delete = False
361 364 c.allowed_to_comment = False
362 365 c.allowed_to_close = False
363 366 else:
364 367 can_change_status = PullRequestModel().check_user_change_status(
365 368 pull_request_at_ver, self._rhodecode_user)
366 369 c.allowed_to_change_status = can_change_status and not pr_closed
367 370
368 371 c.allowed_to_update = PullRequestModel().check_user_update(
369 372 pull_request_latest, self._rhodecode_user) and not pr_closed
370 373 c.allowed_to_merge = PullRequestModel().check_user_merge(
371 374 pull_request_latest, self._rhodecode_user) and not pr_closed
372 375 c.allowed_to_delete = PullRequestModel().check_user_delete(
373 376 pull_request_latest, self._rhodecode_user) and not pr_closed
374 377 c.allowed_to_comment = not pr_closed
375 378 c.allowed_to_close = c.allowed_to_merge and not pr_closed
376 379
377 380 c.forbid_adding_reviewers = False
378 381 c.forbid_author_to_review = False
379 382 c.forbid_commit_author_to_review = False
380 383
381 384 if pull_request_latest.reviewer_data and \
382 385 'rules' in pull_request_latest.reviewer_data:
383 386 rules = pull_request_latest.reviewer_data['rules'] or {}
384 387 try:
385 388 c.forbid_adding_reviewers = rules.get(
386 389 'forbid_adding_reviewers')
387 390 c.forbid_author_to_review = rules.get(
388 391 'forbid_author_to_review')
389 392 c.forbid_commit_author_to_review = rules.get(
390 393 'forbid_commit_author_to_review')
391 394 except Exception:
392 395 pass
393 396
394 397 # check merge capabilities
395 398 _merge_check = MergeCheck.validate(
396 399 pull_request_latest, auth_user=self._rhodecode_user,
397 400 translator=self.request.translate,
398 401 force_shadow_repo_refresh=force_refresh)
399 402
400 403 c.pr_merge_errors = _merge_check.error_details
401 404 c.pr_merge_possible = not _merge_check.failed
402 405 c.pr_merge_message = _merge_check.merge_msg
403 406 c.pr_merge_source_commit = _merge_check.source_commit
404 407 c.pr_merge_target_commit = _merge_check.target_commit
405 408
406 409 c.pr_merge_info = MergeCheck.get_merge_conditions(
407 410 pull_request_latest, translator=self.request.translate)
408 411
409 412 c.pull_request_review_status = _merge_check.review_status
410 413 if merge_checks:
411 414 self.request.override_renderer = \
412 415 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
413 416 return self._get_template_context(c)
414 417
415 418 comments_model = CommentsModel()
416 419
417 420 # reviewers and statuses
418 421 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
419 422 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
420 423
421 424 # GENERAL COMMENTS with versions #
422 425 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
423 426 q = q.order_by(ChangesetComment.comment_id.asc())
424 427 general_comments = q
425 428
426 429 # pick comments we want to render at current version
427 430 c.comment_versions = comments_model.aggregate_comments(
428 431 general_comments, versions, c.at_version_num)
429 432 c.comments = c.comment_versions[c.at_version_num]['until']
430 433
431 434 # INLINE COMMENTS with versions #
432 435 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
433 436 q = q.order_by(ChangesetComment.comment_id.asc())
434 437 inline_comments = q
435 438
436 439 c.inline_versions = comments_model.aggregate_comments(
437 440 inline_comments, versions, c.at_version_num, inline=True)
438 441
439 442 # TODOs
440 443 c.unresolved_comments = CommentsModel() \
441 444 .get_pull_request_unresolved_todos(pull_request)
442 445 c.resolved_comments = CommentsModel() \
443 446 .get_pull_request_resolved_todos(pull_request)
444 447
445 448 # inject latest version
446 449 latest_ver = PullRequest.get_pr_display_object(
447 450 pull_request_latest, pull_request_latest)
448 451
449 452 c.versions = versions + [latest_ver]
450 453
451 454 # if we use version, then do not show later comments
452 455 # than current version
453 456 display_inline_comments = collections.defaultdict(
454 457 lambda: collections.defaultdict(list))
455 458 for co in inline_comments:
456 459 if c.at_version_num:
457 460 # pick comments that are at least UPTO given version, so we
458 461 # don't render comments for higher version
459 462 should_render = co.pull_request_version_id and \
460 463 co.pull_request_version_id <= c.at_version_num
461 464 else:
462 465 # showing all, for 'latest'
463 466 should_render = True
464 467
465 468 if should_render:
466 469 display_inline_comments[co.f_path][co.line_no].append(co)
467 470
468 471 # load diff data into template context, if we use compare mode then
469 472 # diff is calculated based on changes between versions of PR
470 473
471 474 source_repo = pull_request_at_ver.source_repo
472 475 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
473 476
474 477 target_repo = pull_request_at_ver.target_repo
475 478 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
476 479
477 480 if compare:
478 481 # in compare switch the diff base to latest commit from prev version
479 482 target_ref_id = prev_pull_request_display_obj.revisions[0]
480 483
481 484 # despite opening commits for bookmarks/branches/tags, we always
482 485 # convert this to rev to prevent changes after bookmark or branch change
483 486 c.source_ref_type = 'rev'
484 487 c.source_ref = source_ref_id
485 488
486 489 c.target_ref_type = 'rev'
487 490 c.target_ref = target_ref_id
488 491
489 492 c.source_repo = source_repo
490 493 c.target_repo = target_repo
491 494
492 495 c.commit_ranges = []
493 496 source_commit = EmptyCommit()
494 497 target_commit = EmptyCommit()
495 498 c.missing_requirements = False
496 499
497 500 source_scm = source_repo.scm_instance()
498 501 target_scm = target_repo.scm_instance()
499 502
500 503 shadow_scm = None
501 504 try:
502 505 shadow_scm = pull_request_latest.get_shadow_repo()
503 506 except Exception:
504 507 log.debug('Failed to get shadow repo', exc_info=True)
505 508 # try first the existing source_repo, and then shadow
506 509 # repo if we can obtain one
507 510 commits_source_repo = source_scm
508 511 if shadow_scm:
509 512 commits_source_repo = shadow_scm
510 513
511 514 c.commits_source_repo = commits_source_repo
512 515 c.ancestor = None # set it to None, to hide it from PR view
513 516
514 517 # empty version means latest, so we keep this to prevent
515 518 # double caching
516 519 version_normalized = version or 'latest'
517 520 from_version_normalized = from_version or 'latest'
518 521
519 522 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
520 523 cache_file_path = diff_cache_exist(
521 524 cache_path, 'pull_request', pull_request_id, version_normalized,
522 525 from_version_normalized, source_ref_id, target_ref_id,
523 526 hide_whitespace_changes, diff_context, c.fulldiff)
524 527
525 528 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
526 529 force_recache = self.get_recache_flag()
527 530
528 531 cached_diff = None
529 532 if caching_enabled:
530 533 cached_diff = load_cached_diff(cache_file_path)
531 534
532 535 has_proper_commit_cache = (
533 536 cached_diff and cached_diff.get('commits')
534 537 and len(cached_diff.get('commits', [])) == 5
535 538 and cached_diff.get('commits')[0]
536 539 and cached_diff.get('commits')[3])
537 540
538 541 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
539 542 diff_commit_cache = \
540 543 (ancestor_commit, commit_cache, missing_requirements,
541 544 source_commit, target_commit) = cached_diff['commits']
542 545 else:
543 546 # NOTE(marcink): we reach potentially unreachable errors when a PR has
544 547 # merge errors resulting in potentially hidden commits in the shadow repo.
545 548 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
546 549 and _merge_check.merge_response
547 550 maybe_unreachable = maybe_unreachable \
548 551 and _merge_check.merge_response.metadata.get('unresolved_files')
549 552 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
550 553 diff_commit_cache = \
551 554 (ancestor_commit, commit_cache, missing_requirements,
552 555 source_commit, target_commit) = self.get_commits(
553 556 commits_source_repo,
554 557 pull_request_at_ver,
555 558 source_commit,
556 559 source_ref_id,
557 560 source_scm,
558 561 target_commit,
559 562 target_ref_id,
560 target_scm, maybe_unreachable=maybe_unreachable)
563 target_scm,
564 maybe_unreachable=maybe_unreachable)
561 565
562 566 # register our commit range
563 567 for comm in commit_cache.values():
564 568 c.commit_ranges.append(comm)
565 569
566 570 c.missing_requirements = missing_requirements
567 571 c.ancestor_commit = ancestor_commit
568 572 c.statuses = source_repo.statuses(
569 573 [x.raw_id for x in c.commit_ranges])
570 574
571 575 # auto collapse if we have more than limit
572 576 collapse_limit = diffs.DiffProcessor._collapse_commits_over
573 577 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
574 578 c.compare_mode = compare
575 579
576 580 # diff_limit is the old behavior, will cut off the whole diff
577 581 # if the limit is applied otherwise will just hide the
578 582 # big files from the front-end
579 583 diff_limit = c.visual.cut_off_limit_diff
580 584 file_limit = c.visual.cut_off_limit_file
581 585
582 586 c.missing_commits = False
583 587 if (c.missing_requirements
584 588 or isinstance(source_commit, EmptyCommit)
585 589 or source_commit == target_commit):
586 590
587 591 c.missing_commits = True
588 592 else:
589 593 c.inline_comments = display_inline_comments
590 594
591 595 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
592 596 if not force_recache and has_proper_diff_cache:
593 597 c.diffset = cached_diff['diff']
594 (ancestor_commit, commit_cache, missing_requirements,
595 source_commit, target_commit) = cached_diff['commits']
596 598 else:
597 599 c.diffset = self._get_diffset(
598 600 c.source_repo.repo_name, commits_source_repo,
601 c.ancestor_commit,
599 602 source_ref_id, target_ref_id,
600 603 target_commit, source_commit,
601 604 diff_limit, file_limit, c.fulldiff,
602 605 hide_whitespace_changes, diff_context)
603 606
604 607 # save cached diff
605 608 if caching_enabled:
606 609 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
607 610
608 611 c.limited_diff = c.diffset.limited_diff
609 612
610 613 # calculate removed files that are bound to comments
611 614 comment_deleted_files = [
612 615 fname for fname in display_inline_comments
613 616 if fname not in c.diffset.file_stats]
614 617
615 618 c.deleted_files_comments = collections.defaultdict(dict)
616 619 for fname, per_line_comments in display_inline_comments.items():
617 620 if fname in comment_deleted_files:
618 621 c.deleted_files_comments[fname]['stats'] = 0
619 622 c.deleted_files_comments[fname]['comments'] = list()
620 623 for lno, comments in per_line_comments.items():
621 624 c.deleted_files_comments[fname]['comments'].extend(comments)
622 625
623 626 # maybe calculate the range diff
624 627 if c.range_diff_on:
625 628 # TODO(marcink): set whitespace/context
626 629 context_lcl = 3
627 630 ign_whitespace_lcl = False
628 631
629 632 for commit in c.commit_ranges:
630 633 commit2 = commit
631 634 commit1 = commit.first_parent
632 635
633 636 range_diff_cache_file_path = diff_cache_exist(
634 637 cache_path, 'diff', commit.raw_id,
635 638 ign_whitespace_lcl, context_lcl, c.fulldiff)
636 639
637 640 cached_diff = None
638 641 if caching_enabled:
639 642 cached_diff = load_cached_diff(range_diff_cache_file_path)
640 643
641 644 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
642 645 if not force_recache and has_proper_diff_cache:
643 646 diffset = cached_diff['diff']
644 647 else:
645 648 diffset = self._get_range_diffset(
646 649 commits_source_repo, source_repo,
647 650 commit1, commit2, diff_limit, file_limit,
648 651 c.fulldiff, ign_whitespace_lcl, context_lcl
649 652 )
650 653
651 654 # save cached diff
652 655 if caching_enabled:
653 656 cache_diff(range_diff_cache_file_path, diffset, None)
654 657
655 658 c.changes[commit.raw_id] = diffset
656 659
657 660 # this is a hack to properly display links, when creating PR, the
658 661 # compare view and others uses different notation, and
659 662 # compare_commits.mako renders links based on the target_repo.
660 663 # We need to swap that here to generate it properly on the html side
661 664 c.target_repo = c.source_repo
662 665
663 666 c.commit_statuses = ChangesetStatus.STATUSES
664 667
665 668 c.show_version_changes = not pr_closed
666 669 if c.show_version_changes:
667 670 cur_obj = pull_request_at_ver
668 671 prev_obj = prev_pull_request_at_ver
669 672
670 673 old_commit_ids = prev_obj.revisions
671 674 new_commit_ids = cur_obj.revisions
672 675 commit_changes = PullRequestModel()._calculate_commit_id_changes(
673 676 old_commit_ids, new_commit_ids)
674 677 c.commit_changes_summary = commit_changes
675 678
676 679 # calculate the diff for commits between versions
677 680 c.commit_changes = []
678 mark = lambda cs, fw: list(
679 h.itertools.izip_longest([], cs, fillvalue=fw))
681
682 def mark(cs, fw):
683 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
684
680 685 for c_type, raw_id in mark(commit_changes.added, 'a') \
681 686 + mark(commit_changes.removed, 'r') \
682 687 + mark(commit_changes.common, 'c'):
683 688
684 689 if raw_id in commit_cache:
685 690 commit = commit_cache[raw_id]
686 691 else:
687 692 try:
688 693 commit = commits_source_repo.get_commit(raw_id)
689 694 except CommitDoesNotExistError:
690 695 # in case we fail extracting still use "dummy" commit
691 696 # for display in commit diff
692 697 commit = h.AttributeDict(
693 698 {'raw_id': raw_id,
694 699 'message': 'EMPTY or MISSING COMMIT'})
695 700 c.commit_changes.append([c_type, commit])
696 701
697 702 # current user review statuses for each version
698 703 c.review_versions = {}
699 704 if self._rhodecode_user.user_id in allowed_reviewers:
700 705 for co in general_comments:
701 706 if co.author.user_id == self._rhodecode_user.user_id:
702 707 status = co.status_change
703 708 if status:
704 709 _ver_pr = status[0].comment.pull_request_version_id
705 710 c.review_versions[_ver_pr] = status[0]
706 711
707 712 return self._get_template_context(c)
708 713
709 714 def get_commits(
710 715 self, commits_source_repo, pull_request_at_ver, source_commit,
711 716 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
712 717 maybe_unreachable=False):
713 718
714 719 commit_cache = collections.OrderedDict()
715 720 missing_requirements = False
716 721
717 722 try:
718 723 pre_load = ["author", "date", "message", "branch", "parents"]
719 724
720 725 pull_request_commits = pull_request_at_ver.revisions
721 726 log.debug('Loading %s commits from %s',
722 727 len(pull_request_commits), commits_source_repo)
723 728
724 729 for rev in pull_request_commits:
725 730 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
726 731 maybe_unreachable=maybe_unreachable)
727 732 commit_cache[comm.raw_id] = comm
728 733
729 734 # Order here matters, we first need to get target, and then
730 735 # the source
731 736 target_commit = commits_source_repo.get_commit(
732 737 commit_id=safe_str(target_ref_id))
733 738
734 739 source_commit = commits_source_repo.get_commit(
735 740 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
736 741 except CommitDoesNotExistError:
737 742 log.warning('Failed to get commit from `{}` repo'.format(
738 743 commits_source_repo), exc_info=True)
739 744 except RepositoryRequirementError:
740 745 log.warning('Failed to get all required data from repo', exc_info=True)
741 746 missing_requirements = True
742 ancestor_commit = None
747
748 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
749
743 750 try:
744 ancestor_id = source_scm.get_common_ancestor(
745 source_commit.raw_id, target_commit.raw_id, target_scm)
746 ancestor_commit = source_scm.get_commit(ancestor_id)
751 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
747 752 except Exception:
748 753 ancestor_commit = None
754
749 755 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
750 756
751 757 def assure_not_empty_repo(self):
752 758 _ = self.request.translate
753 759
754 760 try:
755 761 self.db_repo.scm_instance().get_commit()
756 762 except EmptyRepositoryError:
757 763 h.flash(h.literal(_('There are no commits yet')),
758 764 category='warning')
759 765 raise HTTPFound(
760 766 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
761 767
762 768 @LoginRequired()
763 769 @NotAnonymous()
764 770 @HasRepoPermissionAnyDecorator(
765 771 'repository.read', 'repository.write', 'repository.admin')
766 772 @view_config(
767 773 route_name='pullrequest_new', request_method='GET',
768 774 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
769 775 def pull_request_new(self):
770 776 _ = self.request.translate
771 777 c = self.load_default_context()
772 778
773 779 self.assure_not_empty_repo()
774 780 source_repo = self.db_repo
775 781
776 782 commit_id = self.request.GET.get('commit')
777 783 branch_ref = self.request.GET.get('branch')
778 784 bookmark_ref = self.request.GET.get('bookmark')
779 785
780 786 try:
781 787 source_repo_data = PullRequestModel().generate_repo_data(
782 788 source_repo, commit_id=commit_id,
783 789 branch=branch_ref, bookmark=bookmark_ref,
784 790 translator=self.request.translate)
785 791 except CommitDoesNotExistError as e:
786 792 log.exception(e)
787 793 h.flash(_('Commit does not exist'), 'error')
788 794 raise HTTPFound(
789 795 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
790 796
791 797 default_target_repo = source_repo
792 798
793 799 if source_repo.parent and c.has_origin_repo_read_perm:
794 800 parent_vcs_obj = source_repo.parent.scm_instance()
795 801 if parent_vcs_obj and not parent_vcs_obj.is_empty():
796 802 # change default if we have a parent repo
797 803 default_target_repo = source_repo.parent
798 804
799 805 target_repo_data = PullRequestModel().generate_repo_data(
800 806 default_target_repo, translator=self.request.translate)
801 807
802 808 selected_source_ref = source_repo_data['refs']['selected_ref']
803 809 title_source_ref = ''
804 810 if selected_source_ref:
805 811 title_source_ref = selected_source_ref.split(':', 2)[1]
806 812 c.default_title = PullRequestModel().generate_pullrequest_title(
807 813 source=source_repo.repo_name,
808 814 source_ref=title_source_ref,
809 815 target=default_target_repo.repo_name
810 816 )
811 817
812 818 c.default_repo_data = {
813 819 'source_repo_name': source_repo.repo_name,
814 820 'source_refs_json': json.dumps(source_repo_data),
815 821 'target_repo_name': default_target_repo.repo_name,
816 822 'target_refs_json': json.dumps(target_repo_data),
817 823 }
818 824 c.default_source_ref = selected_source_ref
819 825
820 826 return self._get_template_context(c)
821 827
822 828 @LoginRequired()
823 829 @NotAnonymous()
824 830 @HasRepoPermissionAnyDecorator(
825 831 'repository.read', 'repository.write', 'repository.admin')
826 832 @view_config(
827 833 route_name='pullrequest_repo_refs', request_method='GET',
828 834 renderer='json_ext', xhr=True)
829 835 def pull_request_repo_refs(self):
830 836 self.load_default_context()
831 837 target_repo_name = self.request.matchdict['target_repo_name']
832 838 repo = Repository.get_by_repo_name(target_repo_name)
833 839 if not repo:
834 840 raise HTTPNotFound()
835 841
836 842 target_perm = HasRepoPermissionAny(
837 843 'repository.read', 'repository.write', 'repository.admin')(
838 844 target_repo_name)
839 845 if not target_perm:
840 846 raise HTTPNotFound()
841 847
842 848 return PullRequestModel().generate_repo_data(
843 849 repo, translator=self.request.translate)
844 850
845 851 @LoginRequired()
846 852 @NotAnonymous()
847 853 @HasRepoPermissionAnyDecorator(
848 854 'repository.read', 'repository.write', 'repository.admin')
849 855 @view_config(
850 856 route_name='pullrequest_repo_targets', request_method='GET',
851 857 renderer='json_ext', xhr=True)
852 858 def pullrequest_repo_targets(self):
853 859 _ = self.request.translate
854 860 filter_query = self.request.GET.get('query')
855 861
856 862 # get the parents
857 863 parent_target_repos = []
858 864 if self.db_repo.parent:
859 865 parents_query = Repository.query() \
860 866 .order_by(func.length(Repository.repo_name)) \
861 867 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
862 868
863 869 if filter_query:
864 870 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
865 871 parents_query = parents_query.filter(
866 872 Repository.repo_name.ilike(ilike_expression))
867 873 parents = parents_query.limit(20).all()
868 874
869 875 for parent in parents:
870 876 parent_vcs_obj = parent.scm_instance()
871 877 if parent_vcs_obj and not parent_vcs_obj.is_empty():
872 878 parent_target_repos.append(parent)
873 879
874 880 # get other forks, and repo itself
875 881 query = Repository.query() \
876 882 .order_by(func.length(Repository.repo_name)) \
877 883 .filter(
878 884 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
879 885 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
880 886 ) \
881 887 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
882 888
883 889 if filter_query:
884 890 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
885 891 query = query.filter(Repository.repo_name.ilike(ilike_expression))
886 892
887 893 limit = max(20 - len(parent_target_repos), 5) # not less then 5
888 894 target_repos = query.limit(limit).all()
889 895
890 896 all_target_repos = target_repos + parent_target_repos
891 897
892 898 repos = []
893 899 # This checks permissions to the repositories
894 900 for obj in ScmModel().get_repos(all_target_repos):
895 901 repos.append({
896 902 'id': obj['name'],
897 903 'text': obj['name'],
898 904 'type': 'repo',
899 905 'repo_id': obj['dbrepo']['repo_id'],
900 906 'repo_type': obj['dbrepo']['repo_type'],
901 907 'private': obj['dbrepo']['private'],
902 908
903 909 })
904 910
905 911 data = {
906 912 'more': False,
907 913 'results': [{
908 914 'text': _('Repositories'),
909 915 'children': repos
910 916 }] if repos else []
911 917 }
912 918 return data
913 919
914 920 @LoginRequired()
915 921 @NotAnonymous()
916 922 @HasRepoPermissionAnyDecorator(
917 923 'repository.read', 'repository.write', 'repository.admin')
918 924 @CSRFRequired()
919 925 @view_config(
920 926 route_name='pullrequest_create', request_method='POST',
921 927 renderer=None)
922 928 def pull_request_create(self):
923 929 _ = self.request.translate
924 930 self.assure_not_empty_repo()
925 931 self.load_default_context()
926 932
927 933 controls = peppercorn.parse(self.request.POST.items())
928 934
929 935 try:
930 936 form = PullRequestForm(
931 937 self.request.translate, self.db_repo.repo_id)()
932 938 _form = form.to_python(controls)
933 939 except formencode.Invalid as errors:
934 940 if errors.error_dict.get('revisions'):
935 941 msg = 'Revisions: %s' % errors.error_dict['revisions']
936 942 elif errors.error_dict.get('pullrequest_title'):
937 943 msg = errors.error_dict.get('pullrequest_title')
938 944 else:
939 945 msg = _('Error creating pull request: {}').format(errors)
940 946 log.exception(msg)
941 947 h.flash(msg, 'error')
942 948
943 949 # would rather just go back to form ...
944 950 raise HTTPFound(
945 951 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
946 952
947 953 source_repo = _form['source_repo']
948 954 source_ref = _form['source_ref']
949 955 target_repo = _form['target_repo']
950 956 target_ref = _form['target_ref']
951 957 commit_ids = _form['revisions'][::-1]
958 common_ancestor_id = _form['common_ancestor']
952 959
953 960 # find the ancestor for this pr
954 961 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
955 962 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
956 963
957 964 if not (source_db_repo or target_db_repo):
958 965 h.flash(_('source_repo or target repo not found'), category='error')
959 966 raise HTTPFound(
960 967 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
961 968
962 969 # re-check permissions again here
963 970 # source_repo we must have read permissions
964 971
965 972 source_perm = HasRepoPermissionAny(
966 973 'repository.read', 'repository.write', 'repository.admin')(
967 974 source_db_repo.repo_name)
968 975 if not source_perm:
969 976 msg = _('Not Enough permissions to source repo `{}`.'.format(
970 977 source_db_repo.repo_name))
971 978 h.flash(msg, category='error')
972 979 # copy the args back to redirect
973 980 org_query = self.request.GET.mixed()
974 981 raise HTTPFound(
975 982 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
976 983 _query=org_query))
977 984
978 985 # target repo we must have read permissions, and also later on
979 986 # we want to check branch permissions here
980 987 target_perm = HasRepoPermissionAny(
981 988 'repository.read', 'repository.write', 'repository.admin')(
982 989 target_db_repo.repo_name)
983 990 if not target_perm:
984 991 msg = _('Not Enough permissions to target repo `{}`.'.format(
985 992 target_db_repo.repo_name))
986 993 h.flash(msg, category='error')
987 994 # copy the args back to redirect
988 995 org_query = self.request.GET.mixed()
989 996 raise HTTPFound(
990 997 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
991 998 _query=org_query))
992 999
993 1000 source_scm = source_db_repo.scm_instance()
994 1001 target_scm = target_db_repo.scm_instance()
995 1002
996 1003 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
997 1004 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
998 1005
999 1006 ancestor = source_scm.get_common_ancestor(
1000 1007 source_commit.raw_id, target_commit.raw_id, target_scm)
1001 1008
1002 1009 # recalculate target ref based on ancestor
1003 1010 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1004 1011 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1005 1012
1006 1013 get_default_reviewers_data, validate_default_reviewers = \
1007 1014 PullRequestModel().get_reviewer_functions()
1008 1015
1009 1016 # recalculate reviewers logic, to make sure we can validate this
1010 1017 reviewer_rules = get_default_reviewers_data(
1011 1018 self._rhodecode_db_user, source_db_repo,
1012 1019 source_commit, target_db_repo, target_commit)
1013 1020
1014 1021 given_reviewers = _form['review_members']
1015 1022 reviewers = validate_default_reviewers(
1016 1023 given_reviewers, reviewer_rules)
1017 1024
1018 1025 pullrequest_title = _form['pullrequest_title']
1019 1026 title_source_ref = source_ref.split(':', 2)[1]
1020 1027 if not pullrequest_title:
1021 1028 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1022 1029 source=source_repo,
1023 1030 source_ref=title_source_ref,
1024 1031 target=target_repo
1025 1032 )
1026 1033
1027 1034 description = _form['pullrequest_desc']
1028 1035 description_renderer = _form['description_renderer']
1029 1036
1030 1037 try:
1031 1038 pull_request = PullRequestModel().create(
1032 1039 created_by=self._rhodecode_user.user_id,
1033 1040 source_repo=source_repo,
1034 1041 source_ref=source_ref,
1035 1042 target_repo=target_repo,
1036 1043 target_ref=target_ref,
1037 1044 revisions=commit_ids,
1045 common_ancestor_id=common_ancestor_id,
1038 1046 reviewers=reviewers,
1039 1047 title=pullrequest_title,
1040 1048 description=description,
1041 1049 description_renderer=description_renderer,
1042 1050 reviewer_data=reviewer_rules,
1043 1051 auth_user=self._rhodecode_user
1044 1052 )
1045 1053 Session().commit()
1046 1054
1047 1055 h.flash(_('Successfully opened new pull request'),
1048 1056 category='success')
1049 1057 except Exception:
1050 1058 msg = _('Error occurred during creation of this pull request.')
1051 1059 log.exception(msg)
1052 1060 h.flash(msg, category='error')
1053 1061
1054 1062 # copy the args back to redirect
1055 1063 org_query = self.request.GET.mixed()
1056 1064 raise HTTPFound(
1057 1065 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1058 1066 _query=org_query))
1059 1067
1060 1068 raise HTTPFound(
1061 1069 h.route_path('pullrequest_show', repo_name=target_repo,
1062 1070 pull_request_id=pull_request.pull_request_id))
1063 1071
1064 1072 @LoginRequired()
1065 1073 @NotAnonymous()
1066 1074 @HasRepoPermissionAnyDecorator(
1067 1075 'repository.read', 'repository.write', 'repository.admin')
1068 1076 @CSRFRequired()
1069 1077 @view_config(
1070 1078 route_name='pullrequest_update', request_method='POST',
1071 1079 renderer='json_ext')
1072 1080 def pull_request_update(self):
1073 1081 pull_request = PullRequest.get_or_404(
1074 1082 self.request.matchdict['pull_request_id'])
1075 1083 _ = self.request.translate
1076 1084
1077 1085 self.load_default_context()
1078 1086 redirect_url = None
1079 1087
1080 1088 if pull_request.is_closed():
1081 1089 log.debug('update: forbidden because pull request is closed')
1082 1090 msg = _(u'Cannot update closed pull requests.')
1083 1091 h.flash(msg, category='error')
1084 1092 return {'response': True,
1085 1093 'redirect_url': redirect_url}
1086 1094
1087 1095 is_state_changing = pull_request.is_state_changing()
1088 1096
1089 1097 # only owner or admin can update it
1090 1098 allowed_to_update = PullRequestModel().check_user_update(
1091 1099 pull_request, self._rhodecode_user)
1092 1100 if allowed_to_update:
1093 1101 controls = peppercorn.parse(self.request.POST.items())
1094 1102 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1095 1103
1096 1104 if 'review_members' in controls:
1097 1105 self._update_reviewers(
1098 1106 pull_request, controls['review_members'],
1099 1107 pull_request.reviewer_data)
1100 1108 elif str2bool(self.request.POST.get('update_commits', 'false')):
1101 1109 if is_state_changing:
1102 1110 log.debug('commits update: forbidden because pull request is in state %s',
1103 1111 pull_request.pull_request_state)
1104 1112 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1105 1113 u'Current state is: `{}`').format(
1106 1114 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1107 1115 h.flash(msg, category='error')
1108 1116 return {'response': True,
1109 1117 'redirect_url': redirect_url}
1110 1118
1111 1119 self._update_commits(pull_request)
1112 1120 if force_refresh:
1113 1121 redirect_url = h.route_path(
1114 1122 'pullrequest_show', repo_name=self.db_repo_name,
1115 1123 pull_request_id=pull_request.pull_request_id,
1116 1124 _query={"force_refresh": 1})
1117 1125 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1118 1126 self._edit_pull_request(pull_request)
1119 1127 else:
1120 1128 raise HTTPBadRequest()
1121 1129
1122 1130 return {'response': True,
1123 1131 'redirect_url': redirect_url}
1124 1132 raise HTTPForbidden()
1125 1133
1126 1134 def _edit_pull_request(self, pull_request):
1127 1135 _ = self.request.translate
1128 1136
1129 1137 try:
1130 1138 PullRequestModel().edit(
1131 1139 pull_request,
1132 1140 self.request.POST.get('title'),
1133 1141 self.request.POST.get('description'),
1134 1142 self.request.POST.get('description_renderer'),
1135 1143 self._rhodecode_user)
1136 1144 except ValueError:
1137 1145 msg = _(u'Cannot update closed pull requests.')
1138 1146 h.flash(msg, category='error')
1139 1147 return
1140 1148 else:
1141 1149 Session().commit()
1142 1150
1143 1151 msg = _(u'Pull request title & description updated.')
1144 1152 h.flash(msg, category='success')
1145 1153 return
1146 1154
1147 1155 def _update_commits(self, pull_request):
1148 1156 _ = self.request.translate
1149 1157
1150 1158 with pull_request.set_state(PullRequest.STATE_UPDATING):
1151 1159 resp = PullRequestModel().update_commits(
1152 1160 pull_request, self._rhodecode_db_user)
1153 1161
1154 1162 if resp.executed:
1155 1163
1156 1164 if resp.target_changed and resp.source_changed:
1157 1165 changed = 'target and source repositories'
1158 1166 elif resp.target_changed and not resp.source_changed:
1159 1167 changed = 'target repository'
1160 1168 elif not resp.target_changed and resp.source_changed:
1161 1169 changed = 'source repository'
1162 1170 else:
1163 1171 changed = 'nothing'
1164 1172
1165 1173 msg = _(u'Pull request updated to "{source_commit_id}" with '
1166 1174 u'{count_added} added, {count_removed} removed commits. '
1167 1175 u'Source of changes: {change_source}')
1168 1176 msg = msg.format(
1169 1177 source_commit_id=pull_request.source_ref_parts.commit_id,
1170 1178 count_added=len(resp.changes.added),
1171 1179 count_removed=len(resp.changes.removed),
1172 1180 change_source=changed)
1173 1181 h.flash(msg, category='success')
1174 1182
1175 1183 channel = '/repo${}$/pr/{}'.format(
1176 1184 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1177 1185 message = msg + (
1178 1186 ' - <a onclick="window.location.reload()">'
1179 1187 '<strong>{}</strong></a>'.format(_('Reload page')))
1180 1188 channelstream.post_message(
1181 1189 channel, message, self._rhodecode_user.username,
1182 1190 registry=self.request.registry)
1183 1191 else:
1184 1192 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1185 1193 warning_reasons = [
1186 1194 UpdateFailureReason.NO_CHANGE,
1187 1195 UpdateFailureReason.WRONG_REF_TYPE,
1188 1196 ]
1189 1197 category = 'warning' if resp.reason in warning_reasons else 'error'
1190 1198 h.flash(msg, category=category)
1191 1199
1192 1200 @LoginRequired()
1193 1201 @NotAnonymous()
1194 1202 @HasRepoPermissionAnyDecorator(
1195 1203 'repository.read', 'repository.write', 'repository.admin')
1196 1204 @CSRFRequired()
1197 1205 @view_config(
1198 1206 route_name='pullrequest_merge', request_method='POST',
1199 1207 renderer='json_ext')
1200 1208 def pull_request_merge(self):
1201 1209 """
1202 1210 Merge will perform a server-side merge of the specified
1203 1211 pull request, if the pull request is approved and mergeable.
1204 1212 After successful merging, the pull request is automatically
1205 1213 closed, with a relevant comment.
1206 1214 """
1207 1215 pull_request = PullRequest.get_or_404(
1208 1216 self.request.matchdict['pull_request_id'])
1209 1217 _ = self.request.translate
1210 1218
1211 1219 if pull_request.is_state_changing():
1212 1220 log.debug('show: forbidden because pull request is in state %s',
1213 1221 pull_request.pull_request_state)
1214 1222 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1215 1223 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1216 1224 pull_request.pull_request_state)
1217 1225 h.flash(msg, category='error')
1218 1226 raise HTTPFound(
1219 1227 h.route_path('pullrequest_show',
1220 1228 repo_name=pull_request.target_repo.repo_name,
1221 1229 pull_request_id=pull_request.pull_request_id))
1222 1230
1223 1231 self.load_default_context()
1224 1232
1225 1233 with pull_request.set_state(PullRequest.STATE_UPDATING):
1226 1234 check = MergeCheck.validate(
1227 1235 pull_request, auth_user=self._rhodecode_user,
1228 1236 translator=self.request.translate)
1229 1237 merge_possible = not check.failed
1230 1238
1231 1239 for err_type, error_msg in check.errors:
1232 1240 h.flash(error_msg, category=err_type)
1233 1241
1234 1242 if merge_possible:
1235 1243 log.debug("Pre-conditions checked, trying to merge.")
1236 1244 extras = vcs_operation_context(
1237 1245 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1238 1246 username=self._rhodecode_db_user.username, action='push',
1239 1247 scm=pull_request.target_repo.repo_type)
1240 1248 with pull_request.set_state(PullRequest.STATE_UPDATING):
1241 1249 self._merge_pull_request(
1242 1250 pull_request, self._rhodecode_db_user, extras)
1243 1251 else:
1244 1252 log.debug("Pre-conditions failed, NOT merging.")
1245 1253
1246 1254 raise HTTPFound(
1247 1255 h.route_path('pullrequest_show',
1248 1256 repo_name=pull_request.target_repo.repo_name,
1249 1257 pull_request_id=pull_request.pull_request_id))
1250 1258
1251 1259 def _merge_pull_request(self, pull_request, user, extras):
1252 1260 _ = self.request.translate
1253 1261 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1254 1262
1255 1263 if merge_resp.executed:
1256 1264 log.debug("The merge was successful, closing the pull request.")
1257 1265 PullRequestModel().close_pull_request(
1258 1266 pull_request.pull_request_id, user)
1259 1267 Session().commit()
1260 1268 msg = _('Pull request was successfully merged and closed.')
1261 1269 h.flash(msg, category='success')
1262 1270 else:
1263 1271 log.debug(
1264 1272 "The merge was not successful. Merge response: %s", merge_resp)
1265 1273 msg = merge_resp.merge_status_message
1266 1274 h.flash(msg, category='error')
1267 1275
1268 1276 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1269 1277 _ = self.request.translate
1270 1278
1271 1279 get_default_reviewers_data, validate_default_reviewers = \
1272 1280 PullRequestModel().get_reviewer_functions()
1273 1281
1274 1282 try:
1275 1283 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1276 1284 except ValueError as e:
1277 1285 log.error('Reviewers Validation: {}'.format(e))
1278 1286 h.flash(e, category='error')
1279 1287 return
1280 1288
1281 1289 old_calculated_status = pull_request.calculated_review_status()
1282 1290 PullRequestModel().update_reviewers(
1283 1291 pull_request, reviewers, self._rhodecode_user)
1284 1292 h.flash(_('Pull request reviewers updated.'), category='success')
1285 1293 Session().commit()
1286 1294
1287 1295 # trigger status changed if change in reviewers changes the status
1288 1296 calculated_status = pull_request.calculated_review_status()
1289 1297 if old_calculated_status != calculated_status:
1290 1298 PullRequestModel().trigger_pull_request_hook(
1291 1299 pull_request, self._rhodecode_user, 'review_status_change',
1292 1300 data={'status': calculated_status})
1293 1301
1294 1302 @LoginRequired()
1295 1303 @NotAnonymous()
1296 1304 @HasRepoPermissionAnyDecorator(
1297 1305 'repository.read', 'repository.write', 'repository.admin')
1298 1306 @CSRFRequired()
1299 1307 @view_config(
1300 1308 route_name='pullrequest_delete', request_method='POST',
1301 1309 renderer='json_ext')
1302 1310 def pull_request_delete(self):
1303 1311 _ = self.request.translate
1304 1312
1305 1313 pull_request = PullRequest.get_or_404(
1306 1314 self.request.matchdict['pull_request_id'])
1307 1315 self.load_default_context()
1308 1316
1309 1317 pr_closed = pull_request.is_closed()
1310 1318 allowed_to_delete = PullRequestModel().check_user_delete(
1311 1319 pull_request, self._rhodecode_user) and not pr_closed
1312 1320
1313 1321 # only owner can delete it !
1314 1322 if allowed_to_delete:
1315 1323 PullRequestModel().delete(pull_request, self._rhodecode_user)
1316 1324 Session().commit()
1317 1325 h.flash(_('Successfully deleted pull request'),
1318 1326 category='success')
1319 1327 raise HTTPFound(h.route_path('pullrequest_show_all',
1320 1328 repo_name=self.db_repo_name))
1321 1329
1322 1330 log.warning('user %s tried to delete pull request without access',
1323 1331 self._rhodecode_user)
1324 1332 raise HTTPNotFound()
1325 1333
1326 1334 @LoginRequired()
1327 1335 @NotAnonymous()
1328 1336 @HasRepoPermissionAnyDecorator(
1329 1337 'repository.read', 'repository.write', 'repository.admin')
1330 1338 @CSRFRequired()
1331 1339 @view_config(
1332 1340 route_name='pullrequest_comment_create', request_method='POST',
1333 1341 renderer='json_ext')
1334 1342 def pull_request_comment_create(self):
1335 1343 _ = self.request.translate
1336 1344
1337 1345 pull_request = PullRequest.get_or_404(
1338 1346 self.request.matchdict['pull_request_id'])
1339 1347 pull_request_id = pull_request.pull_request_id
1340 1348
1341 1349 if pull_request.is_closed():
1342 1350 log.debug('comment: forbidden because pull request is closed')
1343 1351 raise HTTPForbidden()
1344 1352
1345 1353 allowed_to_comment = PullRequestModel().check_user_comment(
1346 1354 pull_request, self._rhodecode_user)
1347 1355 if not allowed_to_comment:
1348 1356 log.debug(
1349 1357 'comment: forbidden because pull request is from forbidden repo')
1350 1358 raise HTTPForbidden()
1351 1359
1352 1360 c = self.load_default_context()
1353 1361
1354 1362 status = self.request.POST.get('changeset_status', None)
1355 1363 text = self.request.POST.get('text')
1356 1364 comment_type = self.request.POST.get('comment_type')
1357 1365 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1358 1366 close_pull_request = self.request.POST.get('close_pull_request')
1359 1367
1360 1368 # the logic here should work like following, if we submit close
1361 1369 # pr comment, use `close_pull_request_with_comment` function
1362 1370 # else handle regular comment logic
1363 1371
1364 1372 if close_pull_request:
1365 1373 # only owner or admin or person with write permissions
1366 1374 allowed_to_close = PullRequestModel().check_user_update(
1367 1375 pull_request, self._rhodecode_user)
1368 1376 if not allowed_to_close:
1369 1377 log.debug('comment: forbidden because not allowed to close '
1370 1378 'pull request %s', pull_request_id)
1371 1379 raise HTTPForbidden()
1372 1380
1373 1381 # This also triggers `review_status_change`
1374 1382 comment, status = PullRequestModel().close_pull_request_with_comment(
1375 1383 pull_request, self._rhodecode_user, self.db_repo, message=text,
1376 1384 auth_user=self._rhodecode_user)
1377 1385 Session().flush()
1378 1386
1379 1387 PullRequestModel().trigger_pull_request_hook(
1380 1388 pull_request, self._rhodecode_user, 'comment',
1381 1389 data={'comment': comment})
1382 1390
1383 1391 else:
1384 1392 # regular comment case, could be inline, or one with status.
1385 1393 # for that one we check also permissions
1386 1394
1387 1395 allowed_to_change_status = PullRequestModel().check_user_change_status(
1388 1396 pull_request, self._rhodecode_user)
1389 1397
1390 1398 if status and allowed_to_change_status:
1391 1399 message = (_('Status change %(transition_icon)s %(status)s')
1392 1400 % {'transition_icon': '>',
1393 1401 'status': ChangesetStatus.get_status_lbl(status)})
1394 1402 text = text or message
1395 1403
1396 1404 comment = CommentsModel().create(
1397 1405 text=text,
1398 1406 repo=self.db_repo.repo_id,
1399 1407 user=self._rhodecode_user.user_id,
1400 1408 pull_request=pull_request,
1401 1409 f_path=self.request.POST.get('f_path'),
1402 1410 line_no=self.request.POST.get('line'),
1403 1411 status_change=(ChangesetStatus.get_status_lbl(status)
1404 1412 if status and allowed_to_change_status else None),
1405 1413 status_change_type=(status
1406 1414 if status and allowed_to_change_status else None),
1407 1415 comment_type=comment_type,
1408 1416 resolves_comment_id=resolves_comment_id,
1409 1417 auth_user=self._rhodecode_user
1410 1418 )
1411 1419
1412 1420 if allowed_to_change_status:
1413 1421 # calculate old status before we change it
1414 1422 old_calculated_status = pull_request.calculated_review_status()
1415 1423
1416 1424 # get status if set !
1417 1425 if status:
1418 1426 ChangesetStatusModel().set_status(
1419 1427 self.db_repo.repo_id,
1420 1428 status,
1421 1429 self._rhodecode_user.user_id,
1422 1430 comment,
1423 1431 pull_request=pull_request
1424 1432 )
1425 1433
1426 1434 Session().flush()
1427 1435 # this is somehow required to get access to some relationship
1428 1436 # loaded on comment
1429 1437 Session().refresh(comment)
1430 1438
1431 1439 PullRequestModel().trigger_pull_request_hook(
1432 1440 pull_request, self._rhodecode_user, 'comment',
1433 1441 data={'comment': comment})
1434 1442
1435 1443 # we now calculate the status of pull request, and based on that
1436 1444 # calculation we set the commits status
1437 1445 calculated_status = pull_request.calculated_review_status()
1438 1446 if old_calculated_status != calculated_status:
1439 1447 PullRequestModel().trigger_pull_request_hook(
1440 1448 pull_request, self._rhodecode_user, 'review_status_change',
1441 1449 data={'status': calculated_status})
1442 1450
1443 1451 Session().commit()
1444 1452
1445 1453 data = {
1446 1454 'target_id': h.safeid(h.safe_unicode(
1447 1455 self.request.POST.get('f_path'))),
1448 1456 }
1449 1457 if comment:
1450 1458 c.co = comment
1451 1459 rendered_comment = render(
1452 1460 'rhodecode:templates/changeset/changeset_comment_block.mako',
1453 1461 self._get_template_context(c), self.request)
1454 1462
1455 1463 data.update(comment.get_dict())
1456 1464 data.update({'rendered_text': rendered_comment})
1457 1465
1458 1466 return data
1459 1467
1460 1468 @LoginRequired()
1461 1469 @NotAnonymous()
1462 1470 @HasRepoPermissionAnyDecorator(
1463 1471 'repository.read', 'repository.write', 'repository.admin')
1464 1472 @CSRFRequired()
1465 1473 @view_config(
1466 1474 route_name='pullrequest_comment_delete', request_method='POST',
1467 1475 renderer='json_ext')
1468 1476 def pull_request_comment_delete(self):
1469 1477 pull_request = PullRequest.get_or_404(
1470 1478 self.request.matchdict['pull_request_id'])
1471 1479
1472 1480 comment = ChangesetComment.get_or_404(
1473 1481 self.request.matchdict['comment_id'])
1474 1482 comment_id = comment.comment_id
1475 1483
1476 1484 if comment.immutable:
1477 1485 # don't allow deleting comments that are immutable
1478 1486 raise HTTPForbidden()
1479 1487
1480 1488 if pull_request.is_closed():
1481 1489 log.debug('comment: forbidden because pull request is closed')
1482 1490 raise HTTPForbidden()
1483 1491
1484 1492 if not comment:
1485 1493 log.debug('Comment with id:%s not found, skipping', comment_id)
1486 1494 # comment already deleted in another call probably
1487 1495 return True
1488 1496
1489 1497 if comment.pull_request.is_closed():
1490 1498 # don't allow deleting comments on closed pull request
1491 1499 raise HTTPForbidden()
1492 1500
1493 1501 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1494 1502 super_admin = h.HasPermissionAny('hg.admin')()
1495 1503 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1496 1504 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1497 1505 comment_repo_admin = is_repo_admin and is_repo_comment
1498 1506
1499 1507 if super_admin or comment_owner or comment_repo_admin:
1500 1508 old_calculated_status = comment.pull_request.calculated_review_status()
1501 1509 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1502 1510 Session().commit()
1503 1511 calculated_status = comment.pull_request.calculated_review_status()
1504 1512 if old_calculated_status != calculated_status:
1505 1513 PullRequestModel().trigger_pull_request_hook(
1506 1514 comment.pull_request, self._rhodecode_user, 'review_status_change',
1507 1515 data={'status': calculated_status})
1508 1516 return True
1509 1517 else:
1510 1518 log.warning('No permissions for user %s to delete comment_id: %s',
1511 1519 self._rhodecode_db_user, comment_id)
1512 1520 raise HTTPNotFound()
@@ -1,61 +1,73 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from pyramid.view import view_config
24 24
25 25 from rhodecode.apps._base import RepoAppView
26 26 from rhodecode.apps.repository.utils import get_default_reviewers_data
27 27 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
28 28 from rhodecode.model.db import Repository
29 29
30 30 log = logging.getLogger(__name__)
31 31
32 32
33 33 class RepoReviewRulesView(RepoAppView):
34 34 def load_default_context(self):
35 35 c = self._get_local_tmpl_context()
36 36 return c
37 37
38 38 @LoginRequired()
39 39 @HasRepoPermissionAnyDecorator('repository.admin')
40 40 @view_config(
41 41 route_name='repo_reviewers', request_method='GET',
42 42 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
43 43 def repo_review_rules(self):
44 44 c = self.load_default_context()
45 45 c.active = 'reviewers'
46 46
47 47 return self._get_template_context(c)
48 48
49 49 @LoginRequired()
50 50 @HasRepoPermissionAnyDecorator(
51 51 'repository.read', 'repository.write', 'repository.admin')
52 52 @view_config(
53 53 route_name='repo_default_reviewers_data', request_method='GET',
54 54 renderer='json_ext')
55 55 def repo_default_reviewers_data(self):
56 56 self.load_default_context()
57 target_repo_name = self.request.GET.get('target_repo', self.db_repo.repo_name)
57
58 request = self.request
59 source_repo = self.db_repo
60 source_repo_name = source_repo.repo_name
61 target_repo_name = request.GET.get('target_repo', source_repo_name)
58 62 target_repo = Repository.get_by_repo_name(target_repo_name)
63
64 source_ref = request.GET['source_ref']
65 target_ref = request.GET['target_ref']
66 source_commit = source_repo.get_commit(source_ref)
67 target_commit = target_repo.get_commit(target_ref)
68
69 current_user = request.user.get_instance()
59 70 review_data = get_default_reviewers_data(
60 self.db_repo.user, None, None, target_repo, None)
71 current_user, source_repo, source_commit, target_repo, target_commit)
72
61 73 return review_data
@@ -1,1029 +1,1034 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else self.get_default_config()
66 66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 67
68 68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def _remote(self):
75 75 repo_id = self.path
76 76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 77
78 78 @LazyProperty
79 79 def bare(self):
80 80 return self._remote.bare()
81 81
82 82 @LazyProperty
83 83 def head(self):
84 84 return self._remote.head()
85 85
86 86 @CachedProperty
87 87 def commit_ids(self):
88 88 """
89 89 Returns list of commit ids, in ascending order. Being lazy
90 90 attribute allows external tools to inject commit ids from cache.
91 91 """
92 92 commit_ids = self._get_all_commit_ids()
93 93 self._rebuild_cache(commit_ids)
94 94 return commit_ids
95 95
96 96 def _rebuild_cache(self, commit_ids):
97 97 self._commit_ids = dict((commit_id, index)
98 98 for index, commit_id in enumerate(commit_ids))
99 99
100 100 def run_git_command(self, cmd, **opts):
101 101 """
102 102 Runs given ``cmd`` as git command and returns tuple
103 103 (stdout, stderr).
104 104
105 105 :param cmd: git command to be executed
106 106 :param opts: env options to pass into Subprocess command
107 107 """
108 108 if not isinstance(cmd, list):
109 109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 110
111 111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 112 out, err = self._remote.run_git_command(cmd, **opts)
113 113 if err and not skip_stderr_log:
114 114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 115 return out, err
116 116
117 117 @staticmethod
118 118 def check_url(url, config):
119 119 """
120 120 Function will check given url and try to verify if it's a valid
121 121 link. Sometimes it may happened that git will issue basic
122 122 auth request that can cause whole API to hang when used from python
123 123 or other external calls.
124 124
125 125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 126 when the return code is non 200
127 127 """
128 128 # check first if it's not an url
129 129 if os.path.isdir(url) or url.startswith('file:'):
130 130 return True
131 131
132 132 if '+' in url.split('://', 1)[0]:
133 133 url = url.split('+', 1)[1]
134 134
135 135 # Request the _remote to verify the url
136 136 return connection.Git.check_url(url, config.serialize())
137 137
138 138 @staticmethod
139 139 def is_valid_repository(path):
140 140 if os.path.isdir(os.path.join(path, '.git')):
141 141 return True
142 142 # check case of bare repository
143 143 try:
144 144 GitRepository(path)
145 145 return True
146 146 except VCSError:
147 147 pass
148 148 return False
149 149
150 150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 151 bare=False):
152 152 if create and os.path.exists(self.path):
153 153 raise RepositoryError(
154 154 "Cannot create repository at %s, location already exist"
155 155 % self.path)
156 156
157 157 if bare and do_workspace_checkout:
158 158 raise RepositoryError("Cannot update a bare repository")
159 159 try:
160 160
161 161 if src_url:
162 162 # check URL before any actions
163 163 GitRepository.check_url(src_url, self.config)
164 164
165 165 if create:
166 166 os.makedirs(self.path, mode=0o755)
167 167
168 168 if bare:
169 169 self._remote.init_bare()
170 170 else:
171 171 self._remote.init()
172 172
173 173 if src_url and bare:
174 174 # bare repository only allows a fetch and checkout is not allowed
175 175 self.fetch(src_url, commit_ids=None)
176 176 elif src_url:
177 177 self.pull(src_url, commit_ids=None,
178 178 update_after=do_workspace_checkout)
179 179
180 180 else:
181 181 if not self._remote.assert_correct_path():
182 182 raise RepositoryError(
183 183 'Path "%s" does not contain a Git repository' %
184 184 (self.path,))
185 185
186 186 # TODO: johbo: check if we have to translate the OSError here
187 187 except OSError as err:
188 188 raise RepositoryError(err)
189 189
190 190 def _get_all_commit_ids(self):
191 191 return self._remote.get_all_commit_ids()
192 192
193 193 def _get_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199
200 200 if not head:
201 201 return []
202 202
203 203 rev_filter = ['--branches', '--tags']
204 204 extra_filter = []
205 205
206 206 if filters:
207 207 if filters.get('since'):
208 208 extra_filter.append('--since=%s' % (filters['since']))
209 209 if filters.get('until'):
210 210 extra_filter.append('--until=%s' % (filters['until']))
211 211 if filters.get('branch_name'):
212 212 rev_filter = []
213 213 extra_filter.append(filters['branch_name'])
214 214 rev_filter.extend(extra_filter)
215 215
216 216 # if filters.get('start') or filters.get('end'):
217 217 # # skip is offset, max-count is limit
218 218 # if filters.get('start'):
219 219 # extra_filter += ' --skip=%s' % filters['start']
220 220 # if filters.get('end'):
221 221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 222
223 223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 224 try:
225 225 output, __ = self.run_git_command(cmd)
226 226 except RepositoryError:
227 227 # Can be raised for empty repositories
228 228 return []
229 229 return output.splitlines()
230 230
231 231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
232 232 def is_null(value):
233 233 return len(value) == commit_id_or_idx.count('0')
234 234
235 235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 236 return self.commit_ids[-1]
237 237
238 238 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 239 *map(safe_str, [commit_id_or_idx, self.name]))
240 240
241 241 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
242 242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
243 243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
244 244 try:
245 245 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
246 246 except Exception:
247 247 raise CommitDoesNotExistError(commit_missing_err)
248 248
249 249 elif is_bstr:
250 250 # Need to call remote to translate id for tagging scenario
251 251 try:
252 252 remote_data = self._remote.get_object(commit_id_or_idx,
253 253 maybe_unreachable=maybe_unreachable)
254 254 commit_id_or_idx = remote_data["commit_id"]
255 255 except (CommitDoesNotExistError,):
256 256 raise CommitDoesNotExistError(commit_missing_err)
257 257
258 258 # Ensure we return full id
259 259 if not SHA_PATTERN.match(str(commit_id_or_idx)):
260 260 raise CommitDoesNotExistError(
261 261 "Given commit id %s not recognized" % commit_id_or_idx)
262 262 return commit_id_or_idx
263 263
264 264 def get_hook_location(self):
265 265 """
266 266 returns absolute path to location where hooks are stored
267 267 """
268 268 loc = os.path.join(self.path, 'hooks')
269 269 if not self.bare:
270 270 loc = os.path.join(self.path, '.git', 'hooks')
271 271 return loc
272 272
273 273 @LazyProperty
274 274 def last_change(self):
275 275 """
276 276 Returns last change made on this repository as
277 277 `datetime.datetime` object.
278 278 """
279 279 try:
280 280 return self.get_commit().date
281 281 except RepositoryError:
282 282 tzoffset = makedate()[1]
283 283 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
284 284
285 285 def _get_fs_mtime(self):
286 286 idx_loc = '' if self.bare else '.git'
287 287 # fallback to filesystem
288 288 in_path = os.path.join(self.path, idx_loc, "index")
289 289 he_path = os.path.join(self.path, idx_loc, "HEAD")
290 290 if os.path.exists(in_path):
291 291 return os.stat(in_path).st_mtime
292 292 else:
293 293 return os.stat(he_path).st_mtime
294 294
295 295 @LazyProperty
296 296 def description(self):
297 297 description = self._remote.get_description()
298 298 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
299 299
300 300 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
301 301 if self.is_empty():
302 302 return OrderedDict()
303 303
304 304 result = []
305 305 for ref, sha in self._refs.iteritems():
306 306 if ref.startswith(prefix):
307 307 ref_name = ref
308 308 if strip_prefix:
309 309 ref_name = ref[len(prefix):]
310 310 result.append((safe_unicode(ref_name), sha))
311 311
312 312 def get_name(entry):
313 313 return entry[0]
314 314
315 315 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
316 316
317 317 def _get_branches(self):
318 318 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
319 319
320 320 @CachedProperty
321 321 def branches(self):
322 322 return self._get_branches()
323 323
324 324 @CachedProperty
325 325 def branches_closed(self):
326 326 return {}
327 327
328 328 @CachedProperty
329 329 def bookmarks(self):
330 330 return {}
331 331
332 332 @CachedProperty
333 333 def branches_all(self):
334 334 all_branches = {}
335 335 all_branches.update(self.branches)
336 336 all_branches.update(self.branches_closed)
337 337 return all_branches
338 338
339 339 @CachedProperty
340 340 def tags(self):
341 341 return self._get_tags()
342 342
343 343 def _get_tags(self):
344 344 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
345 345
346 346 def tag(self, name, user, commit_id=None, message=None, date=None,
347 347 **kwargs):
348 348 # TODO: fix this method to apply annotated tags correct with message
349 349 """
350 350 Creates and returns a tag for the given ``commit_id``.
351 351
352 352 :param name: name for new tag
353 353 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
354 354 :param commit_id: commit id for which new tag would be created
355 355 :param message: message of the tag's commit
356 356 :param date: date of tag's commit
357 357
358 358 :raises TagAlreadyExistError: if tag with same name already exists
359 359 """
360 360 if name in self.tags:
361 361 raise TagAlreadyExistError("Tag %s already exists" % name)
362 362 commit = self.get_commit(commit_id=commit_id)
363 363 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
364 364
365 365 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
366 366
367 367 self._invalidate_prop_cache('tags')
368 368 self._invalidate_prop_cache('_refs')
369 369
370 370 return commit
371 371
372 372 def remove_tag(self, name, user, message=None, date=None):
373 373 """
374 374 Removes tag with the given ``name``.
375 375
376 376 :param name: name of the tag to be removed
377 377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
378 378 :param message: message of the tag's removal commit
379 379 :param date: date of tag's removal commit
380 380
381 381 :raises TagDoesNotExistError: if tag with given name does not exists
382 382 """
383 383 if name not in self.tags:
384 384 raise TagDoesNotExistError("Tag %s does not exist" % name)
385 385
386 386 self._remote.tag_remove(name)
387 387 self._invalidate_prop_cache('tags')
388 388 self._invalidate_prop_cache('_refs')
389 389
390 390 def _get_refs(self):
391 391 return self._remote.get_refs()
392 392
393 393 @CachedProperty
394 394 def _refs(self):
395 395 return self._get_refs()
396 396
397 397 @property
398 398 def _ref_tree(self):
399 399 node = tree = {}
400 400 for ref, sha in self._refs.iteritems():
401 401 path = ref.split('/')
402 402 for bit in path[:-1]:
403 403 node = node.setdefault(bit, {})
404 404 node[path[-1]] = sha
405 405 node = tree
406 406 return tree
407 407
408 408 def get_remote_ref(self, ref_name):
409 409 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
410 410 try:
411 411 return self._refs[ref_key]
412 412 except Exception:
413 413 return
414 414
415 415 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
416 416 translate_tag=True, maybe_unreachable=False):
417 417 """
418 418 Returns `GitCommit` object representing commit from git repository
419 419 at the given `commit_id` or head (most recent commit) if None given.
420 420 """
421 421 if self.is_empty():
422 422 raise EmptyRepositoryError("There are no commits yet")
423 423
424 424 if commit_id is not None:
425 425 self._validate_commit_id(commit_id)
426 426 try:
427 427 # we have cached idx, use it without contacting the remote
428 428 idx = self._commit_ids[commit_id]
429 429 return GitCommit(self, commit_id, idx, pre_load=pre_load)
430 430 except KeyError:
431 431 pass
432 432
433 433 elif commit_idx is not None:
434 434 self._validate_commit_idx(commit_idx)
435 435 try:
436 436 _commit_id = self.commit_ids[commit_idx]
437 437 if commit_idx < 0:
438 438 commit_idx = self.commit_ids.index(_commit_id)
439 439 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
440 440 except IndexError:
441 441 commit_id = commit_idx
442 442 else:
443 443 commit_id = "tip"
444 444
445 445 if translate_tag:
446 446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
447 447
448 448 try:
449 449 idx = self._commit_ids[commit_id]
450 450 except KeyError:
451 451 idx = -1
452 452
453 453 return GitCommit(self, commit_id, idx, pre_load=pre_load)
454 454
455 455 def get_commits(
456 456 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
458 458 """
459 459 Returns generator of `GitCommit` objects from start to end (both
460 460 are inclusive), in ascending date order.
461 461
462 462 :param start_id: None, str(commit_id)
463 463 :param end_id: None, str(commit_id)
464 464 :param start_date: if specified, commits with commit date less than
465 465 ``start_date`` would be filtered out from returned set
466 466 :param end_date: if specified, commits with commit date greater than
467 467 ``end_date`` would be filtered out from returned set
468 468 :param branch_name: if specified, commits not reachable from given
469 469 branch would be filtered out from returned set
470 470 :param show_hidden: Show hidden commits such as obsolete or hidden from
471 471 Mercurial evolve
472 472 :raise BranchDoesNotExistError: If given `branch_name` does not
473 473 exist.
474 474 :raise CommitDoesNotExistError: If commits for given `start` or
475 475 `end` could not be found.
476 476
477 477 """
478 478 if self.is_empty():
479 479 raise EmptyRepositoryError("There are no commits yet")
480 480
481 481 self._validate_branch_name(branch_name)
482 482
483 483 if start_id is not None:
484 484 self._validate_commit_id(start_id)
485 485 if end_id is not None:
486 486 self._validate_commit_id(end_id)
487 487
488 488 start_raw_id = self._lookup_commit(start_id)
489 489 start_pos = self._commit_ids[start_raw_id] if start_id else None
490 490 end_raw_id = self._lookup_commit(end_id)
491 491 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
492 492
493 493 if None not in [start_id, end_id] and start_pos > end_pos:
494 494 raise RepositoryError(
495 495 "Start commit '%s' cannot be after end commit '%s'" %
496 496 (start_id, end_id))
497 497
498 498 if end_pos is not None:
499 499 end_pos += 1
500 500
501 501 filter_ = []
502 502 if branch_name:
503 503 filter_.append({'branch_name': branch_name})
504 504 if start_date and not end_date:
505 505 filter_.append({'since': start_date})
506 506 if end_date and not start_date:
507 507 filter_.append({'until': end_date})
508 508 if start_date and end_date:
509 509 filter_.append({'since': start_date})
510 510 filter_.append({'until': end_date})
511 511
512 512 # if start_pos or end_pos:
513 513 # filter_.append({'start': start_pos})
514 514 # filter_.append({'end': end_pos})
515 515
516 516 if filter_:
517 517 revfilters = {
518 518 'branch_name': branch_name,
519 519 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
520 520 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
521 521 'start': start_pos,
522 522 'end': end_pos,
523 523 }
524 524 commit_ids = self._get_commit_ids(filters=revfilters)
525 525
526 526 else:
527 527 commit_ids = self.commit_ids
528 528
529 529 if start_pos or end_pos:
530 530 commit_ids = commit_ids[start_pos: end_pos]
531 531
532 532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 533 translate_tag=translate_tags)
534 534
535 535 def get_diff(
536 536 self, commit1, commit2, path='', ignore_whitespace=False,
537 537 context=3, path1=None):
538 538 """
539 539 Returns (git like) *diff*, as plain text. Shows changes introduced by
540 540 ``commit2`` since ``commit1``.
541 541
542 542 :param commit1: Entry point from which diff is shown. Can be
543 543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
544 544 the changes since empty state of the repository until ``commit2``
545 545 :param commit2: Until which commits changes should be shown.
546 546 :param ignore_whitespace: If set to ``True``, would not show whitespace
547 547 changes. Defaults to ``False``.
548 548 :param context: How many lines before/after changed lines should be
549 549 shown. Defaults to ``3``.
550 550 """
551 551 self._validate_diff_commits(commit1, commit2)
552 552 if path1 is not None and path1 != path:
553 553 raise ValueError("Diff of two different paths not supported.")
554 554
555 555 if path:
556 556 file_filter = path
557 557 else:
558 558 file_filter = None
559 559
560 560 diff = self._remote.diff(
561 561 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
562 562 opt_ignorews=ignore_whitespace,
563 563 context=context)
564 564 return GitDiff(diff)
565 565
566 566 def strip(self, commit_id, branch_name):
567 567 commit = self.get_commit(commit_id=commit_id)
568 568 if commit.merge:
569 569 raise Exception('Cannot reset to merge commit')
570 570
571 571 # parent is going to be the new head now
572 572 commit = commit.parents[0]
573 573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574 574
575 575 # clear cached properties
576 576 self._invalidate_prop_cache('commit_ids')
577 577 self._invalidate_prop_cache('_refs')
578 578 self._invalidate_prop_cache('branches')
579 579
580 580 return len(self.commit_ids)
581 581
582 582 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
583 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
584 self, commit_id1, repo2, commit_id2)
585
583 586 if commit_id1 == commit_id2:
584 587 return commit_id1
585 588
586 589 if self != repo2:
587 590 commits = self._remote.get_missing_revs(
588 591 commit_id1, commit_id2, repo2.path)
589 592 if commits:
590 593 commit = repo2.get_commit(commits[-1])
591 594 if commit.parents:
592 595 ancestor_id = commit.parents[0].raw_id
593 596 else:
594 597 ancestor_id = None
595 598 else:
596 599 # no commits from other repo, ancestor_id is the commit_id2
597 600 ancestor_id = commit_id2
598 601 else:
599 602 output, __ = self.run_git_command(
600 603 ['merge-base', commit_id1, commit_id2])
601 604 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
602 605
606 log.debug('Found common ancestor with sha: %s', ancestor_id)
607
603 608 return ancestor_id
604 609
605 610 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
606 611 repo1 = self
607 612 ancestor_id = None
608 613
609 614 if commit_id1 == commit_id2:
610 615 commits = []
611 616 elif repo1 != repo2:
612 617 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
613 618 repo2.path)
614 619 commits = [
615 620 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
616 621 for commit_id in reversed(missing_ids)]
617 622 else:
618 623 output, __ = repo1.run_git_command(
619 624 ['log', '--reverse', '--pretty=format: %H', '-s',
620 625 '%s..%s' % (commit_id1, commit_id2)])
621 626 commits = [
622 627 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
623 628 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
624 629
625 630 return commits
626 631
627 632 @LazyProperty
628 633 def in_memory_commit(self):
629 634 """
630 635 Returns ``GitInMemoryCommit`` object for this repository.
631 636 """
632 637 return GitInMemoryCommit(self)
633 638
634 639 def pull(self, url, commit_ids=None, update_after=False):
635 640 """
636 641 Pull changes from external location. Pull is different in GIT
637 642 that fetch since it's doing a checkout
638 643
639 644 :param commit_ids: Optional. Can be set to a list of commit ids
640 645 which shall be pulled from the other repository.
641 646 """
642 647 refs = None
643 648 if commit_ids is not None:
644 649 remote_refs = self._remote.get_remote_refs(url)
645 650 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
646 651 self._remote.pull(url, refs=refs, update_after=update_after)
647 652 self._remote.invalidate_vcs_cache()
648 653
649 654 def fetch(self, url, commit_ids=None):
650 655 """
651 656 Fetch all git objects from external location.
652 657 """
653 658 self._remote.sync_fetch(url, refs=commit_ids)
654 659 self._remote.invalidate_vcs_cache()
655 660
656 661 def push(self, url):
657 662 refs = None
658 663 self._remote.sync_push(url, refs=refs)
659 664
660 665 def set_refs(self, ref_name, commit_id):
661 666 self._remote.set_refs(ref_name, commit_id)
662 667 self._invalidate_prop_cache('_refs')
663 668
664 669 def remove_ref(self, ref_name):
665 670 self._remote.remove_ref(ref_name)
666 671 self._invalidate_prop_cache('_refs')
667 672
668 673 def run_gc(self, prune=True):
669 674 cmd = ['gc', '--aggressive']
670 675 if prune:
671 676 cmd += ['--prune=now']
672 677 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
673 678 return stderr
674 679
675 680 def _update_server_info(self):
676 681 """
677 682 runs gits update-server-info command in this repo instance
678 683 """
679 684 self._remote.update_server_info()
680 685
681 686 def _current_branch(self):
682 687 """
683 688 Return the name of the current branch.
684 689
685 690 It only works for non bare repositories (i.e. repositories with a
686 691 working copy)
687 692 """
688 693 if self.bare:
689 694 raise RepositoryError('Bare git repos do not have active branches')
690 695
691 696 if self.is_empty():
692 697 return None
693 698
694 699 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
695 700 return stdout.strip()
696 701
697 702 def _checkout(self, branch_name, create=False, force=False):
698 703 """
699 704 Checkout a branch in the working directory.
700 705
701 706 It tries to create the branch if create is True, failing if the branch
702 707 already exists.
703 708
704 709 It only works for non bare repositories (i.e. repositories with a
705 710 working copy)
706 711 """
707 712 if self.bare:
708 713 raise RepositoryError('Cannot checkout branches in a bare git repo')
709 714
710 715 cmd = ['checkout']
711 716 if force:
712 717 cmd.append('-f')
713 718 if create:
714 719 cmd.append('-b')
715 720 cmd.append(branch_name)
716 721 self.run_git_command(cmd, fail_on_stderr=False)
717 722
718 723 def _create_branch(self, branch_name, commit_id):
719 724 """
720 725 creates a branch in a GIT repo
721 726 """
722 727 self._remote.create_branch(branch_name, commit_id)
723 728
724 729 def _identify(self):
725 730 """
726 731 Return the current state of the working directory.
727 732 """
728 733 if self.bare:
729 734 raise RepositoryError('Bare git repos do not have active branches')
730 735
731 736 if self.is_empty():
732 737 return None
733 738
734 739 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
735 740 return stdout.strip()
736 741
737 742 def _local_clone(self, clone_path, branch_name, source_branch=None):
738 743 """
739 744 Create a local clone of the current repo.
740 745 """
741 746 # N.B.(skreft): the --branch option is required as otherwise the shallow
742 747 # clone will only fetch the active branch.
743 748 cmd = ['clone', '--branch', branch_name,
744 749 self.path, os.path.abspath(clone_path)]
745 750
746 751 self.run_git_command(cmd, fail_on_stderr=False)
747 752
748 753 # if we get the different source branch, make sure we also fetch it for
749 754 # merge conditions
750 755 if source_branch and source_branch != branch_name:
751 756 # check if the ref exists.
752 757 shadow_repo = GitRepository(os.path.abspath(clone_path))
753 758 if shadow_repo.get_remote_ref(source_branch):
754 759 cmd = ['fetch', self.path, source_branch]
755 760 self.run_git_command(cmd, fail_on_stderr=False)
756 761
757 762 def _local_fetch(self, repository_path, branch_name, use_origin=False):
758 763 """
759 764 Fetch a branch from a local repository.
760 765 """
761 766 repository_path = os.path.abspath(repository_path)
762 767 if repository_path == self.path:
763 768 raise ValueError('Cannot fetch from the same repository')
764 769
765 770 if use_origin:
766 771 branch_name = '+{branch}:refs/heads/{branch}'.format(
767 772 branch=branch_name)
768 773
769 774 cmd = ['fetch', '--no-tags', '--update-head-ok',
770 775 repository_path, branch_name]
771 776 self.run_git_command(cmd, fail_on_stderr=False)
772 777
773 778 def _local_reset(self, branch_name):
774 779 branch_name = '{}'.format(branch_name)
775 780 cmd = ['reset', '--hard', branch_name, '--']
776 781 self.run_git_command(cmd, fail_on_stderr=False)
777 782
778 783 def _last_fetch_heads(self):
779 784 """
780 785 Return the last fetched heads that need merging.
781 786
782 787 The algorithm is defined at
783 788 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
784 789 """
785 790 if not self.bare:
786 791 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
787 792 else:
788 793 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
789 794
790 795 heads = []
791 796 with open(fetch_heads_path) as f:
792 797 for line in f:
793 798 if ' not-for-merge ' in line:
794 799 continue
795 800 line = re.sub('\t.*', '', line, flags=re.DOTALL)
796 801 heads.append(line)
797 802
798 803 return heads
799 804
800 805 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
801 806 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
802 807
803 808 def _local_pull(self, repository_path, branch_name, ff_only=True):
804 809 """
805 810 Pull a branch from a local repository.
806 811 """
807 812 if self.bare:
808 813 raise RepositoryError('Cannot pull into a bare git repository')
809 814 # N.B.(skreft): The --ff-only option is to make sure this is a
810 815 # fast-forward (i.e., we are only pulling new changes and there are no
811 816 # conflicts with our current branch)
812 817 # Additionally, that option needs to go before --no-tags, otherwise git
813 818 # pull complains about it being an unknown flag.
814 819 cmd = ['pull']
815 820 if ff_only:
816 821 cmd.append('--ff-only')
817 822 cmd.extend(['--no-tags', repository_path, branch_name])
818 823 self.run_git_command(cmd, fail_on_stderr=False)
819 824
820 825 def _local_merge(self, merge_message, user_name, user_email, heads):
821 826 """
822 827 Merge the given head into the checked out branch.
823 828
824 829 It will force a merge commit.
825 830
826 831 Currently it raises an error if the repo is empty, as it is not possible
827 832 to create a merge commit in an empty repo.
828 833
829 834 :param merge_message: The message to use for the merge commit.
830 835 :param heads: the heads to merge.
831 836 """
832 837 if self.bare:
833 838 raise RepositoryError('Cannot merge into a bare git repository')
834 839
835 840 if not heads:
836 841 return
837 842
838 843 if self.is_empty():
839 844 # TODO(skreft): do something more robust in this case.
840 845 raise RepositoryError('Do not know how to merge into empty repositories yet')
841 846 unresolved = None
842 847
843 848 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
844 849 # commit message. We also specify the user who is doing the merge.
845 850 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
846 851 '-c', 'user.email=%s' % safe_str(user_email),
847 852 'merge', '--no-ff', '-m', safe_str(merge_message)]
848 853
849 854 merge_cmd = cmd + heads
850 855
851 856 try:
852 857 self.run_git_command(merge_cmd, fail_on_stderr=False)
853 858 except RepositoryError:
854 859 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
855 860 fail_on_stderr=False)[0].splitlines()
856 861 # NOTE(marcink): we add U notation for consistent with HG backend output
857 862 unresolved = ['U {}'.format(f) for f in files]
858 863
859 864 # Cleanup any merge leftovers
860 865 self._remote.invalidate_vcs_cache()
861 866 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
862 867
863 868 if unresolved:
864 869 raise UnresolvedFilesInRepo(unresolved)
865 870 else:
866 871 raise
867 872
868 873 def _local_push(
869 874 self, source_branch, repository_path, target_branch,
870 875 enable_hooks=False, rc_scm_data=None):
871 876 """
872 877 Push the source_branch to the given repository and target_branch.
873 878
874 879 Currently it if the target_branch is not master and the target repo is
875 880 empty, the push will work, but then GitRepository won't be able to find
876 881 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
877 882 pointing to master, which does not exist).
878 883
879 884 It does not run the hooks in the target repo.
880 885 """
881 886 # TODO(skreft): deal with the case in which the target repo is empty,
882 887 # and the target_branch is not master.
883 888 target_repo = GitRepository(repository_path)
884 889 if (not target_repo.bare and
885 890 target_repo._current_branch() == target_branch):
886 891 # Git prevents pushing to the checked out branch, so simulate it by
887 892 # pulling into the target repository.
888 893 target_repo._local_pull(self.path, source_branch)
889 894 else:
890 895 cmd = ['push', os.path.abspath(repository_path),
891 896 '%s:%s' % (source_branch, target_branch)]
892 897 gitenv = {}
893 898 if rc_scm_data:
894 899 gitenv.update({'RC_SCM_DATA': rc_scm_data})
895 900
896 901 if not enable_hooks:
897 902 gitenv['RC_SKIP_HOOKS'] = '1'
898 903 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
899 904
900 905 def _get_new_pr_branch(self, source_branch, target_branch):
901 906 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
902 907 pr_branches = []
903 908 for branch in self.branches:
904 909 if branch.startswith(prefix):
905 910 pr_branches.append(int(branch[len(prefix):]))
906 911
907 912 if not pr_branches:
908 913 branch_id = 0
909 914 else:
910 915 branch_id = max(pr_branches) + 1
911 916
912 917 return '%s%d' % (prefix, branch_id)
913 918
914 919 def _maybe_prepare_merge_workspace(
915 920 self, repo_id, workspace_id, target_ref, source_ref):
916 921 shadow_repository_path = self._get_shadow_repository_path(
917 922 self.path, repo_id, workspace_id)
918 923 if not os.path.exists(shadow_repository_path):
919 924 self._local_clone(
920 925 shadow_repository_path, target_ref.name, source_ref.name)
921 926 log.debug('Prepared %s shadow repository in %s',
922 927 self.alias, shadow_repository_path)
923 928
924 929 return shadow_repository_path
925 930
926 931 def _merge_repo(self, repo_id, workspace_id, target_ref,
927 932 source_repo, source_ref, merge_message,
928 933 merger_name, merger_email, dry_run=False,
929 934 use_rebase=False, close_branch=False):
930 935
931 936 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
932 937 'rebase' if use_rebase else 'merge', dry_run)
933 938 if target_ref.commit_id != self.branches[target_ref.name]:
934 939 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
935 940 target_ref.commit_id, self.branches[target_ref.name])
936 941 return MergeResponse(
937 942 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
938 943 metadata={'target_ref': target_ref})
939 944
940 945 shadow_repository_path = self._maybe_prepare_merge_workspace(
941 946 repo_id, workspace_id, target_ref, source_ref)
942 947 shadow_repo = self.get_shadow_instance(shadow_repository_path)
943 948
944 949 # checkout source, if it's different. Otherwise we could not
945 950 # fetch proper commits for merge testing
946 951 if source_ref.name != target_ref.name:
947 952 if shadow_repo.get_remote_ref(source_ref.name):
948 953 shadow_repo._checkout(source_ref.name, force=True)
949 954
950 955 # checkout target, and fetch changes
951 956 shadow_repo._checkout(target_ref.name, force=True)
952 957
953 958 # fetch/reset pull the target, in case it is changed
954 959 # this handles even force changes
955 960 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
956 961 shadow_repo._local_reset(target_ref.name)
957 962
958 963 # Need to reload repo to invalidate the cache, or otherwise we cannot
959 964 # retrieve the last target commit.
960 965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
961 966 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
962 967 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
963 968 target_ref, target_ref.commit_id,
964 969 shadow_repo.branches[target_ref.name])
965 970 return MergeResponse(
966 971 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
967 972 metadata={'target_ref': target_ref})
968 973
969 974 # calculate new branch
970 975 pr_branch = shadow_repo._get_new_pr_branch(
971 976 source_ref.name, target_ref.name)
972 977 log.debug('using pull-request merge branch: `%s`', pr_branch)
973 978 # checkout to temp branch, and fetch changes
974 979 shadow_repo._checkout(pr_branch, create=True)
975 980 try:
976 981 shadow_repo._local_fetch(source_repo.path, source_ref.name)
977 982 except RepositoryError:
978 983 log.exception('Failure when doing local fetch on '
979 984 'shadow repo: %s', shadow_repo)
980 985 return MergeResponse(
981 986 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
982 987 metadata={'source_ref': source_ref})
983 988
984 989 merge_ref = None
985 990 merge_failure_reason = MergeFailureReason.NONE
986 991 metadata = {}
987 992 try:
988 993 shadow_repo._local_merge(merge_message, merger_name, merger_email,
989 994 [source_ref.commit_id])
990 995 merge_possible = True
991 996
992 997 # Need to invalidate the cache, or otherwise we
993 998 # cannot retrieve the merge commit.
994 999 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
995 1000 merge_commit_id = shadow_repo.branches[pr_branch]
996 1001
997 1002 # Set a reference pointing to the merge commit. This reference may
998 1003 # be used to easily identify the last successful merge commit in
999 1004 # the shadow repository.
1000 1005 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1001 1006 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1002 1007 except RepositoryError as e:
1003 1008 log.exception('Failure when doing local merge on git shadow repo')
1004 1009 if isinstance(e, UnresolvedFilesInRepo):
1005 1010 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1006 1011
1007 1012 merge_possible = False
1008 1013 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1009 1014
1010 1015 if merge_possible and not dry_run:
1011 1016 try:
1012 1017 shadow_repo._local_push(
1013 1018 pr_branch, self.path, target_ref.name, enable_hooks=True,
1014 1019 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1015 1020 merge_succeeded = True
1016 1021 except RepositoryError:
1017 1022 log.exception(
1018 1023 'Failure when doing local push from the shadow '
1019 1024 'repository to the target repository at %s.', self.path)
1020 1025 merge_succeeded = False
1021 1026 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1022 1027 metadata['target'] = 'git shadow repo'
1023 1028 metadata['merge_commit'] = pr_branch
1024 1029 else:
1025 1030 merge_succeeded = False
1026 1031
1027 1032 return MergeResponse(
1028 1033 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1029 1034 metadata=metadata)
@@ -1,979 +1,986 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, exceptions
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 46 from rhodecode.lib.vcs.compat import configparser
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
302
300 303 if commit_id1 == commit_id2:
301 304 return commit_id1
302 305
303 306 ancestors = self._remote.revs_from_revspec(
304 307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
305 308 other_path=repo2.path)
306 return repo2[ancestors[0]].raw_id if ancestors else None
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
307 314
308 315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
309 316 if commit_id1 == commit_id2:
310 317 commits = []
311 318 else:
312 319 if merge:
313 320 indexes = self._remote.revs_from_revspec(
314 321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
315 322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
316 323 else:
317 324 indexes = self._remote.revs_from_revspec(
318 325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
319 326 commit_id1, other_path=repo2.path)
320 327
321 328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
322 329 for idx in indexes]
323 330
324 331 return commits
325 332
326 333 @staticmethod
327 334 def check_url(url, config):
328 335 """
329 336 Function will check given url and try to verify if it's a valid
330 337 link. Sometimes it may happened that mercurial will issue basic
331 338 auth request that can cause whole API to hang when used from python
332 339 or other external calls.
333 340
334 341 On failures it'll raise urllib2.HTTPError, exception is also thrown
335 342 when the return code is non 200
336 343 """
337 344 # check first if it's not an local url
338 345 if os.path.isdir(url) or url.startswith('file:'):
339 346 return True
340 347
341 348 # Request the _remote to verify the url
342 349 return connection.Hg.check_url(url, config.serialize())
343 350
344 351 @staticmethod
345 352 def is_valid_repository(path):
346 353 return os.path.isdir(os.path.join(path, '.hg'))
347 354
348 355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
349 356 """
350 357 Function will check for mercurial repository in given path. If there
351 358 is no repository in that path it will raise an exception unless
352 359 `create` parameter is set to True - in that case repository would
353 360 be created.
354 361
355 362 If `src_url` is given, would try to clone repository from the
356 363 location at given clone_point. Additionally it'll make update to
357 364 working copy accordingly to `do_workspace_checkout` flag.
358 365 """
359 366 if create and os.path.exists(self.path):
360 367 raise RepositoryError(
361 368 "Cannot create repository at %s, location already exist"
362 369 % self.path)
363 370
364 371 if src_url:
365 372 url = str(self._get_url(src_url))
366 373 MercurialRepository.check_url(url, self.config)
367 374
368 375 self._remote.clone(url, self.path, do_workspace_checkout)
369 376
370 377 # Don't try to create if we've already cloned repo
371 378 create = False
372 379
373 380 if create:
374 381 os.makedirs(self.path, mode=0o755)
375 382 self._remote.localrepository(create)
376 383
377 384 @LazyProperty
378 385 def in_memory_commit(self):
379 386 return MercurialInMemoryCommit(self)
380 387
381 388 @LazyProperty
382 389 def description(self):
383 390 description = self._remote.get_config_value(
384 391 'web', 'description', untrusted=True)
385 392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
386 393
387 394 @LazyProperty
388 395 def contact(self):
389 396 contact = (
390 397 self._remote.get_config_value("web", "contact") or
391 398 self._remote.get_config_value("ui", "username"))
392 399 return safe_unicode(contact or self.DEFAULT_CONTACT)
393 400
394 401 @LazyProperty
395 402 def last_change(self):
396 403 """
397 404 Returns last change made on this repository as
398 405 `datetime.datetime` object.
399 406 """
400 407 try:
401 408 return self.get_commit().date
402 409 except RepositoryError:
403 410 tzoffset = makedate()[1]
404 411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
405 412
406 413 def _get_fs_mtime(self):
407 414 # fallback to filesystem
408 415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
409 416 st_path = os.path.join(self.path, '.hg', "store")
410 417 if os.path.exists(cl_path):
411 418 return os.stat(cl_path).st_mtime
412 419 else:
413 420 return os.stat(st_path).st_mtime
414 421
415 422 def _get_url(self, url):
416 423 """
417 424 Returns normalized url. If schema is not given, would fall
418 425 to filesystem
419 426 (``file:///``) schema.
420 427 """
421 428 url = url.encode('utf8')
422 429 if url != 'default' and '://' not in url:
423 430 url = "file:" + urllib.pathname2url(url)
424 431 return url
425 432
426 433 def get_hook_location(self):
427 434 """
428 435 returns absolute path to location where hooks are stored
429 436 """
430 437 return os.path.join(self.path, '.hg', '.hgrc')
431 438
432 439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
433 440 translate_tag=None, maybe_unreachable=False):
434 441 """
435 442 Returns ``MercurialCommit`` object representing repository's
436 443 commit at the given `commit_id` or `commit_idx`.
437 444 """
438 445 if self.is_empty():
439 446 raise EmptyRepositoryError("There are no commits yet")
440 447
441 448 if commit_id is not None:
442 449 self._validate_commit_id(commit_id)
443 450 try:
444 451 # we have cached idx, use it without contacting the remote
445 452 idx = self._commit_ids[commit_id]
446 453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
447 454 except KeyError:
448 455 pass
449 456
450 457 elif commit_idx is not None:
451 458 self._validate_commit_idx(commit_idx)
452 459 try:
453 460 _commit_id = self.commit_ids[commit_idx]
454 461 if commit_idx < 0:
455 462 commit_idx = self.commit_ids.index(_commit_id)
456 463
457 464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
458 465 except IndexError:
459 466 commit_id = commit_idx
460 467 else:
461 468 commit_id = "tip"
462 469
463 470 if isinstance(commit_id, unicode):
464 471 commit_id = safe_str(commit_id)
465 472
466 473 try:
467 474 raw_id, idx = self._remote.lookup(commit_id, both=True)
468 475 except CommitDoesNotExistError:
469 476 msg = "Commit {} does not exist for `{}`".format(
470 477 *map(safe_str, [commit_id, self.name]))
471 478 raise CommitDoesNotExistError(msg)
472 479
473 480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
474 481
475 482 def get_commits(
476 483 self, start_id=None, end_id=None, start_date=None, end_date=None,
477 484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
478 485 """
479 486 Returns generator of ``MercurialCommit`` objects from start to end
480 487 (both are inclusive)
481 488
482 489 :param start_id: None, str(commit_id)
483 490 :param end_id: None, str(commit_id)
484 491 :param start_date: if specified, commits with commit date less than
485 492 ``start_date`` would be filtered out from returned set
486 493 :param end_date: if specified, commits with commit date greater than
487 494 ``end_date`` would be filtered out from returned set
488 495 :param branch_name: if specified, commits not reachable from given
489 496 branch would be filtered out from returned set
490 497 :param show_hidden: Show hidden commits such as obsolete or hidden from
491 498 Mercurial evolve
492 499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
493 500 exist.
494 501 :raise CommitDoesNotExistError: If commit for given ``start`` or
495 502 ``end`` could not be found.
496 503 """
497 504 # actually we should check now if it's not an empty repo
498 505 if self.is_empty():
499 506 raise EmptyRepositoryError("There are no commits yet")
500 507 self._validate_branch_name(branch_name)
501 508
502 509 branch_ancestors = False
503 510 if start_id is not None:
504 511 self._validate_commit_id(start_id)
505 512 c_start = self.get_commit(commit_id=start_id)
506 513 start_pos = self._commit_ids[c_start.raw_id]
507 514 else:
508 515 start_pos = None
509 516
510 517 if end_id is not None:
511 518 self._validate_commit_id(end_id)
512 519 c_end = self.get_commit(commit_id=end_id)
513 520 end_pos = max(0, self._commit_ids[c_end.raw_id])
514 521 else:
515 522 end_pos = None
516 523
517 524 if None not in [start_id, end_id] and start_pos > end_pos:
518 525 raise RepositoryError(
519 526 "Start commit '%s' cannot be after end commit '%s'" %
520 527 (start_id, end_id))
521 528
522 529 if end_pos is not None:
523 530 end_pos += 1
524 531
525 532 commit_filter = []
526 533
527 534 if branch_name and not branch_ancestors:
528 535 commit_filter.append('branch("%s")' % (branch_name,))
529 536 elif branch_name and branch_ancestors:
530 537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
531 538
532 539 if start_date and not end_date:
533 540 commit_filter.append('date(">%s")' % (start_date,))
534 541 if end_date and not start_date:
535 542 commit_filter.append('date("<%s")' % (end_date,))
536 543 if start_date and end_date:
537 544 commit_filter.append(
538 545 'date(">%s") and date("<%s")' % (start_date, end_date))
539 546
540 547 if not show_hidden:
541 548 commit_filter.append('not obsolete()')
542 549 commit_filter.append('not hidden()')
543 550
544 551 # TODO: johbo: Figure out a simpler way for this solution
545 552 collection_generator = CollectionGenerator
546 553 if commit_filter:
547 554 commit_filter = ' and '.join(map(safe_str, commit_filter))
548 555 revisions = self._remote.rev_range([commit_filter])
549 556 collection_generator = MercurialIndexBasedCollectionGenerator
550 557 else:
551 558 revisions = self.commit_ids
552 559
553 560 if start_pos or end_pos:
554 561 revisions = revisions[start_pos:end_pos]
555 562
556 563 return collection_generator(self, revisions, pre_load=pre_load)
557 564
558 565 def pull(self, url, commit_ids=None):
559 566 """
560 567 Pull changes from external location.
561 568
562 569 :param commit_ids: Optional. Can be set to a list of commit ids
563 570 which shall be pulled from the other repository.
564 571 """
565 572 url = self._get_url(url)
566 573 self._remote.pull(url, commit_ids=commit_ids)
567 574 self._remote.invalidate_vcs_cache()
568 575
569 576 def fetch(self, url, commit_ids=None):
570 577 """
571 578 Backward compatibility with GIT fetch==pull
572 579 """
573 580 return self.pull(url, commit_ids=commit_ids)
574 581
575 582 def push(self, url):
576 583 url = self._get_url(url)
577 584 self._remote.sync_push(url)
578 585
579 586 def _local_clone(self, clone_path):
580 587 """
581 588 Create a local clone of the current repo.
582 589 """
583 590 self._remote.clone(self.path, clone_path, update_after_clone=True,
584 591 hooks=False)
585 592
586 593 def _update(self, revision, clean=False):
587 594 """
588 595 Update the working copy to the specified revision.
589 596 """
590 597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
591 598 self._remote.update(revision, clean=clean)
592 599
593 600 def _identify(self):
594 601 """
595 602 Return the current state of the working directory.
596 603 """
597 604 return self._remote.identify().strip().rstrip('+')
598 605
599 606 def _heads(self, branch=None):
600 607 """
601 608 Return the commit ids of the repository heads.
602 609 """
603 610 return self._remote.heads(branch=branch).strip().split(' ')
604 611
605 612 def _ancestor(self, revision1, revision2):
606 613 """
607 614 Return the common ancestor of the two revisions.
608 615 """
609 616 return self._remote.ancestor(revision1, revision2)
610 617
611 618 def _local_push(
612 619 self, revision, repository_path, push_branches=False,
613 620 enable_hooks=False):
614 621 """
615 622 Push the given revision to the specified repository.
616 623
617 624 :param push_branches: allow to create branches in the target repo.
618 625 """
619 626 self._remote.push(
620 627 [revision], repository_path, hooks=enable_hooks,
621 628 push_branches=push_branches)
622 629
623 630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
624 631 source_ref, use_rebase=False, dry_run=False):
625 632 """
626 633 Merge the given source_revision into the checked out revision.
627 634
628 635 Returns the commit id of the merge and a boolean indicating if the
629 636 commit needs to be pushed.
630 637 """
631 638 self._update(target_ref.commit_id, clean=True)
632 639
633 640 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
634 641 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
635 642
636 643 if ancestor == source_ref.commit_id:
637 644 # Nothing to do, the changes were already integrated
638 645 return target_ref.commit_id, False
639 646
640 647 elif ancestor == target_ref.commit_id and is_the_same_branch:
641 648 # In this case we should force a commit message
642 649 return source_ref.commit_id, True
643 650
644 651 unresolved = None
645 652 if use_rebase:
646 653 try:
647 654 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
648 655 target_ref.commit_id)
649 656 self.bookmark(bookmark_name, revision=source_ref.commit_id)
650 657 self._remote.rebase(
651 658 source=source_ref.commit_id, dest=target_ref.commit_id)
652 659 self._remote.invalidate_vcs_cache()
653 660 self._update(bookmark_name, clean=True)
654 661 return self._identify(), True
655 662 except RepositoryError as e:
656 663 # The rebase-abort may raise another exception which 'hides'
657 664 # the original one, therefore we log it here.
658 665 log.exception('Error while rebasing shadow repo during merge.')
659 666 if 'unresolved conflicts' in safe_str(e):
660 667 unresolved = self._remote.get_unresolved_files()
661 668 log.debug('unresolved files: %s', unresolved)
662 669
663 670 # Cleanup any rebase leftovers
664 671 self._remote.invalidate_vcs_cache()
665 672 self._remote.rebase(abort=True)
666 673 self._remote.invalidate_vcs_cache()
667 674 self._remote.update(clean=True)
668 675 if unresolved:
669 676 raise UnresolvedFilesInRepo(unresolved)
670 677 else:
671 678 raise
672 679 else:
673 680 try:
674 681 self._remote.merge(source_ref.commit_id)
675 682 self._remote.invalidate_vcs_cache()
676 683 self._remote.commit(
677 684 message=safe_str(merge_message),
678 685 username=safe_str('%s <%s>' % (user_name, user_email)))
679 686 self._remote.invalidate_vcs_cache()
680 687 return self._identify(), True
681 688 except RepositoryError as e:
682 689 # The merge-abort may raise another exception which 'hides'
683 690 # the original one, therefore we log it here.
684 691 log.exception('Error while merging shadow repo during merge.')
685 692 if 'unresolved merge conflicts' in safe_str(e):
686 693 unresolved = self._remote.get_unresolved_files()
687 694 log.debug('unresolved files: %s', unresolved)
688 695
689 696 # Cleanup any merge leftovers
690 697 self._remote.update(clean=True)
691 698 if unresolved:
692 699 raise UnresolvedFilesInRepo(unresolved)
693 700 else:
694 701 raise
695 702
696 703 def _local_close(self, target_ref, user_name, user_email,
697 704 source_ref, close_message=''):
698 705 """
699 706 Close the branch of the given source_revision
700 707
701 708 Returns the commit id of the close and a boolean indicating if the
702 709 commit needs to be pushed.
703 710 """
704 711 self._update(source_ref.commit_id)
705 712 message = close_message or "Closing branch: `{}`".format(source_ref.name)
706 713 try:
707 714 self._remote.commit(
708 715 message=safe_str(message),
709 716 username=safe_str('%s <%s>' % (user_name, user_email)),
710 717 close_branch=True)
711 718 self._remote.invalidate_vcs_cache()
712 719 return self._identify(), True
713 720 except RepositoryError:
714 721 # Cleanup any commit leftovers
715 722 self._remote.update(clean=True)
716 723 raise
717 724
718 725 def _is_the_same_branch(self, target_ref, source_ref):
719 726 return (
720 727 self._get_branch_name(target_ref) ==
721 728 self._get_branch_name(source_ref))
722 729
723 730 def _get_branch_name(self, ref):
724 731 if ref.type == 'branch':
725 732 return ref.name
726 733 return self._remote.ctx_branch(ref.commit_id)
727 734
728 735 def _maybe_prepare_merge_workspace(
729 736 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
730 737 shadow_repository_path = self._get_shadow_repository_path(
731 738 self.path, repo_id, workspace_id)
732 739 if not os.path.exists(shadow_repository_path):
733 740 self._local_clone(shadow_repository_path)
734 741 log.debug(
735 742 'Prepared shadow repository in %s', shadow_repository_path)
736 743
737 744 return shadow_repository_path
738 745
739 746 def _merge_repo(self, repo_id, workspace_id, target_ref,
740 747 source_repo, source_ref, merge_message,
741 748 merger_name, merger_email, dry_run=False,
742 749 use_rebase=False, close_branch=False):
743 750
744 751 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
745 752 'rebase' if use_rebase else 'merge', dry_run)
746 753 if target_ref.commit_id not in self._heads():
747 754 return MergeResponse(
748 755 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
749 756 metadata={'target_ref': target_ref})
750 757
751 758 try:
752 759 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
753 760 heads = '\n,'.join(self._heads(target_ref.name))
754 761 metadata = {
755 762 'target_ref': target_ref,
756 763 'source_ref': source_ref,
757 764 'heads': heads
758 765 }
759 766 return MergeResponse(
760 767 False, False, None,
761 768 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
762 769 metadata=metadata)
763 770 except CommitDoesNotExistError:
764 771 log.exception('Failure when looking up branch heads on hg target')
765 772 return MergeResponse(
766 773 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
767 774 metadata={'target_ref': target_ref})
768 775
769 776 shadow_repository_path = self._maybe_prepare_merge_workspace(
770 777 repo_id, workspace_id, target_ref, source_ref)
771 778 shadow_repo = self.get_shadow_instance(shadow_repository_path)
772 779
773 780 log.debug('Pulling in target reference %s', target_ref)
774 781 self._validate_pull_reference(target_ref)
775 782 shadow_repo._local_pull(self.path, target_ref)
776 783
777 784 try:
778 785 log.debug('Pulling in source reference %s', source_ref)
779 786 source_repo._validate_pull_reference(source_ref)
780 787 shadow_repo._local_pull(source_repo.path, source_ref)
781 788 except CommitDoesNotExistError:
782 789 log.exception('Failure when doing local pull on hg shadow repo')
783 790 return MergeResponse(
784 791 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
785 792 metadata={'source_ref': source_ref})
786 793
787 794 merge_ref = None
788 795 merge_commit_id = None
789 796 close_commit_id = None
790 797 merge_failure_reason = MergeFailureReason.NONE
791 798 metadata = {}
792 799
793 800 # enforce that close branch should be used only in case we source from
794 801 # an actual Branch
795 802 close_branch = close_branch and source_ref.type == 'branch'
796 803
797 804 # don't allow to close branch if source and target are the same
798 805 close_branch = close_branch and source_ref.name != target_ref.name
799 806
800 807 needs_push_on_close = False
801 808 if close_branch and not use_rebase and not dry_run:
802 809 try:
803 810 close_commit_id, needs_push_on_close = shadow_repo._local_close(
804 811 target_ref, merger_name, merger_email, source_ref)
805 812 merge_possible = True
806 813 except RepositoryError:
807 814 log.exception('Failure when doing close branch on '
808 815 'shadow repo: %s', shadow_repo)
809 816 merge_possible = False
810 817 merge_failure_reason = MergeFailureReason.MERGE_FAILED
811 818 else:
812 819 merge_possible = True
813 820
814 821 needs_push = False
815 822 if merge_possible:
816 823 try:
817 824 merge_commit_id, needs_push = shadow_repo._local_merge(
818 825 target_ref, merge_message, merger_name, merger_email,
819 826 source_ref, use_rebase=use_rebase, dry_run=dry_run)
820 827 merge_possible = True
821 828
822 829 # read the state of the close action, if it
823 830 # maybe required a push
824 831 needs_push = needs_push or needs_push_on_close
825 832
826 833 # Set a bookmark pointing to the merge commit. This bookmark
827 834 # may be used to easily identify the last successful merge
828 835 # commit in the shadow repository.
829 836 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
830 837 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
831 838 except SubrepoMergeError:
832 839 log.exception(
833 840 'Subrepo merge error during local merge on hg shadow repo.')
834 841 merge_possible = False
835 842 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
836 843 needs_push = False
837 844 except RepositoryError as e:
838 845 log.exception('Failure when doing local merge on hg shadow repo')
839 846 if isinstance(e, UnresolvedFilesInRepo):
840 847 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
841 848
842 849 merge_possible = False
843 850 merge_failure_reason = MergeFailureReason.MERGE_FAILED
844 851 needs_push = False
845 852
846 853 if merge_possible and not dry_run:
847 854 if needs_push:
848 855 # In case the target is a bookmark, update it, so after pushing
849 856 # the bookmarks is also updated in the target.
850 857 if target_ref.type == 'book':
851 858 shadow_repo.bookmark(
852 859 target_ref.name, revision=merge_commit_id)
853 860 try:
854 861 shadow_repo_with_hooks = self.get_shadow_instance(
855 862 shadow_repository_path,
856 863 enable_hooks=True)
857 864 # This is the actual merge action, we push from shadow
858 865 # into origin.
859 866 # Note: the push_branches option will push any new branch
860 867 # defined in the source repository to the target. This may
861 868 # be dangerous as branches are permanent in Mercurial.
862 869 # This feature was requested in issue #441.
863 870 shadow_repo_with_hooks._local_push(
864 871 merge_commit_id, self.path, push_branches=True,
865 872 enable_hooks=True)
866 873
867 874 # maybe we also need to push the close_commit_id
868 875 if close_commit_id:
869 876 shadow_repo_with_hooks._local_push(
870 877 close_commit_id, self.path, push_branches=True,
871 878 enable_hooks=True)
872 879 merge_succeeded = True
873 880 except RepositoryError:
874 881 log.exception(
875 882 'Failure when doing local push from the shadow '
876 883 'repository to the target repository at %s.', self.path)
877 884 merge_succeeded = False
878 885 merge_failure_reason = MergeFailureReason.PUSH_FAILED
879 886 metadata['target'] = 'hg shadow repo'
880 887 metadata['merge_commit'] = merge_commit_id
881 888 else:
882 889 merge_succeeded = True
883 890 else:
884 891 merge_succeeded = False
885 892
886 893 return MergeResponse(
887 894 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
888 895 metadata=metadata)
889 896
890 897 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
891 898 config = self.config.copy()
892 899 if not enable_hooks:
893 900 config.clear_section('hooks')
894 901 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
895 902
896 903 def _validate_pull_reference(self, reference):
897 904 if not (reference.name in self.bookmarks or
898 905 reference.name in self.branches or
899 906 self.get_commit(reference.commit_id)):
900 907 raise CommitDoesNotExistError(
901 908 'Unknown branch, bookmark or commit id')
902 909
903 910 def _local_pull(self, repository_path, reference):
904 911 """
905 912 Fetch a branch, bookmark or commit from a local repository.
906 913 """
907 914 repository_path = os.path.abspath(repository_path)
908 915 if repository_path == self.path:
909 916 raise ValueError('Cannot pull from the same repository')
910 917
911 918 reference_type_to_option_name = {
912 919 'book': 'bookmark',
913 920 'branch': 'branch',
914 921 }
915 922 option_name = reference_type_to_option_name.get(
916 923 reference.type, 'revision')
917 924
918 925 if option_name == 'revision':
919 926 ref = reference.commit_id
920 927 else:
921 928 ref = reference.name
922 929
923 930 options = {option_name: [ref]}
924 931 self._remote.pull_cmd(repository_path, hooks=False, **options)
925 932 self._remote.invalidate_vcs_cache()
926 933
927 934 def bookmark(self, bookmark, revision=None):
928 935 if isinstance(bookmark, unicode):
929 936 bookmark = safe_str(bookmark)
930 937 self._remote.bookmark(bookmark, revision=revision)
931 938 self._remote.invalidate_vcs_cache()
932 939
933 940 def get_path_permissions(self, username):
934 941 hgacl_file = os.path.join(self.path, '.hg/hgacl')
935 942
936 943 def read_patterns(suffix):
937 944 svalue = None
938 945 for section, option in [
939 946 ('narrowacl', username + suffix),
940 947 ('narrowacl', 'default' + suffix),
941 948 ('narrowhgacl', username + suffix),
942 949 ('narrowhgacl', 'default' + suffix)
943 950 ]:
944 951 try:
945 952 svalue = hgacl.get(section, option)
946 953 break # stop at the first value we find
947 954 except configparser.NoOptionError:
948 955 pass
949 956 if not svalue:
950 957 return None
951 958 result = ['/']
952 959 for pattern in svalue.split():
953 960 result.append(pattern)
954 961 if '*' not in pattern and '?' not in pattern:
955 962 result.append(pattern + '/*')
956 963 return result
957 964
958 965 if os.path.exists(hgacl_file):
959 966 try:
960 967 hgacl = configparser.RawConfigParser()
961 968 hgacl.read(hgacl_file)
962 969
963 970 includes = read_patterns('.includes')
964 971 excludes = read_patterns('.excludes')
965 972 return BasePathPermissionChecker.create_from_patterns(
966 973 includes, excludes)
967 974 except BaseException as e:
968 975 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
969 976 hgacl_file, self.name, e)
970 977 raise exceptions.RepositoryRequirementError(msg)
971 978 else:
972 979 return None
973 980
974 981
975 982 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
976 983
977 984 def _commit_factory(self, commit_id):
978 985 return self.repo.get_commit(
979 986 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,5591 +1,5593 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers2.text import remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
60 60 from rhodecode.lib.utils2 import (
61 61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 65 JsonRaw
66 66 from rhodecode.lib.ext_json import json
67 67 from rhodecode.lib.caching_query import FromCache
68 68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 69 from rhodecode.lib.encrypt2 import Encryptor
70 70 from rhodecode.lib.exceptions import (
71 71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 72 from rhodecode.model.meta import Base, Session
73 73
74 74 URL_SEP = '/'
75 75 log = logging.getLogger(__name__)
76 76
77 77 # =============================================================================
78 78 # BASE CLASSES
79 79 # =============================================================================
80 80
81 81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 82 # beaker.session.secret if first is not set.
83 83 # and initialized at environment.py
84 84 ENCRYPTION_KEY = None
85 85
86 86 # used to sort permissions by types, '#' used here is not allowed to be in
87 87 # usernames, and it's very early in sorted string.printable table.
88 88 PERMISSION_TYPE_SORT = {
89 89 'admin': '####',
90 90 'write': '###',
91 91 'read': '##',
92 92 'none': '#',
93 93 }
94 94
95 95
96 96 def display_user_sort(obj):
97 97 """
98 98 Sort function used to sort permissions in .permissions() function of
99 99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 100 of all other resources
101 101 """
102 102
103 103 if obj.username == User.DEFAULT_USER:
104 104 return '#####'
105 105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 106 return prefix + obj.username
107 107
108 108
109 109 def display_user_group_sort(obj):
110 110 """
111 111 Sort function used to sort permissions in .permissions() function of
112 112 Repository, RepoGroup, UserGroup. Also it put the default user in front
113 113 of all other resources
114 114 """
115 115
116 116 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
117 117 return prefix + obj.users_group_name
118 118
119 119
120 120 def _hash_key(k):
121 121 return sha1_safe(k)
122 122
123 123
124 124 def in_filter_generator(qry, items, limit=500):
125 125 """
126 126 Splits IN() into multiple with OR
127 127 e.g.::
128 128 cnt = Repository.query().filter(
129 129 or_(
130 130 *in_filter_generator(Repository.repo_id, range(100000))
131 131 )).count()
132 132 """
133 133 if not items:
134 134 # empty list will cause empty query which might cause security issues
135 135 # this can lead to hidden unpleasant results
136 136 items = [-1]
137 137
138 138 parts = []
139 139 for chunk in xrange(0, len(items), limit):
140 140 parts.append(
141 141 qry.in_(items[chunk: chunk + limit])
142 142 )
143 143
144 144 return parts
145 145
146 146
147 147 base_table_args = {
148 148 'extend_existing': True,
149 149 'mysql_engine': 'InnoDB',
150 150 'mysql_charset': 'utf8',
151 151 'sqlite_autoincrement': True
152 152 }
153 153
154 154
155 155 class EncryptedTextValue(TypeDecorator):
156 156 """
157 157 Special column for encrypted long text data, use like::
158 158
159 159 value = Column("encrypted_value", EncryptedValue(), nullable=False)
160 160
161 161 This column is intelligent so if value is in unencrypted form it return
162 162 unencrypted form, but on save it always encrypts
163 163 """
164 164 impl = Text
165 165
166 166 def process_bind_param(self, value, dialect):
167 167 """
168 168 Setter for storing value
169 169 """
170 170 import rhodecode
171 171 if not value:
172 172 return value
173 173
174 174 # protect against double encrypting if values is already encrypted
175 175 if value.startswith('enc$aes$') \
176 176 or value.startswith('enc$aes_hmac$') \
177 177 or value.startswith('enc2$'):
178 178 raise ValueError('value needs to be in unencrypted format, '
179 179 'ie. not starting with enc$ or enc2$')
180 180
181 181 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
182 182 if algo == 'aes':
183 183 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
184 184 elif algo == 'fernet':
185 185 return Encryptor(ENCRYPTION_KEY).encrypt(value)
186 186 else:
187 187 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
188 188
189 189 def process_result_value(self, value, dialect):
190 190 """
191 191 Getter for retrieving value
192 192 """
193 193
194 194 import rhodecode
195 195 if not value:
196 196 return value
197 197
198 198 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
199 199 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
200 200 if algo == 'aes':
201 201 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
202 202 elif algo == 'fernet':
203 203 return Encryptor(ENCRYPTION_KEY).decrypt(value)
204 204 else:
205 205 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
206 206 return decrypted_data
207 207
208 208
209 209 class BaseModel(object):
210 210 """
211 211 Base Model for all classes
212 212 """
213 213
214 214 @classmethod
215 215 def _get_keys(cls):
216 216 """return column names for this model """
217 217 return class_mapper(cls).c.keys()
218 218
219 219 def get_dict(self):
220 220 """
221 221 return dict with keys and values corresponding
222 222 to this model data """
223 223
224 224 d = {}
225 225 for k in self._get_keys():
226 226 d[k] = getattr(self, k)
227 227
228 228 # also use __json__() if present to get additional fields
229 229 _json_attr = getattr(self, '__json__', None)
230 230 if _json_attr:
231 231 # update with attributes from __json__
232 232 if callable(_json_attr):
233 233 _json_attr = _json_attr()
234 234 for k, val in _json_attr.iteritems():
235 235 d[k] = val
236 236 return d
237 237
238 238 def get_appstruct(self):
239 239 """return list with keys and values tuples corresponding
240 240 to this model data """
241 241
242 242 lst = []
243 243 for k in self._get_keys():
244 244 lst.append((k, getattr(self, k),))
245 245 return lst
246 246
247 247 def populate_obj(self, populate_dict):
248 248 """populate model with data from given populate_dict"""
249 249
250 250 for k in self._get_keys():
251 251 if k in populate_dict:
252 252 setattr(self, k, populate_dict[k])
253 253
254 254 @classmethod
255 255 def query(cls):
256 256 return Session().query(cls)
257 257
258 258 @classmethod
259 259 def get(cls, id_):
260 260 if id_:
261 261 return cls.query().get(id_)
262 262
263 263 @classmethod
264 264 def get_or_404(cls, id_):
265 265 from pyramid.httpexceptions import HTTPNotFound
266 266
267 267 try:
268 268 id_ = int(id_)
269 269 except (TypeError, ValueError):
270 270 raise HTTPNotFound()
271 271
272 272 res = cls.query().get(id_)
273 273 if not res:
274 274 raise HTTPNotFound()
275 275 return res
276 276
277 277 @classmethod
278 278 def getAll(cls):
279 279 # deprecated and left for backward compatibility
280 280 return cls.get_all()
281 281
282 282 @classmethod
283 283 def get_all(cls):
284 284 return cls.query().all()
285 285
286 286 @classmethod
287 287 def delete(cls, id_):
288 288 obj = cls.query().get(id_)
289 289 Session().delete(obj)
290 290
291 291 @classmethod
292 292 def identity_cache(cls, session, attr_name, value):
293 293 exist_in_session = []
294 294 for (item_cls, pkey), instance in session.identity_map.items():
295 295 if cls == item_cls and getattr(instance, attr_name) == value:
296 296 exist_in_session.append(instance)
297 297 if exist_in_session:
298 298 if len(exist_in_session) == 1:
299 299 return exist_in_session[0]
300 300 log.exception(
301 301 'multiple objects with attr %s and '
302 302 'value %s found with same name: %r',
303 303 attr_name, value, exist_in_session)
304 304
305 305 def __repr__(self):
306 306 if hasattr(self, '__unicode__'):
307 307 # python repr needs to return str
308 308 try:
309 309 return safe_str(self.__unicode__())
310 310 except UnicodeDecodeError:
311 311 pass
312 312 return '<DB:%s>' % (self.__class__.__name__)
313 313
314 314
315 315 class RhodeCodeSetting(Base, BaseModel):
316 316 __tablename__ = 'rhodecode_settings'
317 317 __table_args__ = (
318 318 UniqueConstraint('app_settings_name'),
319 319 base_table_args
320 320 )
321 321
322 322 SETTINGS_TYPES = {
323 323 'str': safe_str,
324 324 'int': safe_int,
325 325 'unicode': safe_unicode,
326 326 'bool': str2bool,
327 327 'list': functools.partial(aslist, sep=',')
328 328 }
329 329 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
330 330 GLOBAL_CONF_KEY = 'app_settings'
331 331
332 332 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
333 333 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
334 334 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
335 335 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
336 336
337 337 def __init__(self, key='', val='', type='unicode'):
338 338 self.app_settings_name = key
339 339 self.app_settings_type = type
340 340 self.app_settings_value = val
341 341
342 342 @validates('_app_settings_value')
343 343 def validate_settings_value(self, key, val):
344 344 assert type(val) == unicode
345 345 return val
346 346
347 347 @hybrid_property
348 348 def app_settings_value(self):
349 349 v = self._app_settings_value
350 350 _type = self.app_settings_type
351 351 if _type:
352 352 _type = self.app_settings_type.split('.')[0]
353 353 # decode the encrypted value
354 354 if 'encrypted' in self.app_settings_type:
355 355 cipher = EncryptedTextValue()
356 356 v = safe_unicode(cipher.process_result_value(v, None))
357 357
358 358 converter = self.SETTINGS_TYPES.get(_type) or \
359 359 self.SETTINGS_TYPES['unicode']
360 360 return converter(v)
361 361
362 362 @app_settings_value.setter
363 363 def app_settings_value(self, val):
364 364 """
365 365 Setter that will always make sure we use unicode in app_settings_value
366 366
367 367 :param val:
368 368 """
369 369 val = safe_unicode(val)
370 370 # encode the encrypted value
371 371 if 'encrypted' in self.app_settings_type:
372 372 cipher = EncryptedTextValue()
373 373 val = safe_unicode(cipher.process_bind_param(val, None))
374 374 self._app_settings_value = val
375 375
376 376 @hybrid_property
377 377 def app_settings_type(self):
378 378 return self._app_settings_type
379 379
380 380 @app_settings_type.setter
381 381 def app_settings_type(self, val):
382 382 if val.split('.')[0] not in self.SETTINGS_TYPES:
383 383 raise Exception('type must be one of %s got %s'
384 384 % (self.SETTINGS_TYPES.keys(), val))
385 385 self._app_settings_type = val
386 386
387 387 @classmethod
388 388 def get_by_prefix(cls, prefix):
389 389 return RhodeCodeSetting.query()\
390 390 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
391 391 .all()
392 392
393 393 def __unicode__(self):
394 394 return u"<%s('%s:%s[%s]')>" % (
395 395 self.__class__.__name__,
396 396 self.app_settings_name, self.app_settings_value,
397 397 self.app_settings_type
398 398 )
399 399
400 400
401 401 class RhodeCodeUi(Base, BaseModel):
402 402 __tablename__ = 'rhodecode_ui'
403 403 __table_args__ = (
404 404 UniqueConstraint('ui_key'),
405 405 base_table_args
406 406 )
407 407
408 408 HOOK_REPO_SIZE = 'changegroup.repo_size'
409 409 # HG
410 410 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
411 411 HOOK_PULL = 'outgoing.pull_logger'
412 412 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
413 413 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
414 414 HOOK_PUSH = 'changegroup.push_logger'
415 415 HOOK_PUSH_KEY = 'pushkey.key_push'
416 416
417 417 HOOKS_BUILTIN = [
418 418 HOOK_PRE_PULL,
419 419 HOOK_PULL,
420 420 HOOK_PRE_PUSH,
421 421 HOOK_PRETX_PUSH,
422 422 HOOK_PUSH,
423 423 HOOK_PUSH_KEY,
424 424 ]
425 425
426 426 # TODO: johbo: Unify way how hooks are configured for git and hg,
427 427 # git part is currently hardcoded.
428 428
429 429 # SVN PATTERNS
430 430 SVN_BRANCH_ID = 'vcs_svn_branch'
431 431 SVN_TAG_ID = 'vcs_svn_tag'
432 432
433 433 ui_id = Column(
434 434 "ui_id", Integer(), nullable=False, unique=True, default=None,
435 435 primary_key=True)
436 436 ui_section = Column(
437 437 "ui_section", String(255), nullable=True, unique=None, default=None)
438 438 ui_key = Column(
439 439 "ui_key", String(255), nullable=True, unique=None, default=None)
440 440 ui_value = Column(
441 441 "ui_value", String(255), nullable=True, unique=None, default=None)
442 442 ui_active = Column(
443 443 "ui_active", Boolean(), nullable=True, unique=None, default=True)
444 444
445 445 def __repr__(self):
446 446 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
447 447 self.ui_key, self.ui_value)
448 448
449 449
450 450 class RepoRhodeCodeSetting(Base, BaseModel):
451 451 __tablename__ = 'repo_rhodecode_settings'
452 452 __table_args__ = (
453 453 UniqueConstraint(
454 454 'app_settings_name', 'repository_id',
455 455 name='uq_repo_rhodecode_setting_name_repo_id'),
456 456 base_table_args
457 457 )
458 458
459 459 repository_id = Column(
460 460 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
461 461 nullable=False)
462 462 app_settings_id = Column(
463 463 "app_settings_id", Integer(), nullable=False, unique=True,
464 464 default=None, primary_key=True)
465 465 app_settings_name = Column(
466 466 "app_settings_name", String(255), nullable=True, unique=None,
467 467 default=None)
468 468 _app_settings_value = Column(
469 469 "app_settings_value", String(4096), nullable=True, unique=None,
470 470 default=None)
471 471 _app_settings_type = Column(
472 472 "app_settings_type", String(255), nullable=True, unique=None,
473 473 default=None)
474 474
475 475 repository = relationship('Repository')
476 476
477 477 def __init__(self, repository_id, key='', val='', type='unicode'):
478 478 self.repository_id = repository_id
479 479 self.app_settings_name = key
480 480 self.app_settings_type = type
481 481 self.app_settings_value = val
482 482
483 483 @validates('_app_settings_value')
484 484 def validate_settings_value(self, key, val):
485 485 assert type(val) == unicode
486 486 return val
487 487
488 488 @hybrid_property
489 489 def app_settings_value(self):
490 490 v = self._app_settings_value
491 491 type_ = self.app_settings_type
492 492 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
493 493 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
494 494 return converter(v)
495 495
496 496 @app_settings_value.setter
497 497 def app_settings_value(self, val):
498 498 """
499 499 Setter that will always make sure we use unicode in app_settings_value
500 500
501 501 :param val:
502 502 """
503 503 self._app_settings_value = safe_unicode(val)
504 504
505 505 @hybrid_property
506 506 def app_settings_type(self):
507 507 return self._app_settings_type
508 508
509 509 @app_settings_type.setter
510 510 def app_settings_type(self, val):
511 511 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
512 512 if val not in SETTINGS_TYPES:
513 513 raise Exception('type must be one of %s got %s'
514 514 % (SETTINGS_TYPES.keys(), val))
515 515 self._app_settings_type = val
516 516
517 517 def __unicode__(self):
518 518 return u"<%s('%s:%s:%s[%s]')>" % (
519 519 self.__class__.__name__, self.repository.repo_name,
520 520 self.app_settings_name, self.app_settings_value,
521 521 self.app_settings_type
522 522 )
523 523
524 524
525 525 class RepoRhodeCodeUi(Base, BaseModel):
526 526 __tablename__ = 'repo_rhodecode_ui'
527 527 __table_args__ = (
528 528 UniqueConstraint(
529 529 'repository_id', 'ui_section', 'ui_key',
530 530 name='uq_repo_rhodecode_ui_repository_id_section_key'),
531 531 base_table_args
532 532 )
533 533
534 534 repository_id = Column(
535 535 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
536 536 nullable=False)
537 537 ui_id = Column(
538 538 "ui_id", Integer(), nullable=False, unique=True, default=None,
539 539 primary_key=True)
540 540 ui_section = Column(
541 541 "ui_section", String(255), nullable=True, unique=None, default=None)
542 542 ui_key = Column(
543 543 "ui_key", String(255), nullable=True, unique=None, default=None)
544 544 ui_value = Column(
545 545 "ui_value", String(255), nullable=True, unique=None, default=None)
546 546 ui_active = Column(
547 547 "ui_active", Boolean(), nullable=True, unique=None, default=True)
548 548
549 549 repository = relationship('Repository')
550 550
551 551 def __repr__(self):
552 552 return '<%s[%s:%s]%s=>%s]>' % (
553 553 self.__class__.__name__, self.repository.repo_name,
554 554 self.ui_section, self.ui_key, self.ui_value)
555 555
556 556
557 557 class User(Base, BaseModel):
558 558 __tablename__ = 'users'
559 559 __table_args__ = (
560 560 UniqueConstraint('username'), UniqueConstraint('email'),
561 561 Index('u_username_idx', 'username'),
562 562 Index('u_email_idx', 'email'),
563 563 base_table_args
564 564 )
565 565
566 566 DEFAULT_USER = 'default'
567 567 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
568 568 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
569 569
570 570 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
571 571 username = Column("username", String(255), nullable=True, unique=None, default=None)
572 572 password = Column("password", String(255), nullable=True, unique=None, default=None)
573 573 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
574 574 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
575 575 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
576 576 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
577 577 _email = Column("email", String(255), nullable=True, unique=None, default=None)
578 578 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
579 579 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
580 580 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
581 581
582 582 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
583 583 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
584 584 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
585 585 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
586 586 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
587 587 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
588 588
589 589 user_log = relationship('UserLog')
590 590 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
591 591
592 592 repositories = relationship('Repository')
593 593 repository_groups = relationship('RepoGroup')
594 594 user_groups = relationship('UserGroup')
595 595
596 596 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
597 597 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
598 598
599 599 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
600 600 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
601 601 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
602 602
603 603 group_member = relationship('UserGroupMember', cascade='all')
604 604
605 605 notifications = relationship('UserNotification', cascade='all')
606 606 # notifications assigned to this user
607 607 user_created_notifications = relationship('Notification', cascade='all')
608 608 # comments created by this user
609 609 user_comments = relationship('ChangesetComment', cascade='all')
610 610 # user profile extra info
611 611 user_emails = relationship('UserEmailMap', cascade='all')
612 612 user_ip_map = relationship('UserIpMap', cascade='all')
613 613 user_auth_tokens = relationship('UserApiKeys', cascade='all')
614 614 user_ssh_keys = relationship('UserSshKeys', cascade='all')
615 615
616 616 # gists
617 617 user_gists = relationship('Gist', cascade='all')
618 618 # user pull requests
619 619 user_pull_requests = relationship('PullRequest', cascade='all')
620 620 # external identities
621 621 external_identities = relationship(
622 622 'ExternalIdentity',
623 623 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
624 624 cascade='all')
625 625 # review rules
626 626 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
627 627
628 628 # artifacts owned
629 629 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
630 630
631 631 # no cascade, set NULL
632 632 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
633 633
634 634 def __unicode__(self):
635 635 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
636 636 self.user_id, self.username)
637 637
638 638 @hybrid_property
639 639 def email(self):
640 640 return self._email
641 641
642 642 @email.setter
643 643 def email(self, val):
644 644 self._email = val.lower() if val else None
645 645
646 646 @hybrid_property
647 647 def first_name(self):
648 648 from rhodecode.lib import helpers as h
649 649 if self.name:
650 650 return h.escape(self.name)
651 651 return self.name
652 652
653 653 @hybrid_property
654 654 def last_name(self):
655 655 from rhodecode.lib import helpers as h
656 656 if self.lastname:
657 657 return h.escape(self.lastname)
658 658 return self.lastname
659 659
660 660 @hybrid_property
661 661 def api_key(self):
662 662 """
663 663 Fetch if exist an auth-token with role ALL connected to this user
664 664 """
665 665 user_auth_token = UserApiKeys.query()\
666 666 .filter(UserApiKeys.user_id == self.user_id)\
667 667 .filter(or_(UserApiKeys.expires == -1,
668 668 UserApiKeys.expires >= time.time()))\
669 669 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
670 670 if user_auth_token:
671 671 user_auth_token = user_auth_token.api_key
672 672
673 673 return user_auth_token
674 674
675 675 @api_key.setter
676 676 def api_key(self, val):
677 677 # don't allow to set API key this is deprecated for now
678 678 self._api_key = None
679 679
680 680 @property
681 681 def reviewer_pull_requests(self):
682 682 return PullRequestReviewers.query() \
683 683 .options(joinedload(PullRequestReviewers.pull_request)) \
684 684 .filter(PullRequestReviewers.user_id == self.user_id) \
685 685 .all()
686 686
687 687 @property
688 688 def firstname(self):
689 689 # alias for future
690 690 return self.name
691 691
692 692 @property
693 693 def emails(self):
694 694 other = UserEmailMap.query()\
695 695 .filter(UserEmailMap.user == self) \
696 696 .order_by(UserEmailMap.email_id.asc()) \
697 697 .all()
698 698 return [self.email] + [x.email for x in other]
699 699
700 700 def emails_cached(self):
701 701 emails = UserEmailMap.query()\
702 702 .filter(UserEmailMap.user == self) \
703 703 .order_by(UserEmailMap.email_id.asc())
704 704
705 705 emails = emails.options(
706 706 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
707 707 )
708 708
709 709 return [self.email] + [x.email for x in emails]
710 710
711 711 @property
712 712 def auth_tokens(self):
713 713 auth_tokens = self.get_auth_tokens()
714 714 return [x.api_key for x in auth_tokens]
715 715
716 716 def get_auth_tokens(self):
717 717 return UserApiKeys.query()\
718 718 .filter(UserApiKeys.user == self)\
719 719 .order_by(UserApiKeys.user_api_key_id.asc())\
720 720 .all()
721 721
722 722 @LazyProperty
723 723 def feed_token(self):
724 724 return self.get_feed_token()
725 725
726 726 def get_feed_token(self, cache=True):
727 727 feed_tokens = UserApiKeys.query()\
728 728 .filter(UserApiKeys.user == self)\
729 729 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
730 730 if cache:
731 731 feed_tokens = feed_tokens.options(
732 732 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
733 733
734 734 feed_tokens = feed_tokens.all()
735 735 if feed_tokens:
736 736 return feed_tokens[0].api_key
737 737 return 'NO_FEED_TOKEN_AVAILABLE'
738 738
739 739 @LazyProperty
740 740 def artifact_token(self):
741 741 return self.get_artifact_token()
742 742
743 743 def get_artifact_token(self, cache=True):
744 744 artifacts_tokens = UserApiKeys.query()\
745 745 .filter(UserApiKeys.user == self)\
746 746 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
747 747 if cache:
748 748 artifacts_tokens = artifacts_tokens.options(
749 749 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
750 750
751 751 artifacts_tokens = artifacts_tokens.all()
752 752 if artifacts_tokens:
753 753 return artifacts_tokens[0].api_key
754 754 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
755 755
756 756 @classmethod
757 757 def get(cls, user_id, cache=False):
758 758 if not user_id:
759 759 return
760 760
761 761 user = cls.query()
762 762 if cache:
763 763 user = user.options(
764 764 FromCache("sql_cache_short", "get_users_%s" % user_id))
765 765 return user.get(user_id)
766 766
767 767 @classmethod
768 768 def extra_valid_auth_tokens(cls, user, role=None):
769 769 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
770 770 .filter(or_(UserApiKeys.expires == -1,
771 771 UserApiKeys.expires >= time.time()))
772 772 if role:
773 773 tokens = tokens.filter(or_(UserApiKeys.role == role,
774 774 UserApiKeys.role == UserApiKeys.ROLE_ALL))
775 775 return tokens.all()
776 776
777 777 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
778 778 from rhodecode.lib import auth
779 779
780 780 log.debug('Trying to authenticate user: %s via auth-token, '
781 781 'and roles: %s', self, roles)
782 782
783 783 if not auth_token:
784 784 return False
785 785
786 786 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
787 787 tokens_q = UserApiKeys.query()\
788 788 .filter(UserApiKeys.user_id == self.user_id)\
789 789 .filter(or_(UserApiKeys.expires == -1,
790 790 UserApiKeys.expires >= time.time()))
791 791
792 792 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
793 793
794 794 crypto_backend = auth.crypto_backend()
795 795 enc_token_map = {}
796 796 plain_token_map = {}
797 797 for token in tokens_q:
798 798 if token.api_key.startswith(crypto_backend.ENC_PREF):
799 799 enc_token_map[token.api_key] = token
800 800 else:
801 801 plain_token_map[token.api_key] = token
802 802 log.debug(
803 803 'Found %s plain and %s encrypted tokens to check for authentication for this user',
804 804 len(plain_token_map), len(enc_token_map))
805 805
806 806 # plain token match comes first
807 807 match = plain_token_map.get(auth_token)
808 808
809 809 # check encrypted tokens now
810 810 if not match:
811 811 for token_hash, token in enc_token_map.items():
812 812 # NOTE(marcink): this is expensive to calculate, but most secure
813 813 if crypto_backend.hash_check(auth_token, token_hash):
814 814 match = token
815 815 break
816 816
817 817 if match:
818 818 log.debug('Found matching token %s', match)
819 819 if match.repo_id:
820 820 log.debug('Found scope, checking for scope match of token %s', match)
821 821 if match.repo_id == scope_repo_id:
822 822 return True
823 823 else:
824 824 log.debug(
825 825 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
826 826 'and calling scope is:%s, skipping further checks',
827 827 match.repo, scope_repo_id)
828 828 return False
829 829 else:
830 830 return True
831 831
832 832 return False
833 833
834 834 @property
835 835 def ip_addresses(self):
836 836 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
837 837 return [x.ip_addr for x in ret]
838 838
839 839 @property
840 840 def username_and_name(self):
841 841 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
842 842
843 843 @property
844 844 def username_or_name_or_email(self):
845 845 full_name = self.full_name if self.full_name is not ' ' else None
846 846 return self.username or full_name or self.email
847 847
848 848 @property
849 849 def full_name(self):
850 850 return '%s %s' % (self.first_name, self.last_name)
851 851
852 852 @property
853 853 def full_name_or_username(self):
854 854 return ('%s %s' % (self.first_name, self.last_name)
855 855 if (self.first_name and self.last_name) else self.username)
856 856
857 857 @property
858 858 def full_contact(self):
859 859 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
860 860
861 861 @property
862 862 def short_contact(self):
863 863 return '%s %s' % (self.first_name, self.last_name)
864 864
865 865 @property
866 866 def is_admin(self):
867 867 return self.admin
868 868
869 869 @property
870 870 def language(self):
871 871 return self.user_data.get('language')
872 872
873 873 def AuthUser(self, **kwargs):
874 874 """
875 875 Returns instance of AuthUser for this user
876 876 """
877 877 from rhodecode.lib.auth import AuthUser
878 878 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
879 879
880 880 @hybrid_property
881 881 def user_data(self):
882 882 if not self._user_data:
883 883 return {}
884 884
885 885 try:
886 886 return json.loads(self._user_data)
887 887 except TypeError:
888 888 return {}
889 889
890 890 @user_data.setter
891 891 def user_data(self, val):
892 892 if not isinstance(val, dict):
893 893 raise Exception('user_data must be dict, got %s' % type(val))
894 894 try:
895 895 self._user_data = json.dumps(val)
896 896 except Exception:
897 897 log.error(traceback.format_exc())
898 898
899 899 @classmethod
900 900 def get_by_username(cls, username, case_insensitive=False,
901 901 cache=False, identity_cache=False):
902 902 session = Session()
903 903
904 904 if case_insensitive:
905 905 q = cls.query().filter(
906 906 func.lower(cls.username) == func.lower(username))
907 907 else:
908 908 q = cls.query().filter(cls.username == username)
909 909
910 910 if cache:
911 911 if identity_cache:
912 912 val = cls.identity_cache(session, 'username', username)
913 913 if val:
914 914 return val
915 915 else:
916 916 cache_key = "get_user_by_name_%s" % _hash_key(username)
917 917 q = q.options(
918 918 FromCache("sql_cache_short", cache_key))
919 919
920 920 return q.scalar()
921 921
922 922 @classmethod
923 923 def get_by_auth_token(cls, auth_token, cache=False):
924 924 q = UserApiKeys.query()\
925 925 .filter(UserApiKeys.api_key == auth_token)\
926 926 .filter(or_(UserApiKeys.expires == -1,
927 927 UserApiKeys.expires >= time.time()))
928 928 if cache:
929 929 q = q.options(
930 930 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
931 931
932 932 match = q.first()
933 933 if match:
934 934 return match.user
935 935
936 936 @classmethod
937 937 def get_by_email(cls, email, case_insensitive=False, cache=False):
938 938
939 939 if case_insensitive:
940 940 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
941 941
942 942 else:
943 943 q = cls.query().filter(cls.email == email)
944 944
945 945 email_key = _hash_key(email)
946 946 if cache:
947 947 q = q.options(
948 948 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
949 949
950 950 ret = q.scalar()
951 951 if ret is None:
952 952 q = UserEmailMap.query()
953 953 # try fetching in alternate email map
954 954 if case_insensitive:
955 955 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
956 956 else:
957 957 q = q.filter(UserEmailMap.email == email)
958 958 q = q.options(joinedload(UserEmailMap.user))
959 959 if cache:
960 960 q = q.options(
961 961 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
962 962 ret = getattr(q.scalar(), 'user', None)
963 963
964 964 return ret
965 965
966 966 @classmethod
967 967 def get_from_cs_author(cls, author):
968 968 """
969 969 Tries to get User objects out of commit author string
970 970
971 971 :param author:
972 972 """
973 973 from rhodecode.lib.helpers import email, author_name
974 974 # Valid email in the attribute passed, see if they're in the system
975 975 _email = email(author)
976 976 if _email:
977 977 user = cls.get_by_email(_email, case_insensitive=True)
978 978 if user:
979 979 return user
980 980 # Maybe we can match by username?
981 981 _author = author_name(author)
982 982 user = cls.get_by_username(_author, case_insensitive=True)
983 983 if user:
984 984 return user
985 985
986 986 def update_userdata(self, **kwargs):
987 987 usr = self
988 988 old = usr.user_data
989 989 old.update(**kwargs)
990 990 usr.user_data = old
991 991 Session().add(usr)
992 992 log.debug('updated userdata with %s', kwargs)
993 993
994 994 def update_lastlogin(self):
995 995 """Update user lastlogin"""
996 996 self.last_login = datetime.datetime.now()
997 997 Session().add(self)
998 998 log.debug('updated user %s lastlogin', self.username)
999 999
1000 1000 def update_password(self, new_password):
1001 1001 from rhodecode.lib.auth import get_crypt_password
1002 1002
1003 1003 self.password = get_crypt_password(new_password)
1004 1004 Session().add(self)
1005 1005
1006 1006 @classmethod
1007 1007 def get_first_super_admin(cls):
1008 1008 user = User.query()\
1009 1009 .filter(User.admin == true()) \
1010 1010 .order_by(User.user_id.asc()) \
1011 1011 .first()
1012 1012
1013 1013 if user is None:
1014 1014 raise Exception('FATAL: Missing administrative account!')
1015 1015 return user
1016 1016
1017 1017 @classmethod
1018 1018 def get_all_super_admins(cls, only_active=False):
1019 1019 """
1020 1020 Returns all admin accounts sorted by username
1021 1021 """
1022 1022 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1023 1023 if only_active:
1024 1024 qry = qry.filter(User.active == true())
1025 1025 return qry.all()
1026 1026
1027 1027 @classmethod
1028 1028 def get_all_user_ids(cls, only_active=True):
1029 1029 """
1030 1030 Returns all users IDs
1031 1031 """
1032 1032 qry = Session().query(User.user_id)
1033 1033
1034 1034 if only_active:
1035 1035 qry = qry.filter(User.active == true())
1036 1036 return [x.user_id for x in qry]
1037 1037
1038 1038 @classmethod
1039 1039 def get_default_user(cls, cache=False, refresh=False):
1040 1040 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1041 1041 if user is None:
1042 1042 raise Exception('FATAL: Missing default account!')
1043 1043 if refresh:
1044 1044 # The default user might be based on outdated state which
1045 1045 # has been loaded from the cache.
1046 1046 # A call to refresh() ensures that the
1047 1047 # latest state from the database is used.
1048 1048 Session().refresh(user)
1049 1049 return user
1050 1050
1051 1051 @classmethod
1052 1052 def get_default_user_id(cls):
1053 1053 import rhodecode
1054 1054 return rhodecode.CONFIG['default_user_id']
1055 1055
1056 1056 def _get_default_perms(self, user, suffix=''):
1057 1057 from rhodecode.model.permission import PermissionModel
1058 1058 return PermissionModel().get_default_perms(user.user_perms, suffix)
1059 1059
1060 1060 def get_default_perms(self, suffix=''):
1061 1061 return self._get_default_perms(self, suffix)
1062 1062
1063 1063 def get_api_data(self, include_secrets=False, details='full'):
1064 1064 """
1065 1065 Common function for generating user related data for API
1066 1066
1067 1067 :param include_secrets: By default secrets in the API data will be replaced
1068 1068 by a placeholder value to prevent exposing this data by accident. In case
1069 1069 this data shall be exposed, set this flag to ``True``.
1070 1070
1071 1071 :param details: details can be 'basic|full' basic gives only a subset of
1072 1072 the available user information that includes user_id, name and emails.
1073 1073 """
1074 1074 user = self
1075 1075 user_data = self.user_data
1076 1076 data = {
1077 1077 'user_id': user.user_id,
1078 1078 'username': user.username,
1079 1079 'firstname': user.name,
1080 1080 'lastname': user.lastname,
1081 1081 'description': user.description,
1082 1082 'email': user.email,
1083 1083 'emails': user.emails,
1084 1084 }
1085 1085 if details == 'basic':
1086 1086 return data
1087 1087
1088 1088 auth_token_length = 40
1089 1089 auth_token_replacement = '*' * auth_token_length
1090 1090
1091 1091 extras = {
1092 1092 'auth_tokens': [auth_token_replacement],
1093 1093 'active': user.active,
1094 1094 'admin': user.admin,
1095 1095 'extern_type': user.extern_type,
1096 1096 'extern_name': user.extern_name,
1097 1097 'last_login': user.last_login,
1098 1098 'last_activity': user.last_activity,
1099 1099 'ip_addresses': user.ip_addresses,
1100 1100 'language': user_data.get('language')
1101 1101 }
1102 1102 data.update(extras)
1103 1103
1104 1104 if include_secrets:
1105 1105 data['auth_tokens'] = user.auth_tokens
1106 1106 return data
1107 1107
1108 1108 def __json__(self):
1109 1109 data = {
1110 1110 'full_name': self.full_name,
1111 1111 'full_name_or_username': self.full_name_or_username,
1112 1112 'short_contact': self.short_contact,
1113 1113 'full_contact': self.full_contact,
1114 1114 }
1115 1115 data.update(self.get_api_data())
1116 1116 return data
1117 1117
1118 1118
1119 1119 class UserApiKeys(Base, BaseModel):
1120 1120 __tablename__ = 'user_api_keys'
1121 1121 __table_args__ = (
1122 1122 Index('uak_api_key_idx', 'api_key'),
1123 1123 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1124 1124 base_table_args
1125 1125 )
1126 1126 __mapper_args__ = {}
1127 1127
1128 1128 # ApiKey role
1129 1129 ROLE_ALL = 'token_role_all'
1130 1130 ROLE_HTTP = 'token_role_http'
1131 1131 ROLE_VCS = 'token_role_vcs'
1132 1132 ROLE_API = 'token_role_api'
1133 1133 ROLE_FEED = 'token_role_feed'
1134 1134 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1135 1135 ROLE_PASSWORD_RESET = 'token_password_reset'
1136 1136
1137 1137 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1138 1138
1139 1139 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1140 1140 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1141 1141 api_key = Column("api_key", String(255), nullable=False, unique=True)
1142 1142 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1143 1143 expires = Column('expires', Float(53), nullable=False)
1144 1144 role = Column('role', String(255), nullable=True)
1145 1145 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1146 1146
1147 1147 # scope columns
1148 1148 repo_id = Column(
1149 1149 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1150 1150 nullable=True, unique=None, default=None)
1151 1151 repo = relationship('Repository', lazy='joined')
1152 1152
1153 1153 repo_group_id = Column(
1154 1154 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1155 1155 nullable=True, unique=None, default=None)
1156 1156 repo_group = relationship('RepoGroup', lazy='joined')
1157 1157
1158 1158 user = relationship('User', lazy='joined')
1159 1159
1160 1160 def __unicode__(self):
1161 1161 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1162 1162
1163 1163 def __json__(self):
1164 1164 data = {
1165 1165 'auth_token': self.api_key,
1166 1166 'role': self.role,
1167 1167 'scope': self.scope_humanized,
1168 1168 'expired': self.expired
1169 1169 }
1170 1170 return data
1171 1171
1172 1172 def get_api_data(self, include_secrets=False):
1173 1173 data = self.__json__()
1174 1174 if include_secrets:
1175 1175 return data
1176 1176 else:
1177 1177 data['auth_token'] = self.token_obfuscated
1178 1178 return data
1179 1179
1180 1180 @hybrid_property
1181 1181 def description_safe(self):
1182 1182 from rhodecode.lib import helpers as h
1183 1183 return h.escape(self.description)
1184 1184
1185 1185 @property
1186 1186 def expired(self):
1187 1187 if self.expires == -1:
1188 1188 return False
1189 1189 return time.time() > self.expires
1190 1190
1191 1191 @classmethod
1192 1192 def _get_role_name(cls, role):
1193 1193 return {
1194 1194 cls.ROLE_ALL: _('all'),
1195 1195 cls.ROLE_HTTP: _('http/web interface'),
1196 1196 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1197 1197 cls.ROLE_API: _('api calls'),
1198 1198 cls.ROLE_FEED: _('feed access'),
1199 1199 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1200 1200 }.get(role, role)
1201 1201
1202 1202 @property
1203 1203 def role_humanized(self):
1204 1204 return self._get_role_name(self.role)
1205 1205
1206 1206 def _get_scope(self):
1207 1207 if self.repo:
1208 1208 return 'Repository: {}'.format(self.repo.repo_name)
1209 1209 if self.repo_group:
1210 1210 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1211 1211 return 'Global'
1212 1212
1213 1213 @property
1214 1214 def scope_humanized(self):
1215 1215 return self._get_scope()
1216 1216
1217 1217 @property
1218 1218 def token_obfuscated(self):
1219 1219 if self.api_key:
1220 1220 return self.api_key[:4] + "****"
1221 1221
1222 1222
1223 1223 class UserEmailMap(Base, BaseModel):
1224 1224 __tablename__ = 'user_email_map'
1225 1225 __table_args__ = (
1226 1226 Index('uem_email_idx', 'email'),
1227 1227 UniqueConstraint('email'),
1228 1228 base_table_args
1229 1229 )
1230 1230 __mapper_args__ = {}
1231 1231
1232 1232 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1233 1233 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1234 1234 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1235 1235 user = relationship('User', lazy='joined')
1236 1236
1237 1237 @validates('_email')
1238 1238 def validate_email(self, key, email):
1239 1239 # check if this email is not main one
1240 1240 main_email = Session().query(User).filter(User.email == email).scalar()
1241 1241 if main_email is not None:
1242 1242 raise AttributeError('email %s is present is user table' % email)
1243 1243 return email
1244 1244
1245 1245 @hybrid_property
1246 1246 def email(self):
1247 1247 return self._email
1248 1248
1249 1249 @email.setter
1250 1250 def email(self, val):
1251 1251 self._email = val.lower() if val else None
1252 1252
1253 1253
1254 1254 class UserIpMap(Base, BaseModel):
1255 1255 __tablename__ = 'user_ip_map'
1256 1256 __table_args__ = (
1257 1257 UniqueConstraint('user_id', 'ip_addr'),
1258 1258 base_table_args
1259 1259 )
1260 1260 __mapper_args__ = {}
1261 1261
1262 1262 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1263 1263 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1264 1264 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1265 1265 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1266 1266 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1267 1267 user = relationship('User', lazy='joined')
1268 1268
1269 1269 @hybrid_property
1270 1270 def description_safe(self):
1271 1271 from rhodecode.lib import helpers as h
1272 1272 return h.escape(self.description)
1273 1273
1274 1274 @classmethod
1275 1275 def _get_ip_range(cls, ip_addr):
1276 1276 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1277 1277 return [str(net.network_address), str(net.broadcast_address)]
1278 1278
1279 1279 def __json__(self):
1280 1280 return {
1281 1281 'ip_addr': self.ip_addr,
1282 1282 'ip_range': self._get_ip_range(self.ip_addr),
1283 1283 }
1284 1284
1285 1285 def __unicode__(self):
1286 1286 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1287 1287 self.user_id, self.ip_addr)
1288 1288
1289 1289
1290 1290 class UserSshKeys(Base, BaseModel):
1291 1291 __tablename__ = 'user_ssh_keys'
1292 1292 __table_args__ = (
1293 1293 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1294 1294
1295 1295 UniqueConstraint('ssh_key_fingerprint'),
1296 1296
1297 1297 base_table_args
1298 1298 )
1299 1299 __mapper_args__ = {}
1300 1300
1301 1301 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1302 1302 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1303 1303 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1304 1304
1305 1305 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1306 1306
1307 1307 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1308 1308 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1309 1309 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1310 1310
1311 1311 user = relationship('User', lazy='joined')
1312 1312
1313 1313 def __json__(self):
1314 1314 data = {
1315 1315 'ssh_fingerprint': self.ssh_key_fingerprint,
1316 1316 'description': self.description,
1317 1317 'created_on': self.created_on
1318 1318 }
1319 1319 return data
1320 1320
1321 1321 def get_api_data(self):
1322 1322 data = self.__json__()
1323 1323 return data
1324 1324
1325 1325
1326 1326 class UserLog(Base, BaseModel):
1327 1327 __tablename__ = 'user_logs'
1328 1328 __table_args__ = (
1329 1329 base_table_args,
1330 1330 )
1331 1331
1332 1332 VERSION_1 = 'v1'
1333 1333 VERSION_2 = 'v2'
1334 1334 VERSIONS = [VERSION_1, VERSION_2]
1335 1335
1336 1336 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1337 1337 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1338 1338 username = Column("username", String(255), nullable=True, unique=None, default=None)
1339 1339 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1340 1340 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1341 1341 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1342 1342 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1343 1343 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1344 1344
1345 1345 version = Column("version", String(255), nullable=True, default=VERSION_1)
1346 1346 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1347 1347 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1348 1348
1349 1349 def __unicode__(self):
1350 1350 return u"<%s('id:%s:%s')>" % (
1351 1351 self.__class__.__name__, self.repository_name, self.action)
1352 1352
1353 1353 def __json__(self):
1354 1354 return {
1355 1355 'user_id': self.user_id,
1356 1356 'username': self.username,
1357 1357 'repository_id': self.repository_id,
1358 1358 'repository_name': self.repository_name,
1359 1359 'user_ip': self.user_ip,
1360 1360 'action_date': self.action_date,
1361 1361 'action': self.action,
1362 1362 }
1363 1363
1364 1364 @hybrid_property
1365 1365 def entry_id(self):
1366 1366 return self.user_log_id
1367 1367
1368 1368 @property
1369 1369 def action_as_day(self):
1370 1370 return datetime.date(*self.action_date.timetuple()[:3])
1371 1371
1372 1372 user = relationship('User')
1373 1373 repository = relationship('Repository', cascade='')
1374 1374
1375 1375
1376 1376 class UserGroup(Base, BaseModel):
1377 1377 __tablename__ = 'users_groups'
1378 1378 __table_args__ = (
1379 1379 base_table_args,
1380 1380 )
1381 1381
1382 1382 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1383 1383 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1384 1384 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1385 1385 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1386 1386 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1387 1387 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1388 1388 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1389 1389 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1390 1390
1391 1391 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1392 1392 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1393 1393 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1394 1394 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1395 1395 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1396 1396 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1397 1397
1398 1398 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1399 1399 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1400 1400
1401 1401 @classmethod
1402 1402 def _load_group_data(cls, column):
1403 1403 if not column:
1404 1404 return {}
1405 1405
1406 1406 try:
1407 1407 return json.loads(column) or {}
1408 1408 except TypeError:
1409 1409 return {}
1410 1410
1411 1411 @hybrid_property
1412 1412 def description_safe(self):
1413 1413 from rhodecode.lib import helpers as h
1414 1414 return h.escape(self.user_group_description)
1415 1415
1416 1416 @hybrid_property
1417 1417 def group_data(self):
1418 1418 return self._load_group_data(self._group_data)
1419 1419
1420 1420 @group_data.expression
1421 1421 def group_data(self, **kwargs):
1422 1422 return self._group_data
1423 1423
1424 1424 @group_data.setter
1425 1425 def group_data(self, val):
1426 1426 try:
1427 1427 self._group_data = json.dumps(val)
1428 1428 except Exception:
1429 1429 log.error(traceback.format_exc())
1430 1430
1431 1431 @classmethod
1432 1432 def _load_sync(cls, group_data):
1433 1433 if group_data:
1434 1434 return group_data.get('extern_type')
1435 1435
1436 1436 @property
1437 1437 def sync(self):
1438 1438 return self._load_sync(self.group_data)
1439 1439
1440 1440 def __unicode__(self):
1441 1441 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1442 1442 self.users_group_id,
1443 1443 self.users_group_name)
1444 1444
1445 1445 @classmethod
1446 1446 def get_by_group_name(cls, group_name, cache=False,
1447 1447 case_insensitive=False):
1448 1448 if case_insensitive:
1449 1449 q = cls.query().filter(func.lower(cls.users_group_name) ==
1450 1450 func.lower(group_name))
1451 1451
1452 1452 else:
1453 1453 q = cls.query().filter(cls.users_group_name == group_name)
1454 1454 if cache:
1455 1455 q = q.options(
1456 1456 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1457 1457 return q.scalar()
1458 1458
1459 1459 @classmethod
1460 1460 def get(cls, user_group_id, cache=False):
1461 1461 if not user_group_id:
1462 1462 return
1463 1463
1464 1464 user_group = cls.query()
1465 1465 if cache:
1466 1466 user_group = user_group.options(
1467 1467 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1468 1468 return user_group.get(user_group_id)
1469 1469
1470 1470 def permissions(self, with_admins=True, with_owner=True,
1471 1471 expand_from_user_groups=False):
1472 1472 """
1473 1473 Permissions for user groups
1474 1474 """
1475 1475 _admin_perm = 'usergroup.admin'
1476 1476
1477 1477 owner_row = []
1478 1478 if with_owner:
1479 1479 usr = AttributeDict(self.user.get_dict())
1480 1480 usr.owner_row = True
1481 1481 usr.permission = _admin_perm
1482 1482 owner_row.append(usr)
1483 1483
1484 1484 super_admin_ids = []
1485 1485 super_admin_rows = []
1486 1486 if with_admins:
1487 1487 for usr in User.get_all_super_admins():
1488 1488 super_admin_ids.append(usr.user_id)
1489 1489 # if this admin is also owner, don't double the record
1490 1490 if usr.user_id == owner_row[0].user_id:
1491 1491 owner_row[0].admin_row = True
1492 1492 else:
1493 1493 usr = AttributeDict(usr.get_dict())
1494 1494 usr.admin_row = True
1495 1495 usr.permission = _admin_perm
1496 1496 super_admin_rows.append(usr)
1497 1497
1498 1498 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1499 1499 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1500 1500 joinedload(UserUserGroupToPerm.user),
1501 1501 joinedload(UserUserGroupToPerm.permission),)
1502 1502
1503 1503 # get owners and admins and permissions. We do a trick of re-writing
1504 1504 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1505 1505 # has a global reference and changing one object propagates to all
1506 1506 # others. This means if admin is also an owner admin_row that change
1507 1507 # would propagate to both objects
1508 1508 perm_rows = []
1509 1509 for _usr in q.all():
1510 1510 usr = AttributeDict(_usr.user.get_dict())
1511 1511 # if this user is also owner/admin, mark as duplicate record
1512 1512 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1513 1513 usr.duplicate_perm = True
1514 1514 usr.permission = _usr.permission.permission_name
1515 1515 perm_rows.append(usr)
1516 1516
1517 1517 # filter the perm rows by 'default' first and then sort them by
1518 1518 # admin,write,read,none permissions sorted again alphabetically in
1519 1519 # each group
1520 1520 perm_rows = sorted(perm_rows, key=display_user_sort)
1521 1521
1522 1522 user_groups_rows = []
1523 1523 if expand_from_user_groups:
1524 1524 for ug in self.permission_user_groups(with_members=True):
1525 1525 for user_data in ug.members:
1526 1526 user_groups_rows.append(user_data)
1527 1527
1528 1528 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1529 1529
1530 1530 def permission_user_groups(self, with_members=False):
1531 1531 q = UserGroupUserGroupToPerm.query()\
1532 1532 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1533 1533 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1534 1534 joinedload(UserGroupUserGroupToPerm.target_user_group),
1535 1535 joinedload(UserGroupUserGroupToPerm.permission),)
1536 1536
1537 1537 perm_rows = []
1538 1538 for _user_group in q.all():
1539 1539 entry = AttributeDict(_user_group.user_group.get_dict())
1540 1540 entry.permission = _user_group.permission.permission_name
1541 1541 if with_members:
1542 1542 entry.members = [x.user.get_dict()
1543 1543 for x in _user_group.user_group.members]
1544 1544 perm_rows.append(entry)
1545 1545
1546 1546 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1547 1547 return perm_rows
1548 1548
1549 1549 def _get_default_perms(self, user_group, suffix=''):
1550 1550 from rhodecode.model.permission import PermissionModel
1551 1551 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1552 1552
1553 1553 def get_default_perms(self, suffix=''):
1554 1554 return self._get_default_perms(self, suffix)
1555 1555
1556 1556 def get_api_data(self, with_group_members=True, include_secrets=False):
1557 1557 """
1558 1558 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1559 1559 basically forwarded.
1560 1560
1561 1561 """
1562 1562 user_group = self
1563 1563 data = {
1564 1564 'users_group_id': user_group.users_group_id,
1565 1565 'group_name': user_group.users_group_name,
1566 1566 'group_description': user_group.user_group_description,
1567 1567 'active': user_group.users_group_active,
1568 1568 'owner': user_group.user.username,
1569 1569 'sync': user_group.sync,
1570 1570 'owner_email': user_group.user.email,
1571 1571 }
1572 1572
1573 1573 if with_group_members:
1574 1574 users = []
1575 1575 for user in user_group.members:
1576 1576 user = user.user
1577 1577 users.append(user.get_api_data(include_secrets=include_secrets))
1578 1578 data['users'] = users
1579 1579
1580 1580 return data
1581 1581
1582 1582
1583 1583 class UserGroupMember(Base, BaseModel):
1584 1584 __tablename__ = 'users_groups_members'
1585 1585 __table_args__ = (
1586 1586 base_table_args,
1587 1587 )
1588 1588
1589 1589 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1590 1590 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1591 1591 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1592 1592
1593 1593 user = relationship('User', lazy='joined')
1594 1594 users_group = relationship('UserGroup')
1595 1595
1596 1596 def __init__(self, gr_id='', u_id=''):
1597 1597 self.users_group_id = gr_id
1598 1598 self.user_id = u_id
1599 1599
1600 1600
1601 1601 class RepositoryField(Base, BaseModel):
1602 1602 __tablename__ = 'repositories_fields'
1603 1603 __table_args__ = (
1604 1604 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1605 1605 base_table_args,
1606 1606 )
1607 1607
1608 1608 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1609 1609
1610 1610 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1611 1611 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1612 1612 field_key = Column("field_key", String(250))
1613 1613 field_label = Column("field_label", String(1024), nullable=False)
1614 1614 field_value = Column("field_value", String(10000), nullable=False)
1615 1615 field_desc = Column("field_desc", String(1024), nullable=False)
1616 1616 field_type = Column("field_type", String(255), nullable=False, unique=None)
1617 1617 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1618 1618
1619 1619 repository = relationship('Repository')
1620 1620
1621 1621 @property
1622 1622 def field_key_prefixed(self):
1623 1623 return 'ex_%s' % self.field_key
1624 1624
1625 1625 @classmethod
1626 1626 def un_prefix_key(cls, key):
1627 1627 if key.startswith(cls.PREFIX):
1628 1628 return key[len(cls.PREFIX):]
1629 1629 return key
1630 1630
1631 1631 @classmethod
1632 1632 def get_by_key_name(cls, key, repo):
1633 1633 row = cls.query()\
1634 1634 .filter(cls.repository == repo)\
1635 1635 .filter(cls.field_key == key).scalar()
1636 1636 return row
1637 1637
1638 1638
1639 1639 class Repository(Base, BaseModel):
1640 1640 __tablename__ = 'repositories'
1641 1641 __table_args__ = (
1642 1642 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1643 1643 base_table_args,
1644 1644 )
1645 1645 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1646 1646 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1647 1647 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1648 1648
1649 1649 STATE_CREATED = 'repo_state_created'
1650 1650 STATE_PENDING = 'repo_state_pending'
1651 1651 STATE_ERROR = 'repo_state_error'
1652 1652
1653 1653 LOCK_AUTOMATIC = 'lock_auto'
1654 1654 LOCK_API = 'lock_api'
1655 1655 LOCK_WEB = 'lock_web'
1656 1656 LOCK_PULL = 'lock_pull'
1657 1657
1658 1658 NAME_SEP = URL_SEP
1659 1659
1660 1660 repo_id = Column(
1661 1661 "repo_id", Integer(), nullable=False, unique=True, default=None,
1662 1662 primary_key=True)
1663 1663 _repo_name = Column(
1664 1664 "repo_name", Text(), nullable=False, default=None)
1665 1665 repo_name_hash = Column(
1666 1666 "repo_name_hash", String(255), nullable=False, unique=True)
1667 1667 repo_state = Column("repo_state", String(255), nullable=True)
1668 1668
1669 1669 clone_uri = Column(
1670 1670 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1671 1671 default=None)
1672 1672 push_uri = Column(
1673 1673 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1674 1674 default=None)
1675 1675 repo_type = Column(
1676 1676 "repo_type", String(255), nullable=False, unique=False, default=None)
1677 1677 user_id = Column(
1678 1678 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1679 1679 unique=False, default=None)
1680 1680 private = Column(
1681 1681 "private", Boolean(), nullable=True, unique=None, default=None)
1682 1682 archived = Column(
1683 1683 "archived", Boolean(), nullable=True, unique=None, default=None)
1684 1684 enable_statistics = Column(
1685 1685 "statistics", Boolean(), nullable=True, unique=None, default=True)
1686 1686 enable_downloads = Column(
1687 1687 "downloads", Boolean(), nullable=True, unique=None, default=True)
1688 1688 description = Column(
1689 1689 "description", String(10000), nullable=True, unique=None, default=None)
1690 1690 created_on = Column(
1691 1691 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1692 1692 default=datetime.datetime.now)
1693 1693 updated_on = Column(
1694 1694 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1695 1695 default=datetime.datetime.now)
1696 1696 _landing_revision = Column(
1697 1697 "landing_revision", String(255), nullable=False, unique=False,
1698 1698 default=None)
1699 1699 enable_locking = Column(
1700 1700 "enable_locking", Boolean(), nullable=False, unique=None,
1701 1701 default=False)
1702 1702 _locked = Column(
1703 1703 "locked", String(255), nullable=True, unique=False, default=None)
1704 1704 _changeset_cache = Column(
1705 1705 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1706 1706
1707 1707 fork_id = Column(
1708 1708 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1709 1709 nullable=True, unique=False, default=None)
1710 1710 group_id = Column(
1711 1711 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1712 1712 unique=False, default=None)
1713 1713
1714 1714 user = relationship('User', lazy='joined')
1715 1715 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1716 1716 group = relationship('RepoGroup', lazy='joined')
1717 1717 repo_to_perm = relationship(
1718 1718 'UserRepoToPerm', cascade='all',
1719 1719 order_by='UserRepoToPerm.repo_to_perm_id')
1720 1720 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1721 1721 stats = relationship('Statistics', cascade='all', uselist=False)
1722 1722
1723 1723 followers = relationship(
1724 1724 'UserFollowing',
1725 1725 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1726 1726 cascade='all')
1727 1727 extra_fields = relationship(
1728 1728 'RepositoryField', cascade="all, delete-orphan")
1729 1729 logs = relationship('UserLog')
1730 1730 comments = relationship(
1731 1731 'ChangesetComment', cascade="all, delete-orphan")
1732 1732 pull_requests_source = relationship(
1733 1733 'PullRequest',
1734 1734 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1735 1735 cascade="all, delete-orphan")
1736 1736 pull_requests_target = relationship(
1737 1737 'PullRequest',
1738 1738 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1739 1739 cascade="all, delete-orphan")
1740 1740 ui = relationship('RepoRhodeCodeUi', cascade="all")
1741 1741 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1742 1742 integrations = relationship('Integration', cascade="all, delete-orphan")
1743 1743
1744 1744 scoped_tokens = relationship('UserApiKeys', cascade="all")
1745 1745
1746 1746 # no cascade, set NULL
1747 1747 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1748 1748
1749 1749 def __unicode__(self):
1750 1750 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1751 1751 safe_unicode(self.repo_name))
1752 1752
1753 1753 @hybrid_property
1754 1754 def description_safe(self):
1755 1755 from rhodecode.lib import helpers as h
1756 1756 return h.escape(self.description)
1757 1757
1758 1758 @hybrid_property
1759 1759 def landing_rev(self):
1760 1760 # always should return [rev_type, rev]
1761 1761 if self._landing_revision:
1762 1762 _rev_info = self._landing_revision.split(':')
1763 1763 if len(_rev_info) < 2:
1764 1764 _rev_info.insert(0, 'rev')
1765 1765 return [_rev_info[0], _rev_info[1]]
1766 1766 return [None, None]
1767 1767
1768 1768 @landing_rev.setter
1769 1769 def landing_rev(self, val):
1770 1770 if ':' not in val:
1771 1771 raise ValueError('value must be delimited with `:` and consist '
1772 1772 'of <rev_type>:<rev>, got %s instead' % val)
1773 1773 self._landing_revision = val
1774 1774
1775 1775 @hybrid_property
1776 1776 def locked(self):
1777 1777 if self._locked:
1778 1778 user_id, timelocked, reason = self._locked.split(':')
1779 1779 lock_values = int(user_id), timelocked, reason
1780 1780 else:
1781 1781 lock_values = [None, None, None]
1782 1782 return lock_values
1783 1783
1784 1784 @locked.setter
1785 1785 def locked(self, val):
1786 1786 if val and isinstance(val, (list, tuple)):
1787 1787 self._locked = ':'.join(map(str, val))
1788 1788 else:
1789 1789 self._locked = None
1790 1790
1791 1791 @classmethod
1792 1792 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1793 1793 from rhodecode.lib.vcs.backends.base import EmptyCommit
1794 1794 dummy = EmptyCommit().__json__()
1795 1795 if not changeset_cache_raw:
1796 1796 dummy['source_repo_id'] = repo_id
1797 1797 return json.loads(json.dumps(dummy))
1798 1798
1799 1799 try:
1800 1800 return json.loads(changeset_cache_raw)
1801 1801 except TypeError:
1802 1802 return dummy
1803 1803 except Exception:
1804 1804 log.error(traceback.format_exc())
1805 1805 return dummy
1806 1806
1807 1807 @hybrid_property
1808 1808 def changeset_cache(self):
1809 1809 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1810 1810
1811 1811 @changeset_cache.setter
1812 1812 def changeset_cache(self, val):
1813 1813 try:
1814 1814 self._changeset_cache = json.dumps(val)
1815 1815 except Exception:
1816 1816 log.error(traceback.format_exc())
1817 1817
1818 1818 @hybrid_property
1819 1819 def repo_name(self):
1820 1820 return self._repo_name
1821 1821
1822 1822 @repo_name.setter
1823 1823 def repo_name(self, value):
1824 1824 self._repo_name = value
1825 1825 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1826 1826
1827 1827 @classmethod
1828 1828 def normalize_repo_name(cls, repo_name):
1829 1829 """
1830 1830 Normalizes os specific repo_name to the format internally stored inside
1831 1831 database using URL_SEP
1832 1832
1833 1833 :param cls:
1834 1834 :param repo_name:
1835 1835 """
1836 1836 return cls.NAME_SEP.join(repo_name.split(os.sep))
1837 1837
1838 1838 @classmethod
1839 1839 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1840 1840 session = Session()
1841 1841 q = session.query(cls).filter(cls.repo_name == repo_name)
1842 1842
1843 1843 if cache:
1844 1844 if identity_cache:
1845 1845 val = cls.identity_cache(session, 'repo_name', repo_name)
1846 1846 if val:
1847 1847 return val
1848 1848 else:
1849 1849 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1850 1850 q = q.options(
1851 1851 FromCache("sql_cache_short", cache_key))
1852 1852
1853 1853 return q.scalar()
1854 1854
1855 1855 @classmethod
1856 1856 def get_by_id_or_repo_name(cls, repoid):
1857 1857 if isinstance(repoid, (int, long)):
1858 1858 try:
1859 1859 repo = cls.get(repoid)
1860 1860 except ValueError:
1861 1861 repo = None
1862 1862 else:
1863 1863 repo = cls.get_by_repo_name(repoid)
1864 1864 return repo
1865 1865
1866 1866 @classmethod
1867 1867 def get_by_full_path(cls, repo_full_path):
1868 1868 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1869 1869 repo_name = cls.normalize_repo_name(repo_name)
1870 1870 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1871 1871
1872 1872 @classmethod
1873 1873 def get_repo_forks(cls, repo_id):
1874 1874 return cls.query().filter(Repository.fork_id == repo_id)
1875 1875
1876 1876 @classmethod
1877 1877 def base_path(cls):
1878 1878 """
1879 1879 Returns base path when all repos are stored
1880 1880
1881 1881 :param cls:
1882 1882 """
1883 1883 q = Session().query(RhodeCodeUi)\
1884 1884 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1885 1885 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1886 1886 return q.one().ui_value
1887 1887
1888 1888 @classmethod
1889 1889 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1890 1890 case_insensitive=True, archived=False):
1891 1891 q = Repository.query()
1892 1892
1893 1893 if not archived:
1894 1894 q = q.filter(Repository.archived.isnot(true()))
1895 1895
1896 1896 if not isinstance(user_id, Optional):
1897 1897 q = q.filter(Repository.user_id == user_id)
1898 1898
1899 1899 if not isinstance(group_id, Optional):
1900 1900 q = q.filter(Repository.group_id == group_id)
1901 1901
1902 1902 if case_insensitive:
1903 1903 q = q.order_by(func.lower(Repository.repo_name))
1904 1904 else:
1905 1905 q = q.order_by(Repository.repo_name)
1906 1906
1907 1907 return q.all()
1908 1908
1909 1909 @property
1910 1910 def repo_uid(self):
1911 1911 return '_{}'.format(self.repo_id)
1912 1912
1913 1913 @property
1914 1914 def forks(self):
1915 1915 """
1916 1916 Return forks of this repo
1917 1917 """
1918 1918 return Repository.get_repo_forks(self.repo_id)
1919 1919
1920 1920 @property
1921 1921 def parent(self):
1922 1922 """
1923 1923 Returns fork parent
1924 1924 """
1925 1925 return self.fork
1926 1926
1927 1927 @property
1928 1928 def just_name(self):
1929 1929 return self.repo_name.split(self.NAME_SEP)[-1]
1930 1930
1931 1931 @property
1932 1932 def groups_with_parents(self):
1933 1933 groups = []
1934 1934 if self.group is None:
1935 1935 return groups
1936 1936
1937 1937 cur_gr = self.group
1938 1938 groups.insert(0, cur_gr)
1939 1939 while 1:
1940 1940 gr = getattr(cur_gr, 'parent_group', None)
1941 1941 cur_gr = cur_gr.parent_group
1942 1942 if gr is None:
1943 1943 break
1944 1944 groups.insert(0, gr)
1945 1945
1946 1946 return groups
1947 1947
1948 1948 @property
1949 1949 def groups_and_repo(self):
1950 1950 return self.groups_with_parents, self
1951 1951
1952 1952 @LazyProperty
1953 1953 def repo_path(self):
1954 1954 """
1955 1955 Returns base full path for that repository means where it actually
1956 1956 exists on a filesystem
1957 1957 """
1958 1958 q = Session().query(RhodeCodeUi).filter(
1959 1959 RhodeCodeUi.ui_key == self.NAME_SEP)
1960 1960 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1961 1961 return q.one().ui_value
1962 1962
1963 1963 @property
1964 1964 def repo_full_path(self):
1965 1965 p = [self.repo_path]
1966 1966 # we need to split the name by / since this is how we store the
1967 1967 # names in the database, but that eventually needs to be converted
1968 1968 # into a valid system path
1969 1969 p += self.repo_name.split(self.NAME_SEP)
1970 1970 return os.path.join(*map(safe_unicode, p))
1971 1971
1972 1972 @property
1973 1973 def cache_keys(self):
1974 1974 """
1975 1975 Returns associated cache keys for that repo
1976 1976 """
1977 1977 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1978 1978 repo_id=self.repo_id)
1979 1979 return CacheKey.query()\
1980 1980 .filter(CacheKey.cache_args == invalidation_namespace)\
1981 1981 .order_by(CacheKey.cache_key)\
1982 1982 .all()
1983 1983
1984 1984 @property
1985 1985 def cached_diffs_relative_dir(self):
1986 1986 """
1987 1987 Return a relative to the repository store path of cached diffs
1988 1988 used for safe display for users, who shouldn't know the absolute store
1989 1989 path
1990 1990 """
1991 1991 return os.path.join(
1992 1992 os.path.dirname(self.repo_name),
1993 1993 self.cached_diffs_dir.split(os.path.sep)[-1])
1994 1994
1995 1995 @property
1996 1996 def cached_diffs_dir(self):
1997 1997 path = self.repo_full_path
1998 1998 return os.path.join(
1999 1999 os.path.dirname(path),
2000 2000 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2001 2001
2002 2002 def cached_diffs(self):
2003 2003 diff_cache_dir = self.cached_diffs_dir
2004 2004 if os.path.isdir(diff_cache_dir):
2005 2005 return os.listdir(diff_cache_dir)
2006 2006 return []
2007 2007
2008 2008 def shadow_repos(self):
2009 2009 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2010 2010 return [
2011 2011 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2012 2012 if x.startswith(shadow_repos_pattern)]
2013 2013
2014 2014 def get_new_name(self, repo_name):
2015 2015 """
2016 2016 returns new full repository name based on assigned group and new new
2017 2017
2018 2018 :param group_name:
2019 2019 """
2020 2020 path_prefix = self.group.full_path_splitted if self.group else []
2021 2021 return self.NAME_SEP.join(path_prefix + [repo_name])
2022 2022
2023 2023 @property
2024 2024 def _config(self):
2025 2025 """
2026 2026 Returns db based config object.
2027 2027 """
2028 2028 from rhodecode.lib.utils import make_db_config
2029 2029 return make_db_config(clear_session=False, repo=self)
2030 2030
2031 2031 def permissions(self, with_admins=True, with_owner=True,
2032 2032 expand_from_user_groups=False):
2033 2033 """
2034 2034 Permissions for repositories
2035 2035 """
2036 2036 _admin_perm = 'repository.admin'
2037 2037
2038 2038 owner_row = []
2039 2039 if with_owner:
2040 2040 usr = AttributeDict(self.user.get_dict())
2041 2041 usr.owner_row = True
2042 2042 usr.permission = _admin_perm
2043 2043 usr.permission_id = None
2044 2044 owner_row.append(usr)
2045 2045
2046 2046 super_admin_ids = []
2047 2047 super_admin_rows = []
2048 2048 if with_admins:
2049 2049 for usr in User.get_all_super_admins():
2050 2050 super_admin_ids.append(usr.user_id)
2051 2051 # if this admin is also owner, don't double the record
2052 2052 if usr.user_id == owner_row[0].user_id:
2053 2053 owner_row[0].admin_row = True
2054 2054 else:
2055 2055 usr = AttributeDict(usr.get_dict())
2056 2056 usr.admin_row = True
2057 2057 usr.permission = _admin_perm
2058 2058 usr.permission_id = None
2059 2059 super_admin_rows.append(usr)
2060 2060
2061 2061 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2062 2062 q = q.options(joinedload(UserRepoToPerm.repository),
2063 2063 joinedload(UserRepoToPerm.user),
2064 2064 joinedload(UserRepoToPerm.permission),)
2065 2065
2066 2066 # get owners and admins and permissions. We do a trick of re-writing
2067 2067 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2068 2068 # has a global reference and changing one object propagates to all
2069 2069 # others. This means if admin is also an owner admin_row that change
2070 2070 # would propagate to both objects
2071 2071 perm_rows = []
2072 2072 for _usr in q.all():
2073 2073 usr = AttributeDict(_usr.user.get_dict())
2074 2074 # if this user is also owner/admin, mark as duplicate record
2075 2075 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2076 2076 usr.duplicate_perm = True
2077 2077 # also check if this permission is maybe used by branch_permissions
2078 2078 if _usr.branch_perm_entry:
2079 2079 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2080 2080
2081 2081 usr.permission = _usr.permission.permission_name
2082 2082 usr.permission_id = _usr.repo_to_perm_id
2083 2083 perm_rows.append(usr)
2084 2084
2085 2085 # filter the perm rows by 'default' first and then sort them by
2086 2086 # admin,write,read,none permissions sorted again alphabetically in
2087 2087 # each group
2088 2088 perm_rows = sorted(perm_rows, key=display_user_sort)
2089 2089
2090 2090 user_groups_rows = []
2091 2091 if expand_from_user_groups:
2092 2092 for ug in self.permission_user_groups(with_members=True):
2093 2093 for user_data in ug.members:
2094 2094 user_groups_rows.append(user_data)
2095 2095
2096 2096 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2097 2097
2098 2098 def permission_user_groups(self, with_members=True):
2099 2099 q = UserGroupRepoToPerm.query()\
2100 2100 .filter(UserGroupRepoToPerm.repository == self)
2101 2101 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2102 2102 joinedload(UserGroupRepoToPerm.users_group),
2103 2103 joinedload(UserGroupRepoToPerm.permission),)
2104 2104
2105 2105 perm_rows = []
2106 2106 for _user_group in q.all():
2107 2107 entry = AttributeDict(_user_group.users_group.get_dict())
2108 2108 entry.permission = _user_group.permission.permission_name
2109 2109 if with_members:
2110 2110 entry.members = [x.user.get_dict()
2111 2111 for x in _user_group.users_group.members]
2112 2112 perm_rows.append(entry)
2113 2113
2114 2114 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2115 2115 return perm_rows
2116 2116
2117 2117 def get_api_data(self, include_secrets=False):
2118 2118 """
2119 2119 Common function for generating repo api data
2120 2120
2121 2121 :param include_secrets: See :meth:`User.get_api_data`.
2122 2122
2123 2123 """
2124 2124 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2125 2125 # move this methods on models level.
2126 2126 from rhodecode.model.settings import SettingsModel
2127 2127 from rhodecode.model.repo import RepoModel
2128 2128
2129 2129 repo = self
2130 2130 _user_id, _time, _reason = self.locked
2131 2131
2132 2132 data = {
2133 2133 'repo_id': repo.repo_id,
2134 2134 'repo_name': repo.repo_name,
2135 2135 'repo_type': repo.repo_type,
2136 2136 'clone_uri': repo.clone_uri or '',
2137 2137 'push_uri': repo.push_uri or '',
2138 2138 'url': RepoModel().get_url(self),
2139 2139 'private': repo.private,
2140 2140 'created_on': repo.created_on,
2141 2141 'description': repo.description_safe,
2142 2142 'landing_rev': repo.landing_rev,
2143 2143 'owner': repo.user.username,
2144 2144 'fork_of': repo.fork.repo_name if repo.fork else None,
2145 2145 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2146 2146 'enable_statistics': repo.enable_statistics,
2147 2147 'enable_locking': repo.enable_locking,
2148 2148 'enable_downloads': repo.enable_downloads,
2149 2149 'last_changeset': repo.changeset_cache,
2150 2150 'locked_by': User.get(_user_id).get_api_data(
2151 2151 include_secrets=include_secrets) if _user_id else None,
2152 2152 'locked_date': time_to_datetime(_time) if _time else None,
2153 2153 'lock_reason': _reason if _reason else None,
2154 2154 }
2155 2155
2156 2156 # TODO: mikhail: should be per-repo settings here
2157 2157 rc_config = SettingsModel().get_all_settings()
2158 2158 repository_fields = str2bool(
2159 2159 rc_config.get('rhodecode_repository_fields'))
2160 2160 if repository_fields:
2161 2161 for f in self.extra_fields:
2162 2162 data[f.field_key_prefixed] = f.field_value
2163 2163
2164 2164 return data
2165 2165
2166 2166 @classmethod
2167 2167 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2168 2168 if not lock_time:
2169 2169 lock_time = time.time()
2170 2170 if not lock_reason:
2171 2171 lock_reason = cls.LOCK_AUTOMATIC
2172 2172 repo.locked = [user_id, lock_time, lock_reason]
2173 2173 Session().add(repo)
2174 2174 Session().commit()
2175 2175
2176 2176 @classmethod
2177 2177 def unlock(cls, repo):
2178 2178 repo.locked = None
2179 2179 Session().add(repo)
2180 2180 Session().commit()
2181 2181
2182 2182 @classmethod
2183 2183 def getlock(cls, repo):
2184 2184 return repo.locked
2185 2185
2186 2186 def is_user_lock(self, user_id):
2187 2187 if self.lock[0]:
2188 2188 lock_user_id = safe_int(self.lock[0])
2189 2189 user_id = safe_int(user_id)
2190 2190 # both are ints, and they are equal
2191 2191 return all([lock_user_id, user_id]) and lock_user_id == user_id
2192 2192
2193 2193 return False
2194 2194
2195 2195 def get_locking_state(self, action, user_id, only_when_enabled=True):
2196 2196 """
2197 2197 Checks locking on this repository, if locking is enabled and lock is
2198 2198 present returns a tuple of make_lock, locked, locked_by.
2199 2199 make_lock can have 3 states None (do nothing) True, make lock
2200 2200 False release lock, This value is later propagated to hooks, which
2201 2201 do the locking. Think about this as signals passed to hooks what to do.
2202 2202
2203 2203 """
2204 2204 # TODO: johbo: This is part of the business logic and should be moved
2205 2205 # into the RepositoryModel.
2206 2206
2207 2207 if action not in ('push', 'pull'):
2208 2208 raise ValueError("Invalid action value: %s" % repr(action))
2209 2209
2210 2210 # defines if locked error should be thrown to user
2211 2211 currently_locked = False
2212 2212 # defines if new lock should be made, tri-state
2213 2213 make_lock = None
2214 2214 repo = self
2215 2215 user = User.get(user_id)
2216 2216
2217 2217 lock_info = repo.locked
2218 2218
2219 2219 if repo and (repo.enable_locking or not only_when_enabled):
2220 2220 if action == 'push':
2221 2221 # check if it's already locked !, if it is compare users
2222 2222 locked_by_user_id = lock_info[0]
2223 2223 if user.user_id == locked_by_user_id:
2224 2224 log.debug(
2225 2225 'Got `push` action from user %s, now unlocking', user)
2226 2226 # unlock if we have push from user who locked
2227 2227 make_lock = False
2228 2228 else:
2229 2229 # we're not the same user who locked, ban with
2230 2230 # code defined in settings (default is 423 HTTP Locked) !
2231 2231 log.debug('Repo %s is currently locked by %s', repo, user)
2232 2232 currently_locked = True
2233 2233 elif action == 'pull':
2234 2234 # [0] user [1] date
2235 2235 if lock_info[0] and lock_info[1]:
2236 2236 log.debug('Repo %s is currently locked by %s', repo, user)
2237 2237 currently_locked = True
2238 2238 else:
2239 2239 log.debug('Setting lock on repo %s by %s', repo, user)
2240 2240 make_lock = True
2241 2241
2242 2242 else:
2243 2243 log.debug('Repository %s do not have locking enabled', repo)
2244 2244
2245 2245 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2246 2246 make_lock, currently_locked, lock_info)
2247 2247
2248 2248 from rhodecode.lib.auth import HasRepoPermissionAny
2249 2249 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2250 2250 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2251 2251 # if we don't have at least write permission we cannot make a lock
2252 2252 log.debug('lock state reset back to FALSE due to lack '
2253 2253 'of at least read permission')
2254 2254 make_lock = False
2255 2255
2256 2256 return make_lock, currently_locked, lock_info
2257 2257
2258 2258 @property
2259 2259 def last_commit_cache_update_diff(self):
2260 2260 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2261 2261
2262 2262 @classmethod
2263 2263 def _load_commit_change(cls, last_commit_cache):
2264 2264 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2265 2265 empty_date = datetime.datetime.fromtimestamp(0)
2266 2266 date_latest = last_commit_cache.get('date', empty_date)
2267 2267 try:
2268 2268 return parse_datetime(date_latest)
2269 2269 except Exception:
2270 2270 return empty_date
2271 2271
2272 2272 @property
2273 2273 def last_commit_change(self):
2274 2274 return self._load_commit_change(self.changeset_cache)
2275 2275
2276 2276 @property
2277 2277 def last_db_change(self):
2278 2278 return self.updated_on
2279 2279
2280 2280 @property
2281 2281 def clone_uri_hidden(self):
2282 2282 clone_uri = self.clone_uri
2283 2283 if clone_uri:
2284 2284 import urlobject
2285 2285 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2286 2286 if url_obj.password:
2287 2287 clone_uri = url_obj.with_password('*****')
2288 2288 return clone_uri
2289 2289
2290 2290 @property
2291 2291 def push_uri_hidden(self):
2292 2292 push_uri = self.push_uri
2293 2293 if push_uri:
2294 2294 import urlobject
2295 2295 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2296 2296 if url_obj.password:
2297 2297 push_uri = url_obj.with_password('*****')
2298 2298 return push_uri
2299 2299
2300 2300 def clone_url(self, **override):
2301 2301 from rhodecode.model.settings import SettingsModel
2302 2302
2303 2303 uri_tmpl = None
2304 2304 if 'with_id' in override:
2305 2305 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2306 2306 del override['with_id']
2307 2307
2308 2308 if 'uri_tmpl' in override:
2309 2309 uri_tmpl = override['uri_tmpl']
2310 2310 del override['uri_tmpl']
2311 2311
2312 2312 ssh = False
2313 2313 if 'ssh' in override:
2314 2314 ssh = True
2315 2315 del override['ssh']
2316 2316
2317 2317 # we didn't override our tmpl from **overrides
2318 2318 request = get_current_request()
2319 2319 if not uri_tmpl:
2320 2320 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2321 2321 rc_config = request.call_context.rc_config
2322 2322 else:
2323 2323 rc_config = SettingsModel().get_all_settings(cache=True)
2324 2324
2325 2325 if ssh:
2326 2326 uri_tmpl = rc_config.get(
2327 2327 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2328 2328
2329 2329 else:
2330 2330 uri_tmpl = rc_config.get(
2331 2331 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2332 2332
2333 2333 return get_clone_url(request=request,
2334 2334 uri_tmpl=uri_tmpl,
2335 2335 repo_name=self.repo_name,
2336 2336 repo_id=self.repo_id,
2337 2337 repo_type=self.repo_type,
2338 2338 **override)
2339 2339
2340 2340 def set_state(self, state):
2341 2341 self.repo_state = state
2342 2342 Session().add(self)
2343 2343 #==========================================================================
2344 2344 # SCM PROPERTIES
2345 2345 #==========================================================================
2346 2346
2347 2347 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2348 2348 return get_commit_safe(
2349 2349 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2350 2350 maybe_unreachable=maybe_unreachable)
2351 2351
2352 2352 def get_changeset(self, rev=None, pre_load=None):
2353 2353 warnings.warn("Use get_commit", DeprecationWarning)
2354 2354 commit_id = None
2355 2355 commit_idx = None
2356 2356 if isinstance(rev, compat.string_types):
2357 2357 commit_id = rev
2358 2358 else:
2359 2359 commit_idx = rev
2360 2360 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2361 2361 pre_load=pre_load)
2362 2362
2363 2363 def get_landing_commit(self):
2364 2364 """
2365 2365 Returns landing commit, or if that doesn't exist returns the tip
2366 2366 """
2367 2367 _rev_type, _rev = self.landing_rev
2368 2368 commit = self.get_commit(_rev)
2369 2369 if isinstance(commit, EmptyCommit):
2370 2370 return self.get_commit()
2371 2371 return commit
2372 2372
2373 2373 def flush_commit_cache(self):
2374 2374 self.update_commit_cache(cs_cache={'raw_id':'0'})
2375 2375 self.update_commit_cache()
2376 2376
2377 2377 def update_commit_cache(self, cs_cache=None, config=None):
2378 2378 """
2379 2379 Update cache of last commit for repository
2380 2380 cache_keys should be::
2381 2381
2382 2382 source_repo_id
2383 2383 short_id
2384 2384 raw_id
2385 2385 revision
2386 2386 parents
2387 2387 message
2388 2388 date
2389 2389 author
2390 2390 updated_on
2391 2391
2392 2392 """
2393 2393 from rhodecode.lib.vcs.backends.base import BaseChangeset
2394 2394 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2395 2395 empty_date = datetime.datetime.fromtimestamp(0)
2396 2396
2397 2397 if cs_cache is None:
2398 2398 # use no-cache version here
2399 2399 try:
2400 2400 scm_repo = self.scm_instance(cache=False, config=config)
2401 2401 except VCSError:
2402 2402 scm_repo = None
2403 2403 empty = scm_repo is None or scm_repo.is_empty()
2404 2404
2405 2405 if not empty:
2406 2406 cs_cache = scm_repo.get_commit(
2407 2407 pre_load=["author", "date", "message", "parents", "branch"])
2408 2408 else:
2409 2409 cs_cache = EmptyCommit()
2410 2410
2411 2411 if isinstance(cs_cache, BaseChangeset):
2412 2412 cs_cache = cs_cache.__json__()
2413 2413
2414 2414 def is_outdated(new_cs_cache):
2415 2415 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2416 2416 new_cs_cache['revision'] != self.changeset_cache['revision']):
2417 2417 return True
2418 2418 return False
2419 2419
2420 2420 # check if we have maybe already latest cached revision
2421 2421 if is_outdated(cs_cache) or not self.changeset_cache:
2422 2422 _current_datetime = datetime.datetime.utcnow()
2423 2423 last_change = cs_cache.get('date') or _current_datetime
2424 2424 # we check if last update is newer than the new value
2425 2425 # if yes, we use the current timestamp instead. Imagine you get
2426 2426 # old commit pushed 1y ago, we'd set last update 1y to ago.
2427 2427 last_change_timestamp = datetime_to_time(last_change)
2428 2428 current_timestamp = datetime_to_time(last_change)
2429 2429 if last_change_timestamp > current_timestamp and not empty:
2430 2430 cs_cache['date'] = _current_datetime
2431 2431
2432 2432 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2433 2433 cs_cache['updated_on'] = time.time()
2434 2434 self.changeset_cache = cs_cache
2435 2435 self.updated_on = last_change
2436 2436 Session().add(self)
2437 2437 Session().commit()
2438 2438
2439 2439 else:
2440 2440 if empty:
2441 2441 cs_cache = EmptyCommit().__json__()
2442 2442 else:
2443 2443 cs_cache = self.changeset_cache
2444 2444
2445 2445 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2446 2446
2447 2447 cs_cache['updated_on'] = time.time()
2448 2448 self.changeset_cache = cs_cache
2449 2449 self.updated_on = _date_latest
2450 2450 Session().add(self)
2451 2451 Session().commit()
2452 2452
2453 2453 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2454 2454 self.repo_name, cs_cache, _date_latest)
2455 2455
2456 2456 @property
2457 2457 def tip(self):
2458 2458 return self.get_commit('tip')
2459 2459
2460 2460 @property
2461 2461 def author(self):
2462 2462 return self.tip.author
2463 2463
2464 2464 @property
2465 2465 def last_change(self):
2466 2466 return self.scm_instance().last_change
2467 2467
2468 2468 def get_comments(self, revisions=None):
2469 2469 """
2470 2470 Returns comments for this repository grouped by revisions
2471 2471
2472 2472 :param revisions: filter query by revisions only
2473 2473 """
2474 2474 cmts = ChangesetComment.query()\
2475 2475 .filter(ChangesetComment.repo == self)
2476 2476 if revisions:
2477 2477 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2478 2478 grouped = collections.defaultdict(list)
2479 2479 for cmt in cmts.all():
2480 2480 grouped[cmt.revision].append(cmt)
2481 2481 return grouped
2482 2482
2483 2483 def statuses(self, revisions=None):
2484 2484 """
2485 2485 Returns statuses for this repository
2486 2486
2487 2487 :param revisions: list of revisions to get statuses for
2488 2488 """
2489 2489 statuses = ChangesetStatus.query()\
2490 2490 .filter(ChangesetStatus.repo == self)\
2491 2491 .filter(ChangesetStatus.version == 0)
2492 2492
2493 2493 if revisions:
2494 2494 # Try doing the filtering in chunks to avoid hitting limits
2495 2495 size = 500
2496 2496 status_results = []
2497 2497 for chunk in xrange(0, len(revisions), size):
2498 2498 status_results += statuses.filter(
2499 2499 ChangesetStatus.revision.in_(
2500 2500 revisions[chunk: chunk+size])
2501 2501 ).all()
2502 2502 else:
2503 2503 status_results = statuses.all()
2504 2504
2505 2505 grouped = {}
2506 2506
2507 2507 # maybe we have open new pullrequest without a status?
2508 2508 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2509 2509 status_lbl = ChangesetStatus.get_status_lbl(stat)
2510 2510 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2511 2511 for rev in pr.revisions:
2512 2512 pr_id = pr.pull_request_id
2513 2513 pr_repo = pr.target_repo.repo_name
2514 2514 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2515 2515
2516 2516 for stat in status_results:
2517 2517 pr_id = pr_repo = None
2518 2518 if stat.pull_request:
2519 2519 pr_id = stat.pull_request.pull_request_id
2520 2520 pr_repo = stat.pull_request.target_repo.repo_name
2521 2521 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2522 2522 pr_id, pr_repo]
2523 2523 return grouped
2524 2524
2525 2525 # ==========================================================================
2526 2526 # SCM CACHE INSTANCE
2527 2527 # ==========================================================================
2528 2528
2529 2529 def scm_instance(self, **kwargs):
2530 2530 import rhodecode
2531 2531
2532 2532 # Passing a config will not hit the cache currently only used
2533 2533 # for repo2dbmapper
2534 2534 config = kwargs.pop('config', None)
2535 2535 cache = kwargs.pop('cache', None)
2536 2536 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2537 2537 if vcs_full_cache is not None:
2538 2538 # allows override global config
2539 2539 full_cache = vcs_full_cache
2540 2540 else:
2541 2541 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2542 2542 # if cache is NOT defined use default global, else we have a full
2543 2543 # control over cache behaviour
2544 2544 if cache is None and full_cache and not config:
2545 2545 log.debug('Initializing pure cached instance for %s', self.repo_path)
2546 2546 return self._get_instance_cached()
2547 2547
2548 2548 # cache here is sent to the "vcs server"
2549 2549 return self._get_instance(cache=bool(cache), config=config)
2550 2550
2551 2551 def _get_instance_cached(self):
2552 2552 from rhodecode.lib import rc_cache
2553 2553
2554 2554 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2555 2555 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2556 2556 repo_id=self.repo_id)
2557 2557 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2558 2558
2559 2559 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2560 2560 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2561 2561 return self._get_instance(repo_state_uid=_cache_state_uid)
2562 2562
2563 2563 # we must use thread scoped cache here,
2564 2564 # because each thread of gevent needs it's own not shared connection and cache
2565 2565 # we also alter `args` so the cache key is individual for every green thread.
2566 2566 inv_context_manager = rc_cache.InvalidationContext(
2567 2567 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2568 2568 thread_scoped=True)
2569 2569 with inv_context_manager as invalidation_context:
2570 2570 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2571 2571 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2572 2572
2573 2573 # re-compute and store cache if we get invalidate signal
2574 2574 if invalidation_context.should_invalidate():
2575 2575 instance = get_instance_cached.refresh(*args)
2576 2576 else:
2577 2577 instance = get_instance_cached(*args)
2578 2578
2579 2579 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2580 2580 return instance
2581 2581
2582 2582 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2583 2583 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2584 2584 self.repo_type, self.repo_path, cache)
2585 2585 config = config or self._config
2586 2586 custom_wire = {
2587 2587 'cache': cache, # controls the vcs.remote cache
2588 2588 'repo_state_uid': repo_state_uid
2589 2589 }
2590 2590 repo = get_vcs_instance(
2591 2591 repo_path=safe_str(self.repo_full_path),
2592 2592 config=config,
2593 2593 with_wire=custom_wire,
2594 2594 create=False,
2595 2595 _vcs_alias=self.repo_type)
2596 2596 if repo is not None:
2597 2597 repo.count() # cache rebuild
2598 2598 return repo
2599 2599
2600 2600 def get_shadow_repository_path(self, workspace_id):
2601 2601 from rhodecode.lib.vcs.backends.base import BaseRepository
2602 2602 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2603 2603 self.repo_full_path, self.repo_id, workspace_id)
2604 2604 return shadow_repo_path
2605 2605
2606 2606 def __json__(self):
2607 2607 return {'landing_rev': self.landing_rev}
2608 2608
2609 2609 def get_dict(self):
2610 2610
2611 2611 # Since we transformed `repo_name` to a hybrid property, we need to
2612 2612 # keep compatibility with the code which uses `repo_name` field.
2613 2613
2614 2614 result = super(Repository, self).get_dict()
2615 2615 result['repo_name'] = result.pop('_repo_name', None)
2616 2616 return result
2617 2617
2618 2618
2619 2619 class RepoGroup(Base, BaseModel):
2620 2620 __tablename__ = 'groups'
2621 2621 __table_args__ = (
2622 2622 UniqueConstraint('group_name', 'group_parent_id'),
2623 2623 base_table_args,
2624 2624 )
2625 2625 __mapper_args__ = {'order_by': 'group_name'}
2626 2626
2627 2627 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2628 2628
2629 2629 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2630 2630 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2631 2631 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2632 2632 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2633 2633 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2634 2634 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2635 2635 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2636 2636 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2637 2637 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2638 2638 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2639 2639 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2640 2640
2641 2641 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2642 2642 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2643 2643 parent_group = relationship('RepoGroup', remote_side=group_id)
2644 2644 user = relationship('User')
2645 2645 integrations = relationship('Integration', cascade="all, delete-orphan")
2646 2646
2647 2647 # no cascade, set NULL
2648 2648 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2649 2649
2650 2650 def __init__(self, group_name='', parent_group=None):
2651 2651 self.group_name = group_name
2652 2652 self.parent_group = parent_group
2653 2653
2654 2654 def __unicode__(self):
2655 2655 return u"<%s('id:%s:%s')>" % (
2656 2656 self.__class__.__name__, self.group_id, self.group_name)
2657 2657
2658 2658 @hybrid_property
2659 2659 def group_name(self):
2660 2660 return self._group_name
2661 2661
2662 2662 @group_name.setter
2663 2663 def group_name(self, value):
2664 2664 self._group_name = value
2665 2665 self.group_name_hash = self.hash_repo_group_name(value)
2666 2666
2667 2667 @classmethod
2668 2668 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2669 2669 from rhodecode.lib.vcs.backends.base import EmptyCommit
2670 2670 dummy = EmptyCommit().__json__()
2671 2671 if not changeset_cache_raw:
2672 2672 dummy['source_repo_id'] = repo_id
2673 2673 return json.loads(json.dumps(dummy))
2674 2674
2675 2675 try:
2676 2676 return json.loads(changeset_cache_raw)
2677 2677 except TypeError:
2678 2678 return dummy
2679 2679 except Exception:
2680 2680 log.error(traceback.format_exc())
2681 2681 return dummy
2682 2682
2683 2683 @hybrid_property
2684 2684 def changeset_cache(self):
2685 2685 return self._load_changeset_cache('', self._changeset_cache)
2686 2686
2687 2687 @changeset_cache.setter
2688 2688 def changeset_cache(self, val):
2689 2689 try:
2690 2690 self._changeset_cache = json.dumps(val)
2691 2691 except Exception:
2692 2692 log.error(traceback.format_exc())
2693 2693
2694 2694 @validates('group_parent_id')
2695 2695 def validate_group_parent_id(self, key, val):
2696 2696 """
2697 2697 Check cycle references for a parent group to self
2698 2698 """
2699 2699 if self.group_id and val:
2700 2700 assert val != self.group_id
2701 2701
2702 2702 return val
2703 2703
2704 2704 @hybrid_property
2705 2705 def description_safe(self):
2706 2706 from rhodecode.lib import helpers as h
2707 2707 return h.escape(self.group_description)
2708 2708
2709 2709 @classmethod
2710 2710 def hash_repo_group_name(cls, repo_group_name):
2711 2711 val = remove_formatting(repo_group_name)
2712 2712 val = safe_str(val).lower()
2713 2713 chars = []
2714 2714 for c in val:
2715 2715 if c not in string.ascii_letters:
2716 2716 c = str(ord(c))
2717 2717 chars.append(c)
2718 2718
2719 2719 return ''.join(chars)
2720 2720
2721 2721 @classmethod
2722 2722 def _generate_choice(cls, repo_group):
2723 2723 from webhelpers2.html import literal as _literal
2724 2724 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2725 2725 return repo_group.group_id, _name(repo_group.full_path_splitted)
2726 2726
2727 2727 @classmethod
2728 2728 def groups_choices(cls, groups=None, show_empty_group=True):
2729 2729 if not groups:
2730 2730 groups = cls.query().all()
2731 2731
2732 2732 repo_groups = []
2733 2733 if show_empty_group:
2734 2734 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2735 2735
2736 2736 repo_groups.extend([cls._generate_choice(x) for x in groups])
2737 2737
2738 2738 repo_groups = sorted(
2739 2739 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2740 2740 return repo_groups
2741 2741
2742 2742 @classmethod
2743 2743 def url_sep(cls):
2744 2744 return URL_SEP
2745 2745
2746 2746 @classmethod
2747 2747 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2748 2748 if case_insensitive:
2749 2749 gr = cls.query().filter(func.lower(cls.group_name)
2750 2750 == func.lower(group_name))
2751 2751 else:
2752 2752 gr = cls.query().filter(cls.group_name == group_name)
2753 2753 if cache:
2754 2754 name_key = _hash_key(group_name)
2755 2755 gr = gr.options(
2756 2756 FromCache("sql_cache_short", "get_group_%s" % name_key))
2757 2757 return gr.scalar()
2758 2758
2759 2759 @classmethod
2760 2760 def get_user_personal_repo_group(cls, user_id):
2761 2761 user = User.get(user_id)
2762 2762 if user.username == User.DEFAULT_USER:
2763 2763 return None
2764 2764
2765 2765 return cls.query()\
2766 2766 .filter(cls.personal == true()) \
2767 2767 .filter(cls.user == user) \
2768 2768 .order_by(cls.group_id.asc()) \
2769 2769 .first()
2770 2770
2771 2771 @classmethod
2772 2772 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2773 2773 case_insensitive=True):
2774 2774 q = RepoGroup.query()
2775 2775
2776 2776 if not isinstance(user_id, Optional):
2777 2777 q = q.filter(RepoGroup.user_id == user_id)
2778 2778
2779 2779 if not isinstance(group_id, Optional):
2780 2780 q = q.filter(RepoGroup.group_parent_id == group_id)
2781 2781
2782 2782 if case_insensitive:
2783 2783 q = q.order_by(func.lower(RepoGroup.group_name))
2784 2784 else:
2785 2785 q = q.order_by(RepoGroup.group_name)
2786 2786 return q.all()
2787 2787
2788 2788 @property
2789 2789 def parents(self, parents_recursion_limit=10):
2790 2790 groups = []
2791 2791 if self.parent_group is None:
2792 2792 return groups
2793 2793 cur_gr = self.parent_group
2794 2794 groups.insert(0, cur_gr)
2795 2795 cnt = 0
2796 2796 while 1:
2797 2797 cnt += 1
2798 2798 gr = getattr(cur_gr, 'parent_group', None)
2799 2799 cur_gr = cur_gr.parent_group
2800 2800 if gr is None:
2801 2801 break
2802 2802 if cnt == parents_recursion_limit:
2803 2803 # this will prevent accidental infinit loops
2804 2804 log.error('more than %s parents found for group %s, stopping '
2805 2805 'recursive parent fetching', parents_recursion_limit, self)
2806 2806 break
2807 2807
2808 2808 groups.insert(0, gr)
2809 2809 return groups
2810 2810
2811 2811 @property
2812 2812 def last_commit_cache_update_diff(self):
2813 2813 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2814 2814
2815 2815 @classmethod
2816 2816 def _load_commit_change(cls, last_commit_cache):
2817 2817 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2818 2818 empty_date = datetime.datetime.fromtimestamp(0)
2819 2819 date_latest = last_commit_cache.get('date', empty_date)
2820 2820 try:
2821 2821 return parse_datetime(date_latest)
2822 2822 except Exception:
2823 2823 return empty_date
2824 2824
2825 2825 @property
2826 2826 def last_commit_change(self):
2827 2827 return self._load_commit_change(self.changeset_cache)
2828 2828
2829 2829 @property
2830 2830 def last_db_change(self):
2831 2831 return self.updated_on
2832 2832
2833 2833 @property
2834 2834 def children(self):
2835 2835 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2836 2836
2837 2837 @property
2838 2838 def name(self):
2839 2839 return self.group_name.split(RepoGroup.url_sep())[-1]
2840 2840
2841 2841 @property
2842 2842 def full_path(self):
2843 2843 return self.group_name
2844 2844
2845 2845 @property
2846 2846 def full_path_splitted(self):
2847 2847 return self.group_name.split(RepoGroup.url_sep())
2848 2848
2849 2849 @property
2850 2850 def repositories(self):
2851 2851 return Repository.query()\
2852 2852 .filter(Repository.group == self)\
2853 2853 .order_by(Repository.repo_name)
2854 2854
2855 2855 @property
2856 2856 def repositories_recursive_count(self):
2857 2857 cnt = self.repositories.count()
2858 2858
2859 2859 def children_count(group):
2860 2860 cnt = 0
2861 2861 for child in group.children:
2862 2862 cnt += child.repositories.count()
2863 2863 cnt += children_count(child)
2864 2864 return cnt
2865 2865
2866 2866 return cnt + children_count(self)
2867 2867
2868 2868 def _recursive_objects(self, include_repos=True, include_groups=True):
2869 2869 all_ = []
2870 2870
2871 2871 def _get_members(root_gr):
2872 2872 if include_repos:
2873 2873 for r in root_gr.repositories:
2874 2874 all_.append(r)
2875 2875 childs = root_gr.children.all()
2876 2876 if childs:
2877 2877 for gr in childs:
2878 2878 if include_groups:
2879 2879 all_.append(gr)
2880 2880 _get_members(gr)
2881 2881
2882 2882 root_group = []
2883 2883 if include_groups:
2884 2884 root_group = [self]
2885 2885
2886 2886 _get_members(self)
2887 2887 return root_group + all_
2888 2888
2889 2889 def recursive_groups_and_repos(self):
2890 2890 """
2891 2891 Recursive return all groups, with repositories in those groups
2892 2892 """
2893 2893 return self._recursive_objects()
2894 2894
2895 2895 def recursive_groups(self):
2896 2896 """
2897 2897 Returns all children groups for this group including children of children
2898 2898 """
2899 2899 return self._recursive_objects(include_repos=False)
2900 2900
2901 2901 def recursive_repos(self):
2902 2902 """
2903 2903 Returns all children repositories for this group
2904 2904 """
2905 2905 return self._recursive_objects(include_groups=False)
2906 2906
2907 2907 def get_new_name(self, group_name):
2908 2908 """
2909 2909 returns new full group name based on parent and new name
2910 2910
2911 2911 :param group_name:
2912 2912 """
2913 2913 path_prefix = (self.parent_group.full_path_splitted if
2914 2914 self.parent_group else [])
2915 2915 return RepoGroup.url_sep().join(path_prefix + [group_name])
2916 2916
2917 2917 def update_commit_cache(self, config=None):
2918 2918 """
2919 2919 Update cache of last commit for newest repository inside this repository group.
2920 2920 cache_keys should be::
2921 2921
2922 2922 source_repo_id
2923 2923 short_id
2924 2924 raw_id
2925 2925 revision
2926 2926 parents
2927 2927 message
2928 2928 date
2929 2929 author
2930 2930
2931 2931 """
2932 2932 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2933 2933 empty_date = datetime.datetime.fromtimestamp(0)
2934 2934
2935 2935 def repo_groups_and_repos(root_gr):
2936 2936 for _repo in root_gr.repositories:
2937 2937 yield _repo
2938 2938 for child_group in root_gr.children.all():
2939 2939 yield child_group
2940 2940
2941 2941 latest_repo_cs_cache = {}
2942 2942 for obj in repo_groups_and_repos(self):
2943 2943 repo_cs_cache = obj.changeset_cache
2944 2944 date_latest = latest_repo_cs_cache.get('date', empty_date)
2945 2945 date_current = repo_cs_cache.get('date', empty_date)
2946 2946 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2947 2947 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2948 2948 latest_repo_cs_cache = repo_cs_cache
2949 2949 if hasattr(obj, 'repo_id'):
2950 2950 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2951 2951 else:
2952 2952 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2953 2953
2954 2954 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
2955 2955
2956 2956 latest_repo_cs_cache['updated_on'] = time.time()
2957 2957 self.changeset_cache = latest_repo_cs_cache
2958 2958 self.updated_on = _date_latest
2959 2959 Session().add(self)
2960 2960 Session().commit()
2961 2961
2962 2962 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
2963 2963 self.group_name, latest_repo_cs_cache, _date_latest)
2964 2964
2965 2965 def permissions(self, with_admins=True, with_owner=True,
2966 2966 expand_from_user_groups=False):
2967 2967 """
2968 2968 Permissions for repository groups
2969 2969 """
2970 2970 _admin_perm = 'group.admin'
2971 2971
2972 2972 owner_row = []
2973 2973 if with_owner:
2974 2974 usr = AttributeDict(self.user.get_dict())
2975 2975 usr.owner_row = True
2976 2976 usr.permission = _admin_perm
2977 2977 owner_row.append(usr)
2978 2978
2979 2979 super_admin_ids = []
2980 2980 super_admin_rows = []
2981 2981 if with_admins:
2982 2982 for usr in User.get_all_super_admins():
2983 2983 super_admin_ids.append(usr.user_id)
2984 2984 # if this admin is also owner, don't double the record
2985 2985 if usr.user_id == owner_row[0].user_id:
2986 2986 owner_row[0].admin_row = True
2987 2987 else:
2988 2988 usr = AttributeDict(usr.get_dict())
2989 2989 usr.admin_row = True
2990 2990 usr.permission = _admin_perm
2991 2991 super_admin_rows.append(usr)
2992 2992
2993 2993 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2994 2994 q = q.options(joinedload(UserRepoGroupToPerm.group),
2995 2995 joinedload(UserRepoGroupToPerm.user),
2996 2996 joinedload(UserRepoGroupToPerm.permission),)
2997 2997
2998 2998 # get owners and admins and permissions. We do a trick of re-writing
2999 2999 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3000 3000 # has a global reference and changing one object propagates to all
3001 3001 # others. This means if admin is also an owner admin_row that change
3002 3002 # would propagate to both objects
3003 3003 perm_rows = []
3004 3004 for _usr in q.all():
3005 3005 usr = AttributeDict(_usr.user.get_dict())
3006 3006 # if this user is also owner/admin, mark as duplicate record
3007 3007 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3008 3008 usr.duplicate_perm = True
3009 3009 usr.permission = _usr.permission.permission_name
3010 3010 perm_rows.append(usr)
3011 3011
3012 3012 # filter the perm rows by 'default' first and then sort them by
3013 3013 # admin,write,read,none permissions sorted again alphabetically in
3014 3014 # each group
3015 3015 perm_rows = sorted(perm_rows, key=display_user_sort)
3016 3016
3017 3017 user_groups_rows = []
3018 3018 if expand_from_user_groups:
3019 3019 for ug in self.permission_user_groups(with_members=True):
3020 3020 for user_data in ug.members:
3021 3021 user_groups_rows.append(user_data)
3022 3022
3023 3023 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3024 3024
3025 3025 def permission_user_groups(self, with_members=False):
3026 3026 q = UserGroupRepoGroupToPerm.query()\
3027 3027 .filter(UserGroupRepoGroupToPerm.group == self)
3028 3028 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3029 3029 joinedload(UserGroupRepoGroupToPerm.users_group),
3030 3030 joinedload(UserGroupRepoGroupToPerm.permission),)
3031 3031
3032 3032 perm_rows = []
3033 3033 for _user_group in q.all():
3034 3034 entry = AttributeDict(_user_group.users_group.get_dict())
3035 3035 entry.permission = _user_group.permission.permission_name
3036 3036 if with_members:
3037 3037 entry.members = [x.user.get_dict()
3038 3038 for x in _user_group.users_group.members]
3039 3039 perm_rows.append(entry)
3040 3040
3041 3041 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3042 3042 return perm_rows
3043 3043
3044 3044 def get_api_data(self):
3045 3045 """
3046 3046 Common function for generating api data
3047 3047
3048 3048 """
3049 3049 group = self
3050 3050 data = {
3051 3051 'group_id': group.group_id,
3052 3052 'group_name': group.group_name,
3053 3053 'group_description': group.description_safe,
3054 3054 'parent_group': group.parent_group.group_name if group.parent_group else None,
3055 3055 'repositories': [x.repo_name for x in group.repositories],
3056 3056 'owner': group.user.username,
3057 3057 }
3058 3058 return data
3059 3059
3060 3060 def get_dict(self):
3061 3061 # Since we transformed `group_name` to a hybrid property, we need to
3062 3062 # keep compatibility with the code which uses `group_name` field.
3063 3063 result = super(RepoGroup, self).get_dict()
3064 3064 result['group_name'] = result.pop('_group_name', None)
3065 3065 return result
3066 3066
3067 3067
3068 3068 class Permission(Base, BaseModel):
3069 3069 __tablename__ = 'permissions'
3070 3070 __table_args__ = (
3071 3071 Index('p_perm_name_idx', 'permission_name'),
3072 3072 base_table_args,
3073 3073 )
3074 3074
3075 3075 PERMS = [
3076 3076 ('hg.admin', _('RhodeCode Super Administrator')),
3077 3077
3078 3078 ('repository.none', _('Repository no access')),
3079 3079 ('repository.read', _('Repository read access')),
3080 3080 ('repository.write', _('Repository write access')),
3081 3081 ('repository.admin', _('Repository admin access')),
3082 3082
3083 3083 ('group.none', _('Repository group no access')),
3084 3084 ('group.read', _('Repository group read access')),
3085 3085 ('group.write', _('Repository group write access')),
3086 3086 ('group.admin', _('Repository group admin access')),
3087 3087
3088 3088 ('usergroup.none', _('User group no access')),
3089 3089 ('usergroup.read', _('User group read access')),
3090 3090 ('usergroup.write', _('User group write access')),
3091 3091 ('usergroup.admin', _('User group admin access')),
3092 3092
3093 3093 ('branch.none', _('Branch no permissions')),
3094 3094 ('branch.merge', _('Branch access by web merge')),
3095 3095 ('branch.push', _('Branch access by push')),
3096 3096 ('branch.push_force', _('Branch access by push with force')),
3097 3097
3098 3098 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3099 3099 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3100 3100
3101 3101 ('hg.usergroup.create.false', _('User Group creation disabled')),
3102 3102 ('hg.usergroup.create.true', _('User Group creation enabled')),
3103 3103
3104 3104 ('hg.create.none', _('Repository creation disabled')),
3105 3105 ('hg.create.repository', _('Repository creation enabled')),
3106 3106 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3107 3107 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3108 3108
3109 3109 ('hg.fork.none', _('Repository forking disabled')),
3110 3110 ('hg.fork.repository', _('Repository forking enabled')),
3111 3111
3112 3112 ('hg.register.none', _('Registration disabled')),
3113 3113 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3114 3114 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3115 3115
3116 3116 ('hg.password_reset.enabled', _('Password reset enabled')),
3117 3117 ('hg.password_reset.hidden', _('Password reset hidden')),
3118 3118 ('hg.password_reset.disabled', _('Password reset disabled')),
3119 3119
3120 3120 ('hg.extern_activate.manual', _('Manual activation of external account')),
3121 3121 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3122 3122
3123 3123 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3124 3124 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3125 3125 ]
3126 3126
3127 3127 # definition of system default permissions for DEFAULT user, created on
3128 3128 # system setup
3129 3129 DEFAULT_USER_PERMISSIONS = [
3130 3130 # object perms
3131 3131 'repository.read',
3132 3132 'group.read',
3133 3133 'usergroup.read',
3134 3134 # branch, for backward compat we need same value as before so forced pushed
3135 3135 'branch.push_force',
3136 3136 # global
3137 3137 'hg.create.repository',
3138 3138 'hg.repogroup.create.false',
3139 3139 'hg.usergroup.create.false',
3140 3140 'hg.create.write_on_repogroup.true',
3141 3141 'hg.fork.repository',
3142 3142 'hg.register.manual_activate',
3143 3143 'hg.password_reset.enabled',
3144 3144 'hg.extern_activate.auto',
3145 3145 'hg.inherit_default_perms.true',
3146 3146 ]
3147 3147
3148 3148 # defines which permissions are more important higher the more important
3149 3149 # Weight defines which permissions are more important.
3150 3150 # The higher number the more important.
3151 3151 PERM_WEIGHTS = {
3152 3152 'repository.none': 0,
3153 3153 'repository.read': 1,
3154 3154 'repository.write': 3,
3155 3155 'repository.admin': 4,
3156 3156
3157 3157 'group.none': 0,
3158 3158 'group.read': 1,
3159 3159 'group.write': 3,
3160 3160 'group.admin': 4,
3161 3161
3162 3162 'usergroup.none': 0,
3163 3163 'usergroup.read': 1,
3164 3164 'usergroup.write': 3,
3165 3165 'usergroup.admin': 4,
3166 3166
3167 3167 'branch.none': 0,
3168 3168 'branch.merge': 1,
3169 3169 'branch.push': 3,
3170 3170 'branch.push_force': 4,
3171 3171
3172 3172 'hg.repogroup.create.false': 0,
3173 3173 'hg.repogroup.create.true': 1,
3174 3174
3175 3175 'hg.usergroup.create.false': 0,
3176 3176 'hg.usergroup.create.true': 1,
3177 3177
3178 3178 'hg.fork.none': 0,
3179 3179 'hg.fork.repository': 1,
3180 3180 'hg.create.none': 0,
3181 3181 'hg.create.repository': 1
3182 3182 }
3183 3183
3184 3184 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3185 3185 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3186 3186 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3187 3187
3188 3188 def __unicode__(self):
3189 3189 return u"<%s('%s:%s')>" % (
3190 3190 self.__class__.__name__, self.permission_id, self.permission_name
3191 3191 )
3192 3192
3193 3193 @classmethod
3194 3194 def get_by_key(cls, key):
3195 3195 return cls.query().filter(cls.permission_name == key).scalar()
3196 3196
3197 3197 @classmethod
3198 3198 def get_default_repo_perms(cls, user_id, repo_id=None):
3199 3199 q = Session().query(UserRepoToPerm, Repository, Permission)\
3200 3200 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3201 3201 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3202 3202 .filter(UserRepoToPerm.user_id == user_id)
3203 3203 if repo_id:
3204 3204 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3205 3205 return q.all()
3206 3206
3207 3207 @classmethod
3208 3208 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3209 3209 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3210 3210 .join(
3211 3211 Permission,
3212 3212 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3213 3213 .join(
3214 3214 UserRepoToPerm,
3215 3215 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3216 3216 .filter(UserRepoToPerm.user_id == user_id)
3217 3217
3218 3218 if repo_id:
3219 3219 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3220 3220 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3221 3221
3222 3222 @classmethod
3223 3223 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3224 3224 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3225 3225 .join(
3226 3226 Permission,
3227 3227 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3228 3228 .join(
3229 3229 Repository,
3230 3230 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3231 3231 .join(
3232 3232 UserGroup,
3233 3233 UserGroupRepoToPerm.users_group_id ==
3234 3234 UserGroup.users_group_id)\
3235 3235 .join(
3236 3236 UserGroupMember,
3237 3237 UserGroupRepoToPerm.users_group_id ==
3238 3238 UserGroupMember.users_group_id)\
3239 3239 .filter(
3240 3240 UserGroupMember.user_id == user_id,
3241 3241 UserGroup.users_group_active == true())
3242 3242 if repo_id:
3243 3243 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3244 3244 return q.all()
3245 3245
3246 3246 @classmethod
3247 3247 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3248 3248 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3249 3249 .join(
3250 3250 Permission,
3251 3251 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3252 3252 .join(
3253 3253 UserGroupRepoToPerm,
3254 3254 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3255 3255 .join(
3256 3256 UserGroup,
3257 3257 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3258 3258 .join(
3259 3259 UserGroupMember,
3260 3260 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3261 3261 .filter(
3262 3262 UserGroupMember.user_id == user_id,
3263 3263 UserGroup.users_group_active == true())
3264 3264
3265 3265 if repo_id:
3266 3266 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3267 3267 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3268 3268
3269 3269 @classmethod
3270 3270 def get_default_group_perms(cls, user_id, repo_group_id=None):
3271 3271 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3272 3272 .join(
3273 3273 Permission,
3274 3274 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3275 3275 .join(
3276 3276 RepoGroup,
3277 3277 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3278 3278 .filter(UserRepoGroupToPerm.user_id == user_id)
3279 3279 if repo_group_id:
3280 3280 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3281 3281 return q.all()
3282 3282
3283 3283 @classmethod
3284 3284 def get_default_group_perms_from_user_group(
3285 3285 cls, user_id, repo_group_id=None):
3286 3286 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3287 3287 .join(
3288 3288 Permission,
3289 3289 UserGroupRepoGroupToPerm.permission_id ==
3290 3290 Permission.permission_id)\
3291 3291 .join(
3292 3292 RepoGroup,
3293 3293 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3294 3294 .join(
3295 3295 UserGroup,
3296 3296 UserGroupRepoGroupToPerm.users_group_id ==
3297 3297 UserGroup.users_group_id)\
3298 3298 .join(
3299 3299 UserGroupMember,
3300 3300 UserGroupRepoGroupToPerm.users_group_id ==
3301 3301 UserGroupMember.users_group_id)\
3302 3302 .filter(
3303 3303 UserGroupMember.user_id == user_id,
3304 3304 UserGroup.users_group_active == true())
3305 3305 if repo_group_id:
3306 3306 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3307 3307 return q.all()
3308 3308
3309 3309 @classmethod
3310 3310 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3311 3311 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3312 3312 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3313 3313 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3314 3314 .filter(UserUserGroupToPerm.user_id == user_id)
3315 3315 if user_group_id:
3316 3316 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3317 3317 return q.all()
3318 3318
3319 3319 @classmethod
3320 3320 def get_default_user_group_perms_from_user_group(
3321 3321 cls, user_id, user_group_id=None):
3322 3322 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3323 3323 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3324 3324 .join(
3325 3325 Permission,
3326 3326 UserGroupUserGroupToPerm.permission_id ==
3327 3327 Permission.permission_id)\
3328 3328 .join(
3329 3329 TargetUserGroup,
3330 3330 UserGroupUserGroupToPerm.target_user_group_id ==
3331 3331 TargetUserGroup.users_group_id)\
3332 3332 .join(
3333 3333 UserGroup,
3334 3334 UserGroupUserGroupToPerm.user_group_id ==
3335 3335 UserGroup.users_group_id)\
3336 3336 .join(
3337 3337 UserGroupMember,
3338 3338 UserGroupUserGroupToPerm.user_group_id ==
3339 3339 UserGroupMember.users_group_id)\
3340 3340 .filter(
3341 3341 UserGroupMember.user_id == user_id,
3342 3342 UserGroup.users_group_active == true())
3343 3343 if user_group_id:
3344 3344 q = q.filter(
3345 3345 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3346 3346
3347 3347 return q.all()
3348 3348
3349 3349
3350 3350 class UserRepoToPerm(Base, BaseModel):
3351 3351 __tablename__ = 'repo_to_perm'
3352 3352 __table_args__ = (
3353 3353 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3354 3354 base_table_args
3355 3355 )
3356 3356
3357 3357 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3358 3358 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3359 3359 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3360 3360 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3361 3361
3362 3362 user = relationship('User')
3363 3363 repository = relationship('Repository')
3364 3364 permission = relationship('Permission')
3365 3365
3366 3366 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3367 3367
3368 3368 @classmethod
3369 3369 def create(cls, user, repository, permission):
3370 3370 n = cls()
3371 3371 n.user = user
3372 3372 n.repository = repository
3373 3373 n.permission = permission
3374 3374 Session().add(n)
3375 3375 return n
3376 3376
3377 3377 def __unicode__(self):
3378 3378 return u'<%s => %s >' % (self.user, self.repository)
3379 3379
3380 3380
3381 3381 class UserUserGroupToPerm(Base, BaseModel):
3382 3382 __tablename__ = 'user_user_group_to_perm'
3383 3383 __table_args__ = (
3384 3384 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3385 3385 base_table_args
3386 3386 )
3387 3387
3388 3388 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3389 3389 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3390 3390 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3391 3391 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3392 3392
3393 3393 user = relationship('User')
3394 3394 user_group = relationship('UserGroup')
3395 3395 permission = relationship('Permission')
3396 3396
3397 3397 @classmethod
3398 3398 def create(cls, user, user_group, permission):
3399 3399 n = cls()
3400 3400 n.user = user
3401 3401 n.user_group = user_group
3402 3402 n.permission = permission
3403 3403 Session().add(n)
3404 3404 return n
3405 3405
3406 3406 def __unicode__(self):
3407 3407 return u'<%s => %s >' % (self.user, self.user_group)
3408 3408
3409 3409
3410 3410 class UserToPerm(Base, BaseModel):
3411 3411 __tablename__ = 'user_to_perm'
3412 3412 __table_args__ = (
3413 3413 UniqueConstraint('user_id', 'permission_id'),
3414 3414 base_table_args
3415 3415 )
3416 3416
3417 3417 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3418 3418 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3419 3419 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3420 3420
3421 3421 user = relationship('User')
3422 3422 permission = relationship('Permission', lazy='joined')
3423 3423
3424 3424 def __unicode__(self):
3425 3425 return u'<%s => %s >' % (self.user, self.permission)
3426 3426
3427 3427
3428 3428 class UserGroupRepoToPerm(Base, BaseModel):
3429 3429 __tablename__ = 'users_group_repo_to_perm'
3430 3430 __table_args__ = (
3431 3431 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3432 3432 base_table_args
3433 3433 )
3434 3434
3435 3435 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3436 3436 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3437 3437 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3438 3438 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3439 3439
3440 3440 users_group = relationship('UserGroup')
3441 3441 permission = relationship('Permission')
3442 3442 repository = relationship('Repository')
3443 3443 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3444 3444
3445 3445 @classmethod
3446 3446 def create(cls, users_group, repository, permission):
3447 3447 n = cls()
3448 3448 n.users_group = users_group
3449 3449 n.repository = repository
3450 3450 n.permission = permission
3451 3451 Session().add(n)
3452 3452 return n
3453 3453
3454 3454 def __unicode__(self):
3455 3455 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3456 3456
3457 3457
3458 3458 class UserGroupUserGroupToPerm(Base, BaseModel):
3459 3459 __tablename__ = 'user_group_user_group_to_perm'
3460 3460 __table_args__ = (
3461 3461 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3462 3462 CheckConstraint('target_user_group_id != user_group_id'),
3463 3463 base_table_args
3464 3464 )
3465 3465
3466 3466 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3467 3467 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3468 3468 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3469 3469 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3470 3470
3471 3471 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3472 3472 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3473 3473 permission = relationship('Permission')
3474 3474
3475 3475 @classmethod
3476 3476 def create(cls, target_user_group, user_group, permission):
3477 3477 n = cls()
3478 3478 n.target_user_group = target_user_group
3479 3479 n.user_group = user_group
3480 3480 n.permission = permission
3481 3481 Session().add(n)
3482 3482 return n
3483 3483
3484 3484 def __unicode__(self):
3485 3485 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3486 3486
3487 3487
3488 3488 class UserGroupToPerm(Base, BaseModel):
3489 3489 __tablename__ = 'users_group_to_perm'
3490 3490 __table_args__ = (
3491 3491 UniqueConstraint('users_group_id', 'permission_id',),
3492 3492 base_table_args
3493 3493 )
3494 3494
3495 3495 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3496 3496 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3497 3497 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3498 3498
3499 3499 users_group = relationship('UserGroup')
3500 3500 permission = relationship('Permission')
3501 3501
3502 3502
3503 3503 class UserRepoGroupToPerm(Base, BaseModel):
3504 3504 __tablename__ = 'user_repo_group_to_perm'
3505 3505 __table_args__ = (
3506 3506 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3507 3507 base_table_args
3508 3508 )
3509 3509
3510 3510 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3511 3511 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3512 3512 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3513 3513 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3514 3514
3515 3515 user = relationship('User')
3516 3516 group = relationship('RepoGroup')
3517 3517 permission = relationship('Permission')
3518 3518
3519 3519 @classmethod
3520 3520 def create(cls, user, repository_group, permission):
3521 3521 n = cls()
3522 3522 n.user = user
3523 3523 n.group = repository_group
3524 3524 n.permission = permission
3525 3525 Session().add(n)
3526 3526 return n
3527 3527
3528 3528
3529 3529 class UserGroupRepoGroupToPerm(Base, BaseModel):
3530 3530 __tablename__ = 'users_group_repo_group_to_perm'
3531 3531 __table_args__ = (
3532 3532 UniqueConstraint('users_group_id', 'group_id'),
3533 3533 base_table_args
3534 3534 )
3535 3535
3536 3536 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3537 3537 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3538 3538 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3539 3539 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3540 3540
3541 3541 users_group = relationship('UserGroup')
3542 3542 permission = relationship('Permission')
3543 3543 group = relationship('RepoGroup')
3544 3544
3545 3545 @classmethod
3546 3546 def create(cls, user_group, repository_group, permission):
3547 3547 n = cls()
3548 3548 n.users_group = user_group
3549 3549 n.group = repository_group
3550 3550 n.permission = permission
3551 3551 Session().add(n)
3552 3552 return n
3553 3553
3554 3554 def __unicode__(self):
3555 3555 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3556 3556
3557 3557
3558 3558 class Statistics(Base, BaseModel):
3559 3559 __tablename__ = 'statistics'
3560 3560 __table_args__ = (
3561 3561 base_table_args
3562 3562 )
3563 3563
3564 3564 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3565 3565 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3566 3566 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3567 3567 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3568 3568 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3569 3569 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3570 3570
3571 3571 repository = relationship('Repository', single_parent=True)
3572 3572
3573 3573
3574 3574 class UserFollowing(Base, BaseModel):
3575 3575 __tablename__ = 'user_followings'
3576 3576 __table_args__ = (
3577 3577 UniqueConstraint('user_id', 'follows_repository_id'),
3578 3578 UniqueConstraint('user_id', 'follows_user_id'),
3579 3579 base_table_args
3580 3580 )
3581 3581
3582 3582 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3583 3583 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3584 3584 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3585 3585 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3586 3586 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3587 3587
3588 3588 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3589 3589
3590 3590 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3591 3591 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3592 3592
3593 3593 @classmethod
3594 3594 def get_repo_followers(cls, repo_id):
3595 3595 return cls.query().filter(cls.follows_repo_id == repo_id)
3596 3596
3597 3597
3598 3598 class CacheKey(Base, BaseModel):
3599 3599 __tablename__ = 'cache_invalidation'
3600 3600 __table_args__ = (
3601 3601 UniqueConstraint('cache_key'),
3602 3602 Index('key_idx', 'cache_key'),
3603 3603 base_table_args,
3604 3604 )
3605 3605
3606 3606 CACHE_TYPE_FEED = 'FEED'
3607 3607
3608 3608 # namespaces used to register process/thread aware caches
3609 3609 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3610 3610 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3611 3611
3612 3612 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3613 3613 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3614 3614 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3615 3615 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3616 3616 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3617 3617
3618 3618 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3619 3619 self.cache_key = cache_key
3620 3620 self.cache_args = cache_args
3621 3621 self.cache_active = False
3622 3622 # first key should be same for all entries, since all workers should share it
3623 3623 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3624 3624
3625 3625 def __unicode__(self):
3626 3626 return u"<%s('%s:%s[%s]')>" % (
3627 3627 self.__class__.__name__,
3628 3628 self.cache_id, self.cache_key, self.cache_active)
3629 3629
3630 3630 def _cache_key_partition(self):
3631 3631 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3632 3632 return prefix, repo_name, suffix
3633 3633
3634 3634 def get_prefix(self):
3635 3635 """
3636 3636 Try to extract prefix from existing cache key. The key could consist
3637 3637 of prefix, repo_name, suffix
3638 3638 """
3639 3639 # this returns prefix, repo_name, suffix
3640 3640 return self._cache_key_partition()[0]
3641 3641
3642 3642 def get_suffix(self):
3643 3643 """
3644 3644 get suffix that might have been used in _get_cache_key to
3645 3645 generate self.cache_key. Only used for informational purposes
3646 3646 in repo_edit.mako.
3647 3647 """
3648 3648 # prefix, repo_name, suffix
3649 3649 return self._cache_key_partition()[2]
3650 3650
3651 3651 @classmethod
3652 3652 def generate_new_state_uid(cls, based_on=None):
3653 3653 if based_on:
3654 3654 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3655 3655 else:
3656 3656 return str(uuid.uuid4())
3657 3657
3658 3658 @classmethod
3659 3659 def delete_all_cache(cls):
3660 3660 """
3661 3661 Delete all cache keys from database.
3662 3662 Should only be run when all instances are down and all entries
3663 3663 thus stale.
3664 3664 """
3665 3665 cls.query().delete()
3666 3666 Session().commit()
3667 3667
3668 3668 @classmethod
3669 3669 def set_invalidate(cls, cache_uid, delete=False):
3670 3670 """
3671 3671 Mark all caches of a repo as invalid in the database.
3672 3672 """
3673 3673
3674 3674 try:
3675 3675 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3676 3676 if delete:
3677 3677 qry.delete()
3678 3678 log.debug('cache objects deleted for cache args %s',
3679 3679 safe_str(cache_uid))
3680 3680 else:
3681 3681 qry.update({"cache_active": False,
3682 3682 "cache_state_uid": cls.generate_new_state_uid()})
3683 3683 log.debug('cache objects marked as invalid for cache args %s',
3684 3684 safe_str(cache_uid))
3685 3685
3686 3686 Session().commit()
3687 3687 except Exception:
3688 3688 log.exception(
3689 3689 'Cache key invalidation failed for cache args %s',
3690 3690 safe_str(cache_uid))
3691 3691 Session().rollback()
3692 3692
3693 3693 @classmethod
3694 3694 def get_active_cache(cls, cache_key):
3695 3695 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3696 3696 if inv_obj:
3697 3697 return inv_obj
3698 3698 return None
3699 3699
3700 3700 @classmethod
3701 3701 def get_namespace_map(cls, namespace):
3702 3702 return {
3703 3703 x.cache_key: x
3704 3704 for x in cls.query().filter(cls.cache_args == namespace)}
3705 3705
3706 3706
3707 3707 class ChangesetComment(Base, BaseModel):
3708 3708 __tablename__ = 'changeset_comments'
3709 3709 __table_args__ = (
3710 3710 Index('cc_revision_idx', 'revision'),
3711 3711 base_table_args,
3712 3712 )
3713 3713
3714 3714 COMMENT_OUTDATED = u'comment_outdated'
3715 3715 COMMENT_TYPE_NOTE = u'note'
3716 3716 COMMENT_TYPE_TODO = u'todo'
3717 3717 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3718 3718
3719 3719 OP_IMMUTABLE = u'immutable'
3720 3720 OP_CHANGEABLE = u'changeable'
3721 3721
3722 3722 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3723 3723 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3724 3724 revision = Column('revision', String(40), nullable=True)
3725 3725 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3726 3726 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3727 3727 line_no = Column('line_no', Unicode(10), nullable=True)
3728 3728 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3729 3729 f_path = Column('f_path', Unicode(1000), nullable=True)
3730 3730 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3731 3731 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3732 3732 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3733 3733 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3734 3734 renderer = Column('renderer', Unicode(64), nullable=True)
3735 3735 display_state = Column('display_state', Unicode(128), nullable=True)
3736 3736 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3737 3737
3738 3738 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3739 3739 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3740 3740
3741 3741 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3742 3742 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3743 3743
3744 3744 author = relationship('User', lazy='joined')
3745 3745 repo = relationship('Repository')
3746 3746 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined')
3747 3747 pull_request = relationship('PullRequest', lazy='joined')
3748 3748 pull_request_version = relationship('PullRequestVersion')
3749 3749
3750 3750 @classmethod
3751 3751 def get_users(cls, revision=None, pull_request_id=None):
3752 3752 """
3753 3753 Returns user associated with this ChangesetComment. ie those
3754 3754 who actually commented
3755 3755
3756 3756 :param cls:
3757 3757 :param revision:
3758 3758 """
3759 3759 q = Session().query(User)\
3760 3760 .join(ChangesetComment.author)
3761 3761 if revision:
3762 3762 q = q.filter(cls.revision == revision)
3763 3763 elif pull_request_id:
3764 3764 q = q.filter(cls.pull_request_id == pull_request_id)
3765 3765 return q.all()
3766 3766
3767 3767 @classmethod
3768 3768 def get_index_from_version(cls, pr_version, versions):
3769 3769 num_versions = [x.pull_request_version_id for x in versions]
3770 3770 try:
3771 3771 return num_versions.index(pr_version) +1
3772 3772 except (IndexError, ValueError):
3773 3773 return
3774 3774
3775 3775 @property
3776 3776 def outdated(self):
3777 3777 return self.display_state == self.COMMENT_OUTDATED
3778 3778
3779 3779 @property
3780 3780 def immutable(self):
3781 3781 return self.immutable_state == self.OP_IMMUTABLE
3782 3782
3783 3783 def outdated_at_version(self, version):
3784 3784 """
3785 3785 Checks if comment is outdated for given pull request version
3786 3786 """
3787 3787 return self.outdated and self.pull_request_version_id != version
3788 3788
3789 3789 def older_than_version(self, version):
3790 3790 """
3791 3791 Checks if comment is made from previous version than given
3792 3792 """
3793 3793 if version is None:
3794 3794 return self.pull_request_version_id is not None
3795 3795
3796 3796 return self.pull_request_version_id < version
3797 3797
3798 3798 @property
3799 3799 def resolved(self):
3800 3800 return self.resolved_by[0] if self.resolved_by else None
3801 3801
3802 3802 @property
3803 3803 def is_todo(self):
3804 3804 return self.comment_type == self.COMMENT_TYPE_TODO
3805 3805
3806 3806 @property
3807 3807 def is_inline(self):
3808 3808 return self.line_no and self.f_path
3809 3809
3810 3810 def get_index_version(self, versions):
3811 3811 return self.get_index_from_version(
3812 3812 self.pull_request_version_id, versions)
3813 3813
3814 3814 def __repr__(self):
3815 3815 if self.comment_id:
3816 3816 return '<DB:Comment #%s>' % self.comment_id
3817 3817 else:
3818 3818 return '<DB:Comment at %#x>' % id(self)
3819 3819
3820 3820 def get_api_data(self):
3821 3821 comment = self
3822 3822 data = {
3823 3823 'comment_id': comment.comment_id,
3824 3824 'comment_type': comment.comment_type,
3825 3825 'comment_text': comment.text,
3826 3826 'comment_status': comment.status_change,
3827 3827 'comment_f_path': comment.f_path,
3828 3828 'comment_lineno': comment.line_no,
3829 3829 'comment_author': comment.author,
3830 3830 'comment_created_on': comment.created_on,
3831 3831 'comment_resolved_by': self.resolved,
3832 3832 'comment_commit_id': comment.revision,
3833 3833 'comment_pull_request_id': comment.pull_request_id,
3834 3834 }
3835 3835 return data
3836 3836
3837 3837 def __json__(self):
3838 3838 data = dict()
3839 3839 data.update(self.get_api_data())
3840 3840 return data
3841 3841
3842 3842
3843 3843 class ChangesetStatus(Base, BaseModel):
3844 3844 __tablename__ = 'changeset_statuses'
3845 3845 __table_args__ = (
3846 3846 Index('cs_revision_idx', 'revision'),
3847 3847 Index('cs_version_idx', 'version'),
3848 3848 UniqueConstraint('repo_id', 'revision', 'version'),
3849 3849 base_table_args
3850 3850 )
3851 3851
3852 3852 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3853 3853 STATUS_APPROVED = 'approved'
3854 3854 STATUS_REJECTED = 'rejected'
3855 3855 STATUS_UNDER_REVIEW = 'under_review'
3856 3856
3857 3857 STATUSES = [
3858 3858 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3859 3859 (STATUS_APPROVED, _("Approved")),
3860 3860 (STATUS_REJECTED, _("Rejected")),
3861 3861 (STATUS_UNDER_REVIEW, _("Under Review")),
3862 3862 ]
3863 3863
3864 3864 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3865 3865 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3866 3866 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3867 3867 revision = Column('revision', String(40), nullable=False)
3868 3868 status = Column('status', String(128), nullable=False, default=DEFAULT)
3869 3869 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3870 3870 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3871 3871 version = Column('version', Integer(), nullable=False, default=0)
3872 3872 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3873 3873
3874 3874 author = relationship('User', lazy='joined')
3875 3875 repo = relationship('Repository')
3876 3876 comment = relationship('ChangesetComment', lazy='joined')
3877 3877 pull_request = relationship('PullRequest', lazy='joined')
3878 3878
3879 3879 def __unicode__(self):
3880 3880 return u"<%s('%s[v%s]:%s')>" % (
3881 3881 self.__class__.__name__,
3882 3882 self.status, self.version, self.author
3883 3883 )
3884 3884
3885 3885 @classmethod
3886 3886 def get_status_lbl(cls, value):
3887 3887 return dict(cls.STATUSES).get(value)
3888 3888
3889 3889 @property
3890 3890 def status_lbl(self):
3891 3891 return ChangesetStatus.get_status_lbl(self.status)
3892 3892
3893 3893 def get_api_data(self):
3894 3894 status = self
3895 3895 data = {
3896 3896 'status_id': status.changeset_status_id,
3897 3897 'status': status.status,
3898 3898 }
3899 3899 return data
3900 3900
3901 3901 def __json__(self):
3902 3902 data = dict()
3903 3903 data.update(self.get_api_data())
3904 3904 return data
3905 3905
3906 3906
3907 3907 class _SetState(object):
3908 3908 """
3909 3909 Context processor allowing changing state for sensitive operation such as
3910 3910 pull request update or merge
3911 3911 """
3912 3912
3913 3913 def __init__(self, pull_request, pr_state, back_state=None):
3914 3914 self._pr = pull_request
3915 3915 self._org_state = back_state or pull_request.pull_request_state
3916 3916 self._pr_state = pr_state
3917 3917 self._current_state = None
3918 3918
3919 3919 def __enter__(self):
3920 3920 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
3921 3921 self._pr, self._pr_state)
3922 3922 self.set_pr_state(self._pr_state)
3923 3923 return self
3924 3924
3925 3925 def __exit__(self, exc_type, exc_val, exc_tb):
3926 3926 if exc_val is not None:
3927 3927 log.error(traceback.format_exc(exc_tb))
3928 3928 return None
3929 3929
3930 3930 self.set_pr_state(self._org_state)
3931 3931 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
3932 3932 self._pr, self._org_state)
3933 3933
3934 3934 @property
3935 3935 def state(self):
3936 3936 return self._current_state
3937 3937
3938 3938 def set_pr_state(self, pr_state):
3939 3939 try:
3940 3940 self._pr.pull_request_state = pr_state
3941 3941 Session().add(self._pr)
3942 3942 Session().commit()
3943 3943 self._current_state = pr_state
3944 3944 except Exception:
3945 3945 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
3946 3946 raise
3947 3947
3948 3948
3949 3949 class _PullRequestBase(BaseModel):
3950 3950 """
3951 3951 Common attributes of pull request and version entries.
3952 3952 """
3953 3953
3954 3954 # .status values
3955 3955 STATUS_NEW = u'new'
3956 3956 STATUS_OPEN = u'open'
3957 3957 STATUS_CLOSED = u'closed'
3958 3958
3959 3959 # available states
3960 3960 STATE_CREATING = u'creating'
3961 3961 STATE_UPDATING = u'updating'
3962 3962 STATE_MERGING = u'merging'
3963 3963 STATE_CREATED = u'created'
3964 3964
3965 3965 title = Column('title', Unicode(255), nullable=True)
3966 3966 description = Column(
3967 3967 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3968 3968 nullable=True)
3969 3969 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3970 3970
3971 3971 # new/open/closed status of pull request (not approve/reject/etc)
3972 3972 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3973 3973 created_on = Column(
3974 3974 'created_on', DateTime(timezone=False), nullable=False,
3975 3975 default=datetime.datetime.now)
3976 3976 updated_on = Column(
3977 3977 'updated_on', DateTime(timezone=False), nullable=False,
3978 3978 default=datetime.datetime.now)
3979 3979
3980 3980 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3981 3981
3982 3982 @declared_attr
3983 3983 def user_id(cls):
3984 3984 return Column(
3985 3985 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3986 3986 unique=None)
3987 3987
3988 3988 # 500 revisions max
3989 3989 _revisions = Column(
3990 3990 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3991 3991
3992 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
3993
3992 3994 @declared_attr
3993 3995 def source_repo_id(cls):
3994 3996 # TODO: dan: rename column to source_repo_id
3995 3997 return Column(
3996 3998 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3997 3999 nullable=False)
3998 4000
3999 4001 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4000 4002
4001 4003 @hybrid_property
4002 4004 def source_ref(self):
4003 4005 return self._source_ref
4004 4006
4005 4007 @source_ref.setter
4006 4008 def source_ref(self, val):
4007 4009 parts = (val or '').split(':')
4008 4010 if len(parts) != 3:
4009 4011 raise ValueError(
4010 4012 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4011 4013 self._source_ref = safe_unicode(val)
4012 4014
4013 4015 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4014 4016
4015 4017 @hybrid_property
4016 4018 def target_ref(self):
4017 4019 return self._target_ref
4018 4020
4019 4021 @target_ref.setter
4020 4022 def target_ref(self, val):
4021 4023 parts = (val or '').split(':')
4022 4024 if len(parts) != 3:
4023 4025 raise ValueError(
4024 4026 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4025 4027 self._target_ref = safe_unicode(val)
4026 4028
4027 4029 @declared_attr
4028 4030 def target_repo_id(cls):
4029 4031 # TODO: dan: rename column to target_repo_id
4030 4032 return Column(
4031 4033 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4032 4034 nullable=False)
4033 4035
4034 4036 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4035 4037
4036 4038 # TODO: dan: rename column to last_merge_source_rev
4037 4039 _last_merge_source_rev = Column(
4038 4040 'last_merge_org_rev', String(40), nullable=True)
4039 4041 # TODO: dan: rename column to last_merge_target_rev
4040 4042 _last_merge_target_rev = Column(
4041 4043 'last_merge_other_rev', String(40), nullable=True)
4042 4044 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4043 4045 last_merge_metadata = Column(
4044 4046 'last_merge_metadata', MutationObj.as_mutable(
4045 4047 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4046 4048
4047 4049 merge_rev = Column('merge_rev', String(40), nullable=True)
4048 4050
4049 4051 reviewer_data = Column(
4050 4052 'reviewer_data_json', MutationObj.as_mutable(
4051 4053 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4052 4054
4053 4055 @property
4054 4056 def reviewer_data_json(self):
4055 4057 return json.dumps(self.reviewer_data)
4056 4058
4057 4059 @property
4058 4060 def work_in_progress(self):
4059 4061 """checks if pull request is work in progress by checking the title"""
4060 4062 title = self.title.upper()
4061 4063 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4062 4064 return True
4063 4065 return False
4064 4066
4065 4067 @hybrid_property
4066 4068 def description_safe(self):
4067 4069 from rhodecode.lib import helpers as h
4068 4070 return h.escape(self.description)
4069 4071
4070 4072 @hybrid_property
4071 4073 def revisions(self):
4072 4074 return self._revisions.split(':') if self._revisions else []
4073 4075
4074 4076 @revisions.setter
4075 4077 def revisions(self, val):
4076 4078 self._revisions = u':'.join(val)
4077 4079
4078 4080 @hybrid_property
4079 4081 def last_merge_status(self):
4080 4082 return safe_int(self._last_merge_status)
4081 4083
4082 4084 @last_merge_status.setter
4083 4085 def last_merge_status(self, val):
4084 4086 self._last_merge_status = val
4085 4087
4086 4088 @declared_attr
4087 4089 def author(cls):
4088 4090 return relationship('User', lazy='joined')
4089 4091
4090 4092 @declared_attr
4091 4093 def source_repo(cls):
4092 4094 return relationship(
4093 4095 'Repository',
4094 4096 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4095 4097
4096 4098 @property
4097 4099 def source_ref_parts(self):
4098 4100 return self.unicode_to_reference(self.source_ref)
4099 4101
4100 4102 @declared_attr
4101 4103 def target_repo(cls):
4102 4104 return relationship(
4103 4105 'Repository',
4104 4106 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4105 4107
4106 4108 @property
4107 4109 def target_ref_parts(self):
4108 4110 return self.unicode_to_reference(self.target_ref)
4109 4111
4110 4112 @property
4111 4113 def shadow_merge_ref(self):
4112 4114 return self.unicode_to_reference(self._shadow_merge_ref)
4113 4115
4114 4116 @shadow_merge_ref.setter
4115 4117 def shadow_merge_ref(self, ref):
4116 4118 self._shadow_merge_ref = self.reference_to_unicode(ref)
4117 4119
4118 4120 @staticmethod
4119 4121 def unicode_to_reference(raw):
4120 4122 """
4121 4123 Convert a unicode (or string) to a reference object.
4122 4124 If unicode evaluates to False it returns None.
4123 4125 """
4124 4126 if raw:
4125 4127 refs = raw.split(':')
4126 4128 return Reference(*refs)
4127 4129 else:
4128 4130 return None
4129 4131
4130 4132 @staticmethod
4131 4133 def reference_to_unicode(ref):
4132 4134 """
4133 4135 Convert a reference object to unicode.
4134 4136 If reference is None it returns None.
4135 4137 """
4136 4138 if ref:
4137 4139 return u':'.join(ref)
4138 4140 else:
4139 4141 return None
4140 4142
4141 4143 def get_api_data(self, with_merge_state=True):
4142 4144 from rhodecode.model.pull_request import PullRequestModel
4143 4145
4144 4146 pull_request = self
4145 4147 if with_merge_state:
4146 4148 merge_response, merge_status, msg = \
4147 4149 PullRequestModel().merge_status(pull_request)
4148 4150 merge_state = {
4149 4151 'status': merge_status,
4150 4152 'message': safe_unicode(msg),
4151 4153 }
4152 4154 else:
4153 4155 merge_state = {'status': 'not_available',
4154 4156 'message': 'not_available'}
4155 4157
4156 4158 merge_data = {
4157 4159 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4158 4160 'reference': (
4159 4161 pull_request.shadow_merge_ref._asdict()
4160 4162 if pull_request.shadow_merge_ref else None),
4161 4163 }
4162 4164
4163 4165 data = {
4164 4166 'pull_request_id': pull_request.pull_request_id,
4165 4167 'url': PullRequestModel().get_url(pull_request),
4166 4168 'title': pull_request.title,
4167 4169 'description': pull_request.description,
4168 4170 'status': pull_request.status,
4169 4171 'state': pull_request.pull_request_state,
4170 4172 'created_on': pull_request.created_on,
4171 4173 'updated_on': pull_request.updated_on,
4172 4174 'commit_ids': pull_request.revisions,
4173 4175 'review_status': pull_request.calculated_review_status(),
4174 4176 'mergeable': merge_state,
4175 4177 'source': {
4176 4178 'clone_url': pull_request.source_repo.clone_url(),
4177 4179 'repository': pull_request.source_repo.repo_name,
4178 4180 'reference': {
4179 4181 'name': pull_request.source_ref_parts.name,
4180 4182 'type': pull_request.source_ref_parts.type,
4181 4183 'commit_id': pull_request.source_ref_parts.commit_id,
4182 4184 },
4183 4185 },
4184 4186 'target': {
4185 4187 'clone_url': pull_request.target_repo.clone_url(),
4186 4188 'repository': pull_request.target_repo.repo_name,
4187 4189 'reference': {
4188 4190 'name': pull_request.target_ref_parts.name,
4189 4191 'type': pull_request.target_ref_parts.type,
4190 4192 'commit_id': pull_request.target_ref_parts.commit_id,
4191 4193 },
4192 4194 },
4193 4195 'merge': merge_data,
4194 4196 'author': pull_request.author.get_api_data(include_secrets=False,
4195 4197 details='basic'),
4196 4198 'reviewers': [
4197 4199 {
4198 4200 'user': reviewer.get_api_data(include_secrets=False,
4199 4201 details='basic'),
4200 4202 'reasons': reasons,
4201 4203 'review_status': st[0][1].status if st else 'not_reviewed',
4202 4204 }
4203 4205 for obj, reviewer, reasons, mandatory, st in
4204 4206 pull_request.reviewers_statuses()
4205 4207 ]
4206 4208 }
4207 4209
4208 4210 return data
4209 4211
4210 4212 def set_state(self, pull_request_state, final_state=None):
4211 4213 """
4212 4214 # goes from initial state to updating to initial state.
4213 4215 # initial state can be changed by specifying back_state=
4214 4216 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4215 4217 pull_request.merge()
4216 4218
4217 4219 :param pull_request_state:
4218 4220 :param final_state:
4219 4221
4220 4222 """
4221 4223
4222 4224 return _SetState(self, pull_request_state, back_state=final_state)
4223 4225
4224 4226
4225 4227 class PullRequest(Base, _PullRequestBase):
4226 4228 __tablename__ = 'pull_requests'
4227 4229 __table_args__ = (
4228 4230 base_table_args,
4229 4231 )
4230 4232
4231 4233 pull_request_id = Column(
4232 4234 'pull_request_id', Integer(), nullable=False, primary_key=True)
4233 4235
4234 4236 def __repr__(self):
4235 4237 if self.pull_request_id:
4236 4238 return '<DB:PullRequest #%s>' % self.pull_request_id
4237 4239 else:
4238 4240 return '<DB:PullRequest at %#x>' % id(self)
4239 4241
4240 4242 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4241 4243 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4242 4244 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4243 4245 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4244 4246 lazy='dynamic')
4245 4247
4246 4248 @classmethod
4247 4249 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4248 4250 internal_methods=None):
4249 4251
4250 4252 class PullRequestDisplay(object):
4251 4253 """
4252 4254 Special object wrapper for showing PullRequest data via Versions
4253 4255 It mimics PR object as close as possible. This is read only object
4254 4256 just for display
4255 4257 """
4256 4258
4257 4259 def __init__(self, attrs, internal=None):
4258 4260 self.attrs = attrs
4259 4261 # internal have priority over the given ones via attrs
4260 4262 self.internal = internal or ['versions']
4261 4263
4262 4264 def __getattr__(self, item):
4263 4265 if item in self.internal:
4264 4266 return getattr(self, item)
4265 4267 try:
4266 4268 return self.attrs[item]
4267 4269 except KeyError:
4268 4270 raise AttributeError(
4269 4271 '%s object has no attribute %s' % (self, item))
4270 4272
4271 4273 def __repr__(self):
4272 4274 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4273 4275
4274 4276 def versions(self):
4275 4277 return pull_request_obj.versions.order_by(
4276 4278 PullRequestVersion.pull_request_version_id).all()
4277 4279
4278 4280 def is_closed(self):
4279 4281 return pull_request_obj.is_closed()
4280 4282
4281 4283 def is_state_changing(self):
4282 4284 return pull_request_obj.is_state_changing()
4283 4285
4284 4286 @property
4285 4287 def pull_request_version_id(self):
4286 4288 return getattr(pull_request_obj, 'pull_request_version_id', None)
4287 4289
4288 4290 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4289 4291
4290 4292 attrs.author = StrictAttributeDict(
4291 4293 pull_request_obj.author.get_api_data())
4292 4294 if pull_request_obj.target_repo:
4293 4295 attrs.target_repo = StrictAttributeDict(
4294 4296 pull_request_obj.target_repo.get_api_data())
4295 4297 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4296 4298
4297 4299 if pull_request_obj.source_repo:
4298 4300 attrs.source_repo = StrictAttributeDict(
4299 4301 pull_request_obj.source_repo.get_api_data())
4300 4302 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4301 4303
4302 4304 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4303 4305 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4304 4306 attrs.revisions = pull_request_obj.revisions
4305
4307 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4306 4308 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4307 4309 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4308 4310 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4309 4311
4310 4312 return PullRequestDisplay(attrs, internal=internal_methods)
4311 4313
4312 4314 def is_closed(self):
4313 4315 return self.status == self.STATUS_CLOSED
4314 4316
4315 4317 def is_state_changing(self):
4316 4318 return self.pull_request_state != PullRequest.STATE_CREATED
4317 4319
4318 4320 def __json__(self):
4319 4321 return {
4320 4322 'revisions': self.revisions,
4321 4323 'versions': self.versions_count
4322 4324 }
4323 4325
4324 4326 def calculated_review_status(self):
4325 4327 from rhodecode.model.changeset_status import ChangesetStatusModel
4326 4328 return ChangesetStatusModel().calculated_review_status(self)
4327 4329
4328 4330 def reviewers_statuses(self):
4329 4331 from rhodecode.model.changeset_status import ChangesetStatusModel
4330 4332 return ChangesetStatusModel().reviewers_statuses(self)
4331 4333
4332 4334 @property
4333 4335 def workspace_id(self):
4334 4336 from rhodecode.model.pull_request import PullRequestModel
4335 4337 return PullRequestModel()._workspace_id(self)
4336 4338
4337 4339 def get_shadow_repo(self):
4338 4340 workspace_id = self.workspace_id
4339 4341 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4340 4342 if os.path.isdir(shadow_repository_path):
4341 4343 vcs_obj = self.target_repo.scm_instance()
4342 4344 return vcs_obj.get_shadow_instance(shadow_repository_path)
4343 4345
4344 4346 @property
4345 4347 def versions_count(self):
4346 4348 """
4347 4349 return number of versions this PR have, e.g a PR that once been
4348 4350 updated will have 2 versions
4349 4351 """
4350 4352 return self.versions.count() + 1
4351 4353
4352 4354
4353 4355 class PullRequestVersion(Base, _PullRequestBase):
4354 4356 __tablename__ = 'pull_request_versions'
4355 4357 __table_args__ = (
4356 4358 base_table_args,
4357 4359 )
4358 4360
4359 4361 pull_request_version_id = Column(
4360 4362 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4361 4363 pull_request_id = Column(
4362 4364 'pull_request_id', Integer(),
4363 4365 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4364 4366 pull_request = relationship('PullRequest')
4365 4367
4366 4368 def __repr__(self):
4367 4369 if self.pull_request_version_id:
4368 4370 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4369 4371 else:
4370 4372 return '<DB:PullRequestVersion at %#x>' % id(self)
4371 4373
4372 4374 @property
4373 4375 def reviewers(self):
4374 4376 return self.pull_request.reviewers
4375 4377
4376 4378 @property
4377 4379 def versions(self):
4378 4380 return self.pull_request.versions
4379 4381
4380 4382 def is_closed(self):
4381 4383 # calculate from original
4382 4384 return self.pull_request.status == self.STATUS_CLOSED
4383 4385
4384 4386 def is_state_changing(self):
4385 4387 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4386 4388
4387 4389 def calculated_review_status(self):
4388 4390 return self.pull_request.calculated_review_status()
4389 4391
4390 4392 def reviewers_statuses(self):
4391 4393 return self.pull_request.reviewers_statuses()
4392 4394
4393 4395
4394 4396 class PullRequestReviewers(Base, BaseModel):
4395 4397 __tablename__ = 'pull_request_reviewers'
4396 4398 __table_args__ = (
4397 4399 base_table_args,
4398 4400 )
4399 4401
4400 4402 @hybrid_property
4401 4403 def reasons(self):
4402 4404 if not self._reasons:
4403 4405 return []
4404 4406 return self._reasons
4405 4407
4406 4408 @reasons.setter
4407 4409 def reasons(self, val):
4408 4410 val = val or []
4409 4411 if any(not isinstance(x, compat.string_types) for x in val):
4410 4412 raise Exception('invalid reasons type, must be list of strings')
4411 4413 self._reasons = val
4412 4414
4413 4415 pull_requests_reviewers_id = Column(
4414 4416 'pull_requests_reviewers_id', Integer(), nullable=False,
4415 4417 primary_key=True)
4416 4418 pull_request_id = Column(
4417 4419 "pull_request_id", Integer(),
4418 4420 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4419 4421 user_id = Column(
4420 4422 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4421 4423 _reasons = Column(
4422 4424 'reason', MutationList.as_mutable(
4423 4425 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4424 4426
4425 4427 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4426 4428 user = relationship('User')
4427 4429 pull_request = relationship('PullRequest')
4428 4430
4429 4431 rule_data = Column(
4430 4432 'rule_data_json',
4431 4433 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4432 4434
4433 4435 def rule_user_group_data(self):
4434 4436 """
4435 4437 Returns the voting user group rule data for this reviewer
4436 4438 """
4437 4439
4438 4440 if self.rule_data and 'vote_rule' in self.rule_data:
4439 4441 user_group_data = {}
4440 4442 if 'rule_user_group_entry_id' in self.rule_data:
4441 4443 # means a group with voting rules !
4442 4444 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4443 4445 user_group_data['name'] = self.rule_data['rule_name']
4444 4446 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4445 4447
4446 4448 return user_group_data
4447 4449
4448 4450 def __unicode__(self):
4449 4451 return u"<%s('id:%s')>" % (self.__class__.__name__,
4450 4452 self.pull_requests_reviewers_id)
4451 4453
4452 4454
4453 4455 class Notification(Base, BaseModel):
4454 4456 __tablename__ = 'notifications'
4455 4457 __table_args__ = (
4456 4458 Index('notification_type_idx', 'type'),
4457 4459 base_table_args,
4458 4460 )
4459 4461
4460 4462 TYPE_CHANGESET_COMMENT = u'cs_comment'
4461 4463 TYPE_MESSAGE = u'message'
4462 4464 TYPE_MENTION = u'mention'
4463 4465 TYPE_REGISTRATION = u'registration'
4464 4466 TYPE_PULL_REQUEST = u'pull_request'
4465 4467 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4466 4468 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4467 4469
4468 4470 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4469 4471 subject = Column('subject', Unicode(512), nullable=True)
4470 4472 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4471 4473 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4472 4474 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4473 4475 type_ = Column('type', Unicode(255))
4474 4476
4475 4477 created_by_user = relationship('User')
4476 4478 notifications_to_users = relationship('UserNotification', lazy='joined',
4477 4479 cascade="all, delete-orphan")
4478 4480
4479 4481 @property
4480 4482 def recipients(self):
4481 4483 return [x.user for x in UserNotification.query()\
4482 4484 .filter(UserNotification.notification == self)\
4483 4485 .order_by(UserNotification.user_id.asc()).all()]
4484 4486
4485 4487 @classmethod
4486 4488 def create(cls, created_by, subject, body, recipients, type_=None):
4487 4489 if type_ is None:
4488 4490 type_ = Notification.TYPE_MESSAGE
4489 4491
4490 4492 notification = cls()
4491 4493 notification.created_by_user = created_by
4492 4494 notification.subject = subject
4493 4495 notification.body = body
4494 4496 notification.type_ = type_
4495 4497 notification.created_on = datetime.datetime.now()
4496 4498
4497 4499 # For each recipient link the created notification to his account
4498 4500 for u in recipients:
4499 4501 assoc = UserNotification()
4500 4502 assoc.user_id = u.user_id
4501 4503 assoc.notification = notification
4502 4504
4503 4505 # if created_by is inside recipients mark his notification
4504 4506 # as read
4505 4507 if u.user_id == created_by.user_id:
4506 4508 assoc.read = True
4507 4509 Session().add(assoc)
4508 4510
4509 4511 Session().add(notification)
4510 4512
4511 4513 return notification
4512 4514
4513 4515
4514 4516 class UserNotification(Base, BaseModel):
4515 4517 __tablename__ = 'user_to_notification'
4516 4518 __table_args__ = (
4517 4519 UniqueConstraint('user_id', 'notification_id'),
4518 4520 base_table_args
4519 4521 )
4520 4522
4521 4523 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4522 4524 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4523 4525 read = Column('read', Boolean, default=False)
4524 4526 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4525 4527
4526 4528 user = relationship('User', lazy="joined")
4527 4529 notification = relationship('Notification', lazy="joined",
4528 4530 order_by=lambda: Notification.created_on.desc(),)
4529 4531
4530 4532 def mark_as_read(self):
4531 4533 self.read = True
4532 4534 Session().add(self)
4533 4535
4534 4536
4535 4537 class UserNotice(Base, BaseModel):
4536 4538 __tablename__ = 'user_notices'
4537 4539 __table_args__ = (
4538 4540 base_table_args
4539 4541 )
4540 4542
4541 4543 NOTIFICATION_TYPE_MESSAGE = 'message'
4542 4544 NOTIFICATION_TYPE_NOTICE = 'notice'
4543 4545
4544 4546 NOTIFICATION_LEVEL_INFO = 'info'
4545 4547 NOTIFICATION_LEVEL_WARNING = 'warning'
4546 4548 NOTIFICATION_LEVEL_ERROR = 'error'
4547 4549
4548 4550 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4549 4551
4550 4552 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4551 4553 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4552 4554
4553 4555 notice_read = Column('notice_read', Boolean, default=False)
4554 4556
4555 4557 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4556 4558 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4557 4559
4558 4560 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4559 4561 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4560 4562
4561 4563 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4562 4564 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4563 4565
4564 4566 @classmethod
4565 4567 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4566 4568
4567 4569 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4568 4570 cls.NOTIFICATION_LEVEL_WARNING,
4569 4571 cls.NOTIFICATION_LEVEL_INFO]:
4570 4572 return
4571 4573
4572 4574 from rhodecode.model.user import UserModel
4573 4575 user = UserModel().get_user(user)
4574 4576
4575 4577 new_notice = UserNotice()
4576 4578 if not allow_duplicate:
4577 4579 existing_msg = UserNotice().query() \
4578 4580 .filter(UserNotice.user == user) \
4579 4581 .filter(UserNotice.notice_body == body) \
4580 4582 .filter(UserNotice.notice_read == false()) \
4581 4583 .scalar()
4582 4584 if existing_msg:
4583 4585 log.warning('Ignoring duplicate notice for user %s', user)
4584 4586 return
4585 4587
4586 4588 new_notice.user = user
4587 4589 new_notice.notice_subject = subject
4588 4590 new_notice.notice_body = body
4589 4591 new_notice.notification_level = notice_level
4590 4592 Session().add(new_notice)
4591 4593 Session().commit()
4592 4594
4593 4595
4594 4596 class Gist(Base, BaseModel):
4595 4597 __tablename__ = 'gists'
4596 4598 __table_args__ = (
4597 4599 Index('g_gist_access_id_idx', 'gist_access_id'),
4598 4600 Index('g_created_on_idx', 'created_on'),
4599 4601 base_table_args
4600 4602 )
4601 4603
4602 4604 GIST_PUBLIC = u'public'
4603 4605 GIST_PRIVATE = u'private'
4604 4606 DEFAULT_FILENAME = u'gistfile1.txt'
4605 4607
4606 4608 ACL_LEVEL_PUBLIC = u'acl_public'
4607 4609 ACL_LEVEL_PRIVATE = u'acl_private'
4608 4610
4609 4611 gist_id = Column('gist_id', Integer(), primary_key=True)
4610 4612 gist_access_id = Column('gist_access_id', Unicode(250))
4611 4613 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4612 4614 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4613 4615 gist_expires = Column('gist_expires', Float(53), nullable=False)
4614 4616 gist_type = Column('gist_type', Unicode(128), nullable=False)
4615 4617 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4616 4618 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4617 4619 acl_level = Column('acl_level', Unicode(128), nullable=True)
4618 4620
4619 4621 owner = relationship('User')
4620 4622
4621 4623 def __repr__(self):
4622 4624 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4623 4625
4624 4626 @hybrid_property
4625 4627 def description_safe(self):
4626 4628 from rhodecode.lib import helpers as h
4627 4629 return h.escape(self.gist_description)
4628 4630
4629 4631 @classmethod
4630 4632 def get_or_404(cls, id_):
4631 4633 from pyramid.httpexceptions import HTTPNotFound
4632 4634
4633 4635 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4634 4636 if not res:
4635 4637 raise HTTPNotFound()
4636 4638 return res
4637 4639
4638 4640 @classmethod
4639 4641 def get_by_access_id(cls, gist_access_id):
4640 4642 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4641 4643
4642 4644 def gist_url(self):
4643 4645 from rhodecode.model.gist import GistModel
4644 4646 return GistModel().get_url(self)
4645 4647
4646 4648 @classmethod
4647 4649 def base_path(cls):
4648 4650 """
4649 4651 Returns base path when all gists are stored
4650 4652
4651 4653 :param cls:
4652 4654 """
4653 4655 from rhodecode.model.gist import GIST_STORE_LOC
4654 4656 q = Session().query(RhodeCodeUi)\
4655 4657 .filter(RhodeCodeUi.ui_key == URL_SEP)
4656 4658 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4657 4659 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4658 4660
4659 4661 def get_api_data(self):
4660 4662 """
4661 4663 Common function for generating gist related data for API
4662 4664 """
4663 4665 gist = self
4664 4666 data = {
4665 4667 'gist_id': gist.gist_id,
4666 4668 'type': gist.gist_type,
4667 4669 'access_id': gist.gist_access_id,
4668 4670 'description': gist.gist_description,
4669 4671 'url': gist.gist_url(),
4670 4672 'expires': gist.gist_expires,
4671 4673 'created_on': gist.created_on,
4672 4674 'modified_at': gist.modified_at,
4673 4675 'content': None,
4674 4676 'acl_level': gist.acl_level,
4675 4677 }
4676 4678 return data
4677 4679
4678 4680 def __json__(self):
4679 4681 data = dict(
4680 4682 )
4681 4683 data.update(self.get_api_data())
4682 4684 return data
4683 4685 # SCM functions
4684 4686
4685 4687 def scm_instance(self, **kwargs):
4686 4688 """
4687 4689 Get an instance of VCS Repository
4688 4690
4689 4691 :param kwargs:
4690 4692 """
4691 4693 from rhodecode.model.gist import GistModel
4692 4694 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4693 4695 return get_vcs_instance(
4694 4696 repo_path=safe_str(full_repo_path), create=False,
4695 4697 _vcs_alias=GistModel.vcs_backend)
4696 4698
4697 4699
4698 4700 class ExternalIdentity(Base, BaseModel):
4699 4701 __tablename__ = 'external_identities'
4700 4702 __table_args__ = (
4701 4703 Index('local_user_id_idx', 'local_user_id'),
4702 4704 Index('external_id_idx', 'external_id'),
4703 4705 base_table_args
4704 4706 )
4705 4707
4706 4708 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4707 4709 external_username = Column('external_username', Unicode(1024), default=u'')
4708 4710 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4709 4711 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4710 4712 access_token = Column('access_token', String(1024), default=u'')
4711 4713 alt_token = Column('alt_token', String(1024), default=u'')
4712 4714 token_secret = Column('token_secret', String(1024), default=u'')
4713 4715
4714 4716 @classmethod
4715 4717 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4716 4718 """
4717 4719 Returns ExternalIdentity instance based on search params
4718 4720
4719 4721 :param external_id:
4720 4722 :param provider_name:
4721 4723 :return: ExternalIdentity
4722 4724 """
4723 4725 query = cls.query()
4724 4726 query = query.filter(cls.external_id == external_id)
4725 4727 query = query.filter(cls.provider_name == provider_name)
4726 4728 if local_user_id:
4727 4729 query = query.filter(cls.local_user_id == local_user_id)
4728 4730 return query.first()
4729 4731
4730 4732 @classmethod
4731 4733 def user_by_external_id_and_provider(cls, external_id, provider_name):
4732 4734 """
4733 4735 Returns User instance based on search params
4734 4736
4735 4737 :param external_id:
4736 4738 :param provider_name:
4737 4739 :return: User
4738 4740 """
4739 4741 query = User.query()
4740 4742 query = query.filter(cls.external_id == external_id)
4741 4743 query = query.filter(cls.provider_name == provider_name)
4742 4744 query = query.filter(User.user_id == cls.local_user_id)
4743 4745 return query.first()
4744 4746
4745 4747 @classmethod
4746 4748 def by_local_user_id(cls, local_user_id):
4747 4749 """
4748 4750 Returns all tokens for user
4749 4751
4750 4752 :param local_user_id:
4751 4753 :return: ExternalIdentity
4752 4754 """
4753 4755 query = cls.query()
4754 4756 query = query.filter(cls.local_user_id == local_user_id)
4755 4757 return query
4756 4758
4757 4759 @classmethod
4758 4760 def load_provider_plugin(cls, plugin_id):
4759 4761 from rhodecode.authentication.base import loadplugin
4760 4762 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4761 4763 auth_plugin = loadplugin(_plugin_id)
4762 4764 return auth_plugin
4763 4765
4764 4766
4765 4767 class Integration(Base, BaseModel):
4766 4768 __tablename__ = 'integrations'
4767 4769 __table_args__ = (
4768 4770 base_table_args
4769 4771 )
4770 4772
4771 4773 integration_id = Column('integration_id', Integer(), primary_key=True)
4772 4774 integration_type = Column('integration_type', String(255))
4773 4775 enabled = Column('enabled', Boolean(), nullable=False)
4774 4776 name = Column('name', String(255), nullable=False)
4775 4777 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4776 4778 default=False)
4777 4779
4778 4780 settings = Column(
4779 4781 'settings_json', MutationObj.as_mutable(
4780 4782 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4781 4783 repo_id = Column(
4782 4784 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4783 4785 nullable=True, unique=None, default=None)
4784 4786 repo = relationship('Repository', lazy='joined')
4785 4787
4786 4788 repo_group_id = Column(
4787 4789 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4788 4790 nullable=True, unique=None, default=None)
4789 4791 repo_group = relationship('RepoGroup', lazy='joined')
4790 4792
4791 4793 @property
4792 4794 def scope(self):
4793 4795 if self.repo:
4794 4796 return repr(self.repo)
4795 4797 if self.repo_group:
4796 4798 if self.child_repos_only:
4797 4799 return repr(self.repo_group) + ' (child repos only)'
4798 4800 else:
4799 4801 return repr(self.repo_group) + ' (recursive)'
4800 4802 if self.child_repos_only:
4801 4803 return 'root_repos'
4802 4804 return 'global'
4803 4805
4804 4806 def __repr__(self):
4805 4807 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4806 4808
4807 4809
4808 4810 class RepoReviewRuleUser(Base, BaseModel):
4809 4811 __tablename__ = 'repo_review_rules_users'
4810 4812 __table_args__ = (
4811 4813 base_table_args
4812 4814 )
4813 4815
4814 4816 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4815 4817 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4816 4818 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4817 4819 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4818 4820 user = relationship('User')
4819 4821
4820 4822 def rule_data(self):
4821 4823 return {
4822 4824 'mandatory': self.mandatory
4823 4825 }
4824 4826
4825 4827
4826 4828 class RepoReviewRuleUserGroup(Base, BaseModel):
4827 4829 __tablename__ = 'repo_review_rules_users_groups'
4828 4830 __table_args__ = (
4829 4831 base_table_args
4830 4832 )
4831 4833
4832 4834 VOTE_RULE_ALL = -1
4833 4835
4834 4836 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4835 4837 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4836 4838 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4837 4839 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4838 4840 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4839 4841 users_group = relationship('UserGroup')
4840 4842
4841 4843 def rule_data(self):
4842 4844 return {
4843 4845 'mandatory': self.mandatory,
4844 4846 'vote_rule': self.vote_rule
4845 4847 }
4846 4848
4847 4849 @property
4848 4850 def vote_rule_label(self):
4849 4851 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4850 4852 return 'all must vote'
4851 4853 else:
4852 4854 return 'min. vote {}'.format(self.vote_rule)
4853 4855
4854 4856
4855 4857 class RepoReviewRule(Base, BaseModel):
4856 4858 __tablename__ = 'repo_review_rules'
4857 4859 __table_args__ = (
4858 4860 base_table_args
4859 4861 )
4860 4862
4861 4863 repo_review_rule_id = Column(
4862 4864 'repo_review_rule_id', Integer(), primary_key=True)
4863 4865 repo_id = Column(
4864 4866 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4865 4867 repo = relationship('Repository', backref='review_rules')
4866 4868
4867 4869 review_rule_name = Column('review_rule_name', String(255))
4868 4870 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4869 4871 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4870 4872 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4871 4873
4872 4874 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4873 4875 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4874 4876 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4875 4877 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4876 4878
4877 4879 rule_users = relationship('RepoReviewRuleUser')
4878 4880 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4879 4881
4880 4882 def _validate_pattern(self, value):
4881 4883 re.compile('^' + glob2re(value) + '$')
4882 4884
4883 4885 @hybrid_property
4884 4886 def source_branch_pattern(self):
4885 4887 return self._branch_pattern or '*'
4886 4888
4887 4889 @source_branch_pattern.setter
4888 4890 def source_branch_pattern(self, value):
4889 4891 self._validate_pattern(value)
4890 4892 self._branch_pattern = value or '*'
4891 4893
4892 4894 @hybrid_property
4893 4895 def target_branch_pattern(self):
4894 4896 return self._target_branch_pattern or '*'
4895 4897
4896 4898 @target_branch_pattern.setter
4897 4899 def target_branch_pattern(self, value):
4898 4900 self._validate_pattern(value)
4899 4901 self._target_branch_pattern = value or '*'
4900 4902
4901 4903 @hybrid_property
4902 4904 def file_pattern(self):
4903 4905 return self._file_pattern or '*'
4904 4906
4905 4907 @file_pattern.setter
4906 4908 def file_pattern(self, value):
4907 4909 self._validate_pattern(value)
4908 4910 self._file_pattern = value or '*'
4909 4911
4910 4912 def matches(self, source_branch, target_branch, files_changed):
4911 4913 """
4912 4914 Check if this review rule matches a branch/files in a pull request
4913 4915
4914 4916 :param source_branch: source branch name for the commit
4915 4917 :param target_branch: target branch name for the commit
4916 4918 :param files_changed: list of file paths changed in the pull request
4917 4919 """
4918 4920
4919 4921 source_branch = source_branch or ''
4920 4922 target_branch = target_branch or ''
4921 4923 files_changed = files_changed or []
4922 4924
4923 4925 branch_matches = True
4924 4926 if source_branch or target_branch:
4925 4927 if self.source_branch_pattern == '*':
4926 4928 source_branch_match = True
4927 4929 else:
4928 4930 if self.source_branch_pattern.startswith('re:'):
4929 4931 source_pattern = self.source_branch_pattern[3:]
4930 4932 else:
4931 4933 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4932 4934 source_branch_regex = re.compile(source_pattern)
4933 4935 source_branch_match = bool(source_branch_regex.search(source_branch))
4934 4936 if self.target_branch_pattern == '*':
4935 4937 target_branch_match = True
4936 4938 else:
4937 4939 if self.target_branch_pattern.startswith('re:'):
4938 4940 target_pattern = self.target_branch_pattern[3:]
4939 4941 else:
4940 4942 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4941 4943 target_branch_regex = re.compile(target_pattern)
4942 4944 target_branch_match = bool(target_branch_regex.search(target_branch))
4943 4945
4944 4946 branch_matches = source_branch_match and target_branch_match
4945 4947
4946 4948 files_matches = True
4947 4949 if self.file_pattern != '*':
4948 4950 files_matches = False
4949 4951 if self.file_pattern.startswith('re:'):
4950 4952 file_pattern = self.file_pattern[3:]
4951 4953 else:
4952 4954 file_pattern = glob2re(self.file_pattern)
4953 4955 file_regex = re.compile(file_pattern)
4954 4956 for filename in files_changed:
4955 4957 if file_regex.search(filename):
4956 4958 files_matches = True
4957 4959 break
4958 4960
4959 4961 return branch_matches and files_matches
4960 4962
4961 4963 @property
4962 4964 def review_users(self):
4963 4965 """ Returns the users which this rule applies to """
4964 4966
4965 4967 users = collections.OrderedDict()
4966 4968
4967 4969 for rule_user in self.rule_users:
4968 4970 if rule_user.user.active:
4969 4971 if rule_user.user not in users:
4970 4972 users[rule_user.user.username] = {
4971 4973 'user': rule_user.user,
4972 4974 'source': 'user',
4973 4975 'source_data': {},
4974 4976 'data': rule_user.rule_data()
4975 4977 }
4976 4978
4977 4979 for rule_user_group in self.rule_user_groups:
4978 4980 source_data = {
4979 4981 'user_group_id': rule_user_group.users_group.users_group_id,
4980 4982 'name': rule_user_group.users_group.users_group_name,
4981 4983 'members': len(rule_user_group.users_group.members)
4982 4984 }
4983 4985 for member in rule_user_group.users_group.members:
4984 4986 if member.user.active:
4985 4987 key = member.user.username
4986 4988 if key in users:
4987 4989 # skip this member as we have him already
4988 4990 # this prevents from override the "first" matched
4989 4991 # users with duplicates in multiple groups
4990 4992 continue
4991 4993
4992 4994 users[key] = {
4993 4995 'user': member.user,
4994 4996 'source': 'user_group',
4995 4997 'source_data': source_data,
4996 4998 'data': rule_user_group.rule_data()
4997 4999 }
4998 5000
4999 5001 return users
5000 5002
5001 5003 def user_group_vote_rule(self, user_id):
5002 5004
5003 5005 rules = []
5004 5006 if not self.rule_user_groups:
5005 5007 return rules
5006 5008
5007 5009 for user_group in self.rule_user_groups:
5008 5010 user_group_members = [x.user_id for x in user_group.users_group.members]
5009 5011 if user_id in user_group_members:
5010 5012 rules.append(user_group)
5011 5013 return rules
5012 5014
5013 5015 def __repr__(self):
5014 5016 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5015 5017 self.repo_review_rule_id, self.repo)
5016 5018
5017 5019
5018 5020 class ScheduleEntry(Base, BaseModel):
5019 5021 __tablename__ = 'schedule_entries'
5020 5022 __table_args__ = (
5021 5023 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5022 5024 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5023 5025 base_table_args,
5024 5026 )
5025 5027
5026 5028 schedule_types = ['crontab', 'timedelta', 'integer']
5027 5029 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5028 5030
5029 5031 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5030 5032 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5031 5033 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5032 5034
5033 5035 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5034 5036 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5035 5037
5036 5038 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5037 5039 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5038 5040
5039 5041 # task
5040 5042 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5041 5043 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5042 5044 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5043 5045 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5044 5046
5045 5047 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5046 5048 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5047 5049
5048 5050 @hybrid_property
5049 5051 def schedule_type(self):
5050 5052 return self._schedule_type
5051 5053
5052 5054 @schedule_type.setter
5053 5055 def schedule_type(self, val):
5054 5056 if val not in self.schedule_types:
5055 5057 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5056 5058 val, self.schedule_type))
5057 5059
5058 5060 self._schedule_type = val
5059 5061
5060 5062 @classmethod
5061 5063 def get_uid(cls, obj):
5062 5064 args = obj.task_args
5063 5065 kwargs = obj.task_kwargs
5064 5066 if isinstance(args, JsonRaw):
5065 5067 try:
5066 5068 args = json.loads(args)
5067 5069 except ValueError:
5068 5070 args = tuple()
5069 5071
5070 5072 if isinstance(kwargs, JsonRaw):
5071 5073 try:
5072 5074 kwargs = json.loads(kwargs)
5073 5075 except ValueError:
5074 5076 kwargs = dict()
5075 5077
5076 5078 dot_notation = obj.task_dot_notation
5077 5079 val = '.'.join(map(safe_str, [
5078 5080 sorted(dot_notation), args, sorted(kwargs.items())]))
5079 5081 return hashlib.sha1(val).hexdigest()
5080 5082
5081 5083 @classmethod
5082 5084 def get_by_schedule_name(cls, schedule_name):
5083 5085 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5084 5086
5085 5087 @classmethod
5086 5088 def get_by_schedule_id(cls, schedule_id):
5087 5089 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5088 5090
5089 5091 @property
5090 5092 def task(self):
5091 5093 return self.task_dot_notation
5092 5094
5093 5095 @property
5094 5096 def schedule(self):
5095 5097 from rhodecode.lib.celerylib.utils import raw_2_schedule
5096 5098 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5097 5099 return schedule
5098 5100
5099 5101 @property
5100 5102 def args(self):
5101 5103 try:
5102 5104 return list(self.task_args or [])
5103 5105 except ValueError:
5104 5106 return list()
5105 5107
5106 5108 @property
5107 5109 def kwargs(self):
5108 5110 try:
5109 5111 return dict(self.task_kwargs or {})
5110 5112 except ValueError:
5111 5113 return dict()
5112 5114
5113 5115 def _as_raw(self, val):
5114 5116 if hasattr(val, 'de_coerce'):
5115 5117 val = val.de_coerce()
5116 5118 if val:
5117 5119 val = json.dumps(val)
5118 5120
5119 5121 return val
5120 5122
5121 5123 @property
5122 5124 def schedule_definition_raw(self):
5123 5125 return self._as_raw(self.schedule_definition)
5124 5126
5125 5127 @property
5126 5128 def args_raw(self):
5127 5129 return self._as_raw(self.task_args)
5128 5130
5129 5131 @property
5130 5132 def kwargs_raw(self):
5131 5133 return self._as_raw(self.task_kwargs)
5132 5134
5133 5135 def __repr__(self):
5134 5136 return '<DB:ScheduleEntry({}:{})>'.format(
5135 5137 self.schedule_entry_id, self.schedule_name)
5136 5138
5137 5139
5138 5140 @event.listens_for(ScheduleEntry, 'before_update')
5139 5141 def update_task_uid(mapper, connection, target):
5140 5142 target.task_uid = ScheduleEntry.get_uid(target)
5141 5143
5142 5144
5143 5145 @event.listens_for(ScheduleEntry, 'before_insert')
5144 5146 def set_task_uid(mapper, connection, target):
5145 5147 target.task_uid = ScheduleEntry.get_uid(target)
5146 5148
5147 5149
5148 5150 class _BaseBranchPerms(BaseModel):
5149 5151 @classmethod
5150 5152 def compute_hash(cls, value):
5151 5153 return sha1_safe(value)
5152 5154
5153 5155 @hybrid_property
5154 5156 def branch_pattern(self):
5155 5157 return self._branch_pattern or '*'
5156 5158
5157 5159 @hybrid_property
5158 5160 def branch_hash(self):
5159 5161 return self._branch_hash
5160 5162
5161 5163 def _validate_glob(self, value):
5162 5164 re.compile('^' + glob2re(value) + '$')
5163 5165
5164 5166 @branch_pattern.setter
5165 5167 def branch_pattern(self, value):
5166 5168 self._validate_glob(value)
5167 5169 self._branch_pattern = value or '*'
5168 5170 # set the Hash when setting the branch pattern
5169 5171 self._branch_hash = self.compute_hash(self._branch_pattern)
5170 5172
5171 5173 def matches(self, branch):
5172 5174 """
5173 5175 Check if this the branch matches entry
5174 5176
5175 5177 :param branch: branch name for the commit
5176 5178 """
5177 5179
5178 5180 branch = branch or ''
5179 5181
5180 5182 branch_matches = True
5181 5183 if branch:
5182 5184 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5183 5185 branch_matches = bool(branch_regex.search(branch))
5184 5186
5185 5187 return branch_matches
5186 5188
5187 5189
5188 5190 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5189 5191 __tablename__ = 'user_to_repo_branch_permissions'
5190 5192 __table_args__ = (
5191 5193 base_table_args
5192 5194 )
5193 5195
5194 5196 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5195 5197
5196 5198 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5197 5199 repo = relationship('Repository', backref='user_branch_perms')
5198 5200
5199 5201 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5200 5202 permission = relationship('Permission')
5201 5203
5202 5204 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5203 5205 user_repo_to_perm = relationship('UserRepoToPerm')
5204 5206
5205 5207 rule_order = Column('rule_order', Integer(), nullable=False)
5206 5208 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5207 5209 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5208 5210
5209 5211 def __unicode__(self):
5210 5212 return u'<UserBranchPermission(%s => %r)>' % (
5211 5213 self.user_repo_to_perm, self.branch_pattern)
5212 5214
5213 5215
5214 5216 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5215 5217 __tablename__ = 'user_group_to_repo_branch_permissions'
5216 5218 __table_args__ = (
5217 5219 base_table_args
5218 5220 )
5219 5221
5220 5222 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5221 5223
5222 5224 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5223 5225 repo = relationship('Repository', backref='user_group_branch_perms')
5224 5226
5225 5227 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5226 5228 permission = relationship('Permission')
5227 5229
5228 5230 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5229 5231 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5230 5232
5231 5233 rule_order = Column('rule_order', Integer(), nullable=False)
5232 5234 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5233 5235 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5234 5236
5235 5237 def __unicode__(self):
5236 5238 return u'<UserBranchPermission(%s => %r)>' % (
5237 5239 self.user_group_repo_to_perm, self.branch_pattern)
5238 5240
5239 5241
5240 5242 class UserBookmark(Base, BaseModel):
5241 5243 __tablename__ = 'user_bookmarks'
5242 5244 __table_args__ = (
5243 5245 UniqueConstraint('user_id', 'bookmark_repo_id'),
5244 5246 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5245 5247 UniqueConstraint('user_id', 'bookmark_position'),
5246 5248 base_table_args
5247 5249 )
5248 5250
5249 5251 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5250 5252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5251 5253 position = Column("bookmark_position", Integer(), nullable=False)
5252 5254 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5253 5255 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5254 5256 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5255 5257
5256 5258 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5257 5259 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5258 5260
5259 5261 user = relationship("User")
5260 5262
5261 5263 repository = relationship("Repository")
5262 5264 repository_group = relationship("RepoGroup")
5263 5265
5264 5266 @classmethod
5265 5267 def get_by_position_for_user(cls, position, user_id):
5266 5268 return cls.query() \
5267 5269 .filter(UserBookmark.user_id == user_id) \
5268 5270 .filter(UserBookmark.position == position).scalar()
5269 5271
5270 5272 @classmethod
5271 5273 def get_bookmarks_for_user(cls, user_id, cache=True):
5272 5274 bookmarks = cls.query() \
5273 5275 .filter(UserBookmark.user_id == user_id) \
5274 5276 .options(joinedload(UserBookmark.repository)) \
5275 5277 .options(joinedload(UserBookmark.repository_group)) \
5276 5278 .order_by(UserBookmark.position.asc())
5277 5279
5278 5280 if cache:
5279 5281 bookmarks = bookmarks.options(
5280 5282 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5281 5283 )
5282 5284
5283 5285 return bookmarks.all()
5284 5286
5285 5287 def __unicode__(self):
5286 5288 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5287 5289
5288 5290
5289 5291 class FileStore(Base, BaseModel):
5290 5292 __tablename__ = 'file_store'
5291 5293 __table_args__ = (
5292 5294 base_table_args
5293 5295 )
5294 5296
5295 5297 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5296 5298 file_uid = Column('file_uid', String(1024), nullable=False)
5297 5299 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5298 5300 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5299 5301 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5300 5302
5301 5303 # sha256 hash
5302 5304 file_hash = Column('file_hash', String(512), nullable=False)
5303 5305 file_size = Column('file_size', BigInteger(), nullable=False)
5304 5306
5305 5307 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5306 5308 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5307 5309 accessed_count = Column('accessed_count', Integer(), default=0)
5308 5310
5309 5311 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5310 5312
5311 5313 # if repo/repo_group reference is set, check for permissions
5312 5314 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5313 5315
5314 5316 # hidden defines an attachment that should be hidden from showing in artifact listing
5315 5317 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5316 5318
5317 5319 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5318 5320 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5319 5321
5320 5322 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5321 5323
5322 5324 # scope limited to user, which requester have access to
5323 5325 scope_user_id = Column(
5324 5326 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5325 5327 nullable=True, unique=None, default=None)
5326 5328 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5327 5329
5328 5330 # scope limited to user group, which requester have access to
5329 5331 scope_user_group_id = Column(
5330 5332 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5331 5333 nullable=True, unique=None, default=None)
5332 5334 user_group = relationship('UserGroup', lazy='joined')
5333 5335
5334 5336 # scope limited to repo, which requester have access to
5335 5337 scope_repo_id = Column(
5336 5338 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5337 5339 nullable=True, unique=None, default=None)
5338 5340 repo = relationship('Repository', lazy='joined')
5339 5341
5340 5342 # scope limited to repo group, which requester have access to
5341 5343 scope_repo_group_id = Column(
5342 5344 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5343 5345 nullable=True, unique=None, default=None)
5344 5346 repo_group = relationship('RepoGroup', lazy='joined')
5345 5347
5346 5348 @classmethod
5347 5349 def get_by_store_uid(cls, file_store_uid):
5348 5350 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5349 5351
5350 5352 @classmethod
5351 5353 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5352 5354 file_description='', enabled=True, hidden=False, check_acl=True,
5353 5355 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5354 5356
5355 5357 store_entry = FileStore()
5356 5358 store_entry.file_uid = file_uid
5357 5359 store_entry.file_display_name = file_display_name
5358 5360 store_entry.file_org_name = filename
5359 5361 store_entry.file_size = file_size
5360 5362 store_entry.file_hash = file_hash
5361 5363 store_entry.file_description = file_description
5362 5364
5363 5365 store_entry.check_acl = check_acl
5364 5366 store_entry.enabled = enabled
5365 5367 store_entry.hidden = hidden
5366 5368
5367 5369 store_entry.user_id = user_id
5368 5370 store_entry.scope_user_id = scope_user_id
5369 5371 store_entry.scope_repo_id = scope_repo_id
5370 5372 store_entry.scope_repo_group_id = scope_repo_group_id
5371 5373
5372 5374 return store_entry
5373 5375
5374 5376 @classmethod
5375 5377 def store_metadata(cls, file_store_id, args, commit=True):
5376 5378 file_store = FileStore.get(file_store_id)
5377 5379 if file_store is None:
5378 5380 return
5379 5381
5380 5382 for section, key, value, value_type in args:
5381 5383 has_key = FileStoreMetadata().query() \
5382 5384 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5383 5385 .filter(FileStoreMetadata.file_store_meta_section == section) \
5384 5386 .filter(FileStoreMetadata.file_store_meta_key == key) \
5385 5387 .scalar()
5386 5388 if has_key:
5387 5389 msg = 'key `{}` already defined under section `{}` for this file.'\
5388 5390 .format(key, section)
5389 5391 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5390 5392
5391 5393 # NOTE(marcink): raises ArtifactMetadataBadValueType
5392 5394 FileStoreMetadata.valid_value_type(value_type)
5393 5395
5394 5396 meta_entry = FileStoreMetadata()
5395 5397 meta_entry.file_store = file_store
5396 5398 meta_entry.file_store_meta_section = section
5397 5399 meta_entry.file_store_meta_key = key
5398 5400 meta_entry.file_store_meta_value_type = value_type
5399 5401 meta_entry.file_store_meta_value = value
5400 5402
5401 5403 Session().add(meta_entry)
5402 5404
5403 5405 try:
5404 5406 if commit:
5405 5407 Session().commit()
5406 5408 except IntegrityError:
5407 5409 Session().rollback()
5408 5410 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5409 5411
5410 5412 @classmethod
5411 5413 def bump_access_counter(cls, file_uid, commit=True):
5412 5414 FileStore().query()\
5413 5415 .filter(FileStore.file_uid == file_uid)\
5414 5416 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5415 5417 FileStore.accessed_on: datetime.datetime.now()})
5416 5418 if commit:
5417 5419 Session().commit()
5418 5420
5419 5421 def __json__(self):
5420 5422 data = {
5421 5423 'filename': self.file_display_name,
5422 5424 'filename_org': self.file_org_name,
5423 5425 'file_uid': self.file_uid,
5424 5426 'description': self.file_description,
5425 5427 'hidden': self.hidden,
5426 5428 'size': self.file_size,
5427 5429 'created_on': self.created_on,
5428 5430 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5429 5431 'downloaded_times': self.accessed_count,
5430 5432 'sha256': self.file_hash,
5431 5433 'metadata': self.file_metadata,
5432 5434 }
5433 5435
5434 5436 return data
5435 5437
5436 5438 def __repr__(self):
5437 5439 return '<FileStore({})>'.format(self.file_store_id)
5438 5440
5439 5441
5440 5442 class FileStoreMetadata(Base, BaseModel):
5441 5443 __tablename__ = 'file_store_metadata'
5442 5444 __table_args__ = (
5443 5445 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5444 5446 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5445 5447 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5446 5448 base_table_args
5447 5449 )
5448 5450 SETTINGS_TYPES = {
5449 5451 'str': safe_str,
5450 5452 'int': safe_int,
5451 5453 'unicode': safe_unicode,
5452 5454 'bool': str2bool,
5453 5455 'list': functools.partial(aslist, sep=',')
5454 5456 }
5455 5457
5456 5458 file_store_meta_id = Column(
5457 5459 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5458 5460 primary_key=True)
5459 5461 _file_store_meta_section = Column(
5460 5462 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5461 5463 nullable=True, unique=None, default=None)
5462 5464 _file_store_meta_section_hash = Column(
5463 5465 "file_store_meta_section_hash", String(255),
5464 5466 nullable=True, unique=None, default=None)
5465 5467 _file_store_meta_key = Column(
5466 5468 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5467 5469 nullable=True, unique=None, default=None)
5468 5470 _file_store_meta_key_hash = Column(
5469 5471 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5470 5472 _file_store_meta_value = Column(
5471 5473 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5472 5474 nullable=True, unique=None, default=None)
5473 5475 _file_store_meta_value_type = Column(
5474 5476 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5475 5477 default='unicode')
5476 5478
5477 5479 file_store_id = Column(
5478 5480 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5479 5481 nullable=True, unique=None, default=None)
5480 5482
5481 5483 file_store = relationship('FileStore', lazy='joined')
5482 5484
5483 5485 @classmethod
5484 5486 def valid_value_type(cls, value):
5485 5487 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5486 5488 raise ArtifactMetadataBadValueType(
5487 5489 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5488 5490
5489 5491 @hybrid_property
5490 5492 def file_store_meta_section(self):
5491 5493 return self._file_store_meta_section
5492 5494
5493 5495 @file_store_meta_section.setter
5494 5496 def file_store_meta_section(self, value):
5495 5497 self._file_store_meta_section = value
5496 5498 self._file_store_meta_section_hash = _hash_key(value)
5497 5499
5498 5500 @hybrid_property
5499 5501 def file_store_meta_key(self):
5500 5502 return self._file_store_meta_key
5501 5503
5502 5504 @file_store_meta_key.setter
5503 5505 def file_store_meta_key(self, value):
5504 5506 self._file_store_meta_key = value
5505 5507 self._file_store_meta_key_hash = _hash_key(value)
5506 5508
5507 5509 @hybrid_property
5508 5510 def file_store_meta_value(self):
5509 5511 val = self._file_store_meta_value
5510 5512
5511 5513 if self._file_store_meta_value_type:
5512 5514 # e.g unicode.encrypted == unicode
5513 5515 _type = self._file_store_meta_value_type.split('.')[0]
5514 5516 # decode the encrypted value if it's encrypted field type
5515 5517 if '.encrypted' in self._file_store_meta_value_type:
5516 5518 cipher = EncryptedTextValue()
5517 5519 val = safe_unicode(cipher.process_result_value(val, None))
5518 5520 # do final type conversion
5519 5521 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5520 5522 val = converter(val)
5521 5523
5522 5524 return val
5523 5525
5524 5526 @file_store_meta_value.setter
5525 5527 def file_store_meta_value(self, val):
5526 5528 val = safe_unicode(val)
5527 5529 # encode the encrypted value
5528 5530 if '.encrypted' in self.file_store_meta_value_type:
5529 5531 cipher = EncryptedTextValue()
5530 5532 val = safe_unicode(cipher.process_bind_param(val, None))
5531 5533 self._file_store_meta_value = val
5532 5534
5533 5535 @hybrid_property
5534 5536 def file_store_meta_value_type(self):
5535 5537 return self._file_store_meta_value_type
5536 5538
5537 5539 @file_store_meta_value_type.setter
5538 5540 def file_store_meta_value_type(self, val):
5539 5541 # e.g unicode.encrypted
5540 5542 self.valid_value_type(val)
5541 5543 self._file_store_meta_value_type = val
5542 5544
5543 5545 def __json__(self):
5544 5546 data = {
5545 5547 'artifact': self.file_store.file_uid,
5546 5548 'section': self.file_store_meta_section,
5547 5549 'key': self.file_store_meta_key,
5548 5550 'value': self.file_store_meta_value,
5549 5551 }
5550 5552
5551 5553 return data
5552 5554
5553 5555 def __repr__(self):
5554 5556 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5555 5557 self.file_store_meta_key, self.file_store_meta_value)
5556 5558
5557 5559
5558 5560 class DbMigrateVersion(Base, BaseModel):
5559 5561 __tablename__ = 'db_migrate_version'
5560 5562 __table_args__ = (
5561 5563 base_table_args,
5562 5564 )
5563 5565
5564 5566 repository_id = Column('repository_id', String(250), primary_key=True)
5565 5567 repository_path = Column('repository_path', Text)
5566 5568 version = Column('version', Integer)
5567 5569
5568 5570 @classmethod
5569 5571 def set_version(cls, version):
5570 5572 """
5571 5573 Helper for forcing a different version, usually for debugging purposes via ishell.
5572 5574 """
5573 5575 ver = DbMigrateVersion.query().first()
5574 5576 ver.version = version
5575 5577 Session().commit()
5576 5578
5577 5579
5578 5580 class DbSession(Base, BaseModel):
5579 5581 __tablename__ = 'db_session'
5580 5582 __table_args__ = (
5581 5583 base_table_args,
5582 5584 )
5583 5585
5584 5586 def __repr__(self):
5585 5587 return '<DB:DbSession({})>'.format(self.id)
5586 5588
5587 5589 id = Column('id', Integer())
5588 5590 namespace = Column('namespace', String(255), primary_key=True)
5589 5591 accessed = Column('accessed', DateTime, nullable=False)
5590 5592 created = Column('created', DateTime, nullable=False)
5591 5593 data = Column('data', PickleType, nullable=False)
@@ -1,1933 +1,2067 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 from rhodecode.lib.vcs.nodes import FileNode
38 39 from rhodecode.translation import lazy_ugettext
39 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 41 from rhodecode.lib import audit_logger
41 42 from rhodecode.lib.compat import OrderedDict
42 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 44 from rhodecode.lib.markup_renderer import (
44 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 46 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe, AttributeDict, safe_int
46 47 from rhodecode.lib.vcs.backends.base import (
47 48 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
48 49 TargetRefMissing, SourceRefMissing)
49 50 from rhodecode.lib.vcs.conf import settings as vcs_settings
50 51 from rhodecode.lib.vcs.exceptions import (
51 52 CommitDoesNotExistError, EmptyRepositoryError)
52 53 from rhodecode.model import BaseModel
53 54 from rhodecode.model.changeset_status import ChangesetStatusModel
54 55 from rhodecode.model.comment import CommentsModel
55 56 from rhodecode.model.db import (
56 57 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
57 58 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
58 59 from rhodecode.model.meta import Session
59 60 from rhodecode.model.notification import NotificationModel, \
60 61 EmailNotificationModel
61 62 from rhodecode.model.scm import ScmModel
62 63 from rhodecode.model.settings import VcsSettingsModel
63 64
64 65
65 66 log = logging.getLogger(__name__)
66 67
67 68
68 69 # Data structure to hold the response data when updating commits during a pull
69 70 # request update.
70 71 class UpdateResponse(object):
71 72
72 73 def __init__(self, executed, reason, new, old, common_ancestor_id,
73 74 commit_changes, source_changed, target_changed):
74 75
75 76 self.executed = executed
76 77 self.reason = reason
77 78 self.new = new
78 79 self.old = old
79 80 self.common_ancestor_id = common_ancestor_id
80 81 self.changes = commit_changes
81 82 self.source_changed = source_changed
82 83 self.target_changed = target_changed
83 84
84 85
86 def get_diff_info(
87 source_repo, source_ref, target_repo, target_ref, get_authors=False,
88 get_commit_authors=True):
89 """
90 Calculates detailed diff information for usage in preview of creation of a pull-request.
91 This is also used for default reviewers logic
92 """
93
94 source_scm = source_repo.scm_instance()
95 target_scm = target_repo.scm_instance()
96
97 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
98 if not ancestor_id:
99 raise ValueError(
100 'cannot calculate diff info without a common ancestor. '
101 'Make sure both repositories are related, and have a common forking commit.')
102
103 # case here is that want a simple diff without incoming commits,
104 # previewing what will be merged based only on commits in the source.
105 log.debug('Using ancestor %s as source_ref instead of %s',
106 ancestor_id, source_ref)
107
108 # source of changes now is the common ancestor
109 source_commit = source_scm.get_commit(commit_id=ancestor_id)
110 # target commit becomes the source ref as it is the last commit
111 # for diff generation this logic gives proper diff
112 target_commit = source_scm.get_commit(commit_id=source_ref)
113
114 vcs_diff = \
115 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
116 ignore_whitespace=False, context=3)
117
118 diff_processor = diffs.DiffProcessor(
119 vcs_diff, format='newdiff', diff_limit=None,
120 file_limit=None, show_full_diff=True)
121
122 _parsed = diff_processor.prepare()
123
124 all_files = []
125 all_files_changes = []
126 changed_lines = {}
127 stats = [0, 0]
128 for f in _parsed:
129 all_files.append(f['filename'])
130 all_files_changes.append({
131 'filename': f['filename'],
132 'stats': f['stats']
133 })
134 stats[0] += f['stats']['added']
135 stats[1] += f['stats']['deleted']
136
137 changed_lines[f['filename']] = []
138 if len(f['chunks']) < 2:
139 continue
140 # first line is "context" information
141 for chunks in f['chunks'][1:]:
142 for chunk in chunks['lines']:
143 if chunk['action'] not in ('del', 'mod'):
144 continue
145 changed_lines[f['filename']].append(chunk['old_lineno'])
146
147 commit_authors = []
148 user_counts = {}
149 email_counts = {}
150 author_counts = {}
151 _commit_cache = {}
152
153 commits = []
154 if get_commit_authors:
155 commits = target_scm.compare(
156 target_ref, source_ref, source_scm, merge=True,
157 pre_load=["author"])
158
159 for commit in commits:
160 user = User.get_from_cs_author(commit.author)
161 if user and user not in commit_authors:
162 commit_authors.append(user)
163
164 # lines
165 if get_authors:
166 target_commit = source_repo.get_commit(ancestor_id)
167
168 for fname, lines in changed_lines.items():
169 try:
170 node = target_commit.get_node(fname)
171 except Exception:
172 continue
173
174 if not isinstance(node, FileNode):
175 continue
176
177 for annotation in node.annotate:
178 line_no, commit_id, get_commit_func, line_text = annotation
179 if line_no in lines:
180 if commit_id not in _commit_cache:
181 _commit_cache[commit_id] = get_commit_func()
182 commit = _commit_cache[commit_id]
183 author = commit.author
184 email = commit.author_email
185 user = User.get_from_cs_author(author)
186 if user:
187 user_counts[user] = user_counts.get(user, 0) + 1
188 author_counts[author] = author_counts.get(author, 0) + 1
189 email_counts[email] = email_counts.get(email, 0) + 1
190
191 return {
192 'commits': commits,
193 'files': all_files_changes,
194 'stats': stats,
195 'ancestor': ancestor_id,
196 # original authors of modified files
197 'original_authors': {
198 'users': user_counts,
199 'authors': author_counts,
200 'emails': email_counts,
201 },
202 'commit_authors': commit_authors
203 }
204
205
85 206 class PullRequestModel(BaseModel):
86 207
87 208 cls = PullRequest
88 209
89 210 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
90 211
91 212 UPDATE_STATUS_MESSAGES = {
92 213 UpdateFailureReason.NONE: lazy_ugettext(
93 214 'Pull request update successful.'),
94 215 UpdateFailureReason.UNKNOWN: lazy_ugettext(
95 216 'Pull request update failed because of an unknown error.'),
96 217 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
97 218 'No update needed because the source and target have not changed.'),
98 219 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
99 220 'Pull request cannot be updated because the reference type is '
100 221 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
101 222 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
102 223 'This pull request cannot be updated because the target '
103 224 'reference is missing.'),
104 225 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
105 226 'This pull request cannot be updated because the source '
106 227 'reference is missing.'),
107 228 }
108 229 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
109 230 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
110 231
111 232 def __get_pull_request(self, pull_request):
112 233 return self._get_instance((
113 234 PullRequest, PullRequestVersion), pull_request)
114 235
115 236 def _check_perms(self, perms, pull_request, user, api=False):
116 237 if not api:
117 238 return h.HasRepoPermissionAny(*perms)(
118 239 user=user, repo_name=pull_request.target_repo.repo_name)
119 240 else:
120 241 return h.HasRepoPermissionAnyApi(*perms)(
121 242 user=user, repo_name=pull_request.target_repo.repo_name)
122 243
123 244 def check_user_read(self, pull_request, user, api=False):
124 245 _perms = ('repository.admin', 'repository.write', 'repository.read',)
125 246 return self._check_perms(_perms, pull_request, user, api)
126 247
127 248 def check_user_merge(self, pull_request, user, api=False):
128 249 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
129 250 return self._check_perms(_perms, pull_request, user, api)
130 251
131 252 def check_user_update(self, pull_request, user, api=False):
132 253 owner = user.user_id == pull_request.user_id
133 254 return self.check_user_merge(pull_request, user, api) or owner
134 255
135 256 def check_user_delete(self, pull_request, user):
136 257 owner = user.user_id == pull_request.user_id
137 258 _perms = ('repository.admin',)
138 259 return self._check_perms(_perms, pull_request, user) or owner
139 260
140 261 def check_user_change_status(self, pull_request, user, api=False):
141 262 reviewer = user.user_id in [x.user_id for x in
142 263 pull_request.reviewers]
143 264 return self.check_user_update(pull_request, user, api) or reviewer
144 265
145 266 def check_user_comment(self, pull_request, user):
146 267 owner = user.user_id == pull_request.user_id
147 268 return self.check_user_read(pull_request, user) or owner
148 269
149 270 def get(self, pull_request):
150 271 return self.__get_pull_request(pull_request)
151 272
152 273 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
153 274 statuses=None, opened_by=None, order_by=None,
154 275 order_dir='desc', only_created=False):
155 276 repo = None
156 277 if repo_name:
157 278 repo = self._get_repo(repo_name)
158 279
159 280 q = PullRequest.query()
160 281
161 282 if search_q:
162 283 like_expression = u'%{}%'.format(safe_unicode(search_q))
163 284 q = q.join(User)
164 285 q = q.filter(or_(
165 286 cast(PullRequest.pull_request_id, String).ilike(like_expression),
166 287 User.username.ilike(like_expression),
167 288 PullRequest.title.ilike(like_expression),
168 289 PullRequest.description.ilike(like_expression),
169 290 ))
170 291
171 292 # source or target
172 293 if repo and source:
173 294 q = q.filter(PullRequest.source_repo == repo)
174 295 elif repo:
175 296 q = q.filter(PullRequest.target_repo == repo)
176 297
177 298 # closed,opened
178 299 if statuses:
179 300 q = q.filter(PullRequest.status.in_(statuses))
180 301
181 302 # opened by filter
182 303 if opened_by:
183 304 q = q.filter(PullRequest.user_id.in_(opened_by))
184 305
185 306 # only get those that are in "created" state
186 307 if only_created:
187 308 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
188 309
189 310 if order_by:
190 311 order_map = {
191 312 'name_raw': PullRequest.pull_request_id,
192 313 'id': PullRequest.pull_request_id,
193 314 'title': PullRequest.title,
194 315 'updated_on_raw': PullRequest.updated_on,
195 316 'target_repo': PullRequest.target_repo_id
196 317 }
197 318 if order_dir == 'asc':
198 319 q = q.order_by(order_map[order_by].asc())
199 320 else:
200 321 q = q.order_by(order_map[order_by].desc())
201 322
202 323 return q
203 324
204 325 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
205 326 opened_by=None):
206 327 """
207 328 Count the number of pull requests for a specific repository.
208 329
209 330 :param repo_name: target or source repo
210 331 :param search_q: filter by text
211 332 :param source: boolean flag to specify if repo_name refers to source
212 333 :param statuses: list of pull request statuses
213 334 :param opened_by: author user of the pull request
214 335 :returns: int number of pull requests
215 336 """
216 337 q = self._prepare_get_all_query(
217 338 repo_name, search_q=search_q, source=source, statuses=statuses,
218 339 opened_by=opened_by)
219 340
220 341 return q.count()
221 342
222 343 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
223 344 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
224 345 """
225 346 Get all pull requests for a specific repository.
226 347
227 348 :param repo_name: target or source repo
228 349 :param search_q: filter by text
229 350 :param source: boolean flag to specify if repo_name refers to source
230 351 :param statuses: list of pull request statuses
231 352 :param opened_by: author user of the pull request
232 353 :param offset: pagination offset
233 354 :param length: length of returned list
234 355 :param order_by: order of the returned list
235 356 :param order_dir: 'asc' or 'desc' ordering direction
236 357 :returns: list of pull requests
237 358 """
238 359 q = self._prepare_get_all_query(
239 360 repo_name, search_q=search_q, source=source, statuses=statuses,
240 361 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
241 362
242 363 if length:
243 364 pull_requests = q.limit(length).offset(offset).all()
244 365 else:
245 366 pull_requests = q.all()
246 367
247 368 return pull_requests
248 369
249 370 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
250 371 opened_by=None):
251 372 """
252 373 Count the number of pull requests for a specific repository that are
253 374 awaiting review.
254 375
255 376 :param repo_name: target or source repo
256 377 :param search_q: filter by text
257 378 :param source: boolean flag to specify if repo_name refers to source
258 379 :param statuses: list of pull request statuses
259 380 :param opened_by: author user of the pull request
260 381 :returns: int number of pull requests
261 382 """
262 383 pull_requests = self.get_awaiting_review(
263 384 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
264 385
265 386 return len(pull_requests)
266 387
267 388 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
268 389 opened_by=None, offset=0, length=None,
269 390 order_by=None, order_dir='desc'):
270 391 """
271 392 Get all pull requests for a specific repository that are awaiting
272 393 review.
273 394
274 395 :param repo_name: target or source repo
275 396 :param search_q: filter by text
276 397 :param source: boolean flag to specify if repo_name refers to source
277 398 :param statuses: list of pull request statuses
278 399 :param opened_by: author user of the pull request
279 400 :param offset: pagination offset
280 401 :param length: length of returned list
281 402 :param order_by: order of the returned list
282 403 :param order_dir: 'asc' or 'desc' ordering direction
283 404 :returns: list of pull requests
284 405 """
285 406 pull_requests = self.get_all(
286 407 repo_name, search_q=search_q, source=source, statuses=statuses,
287 408 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
288 409
289 410 _filtered_pull_requests = []
290 411 for pr in pull_requests:
291 412 status = pr.calculated_review_status()
292 413 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
293 414 ChangesetStatus.STATUS_UNDER_REVIEW]:
294 415 _filtered_pull_requests.append(pr)
295 416 if length:
296 417 return _filtered_pull_requests[offset:offset+length]
297 418 else:
298 419 return _filtered_pull_requests
299 420
300 421 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
301 422 opened_by=None, user_id=None):
302 423 """
303 424 Count the number of pull requests for a specific repository that are
304 425 awaiting review from a specific user.
305 426
306 427 :param repo_name: target or source repo
307 428 :param search_q: filter by text
308 429 :param source: boolean flag to specify if repo_name refers to source
309 430 :param statuses: list of pull request statuses
310 431 :param opened_by: author user of the pull request
311 432 :param user_id: reviewer user of the pull request
312 433 :returns: int number of pull requests
313 434 """
314 435 pull_requests = self.get_awaiting_my_review(
315 436 repo_name, search_q=search_q, source=source, statuses=statuses,
316 437 opened_by=opened_by, user_id=user_id)
317 438
318 439 return len(pull_requests)
319 440
320 441 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
321 442 opened_by=None, user_id=None, offset=0,
322 443 length=None, order_by=None, order_dir='desc'):
323 444 """
324 445 Get all pull requests for a specific repository that are awaiting
325 446 review from a specific user.
326 447
327 448 :param repo_name: target or source repo
328 449 :param search_q: filter by text
329 450 :param source: boolean flag to specify if repo_name refers to source
330 451 :param statuses: list of pull request statuses
331 452 :param opened_by: author user of the pull request
332 453 :param user_id: reviewer user of the pull request
333 454 :param offset: pagination offset
334 455 :param length: length of returned list
335 456 :param order_by: order of the returned list
336 457 :param order_dir: 'asc' or 'desc' ordering direction
337 458 :returns: list of pull requests
338 459 """
339 460 pull_requests = self.get_all(
340 461 repo_name, search_q=search_q, source=source, statuses=statuses,
341 462 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
342 463
343 464 _my = PullRequestModel().get_not_reviewed(user_id)
344 465 my_participation = []
345 466 for pr in pull_requests:
346 467 if pr in _my:
347 468 my_participation.append(pr)
348 469 _filtered_pull_requests = my_participation
349 470 if length:
350 471 return _filtered_pull_requests[offset:offset+length]
351 472 else:
352 473 return _filtered_pull_requests
353 474
354 475 def get_not_reviewed(self, user_id):
355 476 return [
356 477 x.pull_request for x in PullRequestReviewers.query().filter(
357 478 PullRequestReviewers.user_id == user_id).all()
358 479 ]
359 480
360 481 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
361 482 order_by=None, order_dir='desc'):
362 483 q = PullRequest.query()
363 484 if user_id:
364 485 reviewers_subquery = Session().query(
365 486 PullRequestReviewers.pull_request_id).filter(
366 487 PullRequestReviewers.user_id == user_id).subquery()
367 488 user_filter = or_(
368 489 PullRequest.user_id == user_id,
369 490 PullRequest.pull_request_id.in_(reviewers_subquery)
370 491 )
371 492 q = PullRequest.query().filter(user_filter)
372 493
373 494 # closed,opened
374 495 if statuses:
375 496 q = q.filter(PullRequest.status.in_(statuses))
376 497
377 498 if query:
378 499 like_expression = u'%{}%'.format(safe_unicode(query))
379 500 q = q.join(User)
380 501 q = q.filter(or_(
381 502 cast(PullRequest.pull_request_id, String).ilike(like_expression),
382 503 User.username.ilike(like_expression),
383 504 PullRequest.title.ilike(like_expression),
384 505 PullRequest.description.ilike(like_expression),
385 506 ))
386 507 if order_by:
387 508 order_map = {
388 509 'name_raw': PullRequest.pull_request_id,
389 510 'title': PullRequest.title,
390 511 'updated_on_raw': PullRequest.updated_on,
391 512 'target_repo': PullRequest.target_repo_id
392 513 }
393 514 if order_dir == 'asc':
394 515 q = q.order_by(order_map[order_by].asc())
395 516 else:
396 517 q = q.order_by(order_map[order_by].desc())
397 518
398 519 return q
399 520
400 521 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
401 522 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
402 523 return q.count()
403 524
404 525 def get_im_participating_in(
405 526 self, user_id=None, statuses=None, query='', offset=0,
406 527 length=None, order_by=None, order_dir='desc'):
407 528 """
408 529 Get all Pull requests that i'm participating in, or i have opened
409 530 """
410 531
411 532 q = self._prepare_participating_query(
412 533 user_id, statuses=statuses, query=query, order_by=order_by,
413 534 order_dir=order_dir)
414 535
415 536 if length:
416 537 pull_requests = q.limit(length).offset(offset).all()
417 538 else:
418 539 pull_requests = q.all()
419 540
420 541 return pull_requests
421 542
422 543 def get_versions(self, pull_request):
423 544 """
424 545 returns version of pull request sorted by ID descending
425 546 """
426 547 return PullRequestVersion.query()\
427 548 .filter(PullRequestVersion.pull_request == pull_request)\
428 549 .order_by(PullRequestVersion.pull_request_version_id.asc())\
429 550 .all()
430 551
431 552 def get_pr_version(self, pull_request_id, version=None):
432 553 at_version = None
433 554
434 555 if version and version == 'latest':
435 556 pull_request_ver = PullRequest.get(pull_request_id)
436 557 pull_request_obj = pull_request_ver
437 558 _org_pull_request_obj = pull_request_obj
438 559 at_version = 'latest'
439 560 elif version:
440 561 pull_request_ver = PullRequestVersion.get_or_404(version)
441 562 pull_request_obj = pull_request_ver
442 563 _org_pull_request_obj = pull_request_ver.pull_request
443 564 at_version = pull_request_ver.pull_request_version_id
444 565 else:
445 566 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
446 567 pull_request_id)
447 568
448 569 pull_request_display_obj = PullRequest.get_pr_display_object(
449 570 pull_request_obj, _org_pull_request_obj)
450 571
451 572 return _org_pull_request_obj, pull_request_obj, \
452 573 pull_request_display_obj, at_version
453 574
454 575 def create(self, created_by, source_repo, source_ref, target_repo,
455 576 target_ref, revisions, reviewers, title, description=None,
577 common_ancestor_id=None,
456 578 description_renderer=None,
457 579 reviewer_data=None, translator=None, auth_user=None):
458 580 translator = translator or get_current_request().translate
459 581
460 582 created_by_user = self._get_user(created_by)
461 583 auth_user = auth_user or created_by_user.AuthUser()
462 584 source_repo = self._get_repo(source_repo)
463 585 target_repo = self._get_repo(target_repo)
464 586
465 587 pull_request = PullRequest()
466 588 pull_request.source_repo = source_repo
467 589 pull_request.source_ref = source_ref
468 590 pull_request.target_repo = target_repo
469 591 pull_request.target_ref = target_ref
470 592 pull_request.revisions = revisions
471 593 pull_request.title = title
472 594 pull_request.description = description
473 595 pull_request.description_renderer = description_renderer
474 596 pull_request.author = created_by_user
475 597 pull_request.reviewer_data = reviewer_data
476 598 pull_request.pull_request_state = pull_request.STATE_CREATING
599 pull_request.common_ancestor_id = common_ancestor_id
600
477 601 Session().add(pull_request)
478 602 Session().flush()
479 603
480 604 reviewer_ids = set()
481 605 # members / reviewers
482 606 for reviewer_object in reviewers:
483 607 user_id, reasons, mandatory, rules = reviewer_object
484 608 user = self._get_user(user_id)
485 609
486 610 # skip duplicates
487 611 if user.user_id in reviewer_ids:
488 612 continue
489 613
490 614 reviewer_ids.add(user.user_id)
491 615
492 616 reviewer = PullRequestReviewers()
493 617 reviewer.user = user
494 618 reviewer.pull_request = pull_request
495 619 reviewer.reasons = reasons
496 620 reviewer.mandatory = mandatory
497 621
498 622 # NOTE(marcink): pick only first rule for now
499 623 rule_id = list(rules)[0] if rules else None
500 624 rule = RepoReviewRule.get(rule_id) if rule_id else None
501 625 if rule:
502 626 review_group = rule.user_group_vote_rule(user_id)
503 627 # we check if this particular reviewer is member of a voting group
504 628 if review_group:
505 629 # NOTE(marcink):
506 630 # can be that user is member of more but we pick the first same,
507 631 # same as default reviewers algo
508 632 review_group = review_group[0]
509 633
510 634 rule_data = {
511 635 'rule_name':
512 636 rule.review_rule_name,
513 637 'rule_user_group_entry_id':
514 638 review_group.repo_review_rule_users_group_id,
515 639 'rule_user_group_name':
516 640 review_group.users_group.users_group_name,
517 641 'rule_user_group_members':
518 642 [x.user.username for x in review_group.users_group.members],
519 643 'rule_user_group_members_id':
520 644 [x.user.user_id for x in review_group.users_group.members],
521 645 }
522 646 # e.g {'vote_rule': -1, 'mandatory': True}
523 647 rule_data.update(review_group.rule_data())
524 648
525 649 reviewer.rule_data = rule_data
526 650
527 651 Session().add(reviewer)
528 652 Session().flush()
529 653
530 654 # Set approval status to "Under Review" for all commits which are
531 655 # part of this pull request.
532 656 ChangesetStatusModel().set_status(
533 657 repo=target_repo,
534 658 status=ChangesetStatus.STATUS_UNDER_REVIEW,
535 659 user=created_by_user,
536 660 pull_request=pull_request
537 661 )
538 662 # we commit early at this point. This has to do with a fact
539 663 # that before queries do some row-locking. And because of that
540 664 # we need to commit and finish transaction before below validate call
541 665 # that for large repos could be long resulting in long row locks
542 666 Session().commit()
543 667
544 668 # prepare workspace, and run initial merge simulation. Set state during that
545 669 # operation
546 670 pull_request = PullRequest.get(pull_request.pull_request_id)
547 671
548 672 # set as merging, for merge simulation, and if finished to created so we mark
549 673 # simulation is working fine
550 674 with pull_request.set_state(PullRequest.STATE_MERGING,
551 675 final_state=PullRequest.STATE_CREATED) as state_obj:
552 676 MergeCheck.validate(
553 677 pull_request, auth_user=auth_user, translator=translator)
554 678
555 679 self.notify_reviewers(pull_request, reviewer_ids)
556 680 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
557 681
558 682 creation_data = pull_request.get_api_data(with_merge_state=False)
559 683 self._log_audit_action(
560 684 'repo.pull_request.create', {'data': creation_data},
561 685 auth_user, pull_request)
562 686
563 687 return pull_request
564 688
565 689 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
566 690 pull_request = self.__get_pull_request(pull_request)
567 691 target_scm = pull_request.target_repo.scm_instance()
568 692 if action == 'create':
569 693 trigger_hook = hooks_utils.trigger_create_pull_request_hook
570 694 elif action == 'merge':
571 695 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
572 696 elif action == 'close':
573 697 trigger_hook = hooks_utils.trigger_close_pull_request_hook
574 698 elif action == 'review_status_change':
575 699 trigger_hook = hooks_utils.trigger_review_pull_request_hook
576 700 elif action == 'update':
577 701 trigger_hook = hooks_utils.trigger_update_pull_request_hook
578 702 elif action == 'comment':
579 703 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
580 704 else:
581 705 return
582 706
583 707 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
584 708 pull_request, action, trigger_hook)
585 709 trigger_hook(
586 710 username=user.username,
587 711 repo_name=pull_request.target_repo.repo_name,
588 712 repo_type=target_scm.alias,
589 713 pull_request=pull_request,
590 714 data=data)
591 715
592 716 def _get_commit_ids(self, pull_request):
593 717 """
594 718 Return the commit ids of the merged pull request.
595 719
596 720 This method is not dealing correctly yet with the lack of autoupdates
597 721 nor with the implicit target updates.
598 722 For example: if a commit in the source repo is already in the target it
599 723 will be reported anyways.
600 724 """
601 725 merge_rev = pull_request.merge_rev
602 726 if merge_rev is None:
603 727 raise ValueError('This pull request was not merged yet')
604 728
605 729 commit_ids = list(pull_request.revisions)
606 730 if merge_rev not in commit_ids:
607 731 commit_ids.append(merge_rev)
608 732
609 733 return commit_ids
610 734
611 735 def merge_repo(self, pull_request, user, extras):
612 736 log.debug("Merging pull request %s", pull_request.pull_request_id)
613 737 extras['user_agent'] = 'internal-merge'
614 738 merge_state = self._merge_pull_request(pull_request, user, extras)
615 739 if merge_state.executed:
616 740 log.debug("Merge was successful, updating the pull request comments.")
617 741 self._comment_and_close_pr(pull_request, user, merge_state)
618 742
619 743 self._log_audit_action(
620 744 'repo.pull_request.merge',
621 745 {'merge_state': merge_state.__dict__},
622 746 user, pull_request)
623 747
624 748 else:
625 749 log.warn("Merge failed, not updating the pull request.")
626 750 return merge_state
627 751
628 752 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
629 753 target_vcs = pull_request.target_repo.scm_instance()
630 754 source_vcs = pull_request.source_repo.scm_instance()
631 755
632 756 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
633 757 pr_id=pull_request.pull_request_id,
634 758 pr_title=pull_request.title,
635 759 source_repo=source_vcs.name,
636 760 source_ref_name=pull_request.source_ref_parts.name,
637 761 target_repo=target_vcs.name,
638 762 target_ref_name=pull_request.target_ref_parts.name,
639 763 )
640 764
641 765 workspace_id = self._workspace_id(pull_request)
642 766 repo_id = pull_request.target_repo.repo_id
643 767 use_rebase = self._use_rebase_for_merging(pull_request)
644 768 close_branch = self._close_branch_before_merging(pull_request)
645 769 user_name = self._user_name_for_merging(pull_request, user)
646 770
647 771 target_ref = self._refresh_reference(
648 772 pull_request.target_ref_parts, target_vcs)
649 773
650 774 callback_daemon, extras = prepare_callback_daemon(
651 775 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
652 776 host=vcs_settings.HOOKS_HOST,
653 777 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
654 778
655 779 with callback_daemon:
656 780 # TODO: johbo: Implement a clean way to run a config_override
657 781 # for a single call.
658 782 target_vcs.config.set(
659 783 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
660 784
661 785 merge_state = target_vcs.merge(
662 786 repo_id, workspace_id, target_ref, source_vcs,
663 787 pull_request.source_ref_parts,
664 788 user_name=user_name, user_email=user.email,
665 789 message=message, use_rebase=use_rebase,
666 790 close_branch=close_branch)
667 791 return merge_state
668 792
669 793 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
670 794 pull_request.merge_rev = merge_state.merge_ref.commit_id
671 795 pull_request.updated_on = datetime.datetime.now()
672 796 close_msg = close_msg or 'Pull request merged and closed'
673 797
674 798 CommentsModel().create(
675 799 text=safe_unicode(close_msg),
676 800 repo=pull_request.target_repo.repo_id,
677 801 user=user.user_id,
678 802 pull_request=pull_request.pull_request_id,
679 803 f_path=None,
680 804 line_no=None,
681 805 closing_pr=True
682 806 )
683 807
684 808 Session().add(pull_request)
685 809 Session().flush()
686 810 # TODO: paris: replace invalidation with less radical solution
687 811 ScmModel().mark_for_invalidation(
688 812 pull_request.target_repo.repo_name)
689 813 self.trigger_pull_request_hook(pull_request, user, 'merge')
690 814
691 815 def has_valid_update_type(self, pull_request):
692 816 source_ref_type = pull_request.source_ref_parts.type
693 817 return source_ref_type in self.REF_TYPES
694 818
695 819 def get_flow_commits(self, pull_request):
696 820
697 821 # source repo
698 822 source_ref_name = pull_request.source_ref_parts.name
699 823 source_ref_type = pull_request.source_ref_parts.type
700 824 source_ref_id = pull_request.source_ref_parts.commit_id
701 825 source_repo = pull_request.source_repo.scm_instance()
702 826
703 827 try:
704 828 if source_ref_type in self.REF_TYPES:
705 829 source_commit = source_repo.get_commit(source_ref_name)
706 830 else:
707 831 source_commit = source_repo.get_commit(source_ref_id)
708 832 except CommitDoesNotExistError:
709 833 raise SourceRefMissing()
710 834
711 835 # target repo
712 836 target_ref_name = pull_request.target_ref_parts.name
713 837 target_ref_type = pull_request.target_ref_parts.type
714 838 target_ref_id = pull_request.target_ref_parts.commit_id
715 839 target_repo = pull_request.target_repo.scm_instance()
716 840
717 841 try:
718 842 if target_ref_type in self.REF_TYPES:
719 843 target_commit = target_repo.get_commit(target_ref_name)
720 844 else:
721 845 target_commit = target_repo.get_commit(target_ref_id)
722 846 except CommitDoesNotExistError:
723 847 raise TargetRefMissing()
724 848
725 849 return source_commit, target_commit
726 850
727 851 def update_commits(self, pull_request, updating_user):
728 852 """
729 853 Get the updated list of commits for the pull request
730 854 and return the new pull request version and the list
731 855 of commits processed by this update action
732 856
733 857 updating_user is the user_object who triggered the update
734 858 """
735 859 pull_request = self.__get_pull_request(pull_request)
736 860 source_ref_type = pull_request.source_ref_parts.type
737 861 source_ref_name = pull_request.source_ref_parts.name
738 862 source_ref_id = pull_request.source_ref_parts.commit_id
739 863
740 864 target_ref_type = pull_request.target_ref_parts.type
741 865 target_ref_name = pull_request.target_ref_parts.name
742 866 target_ref_id = pull_request.target_ref_parts.commit_id
743 867
744 868 if not self.has_valid_update_type(pull_request):
745 869 log.debug("Skipping update of pull request %s due to ref type: %s",
746 870 pull_request, source_ref_type)
747 871 return UpdateResponse(
748 872 executed=False,
749 873 reason=UpdateFailureReason.WRONG_REF_TYPE,
750 874 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
751 875 source_changed=False, target_changed=False)
752 876
753 877 try:
754 878 source_commit, target_commit = self.get_flow_commits(pull_request)
755 879 except SourceRefMissing:
756 880 return UpdateResponse(
757 881 executed=False,
758 882 reason=UpdateFailureReason.MISSING_SOURCE_REF,
759 883 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
760 884 source_changed=False, target_changed=False)
761 885 except TargetRefMissing:
762 886 return UpdateResponse(
763 887 executed=False,
764 888 reason=UpdateFailureReason.MISSING_TARGET_REF,
765 889 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
766 890 source_changed=False, target_changed=False)
767 891
768 892 source_changed = source_ref_id != source_commit.raw_id
769 893 target_changed = target_ref_id != target_commit.raw_id
770 894
771 895 if not (source_changed or target_changed):
772 896 log.debug("Nothing changed in pull request %s", pull_request)
773 897 return UpdateResponse(
774 898 executed=False,
775 899 reason=UpdateFailureReason.NO_CHANGE,
776 900 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
777 901 source_changed=target_changed, target_changed=source_changed)
778 902
779 903 change_in_found = 'target repo' if target_changed else 'source repo'
780 904 log.debug('Updating pull request because of change in %s detected',
781 905 change_in_found)
782 906
783 907 # Finally there is a need for an update, in case of source change
784 908 # we create a new version, else just an update
785 909 if source_changed:
786 910 pull_request_version = self._create_version_from_snapshot(pull_request)
787 911 self._link_comments_to_version(pull_request_version)
788 912 else:
789 913 try:
790 914 ver = pull_request.versions[-1]
791 915 except IndexError:
792 916 ver = None
793 917
794 918 pull_request.pull_request_version_id = \
795 919 ver.pull_request_version_id if ver else None
796 920 pull_request_version = pull_request
797 921
798 922 source_repo = pull_request.source_repo.scm_instance()
799 923 target_repo = pull_request.target_repo.scm_instance()
800 924
801 925 # re-compute commit ids
802 926 old_commit_ids = pull_request.revisions
803 927 pre_load = ["author", "date", "message", "branch"]
804 928 commit_ranges = target_repo.compare(
805 929 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
806 930 pre_load=pre_load)
807 931
808 ancestor_commit_id = source_repo.get_common_ancestor(
809 source_commit.raw_id, target_commit.raw_id, target_repo)
932 target_ref = target_commit.raw_id
933 source_ref = source_commit.raw_id
934 ancestor_commit_id = target_repo.get_common_ancestor(
935 target_ref, source_ref, source_repo)
936
937 if not ancestor_commit_id:
938 raise ValueError(
939 'cannot calculate diff info without a common ancestor. '
940 'Make sure both repositories are related, and have a common forking commit.')
941
942 pull_request.common_ancestor_id = ancestor_commit_id
810 943
811 944 pull_request.source_ref = '%s:%s:%s' % (
812 945 source_ref_type, source_ref_name, source_commit.raw_id)
813 946 pull_request.target_ref = '%s:%s:%s' % (
814 947 target_ref_type, target_ref_name, ancestor_commit_id)
815 948
816 949 pull_request.revisions = [
817 950 commit.raw_id for commit in reversed(commit_ranges)]
818 951 pull_request.updated_on = datetime.datetime.now()
819 952 Session().add(pull_request)
820 953 new_commit_ids = pull_request.revisions
821 954
822 955 old_diff_data, new_diff_data = self._generate_update_diffs(
823 956 pull_request, pull_request_version)
824 957
825 958 # calculate commit and file changes
826 959 commit_changes = self._calculate_commit_id_changes(
827 960 old_commit_ids, new_commit_ids)
828 961 file_changes = self._calculate_file_changes(
829 962 old_diff_data, new_diff_data)
830 963
831 964 # set comments as outdated if DIFFS changed
832 965 CommentsModel().outdate_comments(
833 966 pull_request, old_diff_data=old_diff_data,
834 967 new_diff_data=new_diff_data)
835 968
836 969 valid_commit_changes = (commit_changes.added or commit_changes.removed)
837 970 file_node_changes = (
838 971 file_changes.added or file_changes.modified or file_changes.removed)
839 972 pr_has_changes = valid_commit_changes or file_node_changes
840 973
841 974 # Add an automatic comment to the pull request, in case
842 975 # anything has changed
843 976 if pr_has_changes:
844 977 update_comment = CommentsModel().create(
845 978 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
846 979 repo=pull_request.target_repo,
847 980 user=pull_request.author,
848 981 pull_request=pull_request,
849 982 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
850 983
851 984 # Update status to "Under Review" for added commits
852 985 for commit_id in commit_changes.added:
853 986 ChangesetStatusModel().set_status(
854 987 repo=pull_request.source_repo,
855 988 status=ChangesetStatus.STATUS_UNDER_REVIEW,
856 989 comment=update_comment,
857 990 user=pull_request.author,
858 991 pull_request=pull_request,
859 992 revision=commit_id)
860 993
861 994 # send update email to users
862 995 try:
863 996 self.notify_users(pull_request=pull_request, updating_user=updating_user,
864 997 ancestor_commit_id=ancestor_commit_id,
865 998 commit_changes=commit_changes,
866 999 file_changes=file_changes)
867 1000 except Exception:
868 1001 log.exception('Failed to send email notification to users')
869 1002
870 1003 log.debug(
871 1004 'Updated pull request %s, added_ids: %s, common_ids: %s, '
872 1005 'removed_ids: %s', pull_request.pull_request_id,
873 1006 commit_changes.added, commit_changes.common, commit_changes.removed)
874 1007 log.debug(
875 1008 'Updated pull request with the following file changes: %s',
876 1009 file_changes)
877 1010
878 1011 log.info(
879 1012 "Updated pull request %s from commit %s to commit %s, "
880 1013 "stored new version %s of this pull request.",
881 1014 pull_request.pull_request_id, source_ref_id,
882 1015 pull_request.source_ref_parts.commit_id,
883 1016 pull_request_version.pull_request_version_id)
884 1017 Session().commit()
885 1018 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
886 1019
887 1020 return UpdateResponse(
888 1021 executed=True, reason=UpdateFailureReason.NONE,
889 1022 old=pull_request, new=pull_request_version,
890 1023 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
891 1024 source_changed=source_changed, target_changed=target_changed)
892 1025
893 1026 def _create_version_from_snapshot(self, pull_request):
894 1027 version = PullRequestVersion()
895 1028 version.title = pull_request.title
896 1029 version.description = pull_request.description
897 1030 version.status = pull_request.status
898 1031 version.pull_request_state = pull_request.pull_request_state
899 1032 version.created_on = datetime.datetime.now()
900 1033 version.updated_on = pull_request.updated_on
901 1034 version.user_id = pull_request.user_id
902 1035 version.source_repo = pull_request.source_repo
903 1036 version.source_ref = pull_request.source_ref
904 1037 version.target_repo = pull_request.target_repo
905 1038 version.target_ref = pull_request.target_ref
906 1039
907 1040 version._last_merge_source_rev = pull_request._last_merge_source_rev
908 1041 version._last_merge_target_rev = pull_request._last_merge_target_rev
909 1042 version.last_merge_status = pull_request.last_merge_status
910 1043 version.last_merge_metadata = pull_request.last_merge_metadata
911 1044 version.shadow_merge_ref = pull_request.shadow_merge_ref
912 1045 version.merge_rev = pull_request.merge_rev
913 1046 version.reviewer_data = pull_request.reviewer_data
914 1047
915 1048 version.revisions = pull_request.revisions
1049 version.common_ancestor_id = pull_request.common_ancestor_id
916 1050 version.pull_request = pull_request
917 1051 Session().add(version)
918 1052 Session().flush()
919 1053
920 1054 return version
921 1055
922 1056 def _generate_update_diffs(self, pull_request, pull_request_version):
923 1057
924 1058 diff_context = (
925 1059 self.DIFF_CONTEXT +
926 1060 CommentsModel.needed_extra_diff_context())
927 1061 hide_whitespace_changes = False
928 1062 source_repo = pull_request_version.source_repo
929 1063 source_ref_id = pull_request_version.source_ref_parts.commit_id
930 1064 target_ref_id = pull_request_version.target_ref_parts.commit_id
931 1065 old_diff = self._get_diff_from_pr_or_version(
932 1066 source_repo, source_ref_id, target_ref_id,
933 1067 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
934 1068
935 1069 source_repo = pull_request.source_repo
936 1070 source_ref_id = pull_request.source_ref_parts.commit_id
937 1071 target_ref_id = pull_request.target_ref_parts.commit_id
938 1072
939 1073 new_diff = self._get_diff_from_pr_or_version(
940 1074 source_repo, source_ref_id, target_ref_id,
941 1075 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
942 1076
943 1077 old_diff_data = diffs.DiffProcessor(old_diff)
944 1078 old_diff_data.prepare()
945 1079 new_diff_data = diffs.DiffProcessor(new_diff)
946 1080 new_diff_data.prepare()
947 1081
948 1082 return old_diff_data, new_diff_data
949 1083
950 1084 def _link_comments_to_version(self, pull_request_version):
951 1085 """
952 1086 Link all unlinked comments of this pull request to the given version.
953 1087
954 1088 :param pull_request_version: The `PullRequestVersion` to which
955 1089 the comments shall be linked.
956 1090
957 1091 """
958 1092 pull_request = pull_request_version.pull_request
959 1093 comments = ChangesetComment.query()\
960 1094 .filter(
961 1095 # TODO: johbo: Should we query for the repo at all here?
962 1096 # Pending decision on how comments of PRs are to be related
963 1097 # to either the source repo, the target repo or no repo at all.
964 1098 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
965 1099 ChangesetComment.pull_request == pull_request,
966 1100 ChangesetComment.pull_request_version == None)\
967 1101 .order_by(ChangesetComment.comment_id.asc())
968 1102
969 1103 # TODO: johbo: Find out why this breaks if it is done in a bulk
970 1104 # operation.
971 1105 for comment in comments:
972 1106 comment.pull_request_version_id = (
973 1107 pull_request_version.pull_request_version_id)
974 1108 Session().add(comment)
975 1109
976 1110 def _calculate_commit_id_changes(self, old_ids, new_ids):
977 1111 added = [x for x in new_ids if x not in old_ids]
978 1112 common = [x for x in new_ids if x in old_ids]
979 1113 removed = [x for x in old_ids if x not in new_ids]
980 1114 total = new_ids
981 1115 return ChangeTuple(added, common, removed, total)
982 1116
983 1117 def _calculate_file_changes(self, old_diff_data, new_diff_data):
984 1118
985 1119 old_files = OrderedDict()
986 1120 for diff_data in old_diff_data.parsed_diff:
987 1121 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
988 1122
989 1123 added_files = []
990 1124 modified_files = []
991 1125 removed_files = []
992 1126 for diff_data in new_diff_data.parsed_diff:
993 1127 new_filename = diff_data['filename']
994 1128 new_hash = md5_safe(diff_data['raw_diff'])
995 1129
996 1130 old_hash = old_files.get(new_filename)
997 1131 if not old_hash:
998 1132 # file is not present in old diff, we have to figure out from parsed diff
999 1133 # operation ADD/REMOVE
1000 1134 operations_dict = diff_data['stats']['ops']
1001 1135 if diffs.DEL_FILENODE in operations_dict:
1002 1136 removed_files.append(new_filename)
1003 1137 else:
1004 1138 added_files.append(new_filename)
1005 1139 else:
1006 1140 if new_hash != old_hash:
1007 1141 modified_files.append(new_filename)
1008 1142 # now remove a file from old, since we have seen it already
1009 1143 del old_files[new_filename]
1010 1144
1011 1145 # removed files is when there are present in old, but not in NEW,
1012 1146 # since we remove old files that are present in new diff, left-overs
1013 1147 # if any should be the removed files
1014 1148 removed_files.extend(old_files.keys())
1015 1149
1016 1150 return FileChangeTuple(added_files, modified_files, removed_files)
1017 1151
1018 1152 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1019 1153 """
1020 1154 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1021 1155 so it's always looking the same disregarding on which default
1022 1156 renderer system is using.
1023 1157
1024 1158 :param ancestor_commit_id: ancestor raw_id
1025 1159 :param changes: changes named tuple
1026 1160 :param file_changes: file changes named tuple
1027 1161
1028 1162 """
1029 1163 new_status = ChangesetStatus.get_status_lbl(
1030 1164 ChangesetStatus.STATUS_UNDER_REVIEW)
1031 1165
1032 1166 changed_files = (
1033 1167 file_changes.added + file_changes.modified + file_changes.removed)
1034 1168
1035 1169 params = {
1036 1170 'under_review_label': new_status,
1037 1171 'added_commits': changes.added,
1038 1172 'removed_commits': changes.removed,
1039 1173 'changed_files': changed_files,
1040 1174 'added_files': file_changes.added,
1041 1175 'modified_files': file_changes.modified,
1042 1176 'removed_files': file_changes.removed,
1043 1177 'ancestor_commit_id': ancestor_commit_id
1044 1178 }
1045 1179 renderer = RstTemplateRenderer()
1046 1180 return renderer.render('pull_request_update.mako', **params)
1047 1181
1048 1182 def edit(self, pull_request, title, description, description_renderer, user):
1049 1183 pull_request = self.__get_pull_request(pull_request)
1050 1184 old_data = pull_request.get_api_data(with_merge_state=False)
1051 1185 if pull_request.is_closed():
1052 1186 raise ValueError('This pull request is closed')
1053 1187 if title:
1054 1188 pull_request.title = title
1055 1189 pull_request.description = description
1056 1190 pull_request.updated_on = datetime.datetime.now()
1057 1191 pull_request.description_renderer = description_renderer
1058 1192 Session().add(pull_request)
1059 1193 self._log_audit_action(
1060 1194 'repo.pull_request.edit', {'old_data': old_data},
1061 1195 user, pull_request)
1062 1196
1063 1197 def update_reviewers(self, pull_request, reviewer_data, user):
1064 1198 """
1065 1199 Update the reviewers in the pull request
1066 1200
1067 1201 :param pull_request: the pr to update
1068 1202 :param reviewer_data: list of tuples
1069 1203 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1070 1204 """
1071 1205 pull_request = self.__get_pull_request(pull_request)
1072 1206 if pull_request.is_closed():
1073 1207 raise ValueError('This pull request is closed')
1074 1208
1075 1209 reviewers = {}
1076 1210 for user_id, reasons, mandatory, rules in reviewer_data:
1077 1211 if isinstance(user_id, (int, compat.string_types)):
1078 1212 user_id = self._get_user(user_id).user_id
1079 1213 reviewers[user_id] = {
1080 1214 'reasons': reasons, 'mandatory': mandatory}
1081 1215
1082 1216 reviewers_ids = set(reviewers.keys())
1083 1217 current_reviewers = PullRequestReviewers.query()\
1084 1218 .filter(PullRequestReviewers.pull_request ==
1085 1219 pull_request).all()
1086 1220 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1087 1221
1088 1222 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1089 1223 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1090 1224
1091 1225 log.debug("Adding %s reviewers", ids_to_add)
1092 1226 log.debug("Removing %s reviewers", ids_to_remove)
1093 1227 changed = False
1094 1228 added_audit_reviewers = []
1095 1229 removed_audit_reviewers = []
1096 1230
1097 1231 for uid in ids_to_add:
1098 1232 changed = True
1099 1233 _usr = self._get_user(uid)
1100 1234 reviewer = PullRequestReviewers()
1101 1235 reviewer.user = _usr
1102 1236 reviewer.pull_request = pull_request
1103 1237 reviewer.reasons = reviewers[uid]['reasons']
1104 1238 # NOTE(marcink): mandatory shouldn't be changed now
1105 1239 # reviewer.mandatory = reviewers[uid]['reasons']
1106 1240 Session().add(reviewer)
1107 1241 added_audit_reviewers.append(reviewer.get_dict())
1108 1242
1109 1243 for uid in ids_to_remove:
1110 1244 changed = True
1111 1245 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1112 1246 # that prevents and fixes cases that we added the same reviewer twice.
1113 1247 # this CAN happen due to the lack of DB checks
1114 1248 reviewers = PullRequestReviewers.query()\
1115 1249 .filter(PullRequestReviewers.user_id == uid,
1116 1250 PullRequestReviewers.pull_request == pull_request)\
1117 1251 .all()
1118 1252
1119 1253 for obj in reviewers:
1120 1254 added_audit_reviewers.append(obj.get_dict())
1121 1255 Session().delete(obj)
1122 1256
1123 1257 if changed:
1124 1258 Session().expire_all()
1125 1259 pull_request.updated_on = datetime.datetime.now()
1126 1260 Session().add(pull_request)
1127 1261
1128 1262 # finally store audit logs
1129 1263 for user_data in added_audit_reviewers:
1130 1264 self._log_audit_action(
1131 1265 'repo.pull_request.reviewer.add', {'data': user_data},
1132 1266 user, pull_request)
1133 1267 for user_data in removed_audit_reviewers:
1134 1268 self._log_audit_action(
1135 1269 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1136 1270 user, pull_request)
1137 1271
1138 1272 self.notify_reviewers(pull_request, ids_to_add)
1139 1273 return ids_to_add, ids_to_remove
1140 1274
1141 1275 def get_url(self, pull_request, request=None, permalink=False):
1142 1276 if not request:
1143 1277 request = get_current_request()
1144 1278
1145 1279 if permalink:
1146 1280 return request.route_url(
1147 1281 'pull_requests_global',
1148 1282 pull_request_id=pull_request.pull_request_id,)
1149 1283 else:
1150 1284 return request.route_url('pullrequest_show',
1151 1285 repo_name=safe_str(pull_request.target_repo.repo_name),
1152 1286 pull_request_id=pull_request.pull_request_id,)
1153 1287
1154 1288 def get_shadow_clone_url(self, pull_request, request=None):
1155 1289 """
1156 1290 Returns qualified url pointing to the shadow repository. If this pull
1157 1291 request is closed there is no shadow repository and ``None`` will be
1158 1292 returned.
1159 1293 """
1160 1294 if pull_request.is_closed():
1161 1295 return None
1162 1296 else:
1163 1297 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1164 1298 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1165 1299
1166 1300 def notify_reviewers(self, pull_request, reviewers_ids):
1167 1301 # notification to reviewers
1168 1302 if not reviewers_ids:
1169 1303 return
1170 1304
1171 1305 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1172 1306
1173 1307 pull_request_obj = pull_request
1174 1308 # get the current participants of this pull request
1175 1309 recipients = reviewers_ids
1176 1310 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1177 1311
1178 1312 pr_source_repo = pull_request_obj.source_repo
1179 1313 pr_target_repo = pull_request_obj.target_repo
1180 1314
1181 1315 pr_url = h.route_url('pullrequest_show',
1182 1316 repo_name=pr_target_repo.repo_name,
1183 1317 pull_request_id=pull_request_obj.pull_request_id,)
1184 1318
1185 1319 # set some variables for email notification
1186 1320 pr_target_repo_url = h.route_url(
1187 1321 'repo_summary', repo_name=pr_target_repo.repo_name)
1188 1322
1189 1323 pr_source_repo_url = h.route_url(
1190 1324 'repo_summary', repo_name=pr_source_repo.repo_name)
1191 1325
1192 1326 # pull request specifics
1193 1327 pull_request_commits = [
1194 1328 (x.raw_id, x.message)
1195 1329 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1196 1330
1197 1331 kwargs = {
1198 1332 'user': pull_request.author,
1199 1333 'pull_request': pull_request_obj,
1200 1334 'pull_request_commits': pull_request_commits,
1201 1335
1202 1336 'pull_request_target_repo': pr_target_repo,
1203 1337 'pull_request_target_repo_url': pr_target_repo_url,
1204 1338
1205 1339 'pull_request_source_repo': pr_source_repo,
1206 1340 'pull_request_source_repo_url': pr_source_repo_url,
1207 1341
1208 1342 'pull_request_url': pr_url,
1209 1343 }
1210 1344
1211 1345 # pre-generate the subject for notification itself
1212 1346 (subject,
1213 1347 _h, _e, # we don't care about those
1214 1348 body_plaintext) = EmailNotificationModel().render_email(
1215 1349 notification_type, **kwargs)
1216 1350
1217 1351 # create notification objects, and emails
1218 1352 NotificationModel().create(
1219 1353 created_by=pull_request.author,
1220 1354 notification_subject=subject,
1221 1355 notification_body=body_plaintext,
1222 1356 notification_type=notification_type,
1223 1357 recipients=recipients,
1224 1358 email_kwargs=kwargs,
1225 1359 )
1226 1360
1227 1361 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1228 1362 commit_changes, file_changes):
1229 1363
1230 1364 updating_user_id = updating_user.user_id
1231 1365 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1232 1366 # NOTE(marcink): send notification to all other users except to
1233 1367 # person who updated the PR
1234 1368 recipients = reviewers.difference(set([updating_user_id]))
1235 1369
1236 1370 log.debug('Notify following recipients about pull-request update %s', recipients)
1237 1371
1238 1372 pull_request_obj = pull_request
1239 1373
1240 1374 # send email about the update
1241 1375 changed_files = (
1242 1376 file_changes.added + file_changes.modified + file_changes.removed)
1243 1377
1244 1378 pr_source_repo = pull_request_obj.source_repo
1245 1379 pr_target_repo = pull_request_obj.target_repo
1246 1380
1247 1381 pr_url = h.route_url('pullrequest_show',
1248 1382 repo_name=pr_target_repo.repo_name,
1249 1383 pull_request_id=pull_request_obj.pull_request_id,)
1250 1384
1251 1385 # set some variables for email notification
1252 1386 pr_target_repo_url = h.route_url(
1253 1387 'repo_summary', repo_name=pr_target_repo.repo_name)
1254 1388
1255 1389 pr_source_repo_url = h.route_url(
1256 1390 'repo_summary', repo_name=pr_source_repo.repo_name)
1257 1391
1258 1392 email_kwargs = {
1259 1393 'date': datetime.datetime.now(),
1260 1394 'updating_user': updating_user,
1261 1395
1262 1396 'pull_request': pull_request_obj,
1263 1397
1264 1398 'pull_request_target_repo': pr_target_repo,
1265 1399 'pull_request_target_repo_url': pr_target_repo_url,
1266 1400
1267 1401 'pull_request_source_repo': pr_source_repo,
1268 1402 'pull_request_source_repo_url': pr_source_repo_url,
1269 1403
1270 1404 'pull_request_url': pr_url,
1271 1405
1272 1406 'ancestor_commit_id': ancestor_commit_id,
1273 1407 'added_commits': commit_changes.added,
1274 1408 'removed_commits': commit_changes.removed,
1275 1409 'changed_files': changed_files,
1276 1410 'added_files': file_changes.added,
1277 1411 'modified_files': file_changes.modified,
1278 1412 'removed_files': file_changes.removed,
1279 1413 }
1280 1414
1281 1415 (subject,
1282 1416 _h, _e, # we don't care about those
1283 1417 body_plaintext) = EmailNotificationModel().render_email(
1284 1418 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1285 1419
1286 1420 # create notification objects, and emails
1287 1421 NotificationModel().create(
1288 1422 created_by=updating_user,
1289 1423 notification_subject=subject,
1290 1424 notification_body=body_plaintext,
1291 1425 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1292 1426 recipients=recipients,
1293 1427 email_kwargs=email_kwargs,
1294 1428 )
1295 1429
1296 1430 def delete(self, pull_request, user):
1297 1431 pull_request = self.__get_pull_request(pull_request)
1298 1432 old_data = pull_request.get_api_data(with_merge_state=False)
1299 1433 self._cleanup_merge_workspace(pull_request)
1300 1434 self._log_audit_action(
1301 1435 'repo.pull_request.delete', {'old_data': old_data},
1302 1436 user, pull_request)
1303 1437 Session().delete(pull_request)
1304 1438
1305 1439 def close_pull_request(self, pull_request, user):
1306 1440 pull_request = self.__get_pull_request(pull_request)
1307 1441 self._cleanup_merge_workspace(pull_request)
1308 1442 pull_request.status = PullRequest.STATUS_CLOSED
1309 1443 pull_request.updated_on = datetime.datetime.now()
1310 1444 Session().add(pull_request)
1311 1445 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1312 1446
1313 1447 pr_data = pull_request.get_api_data(with_merge_state=False)
1314 1448 self._log_audit_action(
1315 1449 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1316 1450
1317 1451 def close_pull_request_with_comment(
1318 1452 self, pull_request, user, repo, message=None, auth_user=None):
1319 1453
1320 1454 pull_request_review_status = pull_request.calculated_review_status()
1321 1455
1322 1456 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1323 1457 # approved only if we have voting consent
1324 1458 status = ChangesetStatus.STATUS_APPROVED
1325 1459 else:
1326 1460 status = ChangesetStatus.STATUS_REJECTED
1327 1461 status_lbl = ChangesetStatus.get_status_lbl(status)
1328 1462
1329 1463 default_message = (
1330 1464 'Closing with status change {transition_icon} {status}.'
1331 1465 ).format(transition_icon='>', status=status_lbl)
1332 1466 text = message or default_message
1333 1467
1334 1468 # create a comment, and link it to new status
1335 1469 comment = CommentsModel().create(
1336 1470 text=text,
1337 1471 repo=repo.repo_id,
1338 1472 user=user.user_id,
1339 1473 pull_request=pull_request.pull_request_id,
1340 1474 status_change=status_lbl,
1341 1475 status_change_type=status,
1342 1476 closing_pr=True,
1343 1477 auth_user=auth_user,
1344 1478 )
1345 1479
1346 1480 # calculate old status before we change it
1347 1481 old_calculated_status = pull_request.calculated_review_status()
1348 1482 ChangesetStatusModel().set_status(
1349 1483 repo.repo_id,
1350 1484 status,
1351 1485 user.user_id,
1352 1486 comment=comment,
1353 1487 pull_request=pull_request.pull_request_id
1354 1488 )
1355 1489
1356 1490 Session().flush()
1357 1491
1358 1492 self.trigger_pull_request_hook(pull_request, user, 'comment',
1359 1493 data={'comment': comment})
1360 1494
1361 1495 # we now calculate the status of pull request again, and based on that
1362 1496 # calculation trigger status change. This might happen in cases
1363 1497 # that non-reviewer admin closes a pr, which means his vote doesn't
1364 1498 # change the status, while if he's a reviewer this might change it.
1365 1499 calculated_status = pull_request.calculated_review_status()
1366 1500 if old_calculated_status != calculated_status:
1367 1501 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1368 1502 data={'status': calculated_status})
1369 1503
1370 1504 # finally close the PR
1371 1505 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1372 1506
1373 1507 return comment, status
1374 1508
1375 1509 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1376 1510 _ = translator or get_current_request().translate
1377 1511
1378 1512 if not self._is_merge_enabled(pull_request):
1379 1513 return None, False, _('Server-side pull request merging is disabled.')
1380 1514
1381 1515 if pull_request.is_closed():
1382 1516 return None, False, _('This pull request is closed.')
1383 1517
1384 1518 merge_possible, msg = self._check_repo_requirements(
1385 1519 target=pull_request.target_repo, source=pull_request.source_repo,
1386 1520 translator=_)
1387 1521 if not merge_possible:
1388 1522 return None, merge_possible, msg
1389 1523
1390 1524 try:
1391 1525 merge_response = self._try_merge(
1392 1526 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1393 1527 log.debug("Merge response: %s", merge_response)
1394 1528 return merge_response, merge_response.possible, merge_response.merge_status_message
1395 1529 except NotImplementedError:
1396 1530 return None, False, _('Pull request merging is not supported.')
1397 1531
1398 1532 def _check_repo_requirements(self, target, source, translator):
1399 1533 """
1400 1534 Check if `target` and `source` have compatible requirements.
1401 1535
1402 1536 Currently this is just checking for largefiles.
1403 1537 """
1404 1538 _ = translator
1405 1539 target_has_largefiles = self._has_largefiles(target)
1406 1540 source_has_largefiles = self._has_largefiles(source)
1407 1541 merge_possible = True
1408 1542 message = u''
1409 1543
1410 1544 if target_has_largefiles != source_has_largefiles:
1411 1545 merge_possible = False
1412 1546 if source_has_largefiles:
1413 1547 message = _(
1414 1548 'Target repository large files support is disabled.')
1415 1549 else:
1416 1550 message = _(
1417 1551 'Source repository large files support is disabled.')
1418 1552
1419 1553 return merge_possible, message
1420 1554
1421 1555 def _has_largefiles(self, repo):
1422 1556 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1423 1557 'extensions', 'largefiles')
1424 1558 return largefiles_ui and largefiles_ui[0].active
1425 1559
1426 1560 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1427 1561 """
1428 1562 Try to merge the pull request and return the merge status.
1429 1563 """
1430 1564 log.debug(
1431 1565 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1432 1566 pull_request.pull_request_id, force_shadow_repo_refresh)
1433 1567 target_vcs = pull_request.target_repo.scm_instance()
1434 1568 # Refresh the target reference.
1435 1569 try:
1436 1570 target_ref = self._refresh_reference(
1437 1571 pull_request.target_ref_parts, target_vcs)
1438 1572 except CommitDoesNotExistError:
1439 1573 merge_state = MergeResponse(
1440 1574 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1441 1575 metadata={'target_ref': pull_request.target_ref_parts})
1442 1576 return merge_state
1443 1577
1444 1578 target_locked = pull_request.target_repo.locked
1445 1579 if target_locked and target_locked[0]:
1446 1580 locked_by = 'user:{}'.format(target_locked[0])
1447 1581 log.debug("The target repository is locked by %s.", locked_by)
1448 1582 merge_state = MergeResponse(
1449 1583 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1450 1584 metadata={'locked_by': locked_by})
1451 1585 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1452 1586 pull_request, target_ref):
1453 1587 log.debug("Refreshing the merge status of the repository.")
1454 1588 merge_state = self._refresh_merge_state(
1455 1589 pull_request, target_vcs, target_ref)
1456 1590 else:
1457 1591 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1458 1592 metadata = {
1459 1593 'unresolved_files': '',
1460 1594 'target_ref': pull_request.target_ref_parts,
1461 1595 'source_ref': pull_request.source_ref_parts,
1462 1596 }
1463 1597 if pull_request.last_merge_metadata:
1464 1598 metadata.update(pull_request.last_merge_metadata)
1465 1599
1466 1600 if not possible and target_ref.type == 'branch':
1467 1601 # NOTE(marcink): case for mercurial multiple heads on branch
1468 1602 heads = target_vcs._heads(target_ref.name)
1469 1603 if len(heads) != 1:
1470 1604 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1471 1605 metadata.update({
1472 1606 'heads': heads
1473 1607 })
1474 1608
1475 1609 merge_state = MergeResponse(
1476 1610 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1477 1611
1478 1612 return merge_state
1479 1613
1480 1614 def _refresh_reference(self, reference, vcs_repository):
1481 1615 if reference.type in self.UPDATABLE_REF_TYPES:
1482 1616 name_or_id = reference.name
1483 1617 else:
1484 1618 name_or_id = reference.commit_id
1485 1619
1486 1620 refreshed_commit = vcs_repository.get_commit(name_or_id)
1487 1621 refreshed_reference = Reference(
1488 1622 reference.type, reference.name, refreshed_commit.raw_id)
1489 1623 return refreshed_reference
1490 1624
1491 1625 def _needs_merge_state_refresh(self, pull_request, target_reference):
1492 1626 return not(
1493 1627 pull_request.revisions and
1494 1628 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1495 1629 target_reference.commit_id == pull_request._last_merge_target_rev)
1496 1630
1497 1631 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1498 1632 workspace_id = self._workspace_id(pull_request)
1499 1633 source_vcs = pull_request.source_repo.scm_instance()
1500 1634 repo_id = pull_request.target_repo.repo_id
1501 1635 use_rebase = self._use_rebase_for_merging(pull_request)
1502 1636 close_branch = self._close_branch_before_merging(pull_request)
1503 1637 merge_state = target_vcs.merge(
1504 1638 repo_id, workspace_id,
1505 1639 target_reference, source_vcs, pull_request.source_ref_parts,
1506 1640 dry_run=True, use_rebase=use_rebase,
1507 1641 close_branch=close_branch)
1508 1642
1509 1643 # Do not store the response if there was an unknown error.
1510 1644 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1511 1645 pull_request._last_merge_source_rev = \
1512 1646 pull_request.source_ref_parts.commit_id
1513 1647 pull_request._last_merge_target_rev = target_reference.commit_id
1514 1648 pull_request.last_merge_status = merge_state.failure_reason
1515 1649 pull_request.last_merge_metadata = merge_state.metadata
1516 1650
1517 1651 pull_request.shadow_merge_ref = merge_state.merge_ref
1518 1652 Session().add(pull_request)
1519 1653 Session().commit()
1520 1654
1521 1655 return merge_state
1522 1656
1523 1657 def _workspace_id(self, pull_request):
1524 1658 workspace_id = 'pr-%s' % pull_request.pull_request_id
1525 1659 return workspace_id
1526 1660
1527 1661 def generate_repo_data(self, repo, commit_id=None, branch=None,
1528 1662 bookmark=None, translator=None):
1529 1663 from rhodecode.model.repo import RepoModel
1530 1664
1531 1665 all_refs, selected_ref = \
1532 1666 self._get_repo_pullrequest_sources(
1533 1667 repo.scm_instance(), commit_id=commit_id,
1534 1668 branch=branch, bookmark=bookmark, translator=translator)
1535 1669
1536 1670 refs_select2 = []
1537 1671 for element in all_refs:
1538 1672 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1539 1673 refs_select2.append({'text': element[1], 'children': children})
1540 1674
1541 1675 return {
1542 1676 'user': {
1543 1677 'user_id': repo.user.user_id,
1544 1678 'username': repo.user.username,
1545 1679 'firstname': repo.user.first_name,
1546 1680 'lastname': repo.user.last_name,
1547 1681 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1548 1682 },
1549 1683 'name': repo.repo_name,
1550 1684 'link': RepoModel().get_url(repo),
1551 1685 'description': h.chop_at_smart(repo.description_safe, '\n'),
1552 1686 'refs': {
1553 1687 'all_refs': all_refs,
1554 1688 'selected_ref': selected_ref,
1555 1689 'select2_refs': refs_select2
1556 1690 }
1557 1691 }
1558 1692
1559 1693 def generate_pullrequest_title(self, source, source_ref, target):
1560 1694 return u'{source}#{at_ref} to {target}'.format(
1561 1695 source=source,
1562 1696 at_ref=source_ref,
1563 1697 target=target,
1564 1698 )
1565 1699
1566 1700 def _cleanup_merge_workspace(self, pull_request):
1567 1701 # Merging related cleanup
1568 1702 repo_id = pull_request.target_repo.repo_id
1569 1703 target_scm = pull_request.target_repo.scm_instance()
1570 1704 workspace_id = self._workspace_id(pull_request)
1571 1705
1572 1706 try:
1573 1707 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1574 1708 except NotImplementedError:
1575 1709 pass
1576 1710
1577 1711 def _get_repo_pullrequest_sources(
1578 1712 self, repo, commit_id=None, branch=None, bookmark=None,
1579 1713 translator=None):
1580 1714 """
1581 1715 Return a structure with repo's interesting commits, suitable for
1582 1716 the selectors in pullrequest controller
1583 1717
1584 1718 :param commit_id: a commit that must be in the list somehow
1585 1719 and selected by default
1586 1720 :param branch: a branch that must be in the list and selected
1587 1721 by default - even if closed
1588 1722 :param bookmark: a bookmark that must be in the list and selected
1589 1723 """
1590 1724 _ = translator or get_current_request().translate
1591 1725
1592 1726 commit_id = safe_str(commit_id) if commit_id else None
1593 1727 branch = safe_unicode(branch) if branch else None
1594 1728 bookmark = safe_unicode(bookmark) if bookmark else None
1595 1729
1596 1730 selected = None
1597 1731
1598 1732 # order matters: first source that has commit_id in it will be selected
1599 1733 sources = []
1600 1734 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1601 1735 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1602 1736
1603 1737 if commit_id:
1604 1738 ref_commit = (h.short_id(commit_id), commit_id)
1605 1739 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1606 1740
1607 1741 sources.append(
1608 1742 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1609 1743 )
1610 1744
1611 1745 groups = []
1612 1746
1613 1747 for group_key, ref_list, group_name, match in sources:
1614 1748 group_refs = []
1615 1749 for ref_name, ref_id in ref_list:
1616 1750 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1617 1751 group_refs.append((ref_key, ref_name))
1618 1752
1619 1753 if not selected:
1620 1754 if set([commit_id, match]) & set([ref_id, ref_name]):
1621 1755 selected = ref_key
1622 1756
1623 1757 if group_refs:
1624 1758 groups.append((group_refs, group_name))
1625 1759
1626 1760 if not selected:
1627 1761 ref = commit_id or branch or bookmark
1628 1762 if ref:
1629 1763 raise CommitDoesNotExistError(
1630 1764 u'No commit refs could be found matching: {}'.format(ref))
1631 1765 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1632 1766 selected = u'branch:{}:{}'.format(
1633 1767 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1634 1768 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1635 1769 )
1636 1770 elif repo.commit_ids:
1637 1771 # make the user select in this case
1638 1772 selected = None
1639 1773 else:
1640 1774 raise EmptyRepositoryError()
1641 1775 return groups, selected
1642 1776
1643 1777 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1644 1778 hide_whitespace_changes, diff_context):
1645 1779
1646 1780 return self._get_diff_from_pr_or_version(
1647 1781 source_repo, source_ref_id, target_ref_id,
1648 1782 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1649 1783
1650 1784 def _get_diff_from_pr_or_version(
1651 1785 self, source_repo, source_ref_id, target_ref_id,
1652 1786 hide_whitespace_changes, diff_context):
1653 1787
1654 1788 target_commit = source_repo.get_commit(
1655 1789 commit_id=safe_str(target_ref_id))
1656 1790 source_commit = source_repo.get_commit(
1657 1791 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1658 1792 if isinstance(source_repo, Repository):
1659 1793 vcs_repo = source_repo.scm_instance()
1660 1794 else:
1661 1795 vcs_repo = source_repo
1662 1796
1663 1797 # TODO: johbo: In the context of an update, we cannot reach
1664 1798 # the old commit anymore with our normal mechanisms. It needs
1665 1799 # some sort of special support in the vcs layer to avoid this
1666 1800 # workaround.
1667 1801 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1668 1802 vcs_repo.alias == 'git'):
1669 1803 source_commit.raw_id = safe_str(source_ref_id)
1670 1804
1671 1805 log.debug('calculating diff between '
1672 1806 'source_ref:%s and target_ref:%s for repo `%s`',
1673 1807 target_ref_id, source_ref_id,
1674 1808 safe_unicode(vcs_repo.path))
1675 1809
1676 1810 vcs_diff = vcs_repo.get_diff(
1677 1811 commit1=target_commit, commit2=source_commit,
1678 1812 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1679 1813 return vcs_diff
1680 1814
1681 1815 def _is_merge_enabled(self, pull_request):
1682 1816 return self._get_general_setting(
1683 1817 pull_request, 'rhodecode_pr_merge_enabled')
1684 1818
1685 1819 def _use_rebase_for_merging(self, pull_request):
1686 1820 repo_type = pull_request.target_repo.repo_type
1687 1821 if repo_type == 'hg':
1688 1822 return self._get_general_setting(
1689 1823 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1690 1824 elif repo_type == 'git':
1691 1825 return self._get_general_setting(
1692 1826 pull_request, 'rhodecode_git_use_rebase_for_merging')
1693 1827
1694 1828 return False
1695 1829
1696 1830 def _user_name_for_merging(self, pull_request, user):
1697 1831 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1698 1832 if env_user_name_attr and hasattr(user, env_user_name_attr):
1699 1833 user_name_attr = env_user_name_attr
1700 1834 else:
1701 1835 user_name_attr = 'short_contact'
1702 1836
1703 1837 user_name = getattr(user, user_name_attr)
1704 1838 return user_name
1705 1839
1706 1840 def _close_branch_before_merging(self, pull_request):
1707 1841 repo_type = pull_request.target_repo.repo_type
1708 1842 if repo_type == 'hg':
1709 1843 return self._get_general_setting(
1710 1844 pull_request, 'rhodecode_hg_close_branch_before_merging')
1711 1845 elif repo_type == 'git':
1712 1846 return self._get_general_setting(
1713 1847 pull_request, 'rhodecode_git_close_branch_before_merging')
1714 1848
1715 1849 return False
1716 1850
1717 1851 def _get_general_setting(self, pull_request, settings_key, default=False):
1718 1852 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1719 1853 settings = settings_model.get_general_settings()
1720 1854 return settings.get(settings_key, default)
1721 1855
1722 1856 def _log_audit_action(self, action, action_data, user, pull_request):
1723 1857 audit_logger.store(
1724 1858 action=action,
1725 1859 action_data=action_data,
1726 1860 user=user,
1727 1861 repo=pull_request.target_repo)
1728 1862
1729 1863 def get_reviewer_functions(self):
1730 1864 """
1731 1865 Fetches functions for validation and fetching default reviewers.
1732 1866 If available we use the EE package, else we fallback to CE
1733 1867 package functions
1734 1868 """
1735 1869 try:
1736 1870 from rc_reviewers.utils import get_default_reviewers_data
1737 1871 from rc_reviewers.utils import validate_default_reviewers
1738 1872 except ImportError:
1739 1873 from rhodecode.apps.repository.utils import get_default_reviewers_data
1740 1874 from rhodecode.apps.repository.utils import validate_default_reviewers
1741 1875
1742 1876 return get_default_reviewers_data, validate_default_reviewers
1743 1877
1744 1878
1745 1879 class MergeCheck(object):
1746 1880 """
1747 1881 Perform Merge Checks and returns a check object which stores information
1748 1882 about merge errors, and merge conditions
1749 1883 """
1750 1884 TODO_CHECK = 'todo'
1751 1885 PERM_CHECK = 'perm'
1752 1886 REVIEW_CHECK = 'review'
1753 1887 MERGE_CHECK = 'merge'
1754 1888 WIP_CHECK = 'wip'
1755 1889
1756 1890 def __init__(self):
1757 1891 self.review_status = None
1758 1892 self.merge_possible = None
1759 1893 self.merge_msg = ''
1760 1894 self.merge_response = None
1761 1895 self.failed = None
1762 1896 self.errors = []
1763 1897 self.error_details = OrderedDict()
1764 1898 self.source_commit = AttributeDict()
1765 1899 self.target_commit = AttributeDict()
1766 1900
1767 1901 def __repr__(self):
1768 1902 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1769 1903 self.merge_possible, self.failed, self.errors)
1770 1904
1771 1905 def push_error(self, error_type, message, error_key, details):
1772 1906 self.failed = True
1773 1907 self.errors.append([error_type, message])
1774 1908 self.error_details[error_key] = dict(
1775 1909 details=details,
1776 1910 error_type=error_type,
1777 1911 message=message
1778 1912 )
1779 1913
1780 1914 @classmethod
1781 1915 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1782 1916 force_shadow_repo_refresh=False):
1783 1917 _ = translator
1784 1918 merge_check = cls()
1785 1919
1786 1920 # title has WIP:
1787 1921 if pull_request.work_in_progress:
1788 1922 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1789 1923
1790 1924 msg = _('WIP marker in title prevents from accidental merge.')
1791 1925 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1792 1926 if fail_early:
1793 1927 return merge_check
1794 1928
1795 1929 # permissions to merge
1796 1930 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
1797 1931 if not user_allowed_to_merge:
1798 1932 log.debug("MergeCheck: cannot merge, approval is pending.")
1799 1933
1800 1934 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1801 1935 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1802 1936 if fail_early:
1803 1937 return merge_check
1804 1938
1805 1939 # permission to merge into the target branch
1806 1940 target_commit_id = pull_request.target_ref_parts.commit_id
1807 1941 if pull_request.target_ref_parts.type == 'branch':
1808 1942 branch_name = pull_request.target_ref_parts.name
1809 1943 else:
1810 1944 # for mercurial we can always figure out the branch from the commit
1811 1945 # in case of bookmark
1812 1946 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1813 1947 branch_name = target_commit.branch
1814 1948
1815 1949 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1816 1950 pull_request.target_repo.repo_name, branch_name)
1817 1951 if branch_perm and branch_perm == 'branch.none':
1818 1952 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1819 1953 branch_name, rule)
1820 1954 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1821 1955 if fail_early:
1822 1956 return merge_check
1823 1957
1824 1958 # review status, must be always present
1825 1959 review_status = pull_request.calculated_review_status()
1826 1960 merge_check.review_status = review_status
1827 1961
1828 1962 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1829 1963 if not status_approved:
1830 1964 log.debug("MergeCheck: cannot merge, approval is pending.")
1831 1965
1832 1966 msg = _('Pull request reviewer approval is pending.')
1833 1967
1834 1968 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1835 1969
1836 1970 if fail_early:
1837 1971 return merge_check
1838 1972
1839 1973 # left over TODOs
1840 1974 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1841 1975 if todos:
1842 1976 log.debug("MergeCheck: cannot merge, {} "
1843 1977 "unresolved TODOs left.".format(len(todos)))
1844 1978
1845 1979 if len(todos) == 1:
1846 1980 msg = _('Cannot merge, {} TODO still not resolved.').format(
1847 1981 len(todos))
1848 1982 else:
1849 1983 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1850 1984 len(todos))
1851 1985
1852 1986 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1853 1987
1854 1988 if fail_early:
1855 1989 return merge_check
1856 1990
1857 1991 # merge possible, here is the filesystem simulation + shadow repo
1858 1992 merge_response, merge_status, msg = PullRequestModel().merge_status(
1859 1993 pull_request, translator=translator,
1860 1994 force_shadow_repo_refresh=force_shadow_repo_refresh)
1861 1995
1862 1996 merge_check.merge_possible = merge_status
1863 1997 merge_check.merge_msg = msg
1864 1998 merge_check.merge_response = merge_response
1865 1999
1866 2000 source_ref_id = pull_request.source_ref_parts.commit_id
1867 2001 target_ref_id = pull_request.target_ref_parts.commit_id
1868 2002
1869 2003 try:
1870 2004 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
1871 2005 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
1872 2006 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
1873 2007 merge_check.source_commit.current_raw_id = source_commit.raw_id
1874 2008 merge_check.source_commit.previous_raw_id = source_ref_id
1875 2009
1876 2010 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
1877 2011 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
1878 2012 merge_check.target_commit.current_raw_id = target_commit.raw_id
1879 2013 merge_check.target_commit.previous_raw_id = target_ref_id
1880 2014 except (SourceRefMissing, TargetRefMissing):
1881 2015 pass
1882 2016
1883 2017 if not merge_status:
1884 2018 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1885 2019 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1886 2020
1887 2021 if fail_early:
1888 2022 return merge_check
1889 2023
1890 2024 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1891 2025 return merge_check
1892 2026
1893 2027 @classmethod
1894 2028 def get_merge_conditions(cls, pull_request, translator):
1895 2029 _ = translator
1896 2030 merge_details = {}
1897 2031
1898 2032 model = PullRequestModel()
1899 2033 use_rebase = model._use_rebase_for_merging(pull_request)
1900 2034
1901 2035 if use_rebase:
1902 2036 merge_details['merge_strategy'] = dict(
1903 2037 details={},
1904 2038 message=_('Merge strategy: rebase')
1905 2039 )
1906 2040 else:
1907 2041 merge_details['merge_strategy'] = dict(
1908 2042 details={},
1909 2043 message=_('Merge strategy: explicit merge commit')
1910 2044 )
1911 2045
1912 2046 close_branch = model._close_branch_before_merging(pull_request)
1913 2047 if close_branch:
1914 2048 repo_type = pull_request.target_repo.repo_type
1915 2049 close_msg = ''
1916 2050 if repo_type == 'hg':
1917 2051 close_msg = _('Source branch will be closed after merge.')
1918 2052 elif repo_type == 'git':
1919 2053 close_msg = _('Source branch will be deleted after merge.')
1920 2054
1921 2055 merge_details['close_branch'] = dict(
1922 2056 details={},
1923 2057 message=close_msg
1924 2058 )
1925 2059
1926 2060 return merge_details
1927 2061
1928 2062
1929 2063 ChangeTuple = collections.namedtuple(
1930 2064 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1931 2065
1932 2066 FileChangeTuple = collections.namedtuple(
1933 2067 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,3060 +1,3060 b''
1 1 //Primary CSS
2 2
3 3 //--- IMPORTS ------------------//
4 4
5 5 @import 'helpers';
6 6 @import 'mixins';
7 7 @import 'rcicons';
8 8 @import 'variables';
9 9 @import 'bootstrap-variables';
10 10 @import 'form-bootstrap';
11 11 @import 'codemirror';
12 12 @import 'legacy_code_styles';
13 13 @import 'readme-box';
14 14 @import 'progress-bar';
15 15
16 16 @import 'type';
17 17 @import 'alerts';
18 18 @import 'buttons';
19 19 @import 'tags';
20 20 @import 'code-block';
21 21 @import 'examples';
22 22 @import 'login';
23 23 @import 'main-content';
24 24 @import 'select2';
25 25 @import 'comments';
26 26 @import 'panels-bootstrap';
27 27 @import 'panels';
28 28 @import 'deform';
29 29 @import 'tooltips';
30 30 @import 'sweetalert2';
31 31
32 32
33 33 //--- BASE ------------------//
34 34 .noscript-error {
35 35 top: 0;
36 36 left: 0;
37 37 width: 100%;
38 38 z-index: 101;
39 39 text-align: center;
40 40 font-size: 120%;
41 41 color: white;
42 42 background-color: @alert2;
43 43 padding: 5px 0 5px 0;
44 44 font-weight: @text-semibold-weight;
45 45 font-family: @text-semibold;
46 46 }
47 47
48 48 html {
49 49 display: table;
50 50 height: 100%;
51 51 width: 100%;
52 52 }
53 53
54 54 body {
55 55 display: table-cell;
56 56 width: 100%;
57 57 }
58 58
59 59 //--- LAYOUT ------------------//
60 60
61 61 .hidden{
62 62 display: none !important;
63 63 }
64 64
65 65 .box{
66 66 float: left;
67 67 width: 100%;
68 68 }
69 69
70 70 .browser-header {
71 71 clear: both;
72 72 }
73 73 .main {
74 74 clear: both;
75 75 padding:0 0 @pagepadding;
76 76 height: auto;
77 77
78 78 &:after { //clearfix
79 79 content:"";
80 80 clear:both;
81 81 width:100%;
82 82 display:block;
83 83 }
84 84 }
85 85
86 86 .action-link{
87 87 margin-left: @padding;
88 88 padding-left: @padding;
89 89 border-left: @border-thickness solid @border-default-color;
90 90 }
91 91
92 92 .cursor-pointer {
93 93 cursor: pointer;
94 94 }
95 95
96 96 input + .action-link, .action-link.first{
97 97 border-left: none;
98 98 }
99 99
100 100 .action-link.last{
101 101 margin-right: @padding;
102 102 padding-right: @padding;
103 103 }
104 104
105 105 .action-link.active,
106 106 .action-link.active a{
107 107 color: @grey4;
108 108 }
109 109
110 110 .action-link.disabled {
111 111 color: @grey4;
112 112 cursor: inherit;
113 113 }
114 114
115 115
116 116 .clipboard-action {
117 117 cursor: pointer;
118 118 margin-left: 5px;
119 119
120 120 &:not(.no-grey) {
121 121
122 122 &:hover {
123 123 color: @grey2;
124 124 }
125 125 color: @grey4;
126 126 }
127 127 }
128 128
129 129 ul.simple-list{
130 130 list-style: none;
131 131 margin: 0;
132 132 padding: 0;
133 133 }
134 134
135 135 .main-content {
136 136 padding-bottom: @pagepadding;
137 137 }
138 138
139 139 .wide-mode-wrapper {
140 140 max-width:4000px !important;
141 141 }
142 142
143 143 .wrapper {
144 144 position: relative;
145 145 max-width: @wrapper-maxwidth;
146 146 margin: 0 auto;
147 147 }
148 148
149 149 #content {
150 150 clear: both;
151 151 padding: 0 @contentpadding;
152 152 }
153 153
154 154 .advanced-settings-fields{
155 155 input{
156 156 margin-left: @textmargin;
157 157 margin-right: @padding/2;
158 158 }
159 159 }
160 160
161 161 .cs_files_title {
162 162 margin: @pagepadding 0 0;
163 163 }
164 164
165 165 input.inline[type="file"] {
166 166 display: inline;
167 167 }
168 168
169 169 .error_page {
170 170 margin: 10% auto;
171 171
172 172 h1 {
173 173 color: @grey2;
174 174 }
175 175
176 176 .alert {
177 177 margin: @padding 0;
178 178 }
179 179
180 180 .error-branding {
181 181 color: @grey4;
182 182 font-weight: @text-semibold-weight;
183 183 font-family: @text-semibold;
184 184 }
185 185
186 186 .error_message {
187 187 font-family: @text-regular;
188 188 }
189 189
190 190 .sidebar {
191 191 min-height: 275px;
192 192 margin: 0;
193 193 padding: 0 0 @sidebarpadding @sidebarpadding;
194 194 border: none;
195 195 }
196 196
197 197 .main-content {
198 198 position: relative;
199 199 margin: 0 @sidebarpadding @sidebarpadding;
200 200 padding: 0 0 0 @sidebarpadding;
201 201 border-left: @border-thickness solid @grey5;
202 202
203 203 @media (max-width:767px) {
204 204 clear: both;
205 205 width: 100%;
206 206 margin: 0;
207 207 border: none;
208 208 }
209 209 }
210 210
211 211 .inner-column {
212 212 float: left;
213 213 width: 29.75%;
214 214 min-height: 150px;
215 215 margin: @sidebarpadding 2% 0 0;
216 216 padding: 0 2% 0 0;
217 217 border-right: @border-thickness solid @grey5;
218 218
219 219 @media (max-width:767px) {
220 220 clear: both;
221 221 width: 100%;
222 222 border: none;
223 223 }
224 224
225 225 ul {
226 226 padding-left: 1.25em;
227 227 }
228 228
229 229 &:last-child {
230 230 margin: @sidebarpadding 0 0;
231 231 border: none;
232 232 }
233 233
234 234 h4 {
235 235 margin: 0 0 @padding;
236 236 font-weight: @text-semibold-weight;
237 237 font-family: @text-semibold;
238 238 }
239 239 }
240 240 }
241 241 .error-page-logo {
242 242 width: 130px;
243 243 height: 160px;
244 244 }
245 245
246 246 // HEADER
247 247 .header {
248 248
249 249 // TODO: johbo: Fix login pages, so that they work without a min-height
250 250 // for the header and then remove the min-height. I chose a smaller value
251 251 // intentionally here to avoid rendering issues in the main navigation.
252 252 min-height: 49px;
253 253 min-width: 1024px;
254 254
255 255 position: relative;
256 256 vertical-align: bottom;
257 257 padding: 0 @header-padding;
258 258 background-color: @grey1;
259 259 color: @grey5;
260 260
261 261 .title {
262 262 overflow: visible;
263 263 }
264 264
265 265 &:before,
266 266 &:after {
267 267 content: "";
268 268 clear: both;
269 269 width: 100%;
270 270 }
271 271
272 272 // TODO: johbo: Avoids breaking "Repositories" chooser
273 273 .select2-container .select2-choice .select2-arrow {
274 274 display: none;
275 275 }
276 276 }
277 277
278 278 #header-inner {
279 279 &.title {
280 280 margin: 0;
281 281 }
282 282 &:before,
283 283 &:after {
284 284 content: "";
285 285 clear: both;
286 286 }
287 287 }
288 288
289 289 // Gists
290 290 #files_data {
291 291 clear: both; //for firefox
292 292 padding-top: 10px;
293 293 }
294 294
295 295 #gistid {
296 296 margin-right: @padding;
297 297 }
298 298
299 299 // Global Settings Editor
300 300 .textarea.editor {
301 301 float: left;
302 302 position: relative;
303 303 max-width: @texteditor-width;
304 304
305 305 select {
306 306 position: absolute;
307 307 top:10px;
308 308 right:0;
309 309 }
310 310
311 311 .CodeMirror {
312 312 margin: 0;
313 313 }
314 314
315 315 .help-block {
316 316 margin: 0 0 @padding;
317 317 padding:.5em;
318 318 background-color: @grey6;
319 319 &.pre-formatting {
320 320 white-space: pre;
321 321 }
322 322 }
323 323 }
324 324
325 325 ul.auth_plugins {
326 326 margin: @padding 0 @padding @legend-width;
327 327 padding: 0;
328 328
329 329 li {
330 330 margin-bottom: @padding;
331 331 line-height: 1em;
332 332 list-style-type: none;
333 333
334 334 .auth_buttons .btn {
335 335 margin-right: @padding;
336 336 }
337 337
338 338 }
339 339 }
340 340
341 341
342 342 // My Account PR list
343 343
344 344 #show_closed {
345 345 margin: 0 1em 0 0;
346 346 }
347 347
348 348 #pull_request_list_table {
349 349 .closed {
350 350 background-color: @grey6;
351 351 }
352 352
353 353 .state-creating,
354 354 .state-updating,
355 355 .state-merging
356 356 {
357 357 background-color: @grey6;
358 358 }
359 359
360 360 .td-status {
361 361 padding-left: .5em;
362 362 }
363 363 .log-container .truncate {
364 364 height: 2.75em;
365 365 white-space: pre-line;
366 366 }
367 367 table.rctable .user {
368 368 padding-left: 0;
369 369 }
370 370 table.rctable {
371 371 td.td-description,
372 372 .rc-user {
373 373 min-width: auto;
374 374 }
375 375 }
376 376 }
377 377
378 378 // Pull Requests
379 379
380 380 .pullrequests_section_head {
381 381 display: block;
382 382 clear: both;
383 383 margin: @padding 0;
384 384 font-weight: @text-bold-weight;
385 385 font-family: @text-bold;
386 386 }
387 387
388 388 .pr-commit-flow {
389 389 position: relative;
390 390 font-weight: 600;
391 391
392 392 .tag {
393 393 display: inline-block;
394 394 margin: 0 1em .5em 0;
395 395 }
396 396
397 397 .clone-url {
398 398 display: inline-block;
399 399 margin: 0 0 .5em 0;
400 400 padding: 0;
401 401 line-height: 1.2em;
402 402 }
403 403 }
404 404
405 405 .pr-mergeinfo {
406 406 min-width: 95% !important;
407 407 padding: 0 !important;
408 408 border: 0;
409 409 }
410 410 .pr-mergeinfo-copy {
411 411 padding: 0 0;
412 412 }
413 413
414 414 .pr-pullinfo {
415 415 min-width: 95% !important;
416 416 padding: 0 !important;
417 417 border: 0;
418 418 }
419 419 .pr-pullinfo-copy {
420 420 padding: 0 0;
421 421 }
422 422
423 423 .pr-title-input {
424 424 width: 100%;
425 425 font-size: 18px;
426 426 margin: 0 0 4px 0;
427 427 padding: 0;
428 428 line-height: 1.7em;
429 429 color: @text-color;
430 430 letter-spacing: .02em;
431 431 font-weight: @text-bold-weight;
432 432 font-family: @text-bold;
433 433
434 434 &:hover {
435 435 box-shadow: none;
436 436 }
437 437 }
438 438
439 439 #pr-title {
440 440 input {
441 441 border: 1px transparent;
442 442 color: black;
443 443 opacity: 1;
444 444 background: #fff;
445 445 font-size: 18px;
446 446 }
447 447 }
448 448
449 449 .pr-title-closed-tag {
450 450 font-size: 16px;
451 451 }
452 452
453 453 #pr-desc {
454 454 padding: 10px 0;
455 455
456 456 .markdown-block {
457 457 padding: 0;
458 458 margin-bottom: -30px;
459 459 }
460 460 }
461 461
462 462 #pullrequest_title {
463 463 width: 100%;
464 464 box-sizing: border-box;
465 465 }
466 466
467 467 #pr_open_message {
468 468 border: @border-thickness solid #fff;
469 469 border-radius: @border-radius;
470 padding: @padding-large-vertical @padding-large-vertical @padding-large-vertical 0;
471 470 text-align: left;
472 471 overflow: hidden;
472 white-space: pre-line;
473 473 }
474 474
475 475 .pr-details-title {
476 476 height: 16px
477 477 }
478 478
479 479 .pr-details-title-author-pref {
480 480 padding-right: 10px
481 481 }
482 482
483 483 .label-pr-detail {
484 484 display: table-cell;
485 485 width: 120px;
486 486 padding-top: 7.5px;
487 487 padding-bottom: 7.5px;
488 488 padding-right: 7.5px;
489 489 }
490 490
491 491 .source-details ul {
492 492 padding: 10px 16px;
493 493 }
494 494
495 495 .source-details-action {
496 496 color: @grey4;
497 497 font-size: 11px
498 498 }
499 499
500 500 .pr-submit-button {
501 501 float: right;
502 502 margin: 0 0 0 5px;
503 503 }
504 504
505 505 .pr-spacing-container {
506 506 padding: 20px;
507 507 clear: both
508 508 }
509 509
510 510 #pr-description-input {
511 511 margin-bottom: 0;
512 512 }
513 513
514 514 .pr-description-label {
515 515 vertical-align: top;
516 516 }
517 517
518 518 #open_edit_pullrequest {
519 519 padding: 0;
520 520 }
521 521
522 522 #close_edit_pullrequest {
523 523
524 524 }
525 525
526 526 #delete_pullrequest {
527 527 clear: inherit;
528 528
529 529 form {
530 530 display: inline;
531 531 }
532 532
533 533 }
534 534
535 535 .perms_section_head {
536 536 min-width: 625px;
537 537
538 538 h2 {
539 539 margin-bottom: 0;
540 540 }
541 541
542 542 .label-checkbox {
543 543 float: left;
544 544 }
545 545
546 546 &.field {
547 547 margin: @space 0 @padding;
548 548 }
549 549
550 550 &:first-child.field {
551 551 margin-top: 0;
552 552
553 553 .label {
554 554 margin-top: 0;
555 555 padding-top: 0;
556 556 }
557 557
558 558 .radios {
559 559 padding-top: 0;
560 560 }
561 561 }
562 562
563 563 .radios {
564 564 position: relative;
565 565 width: 505px;
566 566 }
567 567 }
568 568
569 569 //--- MODULES ------------------//
570 570
571 571
572 572 // Server Announcement
573 573 #server-announcement {
574 574 width: 95%;
575 575 margin: @padding auto;
576 576 padding: @padding;
577 577 border-width: 2px;
578 578 border-style: solid;
579 579 .border-radius(2px);
580 580 font-weight: @text-bold-weight;
581 581 font-family: @text-bold;
582 582
583 583 &.info { border-color: @alert4; background-color: @alert4-inner; }
584 584 &.warning { border-color: @alert3; background-color: @alert3-inner; }
585 585 &.error { border-color: @alert2; background-color: @alert2-inner; }
586 586 &.success { border-color: @alert1; background-color: @alert1-inner; }
587 587 &.neutral { border-color: @grey3; background-color: @grey6; }
588 588 }
589 589
590 590 // Fixed Sidebar Column
591 591 .sidebar-col-wrapper {
592 592 padding-left: @sidebar-all-width;
593 593
594 594 .sidebar {
595 595 width: @sidebar-width;
596 596 margin-left: -@sidebar-all-width;
597 597 }
598 598 }
599 599
600 600 .sidebar-col-wrapper.scw-small {
601 601 padding-left: @sidebar-small-all-width;
602 602
603 603 .sidebar {
604 604 width: @sidebar-small-width;
605 605 margin-left: -@sidebar-small-all-width;
606 606 }
607 607 }
608 608
609 609
610 610 // FOOTER
611 611 #footer {
612 612 padding: 0;
613 613 text-align: center;
614 614 vertical-align: middle;
615 615 color: @grey2;
616 616 font-size: 11px;
617 617
618 618 p {
619 619 margin: 0;
620 620 padding: 1em;
621 621 line-height: 1em;
622 622 }
623 623
624 624 .server-instance { //server instance
625 625 display: none;
626 626 }
627 627
628 628 .title {
629 629 float: none;
630 630 margin: 0 auto;
631 631 }
632 632 }
633 633
634 634 button.close {
635 635 padding: 0;
636 636 cursor: pointer;
637 637 background: transparent;
638 638 border: 0;
639 639 .box-shadow(none);
640 640 -webkit-appearance: none;
641 641 }
642 642
643 643 .close {
644 644 float: right;
645 645 font-size: 21px;
646 646 font-family: @text-bootstrap;
647 647 line-height: 1em;
648 648 font-weight: bold;
649 649 color: @grey2;
650 650
651 651 &:hover,
652 652 &:focus {
653 653 color: @grey1;
654 654 text-decoration: none;
655 655 cursor: pointer;
656 656 }
657 657 }
658 658
659 659 // GRID
660 660 .sorting,
661 661 .sorting_desc,
662 662 .sorting_asc {
663 663 cursor: pointer;
664 664 }
665 665 .sorting_desc:after {
666 666 content: "\00A0\25B2";
667 667 font-size: .75em;
668 668 }
669 669 .sorting_asc:after {
670 670 content: "\00A0\25BC";
671 671 font-size: .68em;
672 672 }
673 673
674 674
675 675 .user_auth_tokens {
676 676
677 677 &.truncate {
678 678 white-space: nowrap;
679 679 overflow: hidden;
680 680 text-overflow: ellipsis;
681 681 }
682 682
683 683 .fields .field .input {
684 684 margin: 0;
685 685 }
686 686
687 687 input#description {
688 688 width: 100px;
689 689 margin: 0;
690 690 }
691 691
692 692 .drop-menu {
693 693 // TODO: johbo: Remove this, should work out of the box when
694 694 // having multiple inputs inline
695 695 margin: 0 0 0 5px;
696 696 }
697 697 }
698 698 #user_list_table {
699 699 .closed {
700 700 background-color: @grey6;
701 701 }
702 702 }
703 703
704 704
705 705 input, textarea {
706 706 &.disabled {
707 707 opacity: .5;
708 708 }
709 709
710 710 &:hover {
711 711 border-color: @grey3;
712 712 box-shadow: @button-shadow;
713 713 }
714 714
715 715 &:focus {
716 716 border-color: @rcblue;
717 717 box-shadow: @button-shadow;
718 718 }
719 719 }
720 720
721 721 // remove extra padding in firefox
722 722 input::-moz-focus-inner { border:0; padding:0 }
723 723
724 724 .adjacent input {
725 725 margin-bottom: @padding;
726 726 }
727 727
728 728 .permissions_boxes {
729 729 display: block;
730 730 }
731 731
732 732 //FORMS
733 733
734 734 .medium-inline,
735 735 input#description.medium-inline {
736 736 display: inline;
737 737 width: @medium-inline-input-width;
738 738 min-width: 100px;
739 739 }
740 740
741 741 select {
742 742 //reset
743 743 -webkit-appearance: none;
744 744 -moz-appearance: none;
745 745
746 746 display: inline-block;
747 747 height: 28px;
748 748 width: auto;
749 749 margin: 0 @padding @padding 0;
750 750 padding: 0 18px 0 8px;
751 751 line-height:1em;
752 752 font-size: @basefontsize;
753 753 border: @border-thickness solid @grey5;
754 754 border-radius: @border-radius;
755 755 background:white url("../images/dt-arrow-dn.png") no-repeat 100% 50%;
756 756 color: @grey4;
757 757 box-shadow: @button-shadow;
758 758
759 759 &:after {
760 760 content: "\00A0\25BE";
761 761 }
762 762
763 763 &:focus, &:hover {
764 764 outline: none;
765 765 border-color: @grey4;
766 766 color: @rcdarkblue;
767 767 }
768 768 }
769 769
770 770 option {
771 771 &:focus {
772 772 outline: none;
773 773 }
774 774 }
775 775
776 776 input,
777 777 textarea {
778 778 padding: @input-padding;
779 779 border: @input-border-thickness solid @border-highlight-color;
780 780 .border-radius (@border-radius);
781 781 font-family: @text-light;
782 782 font-size: @basefontsize;
783 783
784 784 &.input-sm {
785 785 padding: 5px;
786 786 }
787 787
788 788 &#description {
789 789 min-width: @input-description-minwidth;
790 790 min-height: 1em;
791 791 padding: 10px;
792 792 }
793 793 }
794 794
795 795 .field-sm {
796 796 input,
797 797 textarea {
798 798 padding: 5px;
799 799 }
800 800 }
801 801
802 802 textarea {
803 803 display: block;
804 804 clear: both;
805 805 width: 100%;
806 806 min-height: 100px;
807 807 margin-bottom: @padding;
808 808 .box-sizing(border-box);
809 809 overflow: auto;
810 810 }
811 811
812 812 label {
813 813 font-family: @text-light;
814 814 }
815 815
816 816 // GRAVATARS
817 817 // centers gravatar on username to the right
818 818
819 819 .gravatar {
820 820 display: inline;
821 821 min-width: 16px;
822 822 min-height: 16px;
823 823 margin: -5px 0;
824 824 padding: 0;
825 825 line-height: 1em;
826 826 box-sizing: content-box;
827 827 border-radius: 50%;
828 828
829 829 &.gravatar-large {
830 830 margin: -0.5em .25em -0.5em 0;
831 831 }
832 832
833 833 & + .user {
834 834 display: inline;
835 835 margin: 0;
836 836 padding: 0 0 0 .17em;
837 837 line-height: 1em;
838 838 }
839 839
840 840 & + .no-margin {
841 841 margin: 0
842 842 }
843 843
844 844 }
845 845
846 846 .user-inline-data {
847 847 display: inline-block;
848 848 float: left;
849 849 padding-left: .5em;
850 850 line-height: 1.3em;
851 851 }
852 852
853 853 .rc-user { // gravatar + user wrapper
854 854 float: left;
855 855 position: relative;
856 856 min-width: 100px;
857 857 max-width: 200px;
858 858 min-height: (@gravatar-size + @border-thickness * 2); // account for border
859 859 display: block;
860 860 padding: 0 0 0 (@gravatar-size + @basefontsize/4);
861 861
862 862
863 863 .gravatar {
864 864 display: block;
865 865 position: absolute;
866 866 top: 0;
867 867 left: 0;
868 868 min-width: @gravatar-size;
869 869 min-height: @gravatar-size;
870 870 margin: 0;
871 871 }
872 872
873 873 .user {
874 874 display: block;
875 875 max-width: 175px;
876 876 padding-top: 2px;
877 877 overflow: hidden;
878 878 text-overflow: ellipsis;
879 879 }
880 880 }
881 881
882 882 .gist-gravatar,
883 883 .journal_container {
884 884 .gravatar-large {
885 885 margin: 0 .5em -10px 0;
886 886 }
887 887 }
888 888
889 889 .gist-type-fields {
890 890 line-height: 30px;
891 891 height: 30px;
892 892
893 893 .gist-type-fields-wrapper {
894 894 vertical-align: middle;
895 895 display: inline-block;
896 896 line-height: 25px;
897 897 }
898 898 }
899 899
900 900 // ADMIN SETTINGS
901 901
902 902 // Tag Patterns
903 903 .tag_patterns {
904 904 .tag_input {
905 905 margin-bottom: @padding;
906 906 }
907 907 }
908 908
909 909 .locked_input {
910 910 position: relative;
911 911
912 912 input {
913 913 display: inline;
914 914 margin: 3px 5px 0px 0px;
915 915 }
916 916
917 917 br {
918 918 display: none;
919 919 }
920 920
921 921 .error-message {
922 922 float: left;
923 923 width: 100%;
924 924 }
925 925
926 926 .lock_input_button {
927 927 display: inline;
928 928 }
929 929
930 930 .help-block {
931 931 clear: both;
932 932 }
933 933 }
934 934
935 935 // Notifications
936 936
937 937 .notifications_buttons {
938 938 margin: 0 0 @space 0;
939 939 padding: 0;
940 940
941 941 .btn {
942 942 display: inline-block;
943 943 }
944 944 }
945 945
946 946 .notification-list {
947 947
948 948 div {
949 949 vertical-align: middle;
950 950 }
951 951
952 952 .container {
953 953 display: block;
954 954 margin: 0 0 @padding 0;
955 955 }
956 956
957 957 .delete-notifications {
958 958 margin-left: @padding;
959 959 text-align: right;
960 960 cursor: pointer;
961 961 }
962 962
963 963 .read-notifications {
964 964 margin-left: @padding/2;
965 965 text-align: right;
966 966 width: 35px;
967 967 cursor: pointer;
968 968 }
969 969
970 970 .icon-minus-sign {
971 971 color: @alert2;
972 972 }
973 973
974 974 .icon-ok-sign {
975 975 color: @alert1;
976 976 }
977 977 }
978 978
979 979 .user_settings {
980 980 float: left;
981 981 clear: both;
982 982 display: block;
983 983 width: 100%;
984 984
985 985 .gravatar_box {
986 986 margin-bottom: @padding;
987 987
988 988 &:after {
989 989 content: " ";
990 990 clear: both;
991 991 width: 100%;
992 992 }
993 993 }
994 994
995 995 .fields .field {
996 996 clear: both;
997 997 }
998 998 }
999 999
1000 1000 .advanced_settings {
1001 1001 margin-bottom: @space;
1002 1002
1003 1003 .help-block {
1004 1004 margin-left: 0;
1005 1005 }
1006 1006
1007 1007 button + .help-block {
1008 1008 margin-top: @padding;
1009 1009 }
1010 1010 }
1011 1011
1012 1012 // admin settings radio buttons and labels
1013 1013 .label-2 {
1014 1014 float: left;
1015 1015 width: @label2-width;
1016 1016
1017 1017 label {
1018 1018 color: @grey1;
1019 1019 }
1020 1020 }
1021 1021 .checkboxes {
1022 1022 float: left;
1023 1023 width: @checkboxes-width;
1024 1024 margin-bottom: @padding;
1025 1025
1026 1026 .checkbox {
1027 1027 width: 100%;
1028 1028
1029 1029 label {
1030 1030 margin: 0;
1031 1031 padding: 0;
1032 1032 }
1033 1033 }
1034 1034
1035 1035 .checkbox + .checkbox {
1036 1036 display: inline-block;
1037 1037 }
1038 1038
1039 1039 label {
1040 1040 margin-right: 1em;
1041 1041 }
1042 1042 }
1043 1043
1044 1044 // CHANGELOG
1045 1045 .container_header {
1046 1046 float: left;
1047 1047 display: block;
1048 1048 width: 100%;
1049 1049 margin: @padding 0 @padding;
1050 1050
1051 1051 #filter_changelog {
1052 1052 float: left;
1053 1053 margin-right: @padding;
1054 1054 }
1055 1055
1056 1056 .breadcrumbs_light {
1057 1057 display: inline-block;
1058 1058 }
1059 1059 }
1060 1060
1061 1061 .info_box {
1062 1062 float: right;
1063 1063 }
1064 1064
1065 1065
1066 1066
1067 1067 #graph_content{
1068 1068
1069 1069 // adjust for table headers so that graph renders properly
1070 1070 // #graph_nodes padding - table cell padding
1071 1071 padding-top: (@space - (@basefontsize * 2.4));
1072 1072
1073 1073 &.graph_full_width {
1074 1074 width: 100%;
1075 1075 max-width: 100%;
1076 1076 }
1077 1077 }
1078 1078
1079 1079 #graph {
1080 1080
1081 1081 .pagination-left {
1082 1082 float: left;
1083 1083 clear: both;
1084 1084 }
1085 1085
1086 1086 .log-container {
1087 1087 max-width: 345px;
1088 1088
1089 1089 .message{
1090 1090 max-width: 340px;
1091 1091 }
1092 1092 }
1093 1093
1094 1094 .graph-col-wrapper {
1095 1095
1096 1096 #graph_nodes {
1097 1097 width: 100px;
1098 1098 position: absolute;
1099 1099 left: 70px;
1100 1100 z-index: -1;
1101 1101 }
1102 1102 }
1103 1103
1104 1104 .load-more-commits {
1105 1105 text-align: center;
1106 1106 }
1107 1107 .load-more-commits:hover {
1108 1108 background-color: @grey7;
1109 1109 }
1110 1110 .load-more-commits {
1111 1111 a {
1112 1112 display: block;
1113 1113 }
1114 1114 }
1115 1115 }
1116 1116
1117 1117 .obsolete-toggle {
1118 1118 line-height: 30px;
1119 1119 margin-left: -15px;
1120 1120 }
1121 1121
1122 1122 #rev_range_container, #rev_range_clear, #rev_range_more {
1123 1123 margin-top: -5px;
1124 1124 margin-bottom: -5px;
1125 1125 }
1126 1126
1127 1127 #filter_changelog {
1128 1128 float: left;
1129 1129 }
1130 1130
1131 1131
1132 1132 //--- THEME ------------------//
1133 1133
1134 1134 #logo {
1135 1135 float: left;
1136 1136 margin: 9px 0 0 0;
1137 1137
1138 1138 .header {
1139 1139 background-color: transparent;
1140 1140 }
1141 1141
1142 1142 a {
1143 1143 display: inline-block;
1144 1144 }
1145 1145
1146 1146 img {
1147 1147 height:30px;
1148 1148 }
1149 1149 }
1150 1150
1151 1151 .logo-wrapper {
1152 1152 float:left;
1153 1153 }
1154 1154
1155 1155 .branding {
1156 1156 float: left;
1157 1157 padding: 9px 2px;
1158 1158 line-height: 1em;
1159 1159 font-size: @navigation-fontsize;
1160 1160
1161 1161 a {
1162 1162 color: @grey5
1163 1163 }
1164 1164 @media screen and (max-width: 1200px) {
1165 1165 display: none;
1166 1166 }
1167 1167 }
1168 1168
1169 1169 img {
1170 1170 border: none;
1171 1171 outline: none;
1172 1172 }
1173 1173 user-profile-header
1174 1174 label {
1175 1175
1176 1176 input[type="checkbox"] {
1177 1177 margin-right: 1em;
1178 1178 }
1179 1179 input[type="radio"] {
1180 1180 margin-right: 1em;
1181 1181 }
1182 1182 }
1183 1183
1184 1184 .review-status {
1185 1185 &.under_review {
1186 1186 color: @alert3;
1187 1187 }
1188 1188 &.approved {
1189 1189 color: @alert1;
1190 1190 }
1191 1191 &.rejected,
1192 1192 &.forced_closed{
1193 1193 color: @alert2;
1194 1194 }
1195 1195 &.not_reviewed {
1196 1196 color: @grey5;
1197 1197 }
1198 1198 }
1199 1199
1200 1200 .review-status-under_review {
1201 1201 color: @alert3;
1202 1202 }
1203 1203 .status-tag-under_review {
1204 1204 border-color: @alert3;
1205 1205 }
1206 1206
1207 1207 .review-status-approved {
1208 1208 color: @alert1;
1209 1209 }
1210 1210 .status-tag-approved {
1211 1211 border-color: @alert1;
1212 1212 }
1213 1213
1214 1214 .review-status-rejected,
1215 1215 .review-status-forced_closed {
1216 1216 color: @alert2;
1217 1217 }
1218 1218 .status-tag-rejected,
1219 1219 .status-tag-forced_closed {
1220 1220 border-color: @alert2;
1221 1221 }
1222 1222
1223 1223 .review-status-not_reviewed {
1224 1224 color: @grey5;
1225 1225 }
1226 1226 .status-tag-not_reviewed {
1227 1227 border-color: @grey5;
1228 1228 }
1229 1229
1230 1230 .test_pattern_preview {
1231 1231 margin: @space 0;
1232 1232
1233 1233 p {
1234 1234 margin-bottom: 0;
1235 1235 border-bottom: @border-thickness solid @border-default-color;
1236 1236 color: @grey3;
1237 1237 }
1238 1238
1239 1239 .btn {
1240 1240 margin-bottom: @padding;
1241 1241 }
1242 1242 }
1243 1243 #test_pattern_result {
1244 1244 display: none;
1245 1245 &:extend(pre);
1246 1246 padding: .9em;
1247 1247 color: @grey3;
1248 1248 background-color: @grey7;
1249 1249 border-right: @border-thickness solid @border-default-color;
1250 1250 border-bottom: @border-thickness solid @border-default-color;
1251 1251 border-left: @border-thickness solid @border-default-color;
1252 1252 }
1253 1253
1254 1254 #repo_vcs_settings {
1255 1255 #inherit_overlay_vcs_default {
1256 1256 display: none;
1257 1257 }
1258 1258 #inherit_overlay_vcs_custom {
1259 1259 display: custom;
1260 1260 }
1261 1261 &.inherited {
1262 1262 #inherit_overlay_vcs_default {
1263 1263 display: block;
1264 1264 }
1265 1265 #inherit_overlay_vcs_custom {
1266 1266 display: none;
1267 1267 }
1268 1268 }
1269 1269 }
1270 1270
1271 1271 .issue-tracker-link {
1272 1272 color: @rcblue;
1273 1273 }
1274 1274
1275 1275 // Issue Tracker Table Show/Hide
1276 1276 #repo_issue_tracker {
1277 1277 #inherit_overlay {
1278 1278 display: none;
1279 1279 }
1280 1280 #custom_overlay {
1281 1281 display: custom;
1282 1282 }
1283 1283 &.inherited {
1284 1284 #inherit_overlay {
1285 1285 display: block;
1286 1286 }
1287 1287 #custom_overlay {
1288 1288 display: none;
1289 1289 }
1290 1290 }
1291 1291 }
1292 1292 table.issuetracker {
1293 1293 &.readonly {
1294 1294 tr, td {
1295 1295 color: @grey3;
1296 1296 }
1297 1297 }
1298 1298 .edit {
1299 1299 display: none;
1300 1300 }
1301 1301 .editopen {
1302 1302 .edit {
1303 1303 display: inline;
1304 1304 }
1305 1305 .entry {
1306 1306 display: none;
1307 1307 }
1308 1308 }
1309 1309 tr td.td-action {
1310 1310 min-width: 117px;
1311 1311 }
1312 1312 td input {
1313 1313 max-width: none;
1314 1314 min-width: 30px;
1315 1315 width: 80%;
1316 1316 }
1317 1317 .issuetracker_pref input {
1318 1318 width: 40%;
1319 1319 }
1320 1320 input.edit_issuetracker_update {
1321 1321 margin-right: 0;
1322 1322 width: auto;
1323 1323 }
1324 1324 }
1325 1325
1326 1326 table.integrations {
1327 1327 .td-icon {
1328 1328 width: 20px;
1329 1329 .integration-icon {
1330 1330 height: 20px;
1331 1331 width: 20px;
1332 1332 }
1333 1333 }
1334 1334 }
1335 1335
1336 1336 .integrations {
1337 1337 a.integration-box {
1338 1338 color: @text-color;
1339 1339 &:hover {
1340 1340 .panel {
1341 1341 background: #fbfbfb;
1342 1342 }
1343 1343 }
1344 1344 .integration-icon {
1345 1345 width: 30px;
1346 1346 height: 30px;
1347 1347 margin-right: 20px;
1348 1348 float: left;
1349 1349 }
1350 1350
1351 1351 .panel-body {
1352 1352 padding: 10px;
1353 1353 }
1354 1354 .panel {
1355 1355 margin-bottom: 10px;
1356 1356 }
1357 1357 h2 {
1358 1358 display: inline-block;
1359 1359 margin: 0;
1360 1360 min-width: 140px;
1361 1361 }
1362 1362 }
1363 1363 a.integration-box.dummy-integration {
1364 1364 color: @grey4
1365 1365 }
1366 1366 }
1367 1367
1368 1368 //Permissions Settings
1369 1369 #add_perm {
1370 1370 margin: 0 0 @padding;
1371 1371 cursor: pointer;
1372 1372 }
1373 1373
1374 1374 .perm_ac {
1375 1375 input {
1376 1376 width: 95%;
1377 1377 }
1378 1378 }
1379 1379
1380 1380 .autocomplete-suggestions {
1381 1381 width: auto !important; // overrides autocomplete.js
1382 1382 min-width: 278px;
1383 1383 margin: 0;
1384 1384 border: @border-thickness solid @grey5;
1385 1385 border-radius: @border-radius;
1386 1386 color: @grey2;
1387 1387 background-color: white;
1388 1388 }
1389 1389
1390 1390 .autocomplete-qfilter-suggestions {
1391 1391 width: auto !important; // overrides autocomplete.js
1392 1392 max-height: 100% !important;
1393 1393 min-width: 376px;
1394 1394 margin: 0;
1395 1395 border: @border-thickness solid @grey5;
1396 1396 color: @grey2;
1397 1397 background-color: white;
1398 1398 }
1399 1399
1400 1400 .autocomplete-selected {
1401 1401 background: #F0F0F0;
1402 1402 }
1403 1403
1404 1404 .ac-container-wrap {
1405 1405 margin: 0;
1406 1406 padding: 8px;
1407 1407 border-bottom: @border-thickness solid @grey5;
1408 1408 list-style-type: none;
1409 1409 cursor: pointer;
1410 1410
1411 1411 &:hover {
1412 1412 background-color: @grey7;
1413 1413 }
1414 1414
1415 1415 img {
1416 1416 height: @gravatar-size;
1417 1417 width: @gravatar-size;
1418 1418 margin-right: 1em;
1419 1419 }
1420 1420
1421 1421 strong {
1422 1422 font-weight: normal;
1423 1423 }
1424 1424 }
1425 1425
1426 1426 // Settings Dropdown
1427 1427 .user-menu .container {
1428 1428 padding: 0 4px;
1429 1429 margin: 0;
1430 1430 }
1431 1431
1432 1432 .user-menu .gravatar {
1433 1433 cursor: pointer;
1434 1434 }
1435 1435
1436 1436 .codeblock {
1437 1437 margin-bottom: @padding;
1438 1438 clear: both;
1439 1439
1440 1440 .stats {
1441 1441 overflow: hidden;
1442 1442 }
1443 1443
1444 1444 .message{
1445 1445 textarea{
1446 1446 margin: 0;
1447 1447 }
1448 1448 }
1449 1449
1450 1450 .code-header {
1451 1451 .stats {
1452 1452 line-height: 2em;
1453 1453
1454 1454 .revision_id {
1455 1455 margin-left: 0;
1456 1456 }
1457 1457 .buttons {
1458 1458 padding-right: 0;
1459 1459 }
1460 1460 }
1461 1461
1462 1462 .item{
1463 1463 margin-right: 0.5em;
1464 1464 }
1465 1465 }
1466 1466
1467 1467 #editor_container {
1468 1468 position: relative;
1469 1469 margin: @padding 10px;
1470 1470 }
1471 1471 }
1472 1472
1473 1473 #file_history_container {
1474 1474 display: none;
1475 1475 }
1476 1476
1477 1477 .file-history-inner {
1478 1478 margin-bottom: 10px;
1479 1479 }
1480 1480
1481 1481 // Pull Requests
1482 1482 .summary-details {
1483 1483 width: 72%;
1484 1484 }
1485 1485 .pr-summary {
1486 1486 border-bottom: @border-thickness solid @grey5;
1487 1487 margin-bottom: @space;
1488 1488 }
1489 1489
1490 1490 .reviewers-title {
1491 1491 width: 25%;
1492 1492 min-width: 200px;
1493 1493
1494 1494 &.first-panel {
1495 1495 margin-top: 34px;
1496 1496 }
1497 1497 }
1498 1498
1499 1499 .reviewers {
1500 1500 width: 25%;
1501 1501 min-width: 200px;
1502 1502 }
1503 1503 .reviewers ul li {
1504 1504 position: relative;
1505 1505 width: 100%;
1506 1506 padding-bottom: 8px;
1507 1507 list-style-type: none;
1508 1508 }
1509 1509
1510 1510 .reviewer_entry {
1511 1511 min-height: 55px;
1512 1512 }
1513 1513
1514 1514 .reviewers_member {
1515 1515 width: 100%;
1516 1516 overflow: auto;
1517 1517 }
1518 1518 .reviewer_reason {
1519 1519 padding-left: 20px;
1520 1520 line-height: 1.5em;
1521 1521 }
1522 1522 .reviewer_status {
1523 1523 display: inline-block;
1524 1524 width: 25px;
1525 1525 min-width: 25px;
1526 1526 height: 1.2em;
1527 1527 line-height: 1em;
1528 1528 }
1529 1529
1530 1530 .reviewer_name {
1531 1531 display: inline-block;
1532 1532 max-width: 83%;
1533 1533 padding-right: 20px;
1534 1534 vertical-align: middle;
1535 1535 line-height: 1;
1536 1536
1537 1537 .rc-user {
1538 1538 min-width: 0;
1539 1539 margin: -2px 1em 0 0;
1540 1540 }
1541 1541
1542 1542 .reviewer {
1543 1543 float: left;
1544 1544 }
1545 1545 }
1546 1546
1547 1547 .reviewer_member_mandatory {
1548 1548 position: absolute;
1549 1549 left: 15px;
1550 1550 top: 8px;
1551 1551 width: 16px;
1552 1552 font-size: 11px;
1553 1553 margin: 0;
1554 1554 padding: 0;
1555 1555 color: black;
1556 1556 }
1557 1557
1558 1558 .reviewer_member_mandatory_remove,
1559 1559 .reviewer_member_remove {
1560 1560 position: absolute;
1561 1561 right: 0;
1562 1562 top: 0;
1563 1563 width: 16px;
1564 1564 margin-bottom: 10px;
1565 1565 padding: 0;
1566 1566 color: black;
1567 1567 }
1568 1568
1569 1569 .reviewer_member_mandatory_remove {
1570 1570 color: @grey4;
1571 1571 }
1572 1572
1573 1573 .reviewer_member_status {
1574 1574 margin-top: 5px;
1575 1575 }
1576 1576 .pr-summary #summary{
1577 1577 width: 100%;
1578 1578 }
1579 1579 .pr-summary .action_button:hover {
1580 1580 border: 0;
1581 1581 cursor: pointer;
1582 1582 }
1583 1583 .pr-details-title {
1584 1584 padding-bottom: 8px;
1585 1585 border-bottom: @border-thickness solid @grey5;
1586 1586
1587 1587 .action_button.disabled {
1588 1588 color: @grey4;
1589 1589 cursor: inherit;
1590 1590 }
1591 1591 .action_button {
1592 1592 color: @rcblue;
1593 1593 }
1594 1594 }
1595 1595 .pr-details-content {
1596 1596 margin-top: @textmargin - 5;
1597 1597 margin-bottom: @textmargin - 5;
1598 1598 }
1599 1599
1600 1600 .pr-reviewer-rules {
1601 1601 padding: 10px 0px 20px 0px;
1602 1602 }
1603 1603
1604 1604 .todo-resolved {
1605 1605 text-decoration: line-through;
1606 1606 }
1607 1607
1608 1608 .todo-table {
1609 1609 width: 100%;
1610 1610
1611 1611 td {
1612 1612 padding: 5px 0px;
1613 1613 }
1614 1614
1615 1615 .td-todo-number {
1616 1616 text-align: left;
1617 1617 white-space: nowrap;
1618 1618 width: 15%;
1619 1619 }
1620 1620
1621 1621 .td-todo-gravatar {
1622 1622 width: 5%;
1623 1623
1624 1624 img {
1625 1625 margin: -3px 0;
1626 1626 }
1627 1627 }
1628 1628
1629 1629 }
1630 1630
1631 1631 .todo-comment-text-wrapper {
1632 1632 display: inline-grid;
1633 1633 }
1634 1634
1635 1635 .todo-comment-text {
1636 1636 margin-left: 5px;
1637 1637 white-space: nowrap;
1638 1638 overflow: hidden;
1639 1639 text-overflow: ellipsis;
1640 1640 }
1641 1641
1642 1642 .group_members {
1643 1643 margin-top: 0;
1644 1644 padding: 0;
1645 1645 list-style: outside none none;
1646 1646
1647 1647 img {
1648 1648 height: @gravatar-size;
1649 1649 width: @gravatar-size;
1650 1650 margin-right: .5em;
1651 1651 margin-left: 3px;
1652 1652 }
1653 1653
1654 1654 .to-delete {
1655 1655 .user {
1656 1656 text-decoration: line-through;
1657 1657 }
1658 1658 }
1659 1659 }
1660 1660
1661 1661 .compare_view_commits_title {
1662 1662 .disabled {
1663 1663 cursor: inherit;
1664 1664 &:hover{
1665 1665 background-color: inherit;
1666 1666 color: inherit;
1667 1667 }
1668 1668 }
1669 1669 }
1670 1670
1671 1671 .subtitle-compare {
1672 1672 margin: -15px 0px 0px 0px;
1673 1673 }
1674 1674
1675 1675 // new entry in group_members
1676 1676 .td-author-new-entry {
1677 1677 background-color: rgba(red(@alert1), green(@alert1), blue(@alert1), 0.3);
1678 1678 }
1679 1679
1680 1680 .usergroup_member_remove {
1681 1681 width: 16px;
1682 1682 margin-bottom: 10px;
1683 1683 padding: 0;
1684 1684 color: black !important;
1685 1685 cursor: pointer;
1686 1686 }
1687 1687
1688 1688 .reviewer_ac .ac-input {
1689 1689 width: 92%;
1690 1690 margin-bottom: 1em;
1691 1691 }
1692 1692
1693 1693 .compare_view_commits tr{
1694 1694 height: 20px;
1695 1695 }
1696 1696 .compare_view_commits td {
1697 1697 vertical-align: top;
1698 1698 padding-top: 10px;
1699 1699 }
1700 1700 .compare_view_commits .author {
1701 1701 margin-left: 5px;
1702 1702 }
1703 1703
1704 1704 .compare_view_commits {
1705 1705 .color-a {
1706 1706 color: @alert1;
1707 1707 }
1708 1708
1709 1709 .color-c {
1710 1710 color: @color3;
1711 1711 }
1712 1712
1713 1713 .color-r {
1714 1714 color: @color5;
1715 1715 }
1716 1716
1717 1717 .color-a-bg {
1718 1718 background-color: @alert1;
1719 1719 }
1720 1720
1721 1721 .color-c-bg {
1722 1722 background-color: @alert3;
1723 1723 }
1724 1724
1725 1725 .color-r-bg {
1726 1726 background-color: @alert2;
1727 1727 }
1728 1728
1729 1729 .color-a-border {
1730 1730 border: 1px solid @alert1;
1731 1731 }
1732 1732
1733 1733 .color-c-border {
1734 1734 border: 1px solid @alert3;
1735 1735 }
1736 1736
1737 1737 .color-r-border {
1738 1738 border: 1px solid @alert2;
1739 1739 }
1740 1740
1741 1741 .commit-change-indicator {
1742 1742 width: 15px;
1743 1743 height: 15px;
1744 1744 position: relative;
1745 1745 left: 15px;
1746 1746 }
1747 1747
1748 1748 .commit-change-content {
1749 1749 text-align: center;
1750 1750 vertical-align: middle;
1751 1751 line-height: 15px;
1752 1752 }
1753 1753 }
1754 1754
1755 1755 .compare_view_filepath {
1756 1756 color: @grey1;
1757 1757 }
1758 1758
1759 1759 .show_more {
1760 1760 display: inline-block;
1761 1761 width: 0;
1762 1762 height: 0;
1763 1763 vertical-align: middle;
1764 1764 content: "";
1765 1765 border: 4px solid;
1766 1766 border-right-color: transparent;
1767 1767 border-bottom-color: transparent;
1768 1768 border-left-color: transparent;
1769 1769 font-size: 0;
1770 1770 }
1771 1771
1772 1772 .journal_more .show_more {
1773 1773 display: inline;
1774 1774
1775 1775 &:after {
1776 1776 content: none;
1777 1777 }
1778 1778 }
1779 1779
1780 1780 .compare_view_commits .collapse_commit:after {
1781 1781 cursor: pointer;
1782 1782 content: "\00A0\25B4";
1783 1783 margin-left: -3px;
1784 1784 font-size: 17px;
1785 1785 color: @grey4;
1786 1786 }
1787 1787
1788 1788 .diff_links {
1789 1789 margin-left: 8px;
1790 1790 }
1791 1791
1792 1792 #pull_request_overview {
1793 1793 div.ancestor {
1794 1794 margin: -33px 0;
1795 1795 }
1796 1796 }
1797 1797
1798 1798 div.ancestor {
1799 line-height: 33px;
1799
1800 1800 }
1801 1801
1802 1802 .cs_icon_td input[type="checkbox"] {
1803 1803 display: none;
1804 1804 }
1805 1805
1806 1806 .cs_icon_td .expand_file_icon:after {
1807 1807 cursor: pointer;
1808 1808 content: "\00A0\25B6";
1809 1809 font-size: 12px;
1810 1810 color: @grey4;
1811 1811 }
1812 1812
1813 1813 .cs_icon_td .collapse_file_icon:after {
1814 1814 cursor: pointer;
1815 1815 content: "\00A0\25BC";
1816 1816 font-size: 12px;
1817 1817 color: @grey4;
1818 1818 }
1819 1819
1820 1820 /*new binary
1821 1821 NEW_FILENODE = 1
1822 1822 DEL_FILENODE = 2
1823 1823 MOD_FILENODE = 3
1824 1824 RENAMED_FILENODE = 4
1825 1825 COPIED_FILENODE = 5
1826 1826 CHMOD_FILENODE = 6
1827 1827 BIN_FILENODE = 7
1828 1828 */
1829 1829 .cs_files_expand {
1830 1830 font-size: @basefontsize + 5px;
1831 1831 line-height: 1.8em;
1832 1832 float: right;
1833 1833 }
1834 1834
1835 1835 .cs_files_expand span{
1836 1836 color: @rcblue;
1837 1837 cursor: pointer;
1838 1838 }
1839 1839 .cs_files {
1840 1840 clear: both;
1841 1841 padding-bottom: @padding;
1842 1842
1843 1843 .cur_cs {
1844 1844 margin: 10px 2px;
1845 1845 font-weight: bold;
1846 1846 }
1847 1847
1848 1848 .node {
1849 1849 float: left;
1850 1850 }
1851 1851
1852 1852 .changes {
1853 1853 float: right;
1854 1854 color: white;
1855 1855 font-size: @basefontsize - 4px;
1856 1856 margin-top: 4px;
1857 1857 opacity: 0.6;
1858 1858 filter: Alpha(opacity=60); /* IE8 and earlier */
1859 1859
1860 1860 .added {
1861 1861 background-color: @alert1;
1862 1862 float: left;
1863 1863 text-align: center;
1864 1864 }
1865 1865
1866 1866 .deleted {
1867 1867 background-color: @alert2;
1868 1868 float: left;
1869 1869 text-align: center;
1870 1870 }
1871 1871
1872 1872 .bin {
1873 1873 background-color: @alert1;
1874 1874 text-align: center;
1875 1875 }
1876 1876
1877 1877 /*new binary*/
1878 1878 .bin.bin1 {
1879 1879 background-color: @alert1;
1880 1880 text-align: center;
1881 1881 }
1882 1882
1883 1883 /*deleted binary*/
1884 1884 .bin.bin2 {
1885 1885 background-color: @alert2;
1886 1886 text-align: center;
1887 1887 }
1888 1888
1889 1889 /*mod binary*/
1890 1890 .bin.bin3 {
1891 1891 background-color: @grey2;
1892 1892 text-align: center;
1893 1893 }
1894 1894
1895 1895 /*rename file*/
1896 1896 .bin.bin4 {
1897 1897 background-color: @alert4;
1898 1898 text-align: center;
1899 1899 }
1900 1900
1901 1901 /*copied file*/
1902 1902 .bin.bin5 {
1903 1903 background-color: @alert4;
1904 1904 text-align: center;
1905 1905 }
1906 1906
1907 1907 /*chmod file*/
1908 1908 .bin.bin6 {
1909 1909 background-color: @grey2;
1910 1910 text-align: center;
1911 1911 }
1912 1912 }
1913 1913 }
1914 1914
1915 1915 .cs_files .cs_added, .cs_files .cs_A,
1916 1916 .cs_files .cs_added, .cs_files .cs_M,
1917 1917 .cs_files .cs_added, .cs_files .cs_D {
1918 1918 height: 16px;
1919 1919 padding-right: 10px;
1920 1920 margin-top: 7px;
1921 1921 text-align: left;
1922 1922 }
1923 1923
1924 1924 .cs_icon_td {
1925 1925 min-width: 16px;
1926 1926 width: 16px;
1927 1927 }
1928 1928
1929 1929 .pull-request-merge {
1930 1930 border: 1px solid @grey5;
1931 1931 padding: 10px 0px 20px;
1932 1932 margin-top: 10px;
1933 1933 margin-bottom: 20px;
1934 1934 }
1935 1935
1936 1936 .pull-request-merge-refresh {
1937 1937 margin: 2px 7px;
1938 1938 a {
1939 1939 color: @grey3;
1940 1940 }
1941 1941 }
1942 1942
1943 1943 .pull-request-merge ul {
1944 1944 padding: 0px 0px;
1945 1945 }
1946 1946
1947 1947 .pull-request-merge li {
1948 1948 list-style-type: none;
1949 1949 }
1950 1950
1951 1951 .pull-request-merge .pull-request-wrap {
1952 1952 height: auto;
1953 1953 padding: 0px 0px;
1954 1954 text-align: right;
1955 1955 }
1956 1956
1957 1957 .pull-request-merge span {
1958 1958 margin-right: 5px;
1959 1959 }
1960 1960
1961 1961 .pull-request-merge-actions {
1962 1962 min-height: 30px;
1963 1963 padding: 0px 0px;
1964 1964 }
1965 1965
1966 1966 .pull-request-merge-info {
1967 1967 padding: 0px 5px 5px 0px;
1968 1968 }
1969 1969
1970 1970 .merge-status {
1971 1971 margin-right: 5px;
1972 1972 }
1973 1973
1974 1974 .merge-message {
1975 1975 font-size: 1.2em
1976 1976 }
1977 1977
1978 1978 .merge-message.success i,
1979 1979 .merge-icon.success i {
1980 1980 color:@alert1;
1981 1981 }
1982 1982
1983 1983 .merge-message.warning i,
1984 1984 .merge-icon.warning i {
1985 1985 color: @alert3;
1986 1986 }
1987 1987
1988 1988 .merge-message.error i,
1989 1989 .merge-icon.error i {
1990 1990 color:@alert2;
1991 1991 }
1992 1992
1993 1993 .pr-versions {
1994 1994 font-size: 1.1em;
1995 1995 padding: 7.5px;
1996 1996
1997 1997 table {
1998 1998
1999 1999 }
2000 2000
2001 2001 td {
2002 2002 line-height: 15px;
2003 2003 }
2004 2004
2005 2005 .compare-radio-button {
2006 2006 position: relative;
2007 2007 top: -3px;
2008 2008 }
2009 2009 }
2010 2010
2011 2011
2012 2012 #close_pull_request {
2013 2013 margin-right: 0px;
2014 2014 }
2015 2015
2016 2016 .empty_data {
2017 2017 color: @grey4;
2018 2018 }
2019 2019
2020 2020 #changeset_compare_view_content {
2021 2021 clear: both;
2022 2022 width: 100%;
2023 2023 box-sizing: border-box;
2024 2024 .border-radius(@border-radius);
2025 2025
2026 2026 .help-block {
2027 2027 margin: @padding 0;
2028 2028 color: @text-color;
2029 2029 &.pre-formatting {
2030 2030 white-space: pre;
2031 2031 }
2032 2032 }
2033 2033
2034 2034 .empty_data {
2035 2035 margin: @padding 0;
2036 2036 }
2037 2037
2038 2038 .alert {
2039 2039 margin-bottom: @space;
2040 2040 }
2041 2041 }
2042 2042
2043 2043 .table_disp {
2044 2044 .status {
2045 2045 width: auto;
2046 2046 }
2047 2047 }
2048 2048
2049 2049
2050 2050 .creation_in_progress {
2051 2051 color: @grey4
2052 2052 }
2053 2053
2054 2054 .status_box_menu {
2055 2055 margin: 0;
2056 2056 }
2057 2057
2058 2058 .notification-table{
2059 2059 margin-bottom: @space;
2060 2060 display: table;
2061 2061 width: 100%;
2062 2062
2063 2063 .container{
2064 2064 display: table-row;
2065 2065
2066 2066 .notification-header{
2067 2067 border-bottom: @border-thickness solid @border-default-color;
2068 2068 }
2069 2069
2070 2070 .notification-subject{
2071 2071 display: table-cell;
2072 2072 }
2073 2073 }
2074 2074 }
2075 2075
2076 2076 // Notifications
2077 2077 .notification-header{
2078 2078 display: table;
2079 2079 width: 100%;
2080 2080 padding: floor(@basefontsize/2) 0;
2081 2081 line-height: 1em;
2082 2082
2083 2083 .desc, .delete-notifications, .read-notifications{
2084 2084 display: table-cell;
2085 2085 text-align: left;
2086 2086 }
2087 2087
2088 2088 .delete-notifications, .read-notifications{
2089 2089 width: 35px;
2090 2090 min-width: 35px; //fixes when only one button is displayed
2091 2091 }
2092 2092 }
2093 2093
2094 2094 .notification-body {
2095 2095 .markdown-block,
2096 2096 .rst-block {
2097 2097 padding: @padding 0;
2098 2098 }
2099 2099
2100 2100 .notification-subject {
2101 2101 padding: @textmargin 0;
2102 2102 border-bottom: @border-thickness solid @border-default-color;
2103 2103 }
2104 2104 }
2105 2105
2106 2106 .notice-messages {
2107 2107 .markdown-block,
2108 2108 .rst-block {
2109 2109 padding: 0;
2110 2110 }
2111 2111 }
2112 2112
2113 2113 .notifications_buttons{
2114 2114 float: right;
2115 2115 }
2116 2116
2117 2117 #notification-status{
2118 2118 display: inline;
2119 2119 }
2120 2120
2121 2121 // Repositories
2122 2122
2123 2123 #summary.fields{
2124 2124 display: table;
2125 2125
2126 2126 .field{
2127 2127 display: table-row;
2128 2128
2129 2129 .label-summary{
2130 2130 display: table-cell;
2131 2131 min-width: @label-summary-minwidth;
2132 2132 padding-top: @padding/2;
2133 2133 padding-bottom: @padding/2;
2134 2134 padding-right: @padding/2;
2135 2135 }
2136 2136
2137 2137 .input{
2138 2138 display: table-cell;
2139 2139 padding: @padding/2;
2140 2140
2141 2141 input{
2142 2142 min-width: 29em;
2143 2143 padding: @padding/4;
2144 2144 }
2145 2145 }
2146 2146 .statistics, .downloads{
2147 2147 .disabled{
2148 2148 color: @grey4;
2149 2149 }
2150 2150 }
2151 2151 }
2152 2152 }
2153 2153
2154 2154 #summary{
2155 2155 width: 70%;
2156 2156 }
2157 2157
2158 2158
2159 2159 // Journal
2160 2160 .journal.title {
2161 2161 h5 {
2162 2162 float: left;
2163 2163 margin: 0;
2164 2164 width: 70%;
2165 2165 }
2166 2166
2167 2167 ul {
2168 2168 float: right;
2169 2169 display: inline-block;
2170 2170 margin: 0;
2171 2171 width: 30%;
2172 2172 text-align: right;
2173 2173
2174 2174 li {
2175 2175 display: inline;
2176 2176 font-size: @journal-fontsize;
2177 2177 line-height: 1em;
2178 2178
2179 2179 list-style-type: none;
2180 2180 }
2181 2181 }
2182 2182 }
2183 2183
2184 2184 .filterexample {
2185 2185 position: absolute;
2186 2186 top: 95px;
2187 2187 left: @contentpadding;
2188 2188 color: @rcblue;
2189 2189 font-size: 11px;
2190 2190 font-family: @text-regular;
2191 2191 cursor: help;
2192 2192
2193 2193 &:hover {
2194 2194 color: @rcdarkblue;
2195 2195 }
2196 2196
2197 2197 @media (max-width:768px) {
2198 2198 position: relative;
2199 2199 top: auto;
2200 2200 left: auto;
2201 2201 display: block;
2202 2202 }
2203 2203 }
2204 2204
2205 2205
2206 2206 #journal{
2207 2207 margin-bottom: @space;
2208 2208
2209 2209 .journal_day{
2210 2210 margin-bottom: @textmargin/2;
2211 2211 padding-bottom: @textmargin/2;
2212 2212 font-size: @journal-fontsize;
2213 2213 border-bottom: @border-thickness solid @border-default-color;
2214 2214 }
2215 2215
2216 2216 .journal_container{
2217 2217 margin-bottom: @space;
2218 2218
2219 2219 .journal_user{
2220 2220 display: inline-block;
2221 2221 }
2222 2222 .journal_action_container{
2223 2223 display: block;
2224 2224 margin-top: @textmargin;
2225 2225
2226 2226 div{
2227 2227 display: inline;
2228 2228 }
2229 2229
2230 2230 div.journal_action_params{
2231 2231 display: block;
2232 2232 }
2233 2233
2234 2234 div.journal_repo:after{
2235 2235 content: "\A";
2236 2236 white-space: pre;
2237 2237 }
2238 2238
2239 2239 div.date{
2240 2240 display: block;
2241 2241 margin-bottom: @textmargin;
2242 2242 }
2243 2243 }
2244 2244 }
2245 2245 }
2246 2246
2247 2247 // Files
2248 2248 .edit-file-title {
2249 2249 font-size: 16px;
2250 2250
2251 2251 .title-heading {
2252 2252 padding: 2px;
2253 2253 }
2254 2254 }
2255 2255
2256 2256 .edit-file-fieldset {
2257 2257 margin: @sidebarpadding 0;
2258 2258
2259 2259 .fieldset {
2260 2260 .left-label {
2261 2261 width: 13%;
2262 2262 }
2263 2263 .right-content {
2264 2264 width: 87%;
2265 2265 max-width: 100%;
2266 2266 }
2267 2267 .filename-label {
2268 2268 margin-top: 13px;
2269 2269 }
2270 2270 .commit-message-label {
2271 2271 margin-top: 4px;
2272 2272 }
2273 2273 .file-upload-input {
2274 2274 input {
2275 2275 display: none;
2276 2276 }
2277 2277 margin-top: 10px;
2278 2278 }
2279 2279 .file-upload-label {
2280 2280 margin-top: 10px;
2281 2281 }
2282 2282 p {
2283 2283 margin-top: 5px;
2284 2284 }
2285 2285
2286 2286 }
2287 2287 .custom-path-link {
2288 2288 margin-left: 5px;
2289 2289 }
2290 2290 #commit {
2291 2291 resize: vertical;
2292 2292 }
2293 2293 }
2294 2294
2295 2295 .delete-file-preview {
2296 2296 max-height: 250px;
2297 2297 }
2298 2298
2299 2299 .new-file,
2300 2300 #filter_activate,
2301 2301 #filter_deactivate {
2302 2302 float: right;
2303 2303 margin: 0 0 0 10px;
2304 2304 }
2305 2305
2306 2306 .file-upload-transaction-wrapper {
2307 2307 margin-top: 57px;
2308 2308 clear: both;
2309 2309 }
2310 2310
2311 2311 .file-upload-transaction-wrapper .error {
2312 2312 color: @color5;
2313 2313 }
2314 2314
2315 2315 .file-upload-transaction {
2316 2316 min-height: 200px;
2317 2317 padding: 54px;
2318 2318 border: 1px solid @grey5;
2319 2319 text-align: center;
2320 2320 clear: both;
2321 2321 }
2322 2322
2323 2323 .file-upload-transaction i {
2324 2324 font-size: 48px
2325 2325 }
2326 2326
2327 2327 h3.files_location{
2328 2328 line-height: 2.4em;
2329 2329 }
2330 2330
2331 2331 .browser-nav {
2332 2332 width: 100%;
2333 2333 display: table;
2334 2334 margin-bottom: 20px;
2335 2335
2336 2336 .info_box {
2337 2337 float: left;
2338 2338 display: inline-table;
2339 2339 height: 2.5em;
2340 2340
2341 2341 .browser-cur-rev, .info_box_elem {
2342 2342 display: table-cell;
2343 2343 vertical-align: middle;
2344 2344 }
2345 2345
2346 2346 .drop-menu {
2347 2347 margin: 0 10px;
2348 2348 }
2349 2349
2350 2350 .info_box_elem {
2351 2351 border-top: @border-thickness solid @grey5;
2352 2352 border-bottom: @border-thickness solid @grey5;
2353 2353 box-shadow: @button-shadow;
2354 2354
2355 2355 #at_rev, a {
2356 2356 padding: 0.6em 0.4em;
2357 2357 margin: 0;
2358 2358 .box-shadow(none);
2359 2359 border: 0;
2360 2360 height: 12px;
2361 2361 color: @grey2;
2362 2362 }
2363 2363
2364 2364 input#at_rev {
2365 2365 max-width: 50px;
2366 2366 text-align: center;
2367 2367 }
2368 2368
2369 2369 &.previous {
2370 2370 border: @border-thickness solid @grey5;
2371 2371 border-top-left-radius: @border-radius;
2372 2372 border-bottom-left-radius: @border-radius;
2373 2373
2374 2374 &:hover {
2375 2375 border-color: @grey4;
2376 2376 }
2377 2377
2378 2378 .disabled {
2379 2379 color: @grey5;
2380 2380 cursor: not-allowed;
2381 2381 opacity: 0.5;
2382 2382 }
2383 2383 }
2384 2384
2385 2385 &.next {
2386 2386 border: @border-thickness solid @grey5;
2387 2387 border-top-right-radius: @border-radius;
2388 2388 border-bottom-right-radius: @border-radius;
2389 2389
2390 2390 &:hover {
2391 2391 border-color: @grey4;
2392 2392 }
2393 2393
2394 2394 .disabled {
2395 2395 color: @grey5;
2396 2396 cursor: not-allowed;
2397 2397 opacity: 0.5;
2398 2398 }
2399 2399 }
2400 2400 }
2401 2401
2402 2402 .browser-cur-rev {
2403 2403
2404 2404 span{
2405 2405 margin: 0;
2406 2406 color: @rcblue;
2407 2407 height: 12px;
2408 2408 display: inline-block;
2409 2409 padding: 0.7em 1em ;
2410 2410 border: @border-thickness solid @rcblue;
2411 2411 margin-right: @padding;
2412 2412 }
2413 2413 }
2414 2414
2415 2415 }
2416 2416
2417 2417 .select-index-number {
2418 2418 margin: 0 0 0 20px;
2419 2419 color: @grey3;
2420 2420 }
2421 2421
2422 2422 .search_activate {
2423 2423 display: table-cell;
2424 2424 vertical-align: middle;
2425 2425
2426 2426 input, label{
2427 2427 margin: 0;
2428 2428 padding: 0;
2429 2429 }
2430 2430
2431 2431 input{
2432 2432 margin-left: @textmargin;
2433 2433 }
2434 2434
2435 2435 }
2436 2436 }
2437 2437
2438 2438 .browser-cur-rev{
2439 2439 margin-bottom: @textmargin;
2440 2440 }
2441 2441
2442 2442 #node_filter_box_loading{
2443 2443 .info_text;
2444 2444 }
2445 2445
2446 2446 .browser-search {
2447 2447 margin: -25px 0px 5px 0px;
2448 2448 }
2449 2449
2450 2450 .files-quick-filter {
2451 2451 float: right;
2452 2452 width: 180px;
2453 2453 position: relative;
2454 2454 }
2455 2455
2456 2456 .files-filter-box {
2457 2457 display: flex;
2458 2458 padding: 0px;
2459 2459 border-radius: 3px;
2460 2460 margin-bottom: 0;
2461 2461
2462 2462 a {
2463 2463 border: none !important;
2464 2464 }
2465 2465
2466 2466 li {
2467 2467 list-style-type: none
2468 2468 }
2469 2469 }
2470 2470
2471 2471 .files-filter-box-path {
2472 2472 line-height: 33px;
2473 2473 padding: 0;
2474 2474 width: 20px;
2475 2475 position: absolute;
2476 2476 z-index: 11;
2477 2477 left: 5px;
2478 2478 }
2479 2479
2480 2480 .files-filter-box-input {
2481 2481 margin-right: 0;
2482 2482
2483 2483 input {
2484 2484 border: 1px solid @white;
2485 2485 padding-left: 25px;
2486 2486 width: 145px;
2487 2487
2488 2488 &:hover {
2489 2489 border-color: @grey6;
2490 2490 }
2491 2491
2492 2492 &:focus {
2493 2493 border-color: @grey5;
2494 2494 }
2495 2495 }
2496 2496 }
2497 2497
2498 2498 .browser-result{
2499 2499 td a{
2500 2500 margin-left: 0.5em;
2501 2501 display: inline-block;
2502 2502
2503 2503 em {
2504 2504 font-weight: @text-bold-weight;
2505 2505 font-family: @text-bold;
2506 2506 }
2507 2507 }
2508 2508 }
2509 2509
2510 2510 .browser-highlight{
2511 2511 background-color: @grey5-alpha;
2512 2512 }
2513 2513
2514 2514
2515 2515 .edit-file-fieldset #location,
2516 2516 .edit-file-fieldset #filename {
2517 2517 display: flex;
2518 2518 width: -moz-available; /* WebKit-based browsers will ignore this. */
2519 2519 width: -webkit-fill-available; /* Mozilla-based browsers will ignore this. */
2520 2520 width: fill-available;
2521 2521 border: 0;
2522 2522 }
2523 2523
2524 2524 .path-items {
2525 2525 display: flex;
2526 2526 padding: 0;
2527 2527 border: 1px solid #eeeeee;
2528 2528 width: 100%;
2529 2529 float: left;
2530 2530
2531 2531 .breadcrumb-path {
2532 2532 line-height: 30px;
2533 2533 padding: 0 4px;
2534 2534 white-space: nowrap;
2535 2535 }
2536 2536
2537 2537 .upload-form {
2538 2538 margin-top: 46px;
2539 2539 }
2540 2540
2541 2541 .location-path {
2542 2542 width: -moz-available; /* WebKit-based browsers will ignore this. */
2543 2543 width: -webkit-fill-available; /* Mozilla-based browsers will ignore this. */
2544 2544 width: fill-available;
2545 2545
2546 2546 .file-name-input {
2547 2547 padding: 0.5em 0;
2548 2548 }
2549 2549
2550 2550 }
2551 2551
2552 2552 ul {
2553 2553 display: flex;
2554 2554 margin: 0;
2555 2555 padding: 0;
2556 2556 width: 100%;
2557 2557 }
2558 2558
2559 2559 li {
2560 2560 list-style-type: none;
2561 2561 }
2562 2562
2563 2563 }
2564 2564
2565 2565 .editor-items {
2566 2566 height: 40px;
2567 2567 margin: 10px 0 -17px 10px;
2568 2568
2569 2569 .editor-action {
2570 2570 cursor: pointer;
2571 2571 }
2572 2572
2573 2573 .editor-action.active {
2574 2574 border-bottom: 2px solid #5C5C5C;
2575 2575 }
2576 2576
2577 2577 li {
2578 2578 list-style-type: none;
2579 2579 }
2580 2580 }
2581 2581
2582 2582 .edit-file-fieldset .message textarea {
2583 2583 border: 1px solid #eeeeee;
2584 2584 }
2585 2585
2586 2586 #files_data .codeblock {
2587 2587 background-color: #F5F5F5;
2588 2588 }
2589 2589
2590 2590 #editor_preview {
2591 2591 background: white;
2592 2592 }
2593 2593
2594 2594 .show-editor {
2595 2595 padding: 10px;
2596 2596 background-color: white;
2597 2597
2598 2598 }
2599 2599
2600 2600 .show-preview {
2601 2601 padding: 10px;
2602 2602 background-color: white;
2603 2603 border-left: 1px solid #eeeeee;
2604 2604 }
2605 2605 // quick filter
2606 2606 .grid-quick-filter {
2607 2607 float: right;
2608 2608 position: relative;
2609 2609 }
2610 2610
2611 2611 .grid-filter-box {
2612 2612 display: flex;
2613 2613 padding: 0px;
2614 2614 border-radius: 3px;
2615 2615 margin-bottom: 0;
2616 2616
2617 2617 a {
2618 2618 border: none !important;
2619 2619 }
2620 2620
2621 2621 li {
2622 2622 list-style-type: none
2623 2623 }
2624 2624 }
2625 2625
2626 2626 .grid-filter-box-icon {
2627 2627 line-height: 33px;
2628 2628 padding: 0;
2629 2629 width: 20px;
2630 2630 position: absolute;
2631 2631 z-index: 11;
2632 2632 left: 5px;
2633 2633 }
2634 2634
2635 2635 .grid-filter-box-input {
2636 2636 margin-right: 0;
2637 2637
2638 2638 input {
2639 2639 border: 1px solid @white;
2640 2640 padding-left: 25px;
2641 2641 width: 145px;
2642 2642
2643 2643 &:hover {
2644 2644 border-color: @grey6;
2645 2645 }
2646 2646
2647 2647 &:focus {
2648 2648 border-color: @grey5;
2649 2649 }
2650 2650 }
2651 2651 }
2652 2652
2653 2653
2654 2654
2655 2655 // Search
2656 2656
2657 2657 .search-form{
2658 2658 #q {
2659 2659 width: @search-form-width;
2660 2660 }
2661 2661 .fields{
2662 2662 margin: 0 0 @space;
2663 2663 }
2664 2664
2665 2665 label{
2666 2666 display: inline-block;
2667 2667 margin-right: @textmargin;
2668 2668 padding-top: 0.25em;
2669 2669 }
2670 2670
2671 2671
2672 2672 .results{
2673 2673 clear: both;
2674 2674 margin: 0 0 @padding;
2675 2675 }
2676 2676
2677 2677 .search-tags {
2678 2678 padding: 5px 0;
2679 2679 }
2680 2680 }
2681 2681
2682 2682 div.search-feedback-items {
2683 2683 display: inline-block;
2684 2684 }
2685 2685
2686 2686 div.search-code-body {
2687 2687 background-color: #ffffff; padding: 5px 0 5px 10px;
2688 2688 pre {
2689 2689 .match { background-color: #faffa6;}
2690 2690 .break { display: block; width: 100%; background-color: #DDE7EF; color: #747474; }
2691 2691 }
2692 2692 }
2693 2693
2694 2694 .expand_commit.search {
2695 2695 .show_more.open {
2696 2696 height: auto;
2697 2697 max-height: none;
2698 2698 }
2699 2699 }
2700 2700
2701 2701 .search-results {
2702 2702
2703 2703 h2 {
2704 2704 margin-bottom: 0;
2705 2705 }
2706 2706 .codeblock {
2707 2707 border: none;
2708 2708 background: transparent;
2709 2709 }
2710 2710
2711 2711 .codeblock-header {
2712 2712 border: none;
2713 2713 background: transparent;
2714 2714 }
2715 2715
2716 2716 .code-body {
2717 2717 border: @border-thickness solid @grey6;
2718 2718 .border-radius(@border-radius);
2719 2719 }
2720 2720
2721 2721 .td-commit {
2722 2722 &:extend(pre);
2723 2723 border-bottom: @border-thickness solid @border-default-color;
2724 2724 }
2725 2725
2726 2726 .message {
2727 2727 height: auto;
2728 2728 max-width: 350px;
2729 2729 white-space: normal;
2730 2730 text-overflow: initial;
2731 2731 overflow: visible;
2732 2732
2733 2733 .match { background-color: #faffa6;}
2734 2734 .break { background-color: #DDE7EF; width: 100%; color: #747474; display: block; }
2735 2735 }
2736 2736
2737 2737 .path {
2738 2738 border-bottom: none !important;
2739 2739 border-left: 1px solid @grey6 !important;
2740 2740 border-right: 1px solid @grey6 !important;
2741 2741 }
2742 2742 }
2743 2743
2744 2744 table.rctable td.td-search-results div {
2745 2745 max-width: 100%;
2746 2746 }
2747 2747
2748 2748 #tip-box, .tip-box{
2749 2749 padding: @menupadding/2;
2750 2750 display: block;
2751 2751 border: @border-thickness solid @border-highlight-color;
2752 2752 .border-radius(@border-radius);
2753 2753 background-color: white;
2754 2754 z-index: 99;
2755 2755 white-space: pre-wrap;
2756 2756 }
2757 2757
2758 2758 #linktt {
2759 2759 width: 79px;
2760 2760 }
2761 2761
2762 2762 #help_kb .modal-content{
2763 2763 max-width: 750px;
2764 2764 margin: 10% auto;
2765 2765
2766 2766 table{
2767 2767 td,th{
2768 2768 border-bottom: none;
2769 2769 line-height: 2.5em;
2770 2770 }
2771 2771 th{
2772 2772 padding-bottom: @textmargin/2;
2773 2773 }
2774 2774 td.keys{
2775 2775 text-align: center;
2776 2776 }
2777 2777 }
2778 2778
2779 2779 .block-left{
2780 2780 width: 45%;
2781 2781 margin-right: 5%;
2782 2782 }
2783 2783 .modal-footer{
2784 2784 clear: both;
2785 2785 }
2786 2786 .key.tag{
2787 2787 padding: 0.5em;
2788 2788 background-color: @rcblue;
2789 2789 color: white;
2790 2790 border-color: @rcblue;
2791 2791 .box-shadow(none);
2792 2792 }
2793 2793 }
2794 2794
2795 2795
2796 2796
2797 2797 //--- IMPORTS FOR REFACTORED STYLES ------------------//
2798 2798
2799 2799 @import 'statistics-graph';
2800 2800 @import 'tables';
2801 2801 @import 'forms';
2802 2802 @import 'diff';
2803 2803 @import 'summary';
2804 2804 @import 'navigation';
2805 2805
2806 2806 //--- SHOW/HIDE SECTIONS --//
2807 2807
2808 2808 .btn-collapse {
2809 2809 float: right;
2810 2810 text-align: right;
2811 2811 font-family: @text-light;
2812 2812 font-size: @basefontsize;
2813 2813 cursor: pointer;
2814 2814 border: none;
2815 2815 color: @rcblue;
2816 2816 }
2817 2817
2818 2818 table.rctable,
2819 2819 table.dataTable {
2820 2820 .btn-collapse {
2821 2821 float: right;
2822 2822 text-align: right;
2823 2823 }
2824 2824 }
2825 2825
2826 2826 table.rctable {
2827 2827 &.permissions {
2828 2828
2829 2829 th.td-owner {
2830 2830 padding: 0;
2831 2831 }
2832 2832
2833 2833 th {
2834 2834 font-weight: normal;
2835 2835 padding: 0 5px;
2836 2836 }
2837 2837
2838 2838 }
2839 2839 }
2840 2840
2841 2841
2842 2842 // TODO: johbo: Fix for IE10, this avoids that we see a border
2843 2843 // and padding around checkboxes and radio boxes. Move to the right place,
2844 2844 // or better: Remove this once we did the form refactoring.
2845 2845 input[type=checkbox],
2846 2846 input[type=radio] {
2847 2847 padding: 0;
2848 2848 border: none;
2849 2849 }
2850 2850
2851 2851 .toggle-ajax-spinner{
2852 2852 height: 16px;
2853 2853 width: 16px;
2854 2854 }
2855 2855
2856 2856
2857 2857 .markup-form .clearfix {
2858 2858 .border-radius(@border-radius);
2859 2859 margin: 0px;
2860 2860 }
2861 2861
2862 2862 .markup-form-area {
2863 2863 padding: 8px 12px;
2864 2864 border: 1px solid @grey4;
2865 2865 .border-radius(@border-radius);
2866 2866 }
2867 2867
2868 2868 .markup-form-area-header .nav-links {
2869 2869 display: flex;
2870 2870 flex-flow: row wrap;
2871 2871 -webkit-flex-flow: row wrap;
2872 2872 width: 100%;
2873 2873 }
2874 2874
2875 2875 .markup-form-area-footer {
2876 2876 display: flex;
2877 2877 }
2878 2878
2879 2879 .markup-form-area-footer .toolbar {
2880 2880
2881 2881 }
2882 2882
2883 2883 // markup Form
2884 2884 div.markup-form {
2885 2885 margin-top: 20px;
2886 2886 }
2887 2887
2888 2888 .markup-form strong {
2889 2889 display: block;
2890 2890 margin-bottom: 15px;
2891 2891 }
2892 2892
2893 2893 .markup-form textarea {
2894 2894 width: 100%;
2895 2895 height: 100px;
2896 2896 font-family: @text-monospace;
2897 2897 }
2898 2898
2899 2899 form.markup-form {
2900 2900 margin-top: 10px;
2901 2901 margin-left: 10px;
2902 2902 }
2903 2903
2904 2904 .markup-form .comment-block-ta,
2905 2905 .markup-form .preview-box {
2906 2906 .border-radius(@border-radius);
2907 2907 .box-sizing(border-box);
2908 2908 background-color: white;
2909 2909 }
2910 2910
2911 2911 .markup-form .preview-box.unloaded {
2912 2912 height: 50px;
2913 2913 text-align: center;
2914 2914 padding: 20px;
2915 2915 background-color: white;
2916 2916 }
2917 2917
2918 2918
2919 2919 .dropzone-wrapper {
2920 2920 border: 1px solid @grey5;
2921 2921 padding: 20px;
2922 2922 }
2923 2923
2924 2924 .dropzone,
2925 2925 .dropzone-pure {
2926 2926 border: 2px dashed @grey5;
2927 2927 border-radius: 5px;
2928 2928 background: white;
2929 2929 min-height: 200px;
2930 2930 padding: 54px;
2931 2931
2932 2932 .dz-message {
2933 2933 font-weight: 700;
2934 2934 text-align: center;
2935 2935 margin: 2em 0;
2936 2936 }
2937 2937
2938 2938 }
2939 2939
2940 2940 .dz-preview {
2941 2941 margin: 10px 0 !important;
2942 2942 position: relative;
2943 2943 vertical-align: top;
2944 2944 padding: 10px;
2945 2945 border-bottom: 1px solid @grey5;
2946 2946 }
2947 2947
2948 2948 .dz-filename {
2949 2949 font-weight: 700;
2950 2950 float: left;
2951 2951 }
2952 2952
2953 2953 .dz-sending {
2954 2954 float: right;
2955 2955 }
2956 2956
2957 2957 .dz-response {
2958 2958 clear: both
2959 2959 }
2960 2960
2961 2961 .dz-filename-size {
2962 2962 float: right
2963 2963 }
2964 2964
2965 2965 .dz-error-message {
2966 2966 color: @alert2;
2967 2967 padding-top: 10px;
2968 2968 clear: both;
2969 2969 }
2970 2970
2971 2971
2972 2972 .user-hovercard {
2973 2973 padding: 5px;
2974 2974 }
2975 2975
2976 2976 .user-hovercard-icon {
2977 2977 display: inline;
2978 2978 padding: 0;
2979 2979 box-sizing: content-box;
2980 2980 border-radius: 50%;
2981 2981 float: left;
2982 2982 }
2983 2983
2984 2984 .user-hovercard-name {
2985 2985 float: right;
2986 2986 vertical-align: top;
2987 2987 padding-left: 10px;
2988 2988 min-width: 150px;
2989 2989 }
2990 2990
2991 2991 .user-hovercard-bio {
2992 2992 clear: both;
2993 2993 padding-top: 10px;
2994 2994 }
2995 2995
2996 2996 .user-hovercard-header {
2997 2997 clear: both;
2998 2998 min-height: 10px;
2999 2999 }
3000 3000
3001 3001 .user-hovercard-footer {
3002 3002 clear: both;
3003 3003 min-height: 10px;
3004 3004 }
3005 3005
3006 3006 .user-group-hovercard {
3007 3007 padding: 5px;
3008 3008 }
3009 3009
3010 3010 .user-group-hovercard-icon {
3011 3011 display: inline;
3012 3012 padding: 0;
3013 3013 box-sizing: content-box;
3014 3014 border-radius: 50%;
3015 3015 float: left;
3016 3016 }
3017 3017
3018 3018 .user-group-hovercard-name {
3019 3019 float: left;
3020 3020 vertical-align: top;
3021 3021 padding-left: 10px;
3022 3022 min-width: 150px;
3023 3023 }
3024 3024
3025 3025 .user-group-hovercard-icon i {
3026 3026 border: 1px solid @grey4;
3027 3027 border-radius: 4px;
3028 3028 }
3029 3029
3030 3030 .user-group-hovercard-bio {
3031 3031 clear: both;
3032 3032 padding-top: 10px;
3033 3033 line-height: 1.0em;
3034 3034 }
3035 3035
3036 3036 .user-group-hovercard-header {
3037 3037 clear: both;
3038 3038 min-height: 10px;
3039 3039 }
3040 3040
3041 3041 .user-group-hovercard-footer {
3042 3042 clear: both;
3043 3043 min-height: 10px;
3044 3044 }
3045 3045
3046 3046 .pr-hovercard-header {
3047 3047 clear: both;
3048 3048 display: block;
3049 3049 line-height: 20px;
3050 3050 }
3051 3051
3052 3052 .pr-hovercard-user {
3053 3053 display: flex;
3054 3054 align-items: center;
3055 3055 padding-left: 5px;
3056 3056 }
3057 3057
3058 3058 .pr-hovercard-title {
3059 3059 padding-top: 5px;
3060 3060 } No newline at end of file
@@ -1,234 +1,237 b''
1 1 // select2.less
2 2 // For use in RhodeCode application drop down select boxes;
3 3 // see style guide documentation for guidelines.
4 4
5 5
6 6 // SELECT2 DROPDOWN MENUS
7 7
8 8 //Select2 Dropdown
9 9 .select2-results{
10 10 .box-sizing(border-box);
11 11 overflow-y: scroll;
12 12 }
13 13
14 14 .select2-container{margin: 0; position: relative; display: inline-block; zoom: 1;}
15 15 .select2-container,
16 16 .select2-drop,
17 17 .select2-search,
18 18 .select2-search input {.box-sizing(border-box);}
19 19 .select2-container .select2-choice{display:block; line-height:1em; -webkit-touch-callout:none;-moz-user-select:none;-ms-user-select:none;user-select:none; }
20 20 .main .select2-container .select2-choice { background-color: white; box-shadow: @button-shadow;}
21 21 .select2-container .select2-choice abbr { display: none; width: 12px; height: 12px; position: absolute; right: 24px; top: 8px; font-size: 1px; text-decoration: none; border: 0; background: url('../images/select2.png') right top no-repeat; cursor: pointer; outline: 0; }
22 22 .select2-container.select2-allowclear .select2-choice abbr {display: inline-block;}
23 23 .select2-container .select2-choice abbr:hover { background-position: right -11px; cursor: pointer; }
24 24 .select2-drop-mask { border: 0; margin: 0; padding: 0; position: fixed; left: 0; top: 0; min-height: 100%; min-width: 100%; height: auto; width: auto; opacity: 0; z-index: 998; background-color: #fff; filter: alpha(opacity=0); }
25 25 .select2-drop { width: 100%; margin-top: -1px; position: absolute; z-index: 999; top: 100%; background: #fff; color: #000; border: @border-thickness solid @rcblue; border-top: 0; border-radius: 0 0 @border-radius @border-radius; }
26 26 .select2-drop.select2-drop-above { margin-top: 1px; border-top: @border-thickness solid @rclightblue; border-bottom: 0; border-radius: @border-radius @border-radius 0 0; }
27 27 .select2-drop-active { border: @border-thickness solid #5897fb; border-top: none; }
28 28 .select2-drop.select2-drop-above.select2-drop-active {border-top: @border-thickness solid #5897fb;}
29 29 .select2-drop-auto-width { border-top: @border-thickness solid #aaa; width: auto; }
30 30 .select2-drop-auto-width .select2-search {padding-top: 4px;}
31 31 html[dir="rtl"] .select2-container .select2-choice .select2-arrow { left: 0; right: auto; border-left: none; border-right: @border-thickness solid @grey5; border-radius: @border-radius 0 0 @border-radius; }
32 32 html[dir="rtl"] .select2-container .select2-choice .select2-arrow b {background-position: 2px 1px;}
33 33 .select2-search { display: inline-block; width: 100%; min-height: 26px; margin: 0; padding-left: 4px; padding-right: 4px; position: relative; z-index: 1000; white-space: nowrap; }
34 34 .select2-search input { width: 100%; height: auto !important; min-height: 26px; padding: 4px 20px 4px 5px; margin: 0; outline: 0; }
35 35 html[dir="rtl"] .select2-search input { padding: 4px 5px 4px 20px; background: #fff url('../images/select2.png') no-repeat -37px -22px; }
36 36 .select2-drop.select2-drop-above .select2-search input {margin-top: 4px;}
37 37 .select2-dropdown-open .select2-choice .select2-arrow { background: transparent; border-left: none; filter: none; }
38 38 html[dir="rtl"] .select2-dropdown-open .select2-choice .select2-arrow {border-right: none;}
39 39 .select2-hidden-accessible { border: 0; clip: rect(0 0 0 0); height: 1px; margin: -1px; overflow: hidden; padding: 0; position: absolute; width: 1px; }
40 40 /* results */
41 41 .select2-results { max-height: 200px; padding: 0 0 0 4px; margin: 4px 4px 4px 0; position: relative; overflow-x: hidden; overflow-y: auto; -webkit-tap-highlight-color: rgba(0, 0, 0, 0); }
42 42 html[dir="rtl"] .select2-results { padding: 0 4px 0 0; margin: 4px 0 4px 4px; }
43 43 .select2-results .select2-disabled{background:@grey6;display:list-item;cursor:default}
44 44 .select2-results .select2-selected{display:none}
45 45 .select2-more-results.select2-active{background:#f4f4f4 url('../images/select2-spinner.gif') no-repeat 100%}
46 46 .select2-container.select2-container-disabled .select2-choice abbr{display:none}
47 47 .select2-container.select2-container-disabled {background:@grey6;cursor:default}
48 48 .select2-container.select2-container-disabled .select2-choice {background:@grey6;cursor:default}
49 49 .select2-container-multi .select2-choices li{float:left;list-style:none}
50 50 .select2-container-multi .select2-choices .select2-search-field{margin:0;padding:0;white-space:nowrap}
51 51 .select2-container-multi .select2-choices .select2-search-choice .select2-chosen{cursor:default}
52 52 .select2-search-choice-close{display:block;width:12px;height:13px;position:absolute;right:3px;top:4px;font-size:1px;outline:none;background:url('../images/select2.png') right top no-repeat}
53 53 .select2-container-multi .select2-search-choice-close{left:3px}
54 54 .select2-container-multi .select2-choices .select2-search-choice .select2-search-choice-close:hover{background-position:right -11px}
55 55 .select2-container-multi .select2-choices .select2-search-choice-focus .select2-search-choice-close{background-position:right -11px}
56 56 .select2-container-multi.select2-container-disabled .select2-choices .select2-search-choice .select2-search-choice-close{display:none;background:none}
57 57 .select2-offscreen,.select2-offscreen:focus{clip:rect(0 0 0 0) !important;width:1px !important;height:1px !important;
58 58 border:0 !important;margin:0 !important;padding:0 !important;overflow:hidden !important;
59 59 position: absolute !important;outline:0 !important;left:0 !important;top:0 !important}
60 60 .select2-display-none,
61 61 .select2-search-hidden {display:none}
62 62 .select2-search input { border-color: @rclightblue; }
63 63
64 64 .select2-measure-scrollbar{position:absolute;top:-10000px;left:-10000px;width:100px;height:100px;overflow:scroll}
65 65 @media only screen and (-webkit-min-device-pixel-ratio:1.5),
66 66 only screen and (min-resolution:144dpi){
67 67 .select2-search input,
68 68 .select2-search-choice-close,
69 69 .select2-container .select2-choice abbr,
70 70 .select2-container .select2-choice .select2-arrow b{background-image:url('../images/select2x2.png');background-repeat:no-repeat;background-size:60px 40px;}
71 71 .select2-search input{background-position:100% -21px}
72 72 }
73 73 [class^="input-"] [class^="select2-choice"]>div{display:none}
74 74 [class^="input-"] .select2-offscreen{position:absolute}
75 75 select.select2{height:28px;visibility:hidden}
76 76 .autocomplete-suggestions{overflow:auto}
77 77 .autocomplete-suggestion{white-space:nowrap;overflow:hidden}
78 78
79 79 /* Retina-ize icons */
80 80 @media only screen and (-webkit-min-device-pixel-ratio:1.5),
81 81 only screen and (min-resolution:144dpi){
82 82 .select2-search input,
83 83 .select2-search-choice-close,
84 84 .select2-container .select2-choice abbr,
85 85 .select2-container .select2-choice .select2-arrow b{background-image:url('../images/select2x2.png');background-repeat:no-repeat;background-size:60px 40px;}
86 86 .select2-search input{background-position:100% -21px}
87 87 }
88 88
89 89 //Internal Select2 Dropdown Menus
90 90
91 91 .drop-menu-core {
92 92 min-width: 160px;
93 93 margin: 0 @padding 0 0;
94 94 padding: 0;
95 95 border: @border-thickness solid @grey5;
96 96 border-radius: @border-radius;
97 97 color: @grey2;
98 98 background-color: white;
99 99
100 100 a {
101 101 color: @grey2;
102 102
103 103 &:hover {
104 104 color: @rcdarkblue;
105 105 }
106 106 }
107 107 }
108 108
109 109 .drop-menu-dropdown {
110 110 .drop-menu-core;
111 111 }
112 112
113 113 .drop-menu-base {
114 114 .drop-menu-core;
115 115 position: relative;
116 116 display: inline-block;
117 117 line-height: 1em;
118 118 z-index: 2;
119 119 cursor: pointer;
120 120
121 121 a {
122 122 display:block;
123 123 padding: .7em;
124 124 padding-right: 2em;
125 125 position: relative;
126 126
127 127 &:after {
128 128 position: absolute;
129 129 content: "\00A0\25BE";
130 130 right: .1em;
131 131 line-height: 1em;
132 132 top: 0.2em;
133 133 width: 1em;
134 134 font-size: 20px;
135 135 }
136 136 }
137 137 }
138 138
139 139 .drop-menu {
140 140 .drop-menu-base;
141 141 width: auto !important;
142 142 }
143 143
144 144 .drop-menu-no-width {
145 145 .drop-menu-base;
146 146 width: auto;
147 147 min-width: 0;
148 148 margin: 0;
149 149 }
150 150
151 151 .field-sm .drop-menu {
152 152 padding: 1px 0 0 0;
153 153 a {
154 154 padding: 6px;
155 155 };
156 156 }
157 157
158 158 .select2-search input {
159 159 width: 100%;
160 160 margin: .5em 0;
161 161 padding: .5em;
162 162 border-color: @grey4;
163 163
164 164 &:focus, &:hover {
165 165 border-color: @rcblue;
166 166 box-shadow: @button-shadow;
167 167 }
168 168 }
169 169
170 170 .select2-no-results {
171 171 padding: .5em;
172 172 }
173 173
174 174 .drop-menu-dropdown ul {
175 175 width: auto;
176 176 margin: 0;
177 177 padding: 0;
178 178 z-index: 50;
179 179
180 180 li {
181 181 margin: 0;
182 182 line-height: 1em;
183 183 list-style-type: none;
184 184
185 185 &:hover,
186 186 &.select2-highlighted {
187 187 background-color: @grey7;
188 188
189 189 .select2-result-label {
190 190 &:hover {
191 191 color: @grey1!important;
192 192 }
193 193 }
194 194 }
195 195
196 196 &.select2-result-with-children {
197 197 &:hover {
198 198 background-color: white;
199 199 }
200 200 }
201 201
202 202 .select2-result-label {
203 203 display:block;
204 204 padding: 8px;
205 205 font-family: @text-regular;
206 206 color: @grey2;
207 207 cursor: pointer;
208 white-space: nowrap;
208 209 }
210
209 211 &.select2-result-with-children {
210 212
211 213 .select2-result-label {
212 214 color: @rcdarkblue;
213 215 cursor: default;
214 216 font-weight: @text-semibold-weight;
215 217 font-family: @text-semibold;
216 218 }
217 219
218 220 ul.select2-result-sub li .select2-result-label {
219 221 padding-left: 16px;
220 222 font-family: @text-regular;
221 223 color: @grey2;
222 224 cursor: pointer;
225 white-space: nowrap;
223 226 }
224 227 }
225 228 }
226 229 }
227 230
228 231 .side-by-side-selector {
229 232 .left-group,
230 233 .middle-group,
231 234 .right-group {
232 235 margin-bottom: @padding;
233 236 }
234 237 }
@@ -1,36 +1,38 b''
1 1 // # Copyright (C) 2016-2020 RhodeCode GmbH
2 2 // #
3 3 // # This program is free software: you can redistribute it and/or modify
4 4 // # it under the terms of the GNU Affero General Public License, version 3
5 5 // # (only), as published by the Free Software Foundation.
6 6 // #
7 7 // # This program is distributed in the hope that it will be useful,
8 8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 // # GNU General Public License for more details.
11 11 // #
12 12 // # You should have received a copy of the GNU Affero General Public License
13 13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 // #
15 15 // # This program is dual-licensed. If you wish to learn more about the
16 16 // # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 i18nLog = Logger.get('i18n');
20 20
21 21 var _gettext = function (s) {
22 22 if (_TM.hasOwnProperty(s)) {
23 23 return _TM[s];
24 24 }
25 i18nLog.error(
25 i18nLog.warning(
26 26 'String `' + s + '` was requested but cannot be ' +
27 27 'found in translation table');
28 28 return s
29 29 };
30 30
31 31 var _ngettext = function (singular, plural, n) {
32 32 if (n === 1) {
33 33 return _gettext(singular)
34 34 }
35 35 return _gettext(plural)
36 36 };
37
38
@@ -1,624 +1,657 b''
1 1 // # Copyright (C) 2010-2020 RhodeCode GmbH
2 2 // #
3 3 // # This program is free software: you can redistribute it and/or modify
4 4 // # it under the terms of the GNU Affero General Public License, version 3
5 5 // # (only), as published by the Free Software Foundation.
6 6 // #
7 7 // # This program is distributed in the hope that it will be useful,
8 8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 // # GNU General Public License for more details.
11 11 // #
12 12 // # You should have received a copy of the GNU Affero General Public License
13 13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 // #
15 15 // # This program is dual-licensed. If you wish to learn more about the
16 16 // # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 var prButtonLockChecks = {
21 21 'compare': false,
22 22 'reviewers': false
23 23 };
24 24
25 25 /**
26 26 * lock button until all checks and loads are made. E.g reviewer calculation
27 27 * should prevent from submitting a PR
28 28 * @param lockEnabled
29 29 * @param msg
30 30 * @param scope
31 31 */
32 32 var prButtonLock = function(lockEnabled, msg, scope) {
33 33 scope = scope || 'all';
34 34 if (scope == 'all'){
35 35 prButtonLockChecks['compare'] = !lockEnabled;
36 36 prButtonLockChecks['reviewers'] = !lockEnabled;
37 37 } else if (scope == 'compare') {
38 38 prButtonLockChecks['compare'] = !lockEnabled;
39 39 } else if (scope == 'reviewers'){
40 40 prButtonLockChecks['reviewers'] = !lockEnabled;
41 41 }
42 42 var checksMeet = prButtonLockChecks.compare && prButtonLockChecks.reviewers;
43 43 if (lockEnabled) {
44 44 $('#pr_submit').attr('disabled', 'disabled');
45 45 }
46 46 else if (checksMeet) {
47 47 $('#pr_submit').removeAttr('disabled');
48 48 }
49 49
50 50 if (msg) {
51 51 $('#pr_open_message').html(msg);
52 52 }
53 53 };
54 54
55 55
56 56 /**
57 57 Generate Title and Description for a PullRequest.
58 58 In case of 1 commits, the title and description is that one commit
59 59 in case of multiple commits, we iterate on them with max N number of commits,
60 60 and build description in a form
61 61 - commitN
62 62 - commitN+1
63 63 ...
64 64
65 65 Title is then constructed from branch names, or other references,
66 66 replacing '-' and '_' into spaces
67 67
68 68 * @param sourceRef
69 69 * @param elements
70 70 * @param limit
71 71 * @returns {*[]}
72 72 */
73 73 var getTitleAndDescription = function(sourceRef, elements, limit) {
74 74 var title = '';
75 75 var desc = '';
76 76
77 77 $.each($(elements).get().reverse().slice(0, limit), function(idx, value) {
78 var rawMessage = $(value).find('td.td-description .message').data('messageRaw').toString();
78 var rawMessage = value['message'];
79 79 desc += '- ' + rawMessage.split('\n')[0].replace(/\n+$/, "") + '\n';
80 80 });
81 81 // only 1 commit, use commit message as title
82 82 if (elements.length === 1) {
83 var rawMessage = $(elements[0]).find('td.td-description .message').data('messageRaw').toString();
83 var rawMessage = elements[0]['message'];
84 84 title = rawMessage.split('\n')[0];
85 85 }
86 86 else {
87 87 // use reference name
88 88 title = sourceRef.replace(/-/g, ' ').replace(/_/g, ' ').capitalizeFirstLetter();
89 89 }
90 90
91 91 return [title, desc]
92 92 };
93 93
94 94
95
96 95 ReviewersController = function () {
97 96 var self = this;
98 97 this.$reviewRulesContainer = $('#review_rules');
99 98 this.$rulesList = this.$reviewRulesContainer.find('.pr-reviewer-rules');
100 99 this.forbidReviewUsers = undefined;
101 100 this.$reviewMembers = $('#review_members');
102 101 this.currentRequest = null;
102 this.diffData = null;
103 //dummy handler, we might register our own later
104 this.diffDataHandler = function(data){};
103 105
104 this.defaultForbidReviewUsers = function() {
106 this.defaultForbidReviewUsers = function () {
105 107 return [
106 {'username': 'default',
107 'user_id': templateContext.default_user.user_id}
108 {
109 'username': 'default',
110 'user_id': templateContext.default_user.user_id
111 }
108 112 ];
109 113 };
110 114
111 this.hideReviewRules = function() {
115 this.hideReviewRules = function () {
112 116 self.$reviewRulesContainer.hide();
113 117 };
114 118
115 this.showReviewRules = function() {
119 this.showReviewRules = function () {
116 120 self.$reviewRulesContainer.show();
117 121 };
118 122
119 this.addRule = function(ruleText) {
123 this.addRule = function (ruleText) {
120 124 self.showReviewRules();
121 125 return '<div>- {0}</div>'.format(ruleText)
122 126 };
123 127
124 this.loadReviewRules = function(data) {
128 this.loadReviewRules = function (data) {
129 self.diffData = data;
130
125 131 // reset forbidden Users
126 132 this.forbidReviewUsers = self.defaultForbidReviewUsers();
127 133
128 134 // reset state of review rules
129 135 self.$rulesList.html('');
130 136
131 137 if (!data || data.rules === undefined || $.isEmptyObject(data.rules)) {
132 138 // default rule, case for older repo that don't have any rules stored
133 139 self.$rulesList.append(
134 140 self.addRule(
135 141 _gettext('All reviewers must vote.'))
136 142 );
137 143 return self.forbidReviewUsers
138 144 }
139 145
140 146 if (data.rules.voting !== undefined) {
141 147 if (data.rules.voting < 0) {
142 148 self.$rulesList.append(
143 149 self.addRule(
144 _gettext('All individual reviewers must vote.'))
150 _gettext('All individual reviewers must vote.'))
145 151 )
146 152 } else if (data.rules.voting === 1) {
147 153 self.$rulesList.append(
148 154 self.addRule(
149 155 _gettext('At least {0} reviewer must vote.').format(data.rules.voting))
150 156 )
151 157
152 158 } else {
153 159 self.$rulesList.append(
154 160 self.addRule(
155 161 _gettext('At least {0} reviewers must vote.').format(data.rules.voting))
156 162 )
157 163 }
158 164 }
159 165
160 166 if (data.rules.voting_groups !== undefined) {
161 $.each(data.rules.voting_groups, function(index, rule_data) {
167 $.each(data.rules.voting_groups, function (index, rule_data) {
162 168 self.$rulesList.append(
163 169 self.addRule(rule_data.text)
164 170 )
165 171 });
166 172 }
167 173
168 174 if (data.rules.use_code_authors_for_review) {
169 175 self.$rulesList.append(
170 176 self.addRule(
171 177 _gettext('Reviewers picked from source code changes.'))
172 178 )
173 179 }
174 180 if (data.rules.forbid_adding_reviewers) {
175 181 $('#add_reviewer_input').remove();
176 182 self.$rulesList.append(
177 183 self.addRule(
178 184 _gettext('Adding new reviewers is forbidden.'))
179 185 )
180 186 }
181 187 if (data.rules.forbid_author_to_review) {
182 188 self.forbidReviewUsers.push(data.rules_data.pr_author);
183 189 self.$rulesList.append(
184 190 self.addRule(
185 191 _gettext('Author is not allowed to be a reviewer.'))
186 192 )
187 193 }
188 194 if (data.rules.forbid_commit_author_to_review) {
189 195
190 196 if (data.rules_data.forbidden_users) {
191 $.each(data.rules_data.forbidden_users, function(index, member_data) {
197 $.each(data.rules_data.forbidden_users, function (index, member_data) {
192 198 self.forbidReviewUsers.push(member_data)
193 199 });
194 200
195 201 }
196 202
197 203 self.$rulesList.append(
198 204 self.addRule(
199 205 _gettext('Commit Authors are not allowed to be a reviewer.'))
200 206 )
201 207 }
202 208
203 209 return self.forbidReviewUsers
204 210 };
205 211
206 this.loadDefaultReviewers = function(sourceRepo, sourceRef, targetRepo, targetRef) {
212 this.loadDefaultReviewers = function (sourceRepo, sourceRef, targetRepo, targetRef) {
207 213
208 214 if (self.currentRequest) {
209 // make sure we cleanup old running requests before triggering this
210 // again
215 // make sure we cleanup old running requests before triggering this again
211 216 self.currentRequest.abort();
212 217 }
213 218
214 219 $('.calculate-reviewers').show();
215 220 // reset reviewer members
216 221 self.$reviewMembers.empty();
217 222
218 223 prButtonLock(true, null, 'reviewers');
219 224 $('#user').hide(); // hide user autocomplete before load
220 225
226 // lock PR button, so we cannot send PR before it's calculated
227 prButtonLock(true, _gettext('Loading diff ...'), 'compare');
228
221 229 if (sourceRef.length !== 3 || targetRef.length !== 3) {
222 230 // don't load defaults in case we're missing some refs...
223 231 $('.calculate-reviewers').hide();
224 232 return
225 233 }
226 234
227 235 var url = pyroutes.url('repo_default_reviewers_data',
228 {
229 'repo_name': templateContext.repo_name,
230 'source_repo': sourceRepo,
231 'source_ref': sourceRef[2],
232 'target_repo': targetRepo,
233 'target_ref': targetRef[2]
234 });
236 {
237 'repo_name': templateContext.repo_name,
238 'source_repo': sourceRepo,
239 'source_ref': sourceRef[2],
240 'target_repo': targetRepo,
241 'target_ref': targetRef[2]
242 });
235 243
236 self.currentRequest = $.get(url)
237 .done(function(data) {
244 self.currentRequest = $.ajax({
245 url: url,
246 headers: {'X-PARTIAL-XHR': true},
247 type: 'GET',
248 success: function (data) {
249
238 250 self.currentRequest = null;
239 251
240 252 // review rules
241 253 self.loadReviewRules(data);
254 self.handleDiffData(data["diff_info"]);
242 255
243 256 for (var i = 0; i < data.reviewers.length; i++) {
244 var reviewer = data.reviewers[i];
245 self.addReviewMember(
246 reviewer, reviewer.reasons, reviewer.mandatory);
257 var reviewer = data.reviewers[i];
258 self.addReviewMember(reviewer, reviewer.reasons, reviewer.mandatory);
247 259 }
248 260 $('.calculate-reviewers').hide();
249 261 prButtonLock(false, null, 'reviewers');
250 262 $('#user').show(); // show user autocomplete after load
251 });
263
264 var commitElements = data["diff_info"]['commits'];
265 if (commitElements.length === 0) {
266 prButtonLock(true, _gettext('no commits'), 'all');
267
268 } else {
269 // un-lock PR button, so we cannot send PR before it's calculated
270 prButtonLock(false, null, 'compare');
271 }
272
273 },
274 error: function (jqXHR, textStatus, errorThrown) {
275 var prefix = "Loading diff and reviewers failed\n"
276 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
277 ajaxErrorSwal(message);
278 }
279 });
280
252 281 };
253 282
254 283 // check those, refactor
255 this.removeReviewMember = function(reviewer_id, mark_delete) {
284 this.removeReviewMember = function (reviewer_id, mark_delete) {
256 285 var reviewer = $('#reviewer_{0}'.format(reviewer_id));
257 286
258 if(typeof(mark_delete) === undefined){
287 if (typeof (mark_delete) === undefined) {
259 288 mark_delete = false;
260 289 }
261 290
262 if(mark_delete === true){
263 if (reviewer){
291 if (mark_delete === true) {
292 if (reviewer) {
264 293 // now delete the input
265 294 $('#reviewer_{0} input'.format(reviewer_id)).remove();
266 295 // mark as to-delete
267 296 var obj = $('#reviewer_{0}_name'.format(reviewer_id));
268 297 obj.addClass('to-delete');
269 obj.css({"text-decoration":"line-through", "opacity": 0.5});
298 obj.css({"text-decoration": "line-through", "opacity": 0.5});
270 299 }
271 }
272 else{
300 } else {
273 301 $('#reviewer_{0}'.format(reviewer_id)).remove();
274 302 }
275 303 };
276 this.reviewMemberEntry = function() {
304
305 this.reviewMemberEntry = function () {
277 306
278 307 };
279 this.addReviewMember = function(reviewer_obj, reasons, mandatory) {
308
309 this.addReviewMember = function (reviewer_obj, reasons, mandatory) {
280 310 var members = self.$reviewMembers.get(0);
281 311 var id = reviewer_obj.user_id;
282 312 var username = reviewer_obj.username;
283 313
284 314 var reasons = reasons || [];
285 315 var mandatory = mandatory || false;
286 316
287 317 // register IDS to check if we don't have this ID already in
288 318 var currentIds = [];
289 319 var _els = self.$reviewMembers.find('li').toArray();
290 for (el in _els){
320 for (el in _els) {
291 321 currentIds.push(_els[el].id)
292 322 }
293 323
294 var userAllowedReview = function(userId) {
324 var userAllowedReview = function (userId) {
295 325 var allowed = true;
296 $.each(self.forbidReviewUsers, function(index, member_data) {
326 $.each(self.forbidReviewUsers, function (index, member_data) {
297 327 if (parseInt(userId) === member_data['user_id']) {
298 328 allowed = false;
299 329 return false // breaks the loop
300 330 }
301 331 });
302 332 return allowed
303 333 };
304 334
305 335 var userAllowed = userAllowedReview(id);
306 if (!userAllowed){
307 alert(_gettext('User `{0}` not allowed to be a reviewer').format(username));
336 if (!userAllowed) {
337 alert(_gettext('User `{0}` not allowed to be a reviewer').format(username));
308 338 } else {
309 339 // only add if it's not there
310 var alreadyReviewer = currentIds.indexOf('reviewer_'+id) != -1;
340 var alreadyReviewer = currentIds.indexOf('reviewer_' + id) != -1;
311 341
312 342 if (alreadyReviewer) {
313 343 alert(_gettext('User `{0}` already in reviewers').format(username));
314 344 } else {
315 345 members.innerHTML += renderTemplate('reviewMemberEntry', {
316 'member': reviewer_obj,
317 'mandatory': mandatory,
318 'allowed_to_update': true,
319 'review_status': 'not_reviewed',
320 'review_status_label': _gettext('Not Reviewed'),
321 'reasons': reasons,
322 'create': true
323 });
346 'member': reviewer_obj,
347 'mandatory': mandatory,
348 'allowed_to_update': true,
349 'review_status': 'not_reviewed',
350 'review_status_label': _gettext('Not Reviewed'),
351 'reasons': reasons,
352 'create': true
353 });
324 354 tooltipActivate();
325 355 }
326 356 }
327 357
328 358 };
329 359
330 this.updateReviewers = function(repo_name, pull_request_id){
360 this.updateReviewers = function (repo_name, pull_request_id) {
331 361 var postData = $('#reviewers input').serialize();
332 362 _updatePullRequest(repo_name, pull_request_id, postData);
333 363 };
334 364
365 this.handleDiffData = function (data) {
366 self.diffDataHandler(data)
367 }
335 368 };
336 369
337 370
338 371 var _updatePullRequest = function(repo_name, pull_request_id, postData) {
339 372 var url = pyroutes.url(
340 373 'pullrequest_update',
341 374 {"repo_name": repo_name, "pull_request_id": pull_request_id});
342 375 if (typeof postData === 'string' ) {
343 376 postData += '&csrf_token=' + CSRF_TOKEN;
344 377 } else {
345 378 postData.csrf_token = CSRF_TOKEN;
346 379 }
347 380
348 381 var success = function(o) {
349 382 var redirectUrl = o['redirect_url'];
350 383 if (redirectUrl !== undefined && redirectUrl !== null && redirectUrl !== '') {
351 384 window.location = redirectUrl;
352 385 } else {
353 386 window.location.reload();
354 387 }
355 388 };
356 389
357 390 ajaxPOST(url, postData, success);
358 391 };
359 392
360 393 /**
361 394 * PULL REQUEST update commits
362 395 */
363 396 var updateCommits = function(repo_name, pull_request_id, force) {
364 397 var postData = {
365 398 'update_commits': true
366 399 };
367 400 if (force !== undefined && force === true) {
368 401 postData['force_refresh'] = true
369 402 }
370 403 _updatePullRequest(repo_name, pull_request_id, postData);
371 404 };
372 405
373 406
374 407 /**
375 408 * PULL REQUEST edit info
376 409 */
377 410 var editPullRequest = function(repo_name, pull_request_id, title, description, renderer) {
378 411 var url = pyroutes.url(
379 412 'pullrequest_update',
380 413 {"repo_name": repo_name, "pull_request_id": pull_request_id});
381 414
382 415 var postData = {
383 416 'title': title,
384 417 'description': description,
385 418 'description_renderer': renderer,
386 419 'edit_pull_request': true,
387 420 'csrf_token': CSRF_TOKEN
388 421 };
389 422 var success = function(o) {
390 423 window.location.reload();
391 424 };
392 425 ajaxPOST(url, postData, success);
393 426 };
394 427
395 428
396 429 /**
397 430 * Reviewer autocomplete
398 431 */
399 432 var ReviewerAutoComplete = function(inputId) {
400 433 $(inputId).autocomplete({
401 434 serviceUrl: pyroutes.url('user_autocomplete_data'),
402 435 minChars:2,
403 436 maxHeight:400,
404 437 deferRequestBy: 300, //miliseconds
405 438 showNoSuggestionNotice: true,
406 439 tabDisabled: true,
407 440 autoSelectFirst: true,
408 441 params: { user_id: templateContext.rhodecode_user.user_id, user_groups:true, user_groups_expand:true, skip_default_user:true },
409 442 formatResult: autocompleteFormatResult,
410 443 lookupFilter: autocompleteFilterResult,
411 444 onSelect: function(element, data) {
412 445 var mandatory = false;
413 446 var reasons = [_gettext('added manually by "{0}"').format(templateContext.rhodecode_user.username)];
414 447
415 448 // add whole user groups
416 449 if (data.value_type == 'user_group') {
417 450 reasons.push(_gettext('member of "{0}"').format(data.value_display));
418 451
419 452 $.each(data.members, function(index, member_data) {
420 453 var reviewer = member_data;
421 454 reviewer['user_id'] = member_data['id'];
422 455 reviewer['gravatar_link'] = member_data['icon_link'];
423 456 reviewer['user_link'] = member_data['profile_link'];
424 457 reviewer['rules'] = [];
425 458 reviewersController.addReviewMember(reviewer, reasons, mandatory);
426 459 })
427 460 }
428 461 // add single user
429 462 else {
430 463 var reviewer = data;
431 464 reviewer['user_id'] = data['id'];
432 465 reviewer['gravatar_link'] = data['icon_link'];
433 466 reviewer['user_link'] = data['profile_link'];
434 467 reviewer['rules'] = [];
435 468 reviewersController.addReviewMember(reviewer, reasons, mandatory);
436 469 }
437 470
438 471 $(inputId).val('');
439 472 }
440 473 });
441 474 };
442 475
443 476
444 477 VersionController = function () {
445 478 var self = this;
446 479 this.$verSource = $('input[name=ver_source]');
447 480 this.$verTarget = $('input[name=ver_target]');
448 481 this.$showVersionDiff = $('#show-version-diff');
449 482
450 483 this.adjustRadioSelectors = function (curNode) {
451 484 var getVal = function (item) {
452 485 if (item == 'latest') {
453 486 return Number.MAX_SAFE_INTEGER
454 487 }
455 488 else {
456 489 return parseInt(item)
457 490 }
458 491 };
459 492
460 493 var curVal = getVal($(curNode).val());
461 494 var cleared = false;
462 495
463 496 $.each(self.$verSource, function (index, value) {
464 497 var elVal = getVal($(value).val());
465 498
466 499 if (elVal > curVal) {
467 500 if ($(value).is(':checked')) {
468 501 cleared = true;
469 502 }
470 503 $(value).attr('disabled', 'disabled');
471 504 $(value).removeAttr('checked');
472 505 $(value).css({'opacity': 0.1});
473 506 }
474 507 else {
475 508 $(value).css({'opacity': 1});
476 509 $(value).removeAttr('disabled');
477 510 }
478 511 });
479 512
480 513 if (cleared) {
481 514 // if we unchecked an active, set the next one to same loc.
482 515 $(this.$verSource).filter('[value={0}]'.format(
483 516 curVal)).attr('checked', 'checked');
484 517 }
485 518
486 519 self.setLockAction(false,
487 520 $(curNode).data('verPos'),
488 521 $(this.$verSource).filter(':checked').data('verPos')
489 522 );
490 523 };
491 524
492 525
493 526 this.attachVersionListener = function () {
494 527 self.$verTarget.change(function (e) {
495 528 self.adjustRadioSelectors(this)
496 529 });
497 530 self.$verSource.change(function (e) {
498 531 self.adjustRadioSelectors(self.$verTarget.filter(':checked'))
499 532 });
500 533 };
501 534
502 535 this.init = function () {
503 536
504 537 var curNode = self.$verTarget.filter(':checked');
505 538 self.adjustRadioSelectors(curNode);
506 539 self.setLockAction(true);
507 540 self.attachVersionListener();
508 541
509 542 };
510 543
511 544 this.setLockAction = function (state, selectedVersion, otherVersion) {
512 545 var $showVersionDiff = this.$showVersionDiff;
513 546
514 547 if (state) {
515 548 $showVersionDiff.attr('disabled', 'disabled');
516 549 $showVersionDiff.addClass('disabled');
517 550 $showVersionDiff.html($showVersionDiff.data('labelTextLocked'));
518 551 }
519 552 else {
520 553 $showVersionDiff.removeAttr('disabled');
521 554 $showVersionDiff.removeClass('disabled');
522 555
523 556 if (selectedVersion == otherVersion) {
524 557 $showVersionDiff.html($showVersionDiff.data('labelTextShow'));
525 558 } else {
526 559 $showVersionDiff.html($showVersionDiff.data('labelTextDiff'));
527 560 }
528 561 }
529 562
530 563 };
531 564
532 565 this.showVersionDiff = function () {
533 566 var target = self.$verTarget.filter(':checked');
534 567 var source = self.$verSource.filter(':checked');
535 568
536 569 if (target.val() && source.val()) {
537 570 var params = {
538 571 'pull_request_id': templateContext.pull_request_data.pull_request_id,
539 572 'repo_name': templateContext.repo_name,
540 573 'version': target.val(),
541 574 'from_version': source.val()
542 575 };
543 576 window.location = pyroutes.url('pullrequest_show', params)
544 577 }
545 578
546 579 return false;
547 580 };
548 581
549 582 this.toggleVersionView = function (elem) {
550 583
551 584 if (this.$showVersionDiff.is(':visible')) {
552 585 $('.version-pr').hide();
553 586 this.$showVersionDiff.hide();
554 587 $(elem).html($(elem).data('toggleOn'))
555 588 } else {
556 589 $('.version-pr').show();
557 590 this.$showVersionDiff.show();
558 591 $(elem).html($(elem).data('toggleOff'))
559 592 }
560 593
561 594 return false
562 595 };
563 596
564 597 this.toggleElement = function (elem, target) {
565 598 var $elem = $(elem);
566 599 var $target = $(target);
567 600
568 601 if ($target.is(':visible')) {
569 602 $target.hide();
570 603 $elem.html($elem.data('toggleOn'))
571 604 } else {
572 605 $target.show();
573 606 $elem.html($elem.data('toggleOff'))
574 607 }
575 608
576 609 return false
577 610 }
578 611
579 612 };
580 613
581 614
582 615 UpdatePrController = function () {
583 616 var self = this;
584 617 this.$updateCommits = $('#update_commits');
585 618 this.$updateCommitsSwitcher = $('#update_commits_switcher');
586 619
587 620 this.lockUpdateButton = function (label) {
588 621 self.$updateCommits.attr('disabled', 'disabled');
589 622 self.$updateCommitsSwitcher.attr('disabled', 'disabled');
590 623
591 624 self.$updateCommits.addClass('disabled');
592 625 self.$updateCommitsSwitcher.addClass('disabled');
593 626
594 627 self.$updateCommits.removeClass('btn-primary');
595 628 self.$updateCommitsSwitcher.removeClass('btn-primary');
596 629
597 630 self.$updateCommits.text(_gettext(label));
598 631 };
599 632
600 633 this.isUpdateLocked = function () {
601 634 return self.$updateCommits.attr('disabled') !== undefined;
602 635 };
603 636
604 637 this.updateCommits = function (curNode) {
605 638 if (self.isUpdateLocked()) {
606 639 return
607 640 }
608 641 self.lockUpdateButton(_gettext('Updating...'));
609 642 updateCommits(
610 643 templateContext.repo_name,
611 644 templateContext.pull_request_data.pull_request_id);
612 645 };
613 646
614 647 this.forceUpdateCommits = function () {
615 648 if (self.isUpdateLocked()) {
616 649 return
617 650 }
618 651 self.lockUpdateButton(_gettext('Force updating...'));
619 652 var force = true;
620 653 updateCommits(
621 654 templateContext.repo_name,
622 655 templateContext.pull_request_data.pull_request_id, force);
623 656 };
624 657 }; No newline at end of file
@@ -1,704 +1,705 b''
1 1 /*
2 2 Version: 3.5.2 Timestamp: Sat Nov 1 14:43:36 EDT 2014
3 3 */
4 4 .select2-container {
5 5 margin: 0;
6 6 position: relative;
7 7 display: inline-block;
8 8 /* inline-block for ie7 */
9 9 zoom: 1;
10 10 *display: inline;
11 11 vertical-align: middle;
12 12 }
13 13
14 14 .select2-container,
15 15 .select2-drop,
16 16 .select2-search,
17 17 .select2-search input {
18 18 /*
19 19 Force border-box so that % widths fit the parent
20 20 container without overlap because of margin/padding.
21 21 More Info : http://www.quirksmode.org/css/box.html
22 22 */
23 23 -webkit-box-sizing: border-box; /* webkit */
24 24 -moz-box-sizing: border-box; /* firefox */
25 25 box-sizing: border-box; /* css3 */
26 26 }
27 27
28 28 .select2-container .select2-choice {
29 29 display: block;
30 30 height: 26px;
31 31 padding: 0 0 0 8px;
32 32 overflow: hidden;
33 33 position: relative;
34 34
35 35 border: 1px solid #aaa;
36 36 white-space: nowrap;
37 37 line-height: 26px;
38 38 color: #444;
39 39 text-decoration: none;
40 40
41 41 border-radius: 4px;
42 42
43 43 background-clip: padding-box;
44 44
45 45 -webkit-touch-callout: none;
46 46 -webkit-user-select: none;
47 47 -moz-user-select: none;
48 48 -ms-user-select: none;
49 49 user-select: none;
50 50
51 51 background-color: #fff;
52 52 background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #eee), color-stop(0.5, #fff));
53 53 background-image: -webkit-linear-gradient(center bottom, #eee 0%, #fff 50%);
54 54 background-image: -moz-linear-gradient(center bottom, #eee 0%, #fff 50%);
55 55 filter: progid:DXImageTransform.Microsoft.gradient(startColorstr = '#ffffff', endColorstr = '#eeeeee', GradientType = 0);
56 56 background-image: linear-gradient(to top, #eee 0%, #fff 50%);
57 57 }
58 58
59 59 html[dir="rtl"] .select2-container .select2-choice {
60 60 padding: 0 8px 0 0;
61 61 }
62 62
63 63 .select2-container.select2-drop-above .select2-choice {
64 64 border-bottom-color: #aaa;
65 65
66 66 border-radius: 0 0 4px 4px;
67 67
68 68 background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #eee), color-stop(0.9, #fff));
69 69 background-image: -webkit-linear-gradient(center bottom, #eee 0%, #fff 90%);
70 70 background-image: -moz-linear-gradient(center bottom, #eee 0%, #fff 90%);
71 71 filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffff', endColorstr='#eeeeee', GradientType=0);
72 72 background-image: linear-gradient(to bottom, #eee 0%, #fff 90%);
73 73 }
74 74
75 75 .select2-container.select2-allowclear .select2-choice .select2-chosen {
76 76 margin-right: 42px;
77 77 }
78 78
79 79 .select2-container .select2-choice > .select2-chosen {
80 80 margin-right: 26px;
81 81 display: block;
82 82 overflow: hidden;
83 83
84 84 white-space: nowrap;
85 85
86 86 text-overflow: ellipsis;
87 87 float: none;
88 88 width: auto;
89 89 }
90 90
91 91 html[dir="rtl"] .select2-container .select2-choice > .select2-chosen {
92 92 margin-left: 26px;
93 93 margin-right: 0;
94 94 }
95 95
96 96 .select2-container .select2-choice abbr {
97 97 display: none;
98 98 width: 12px;
99 99 height: 12px;
100 100 position: absolute;
101 101 right: 24px;
102 102 top: 8px;
103 103
104 104 font-size: 1px;
105 105 text-decoration: none;
106 106
107 107 border: 0;
108 108 background: url('../../images/select2.png') right top no-repeat;
109 109 cursor: pointer;
110 110 outline: 0;
111 111 }
112 112
113 113 .select2-container.select2-allowclear .select2-choice abbr {
114 114 display: inline-block;
115 115 }
116 116
117 117 .select2-container .select2-choice abbr:hover {
118 118 background-position: right -11px;
119 119 cursor: pointer;
120 120 }
121 121
122 122 .select2-drop-mask {
123 123 border: 0;
124 124 margin: 0;
125 125 padding: 0;
126 126 position: fixed;
127 127 left: 0;
128 128 top: 0;
129 129 min-height: 100%;
130 130 min-width: 100%;
131 131 height: auto;
132 132 width: auto;
133 133 opacity: 0;
134 134 z-index: 9998;
135 135 /* styles required for IE to work */
136 136 background-color: #fff;
137 137 filter: alpha(opacity=0);
138 138 }
139 139
140 140 .select2-drop {
141 141 width: 100%;
142 142 margin-top: -1px;
143 143 position: absolute;
144 144 z-index: 9999;
145 145 top: 100%;
146 146
147 147 background: #fff;
148 148 color: #000;
149 149 border: 1px solid #aaa;
150 150 border-top: 0;
151 151
152 152 border-radius: 0 0 4px 4px;
153 153
154 154 -webkit-box-shadow: 0 4px 5px rgba(0, 0, 0, .15);
155 155 box-shadow: 0 4px 5px rgba(0, 0, 0, .15);
156 156 }
157 157
158 158 .select2-drop.select2-drop-above {
159 159 margin-top: 1px;
160 160 border-top: 1px solid #aaa;
161 161 border-bottom: 0;
162 162
163 163 border-radius: 4px 4px 0 0;
164 164
165 165 -webkit-box-shadow: 0 -4px 5px rgba(0, 0, 0, .15);
166 166 box-shadow: 0 -4px 5px rgba(0, 0, 0, .15);
167 167 }
168 168
169 169 .select2-drop-active {
170 170 border: 1px solid #5897fb;
171 171 border-top: none;
172 172 }
173 173
174 174 .select2-drop.select2-drop-above.select2-drop-active {
175 175 border-top: 1px solid #5897fb;
176 176 }
177 177
178 178 .select2-drop-auto-width {
179 179 border-top: 1px solid #aaa;
180 180 width: auto;
181 181 }
182 182
183 183 .select2-drop-auto-width .select2-search {
184 184 padding-top: 4px;
185 185 }
186 186
187 187 .select2-container .select2-choice .select2-arrow {
188 188 display: inline-block;
189 189 width: 18px;
190 190 height: 100%;
191 191 position: absolute;
192 192 right: 0;
193 193 top: 0;
194 194
195 195 border-left: 1px solid #aaa;
196 196 border-radius: 0 4px 4px 0;
197 197
198 198 background-clip: padding-box;
199 199
200 200 background: #ccc;
201 201 background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #ccc), color-stop(0.6, #eee));
202 202 background-image: -webkit-linear-gradient(center bottom, #ccc 0%, #eee 60%);
203 203 background-image: -moz-linear-gradient(center bottom, #ccc 0%, #eee 60%);
204 204 filter: progid:DXImageTransform.Microsoft.gradient(startColorstr = '#eeeeee', endColorstr = '#cccccc', GradientType = 0);
205 205 background-image: linear-gradient(to top, #ccc 0%, #eee 60%);
206 206 }
207 207
208 208 html[dir="rtl"] .select2-container .select2-choice .select2-arrow {
209 209 left: 0;
210 210 right: auto;
211 211
212 212 border-left: none;
213 213 border-right: 1px solid #aaa;
214 214 border-radius: 4px 0 0 4px;
215 215 }
216 216
217 217 .select2-container .select2-choice .select2-arrow b {
218 218 display: block;
219 219 width: 100%;
220 220 height: 100%;
221 221 background: url('../../images/select2.png') no-repeat 0 1px;
222 222 }
223 223
224 224 html[dir="rtl"] .select2-container .select2-choice .select2-arrow b {
225 225 background-position: 2px 1px;
226 226 }
227 227
228 228 .select2-search {
229 229 display: inline-block;
230 230 width: 100%;
231 231 min-height: 26px;
232 232 margin: 0;
233 233 padding-left: 4px;
234 234 padding-right: 4px;
235 235
236 236 position: relative;
237 237 z-index: 10000;
238 238
239 239 white-space: nowrap;
240 240 }
241 241
242 242 .select2-search input {
243 243 width: 100%;
244 244 height: auto !important;
245 245 min-height: 26px;
246 246 padding: 4px 20px 4px 5px;
247 247 margin: 0;
248 248
249 249 outline: 0;
250 250 font-family: sans-serif;
251 251 font-size: 1em;
252 252
253 253 border: 1px solid #aaa;
254 254 border-radius: 0;
255 255
256 256 -webkit-box-shadow: none;
257 257 box-shadow: none;
258 258
259 259 background: #fff url('../../images/select2.png') no-repeat 100% -22px;
260 260 background: url('../../images/select2.png') no-repeat 100% -22px, -webkit-gradient(linear, left bottom, left top, color-stop(0.85, #fff), color-stop(0.99, #eee));
261 261 background: url('../../images/select2.png') no-repeat 100% -22px, -webkit-linear-gradient(center bottom, #fff 85%, #eee 99%);
262 262 background: url('../../images/select2.png') no-repeat 100% -22px, -moz-linear-gradient(center bottom, #fff 85%, #eee 99%);
263 263 background: url('../../images/select2.png') no-repeat 100% -22px, linear-gradient(to bottom, #fff 85%, #eee 99%) 0 0;
264 264 }
265 265
266 266 html[dir="rtl"] .select2-search input {
267 267 padding: 4px 5px 4px 20px;
268 268
269 269 background: #fff url('../../images/select2.png') no-repeat -37px -22px;
270 270 background: url('../../images/select2.png') no-repeat -37px -22px, -webkit-gradient(linear, left bottom, left top, color-stop(0.85, #fff), color-stop(0.99, #eee));
271 271 background: url('../../images/select2.png') no-repeat -37px -22px, -webkit-linear-gradient(center bottom, #fff 85%, #eee 99%);
272 272 background: url('../../images/select2.png') no-repeat -37px -22px, -moz-linear-gradient(center bottom, #fff 85%, #eee 99%);
273 273 background: url('../../images/select2.png') no-repeat -37px -22px, linear-gradient(to bottom, #fff 85%, #eee 99%) 0 0;
274 274 }
275 275
276 276 .select2-drop.select2-drop-above .select2-search input {
277 277 margin-top: 4px;
278 278 }
279 279
280 280 .select2-search input.select2-active {
281 281 background: #fff url('../../images/select2-spinner.gif') no-repeat 100%;
282 282 background: url('../../images/select2-spinner.gif') no-repeat 100%, -webkit-gradient(linear, left bottom, left top, color-stop(0.85, #fff), color-stop(0.99, #eee));
283 283 background: url('../../images/select2-spinner.gif') no-repeat 100%, -webkit-linear-gradient(center bottom, #fff 85%, #eee 99%);
284 284 background: url('../../images/select2-spinner.gif') no-repeat 100%, -moz-linear-gradient(center bottom, #fff 85%, #eee 99%);
285 285 background: url('../../images/select2-spinner.gif') no-repeat 100%, linear-gradient(to bottom, #fff 85%, #eee 99%) 0 0;
286 286 }
287 287
288 288 .select2-container-active .select2-choice,
289 289 .select2-container-active .select2-choices {
290 290 border: 1px solid #5897fb;
291 291 outline: none;
292 292
293 293 -webkit-box-shadow: 0 0 5px rgba(0, 0, 0, .3);
294 294 box-shadow: 0 0 5px rgba(0, 0, 0, .3);
295 295 }
296 296
297 297 .select2-dropdown-open .select2-choice {
298 298 border-bottom-color: transparent;
299 299 -webkit-box-shadow: 0 1px 0 #fff inset;
300 300 box-shadow: 0 1px 0 #fff inset;
301 301
302 302 border-bottom-left-radius: 0;
303 303 border-bottom-right-radius: 0;
304 304
305 305 background-color: #eee;
306 306 background-image: -webkit-gradient(linear, left bottom, left top, color-stop(0, #fff), color-stop(0.5, #eee));
307 307 background-image: -webkit-linear-gradient(center bottom, #fff 0%, #eee 50%);
308 308 background-image: -moz-linear-gradient(center bottom, #fff 0%, #eee 50%);
309 309 filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#eeeeee', endColorstr='#ffffff', GradientType=0);
310 310 background-image: linear-gradient(to top, #fff 0%, #eee 50%);
311 311 }
312 312
313 313 .select2-dropdown-open.select2-drop-above .select2-choice,
314 314 .select2-dropdown-open.select2-drop-above .select2-choices {
315 315 border: 1px solid #5897fb;
316 316 border-top-color: transparent;
317 317
318 318 background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0, #fff), color-stop(0.5, #eee));
319 319 background-image: -webkit-linear-gradient(center top, #fff 0%, #eee 50%);
320 320 background-image: -moz-linear-gradient(center top, #fff 0%, #eee 50%);
321 321 filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#eeeeee', endColorstr='#ffffff', GradientType=0);
322 322 background-image: linear-gradient(to bottom, #fff 0%, #eee 50%);
323 323 }
324 324
325 325 .select2-dropdown-open .select2-choice .select2-arrow {
326 326 background: transparent;
327 327 border-left: none;
328 328 filter: none;
329 329 }
330 330 html[dir="rtl"] .select2-dropdown-open .select2-choice .select2-arrow {
331 331 border-right: none;
332 332 }
333 333
334 334 .select2-dropdown-open .select2-choice .select2-arrow b {
335 335 background-position: -18px 1px;
336 336 }
337 337
338 338 html[dir="rtl"] .select2-dropdown-open .select2-choice .select2-arrow b {
339 339 background-position: -16px 1px;
340 340 }
341 341
342 342 .select2-hidden-accessible {
343 343 border: 0;
344 344 clip: rect(0 0 0 0);
345 345 height: 1px;
346 346 margin: -1px;
347 347 overflow: hidden;
348 348 padding: 0;
349 349 position: absolute;
350 350 width: 1px;
351 351 }
352 352
353 353 /* results */
354 354 .select2-results {
355 355 max-height: 200px;
356 356 padding: 0 0 0 4px;
357 357 margin: 4px 4px 4px 0;
358 358 position: relative;
359 359 overflow-x: hidden;
360 360 overflow-y: auto;
361 361 -webkit-tap-highlight-color: rgba(0, 0, 0, 0);
362 362 }
363 363
364 364 html[dir="rtl"] .select2-results {
365 365 padding: 0 4px 0 0;
366 366 margin: 4px 0 4px 4px;
367 367 }
368 368
369 369 .select2-results ul.select2-result-sub {
370 370 margin: 0;
371 371 padding-left: 0;
372 372 }
373 373
374 374 .select2-results li {
375 375 list-style: none;
376 376 display: list-item;
377 377 background-image: none;
378 378 }
379 379
380 380 .select2-results li.select2-result-with-children > .select2-result-label {
381 381 font-weight: bold;
382 382 }
383 383
384 384 .select2-results .select2-result-label {
385 385 padding: 3px 7px 4px;
386 386 margin: 0;
387 387 cursor: pointer;
388 388
389 389 min-height: 1em;
390 390
391 391 -webkit-touch-callout: none;
392 392 -webkit-user-select: none;
393 393 -moz-user-select: none;
394 394 -ms-user-select: none;
395 395 user-select: none;
396 white-space: nowrap;
396 397 }
397 398
398 399 .select2-results-dept-1 .select2-result-label { padding-left: 20px }
399 400 .select2-results-dept-2 .select2-result-label { padding-left: 40px }
400 401 .select2-results-dept-3 .select2-result-label { padding-left: 60px }
401 402 .select2-results-dept-4 .select2-result-label { padding-left: 80px }
402 403 .select2-results-dept-5 .select2-result-label { padding-left: 100px }
403 404 .select2-results-dept-6 .select2-result-label { padding-left: 110px }
404 405 .select2-results-dept-7 .select2-result-label { padding-left: 120px }
405 406
406 407 .select2-results .select2-highlighted {
407 408 background: #3875d7;
408 409 color: #fff;
409 410 }
410 411
411 412 .select2-results li em {
412 413 background: #feffde;
413 414 font-style: normal;
414 415 }
415 416
416 417 .select2-results .select2-highlighted em {
417 418 background: transparent;
418 419 }
419 420
420 421 .select2-results .select2-highlighted ul {
421 422 background: #fff;
422 423 color: #000;
423 424 }
424 425
425 426 .select2-results .select2-no-results,
426 427 .select2-results .select2-searching,
427 428 .select2-results .select2-ajax-error,
428 429 .select2-results .select2-selection-limit {
429 430 background: #f4f4f4;
430 431 display: list-item;
431 432 padding-left: 5px;
432 433 }
433 434
434 435 /*
435 436 disabled look for disabled choices in the results dropdown
436 437 */
437 438 .select2-results .select2-disabled.select2-highlighted {
438 439 color: #666;
439 440 background: #f4f4f4;
440 441 display: list-item;
441 442 cursor: default;
442 443 }
443 444 .select2-results .select2-disabled {
444 445 background: #f4f4f4;
445 446 display: list-item;
446 447 cursor: default;
447 448 }
448 449
449 450 .select2-results .select2-selected {
450 451 display: none;
451 452 }
452 453
453 454 .select2-more-results.select2-active {
454 455 background: #f4f4f4 url('../../images/select2-spinner.gif') no-repeat 100%;
455 456 }
456 457
457 458 .select2-results .select2-ajax-error {
458 459 background: rgba(255, 50, 50, .2);
459 460 }
460 461
461 462 .select2-more-results {
462 463 background: #f4f4f4;
463 464 display: list-item;
464 465 }
465 466
466 467 /* disabled styles */
467 468
468 469 .select2-container.select2-container-disabled .select2-choice {
469 470 background-color: #f4f4f4;
470 471 background-image: none;
471 472 border: 1px solid #ddd;
472 473 cursor: default;
473 474 }
474 475
475 476 .select2-container.select2-container-disabled .select2-choice .select2-arrow {
476 477 background-color: #f4f4f4;
477 478 background-image: none;
478 479 border-left: 0;
479 480 }
480 481
481 482 .select2-container.select2-container-disabled .select2-choice abbr {
482 483 display: none;
483 484 }
484 485
485 486
486 487 /* multiselect */
487 488
488 489 .select2-container-multi .select2-choices {
489 490 height: auto !important;
490 491 height: 1%;
491 492 margin: 0;
492 493 padding: 0 5px 0 0;
493 494 position: relative;
494 495
495 496 border: 1px solid #aaa;
496 497 cursor: text;
497 498 overflow: hidden;
498 499
499 500 background-color: #fff;
500 501 background-image: -webkit-gradient(linear, 0% 0%, 0% 100%, color-stop(1%, #eee), color-stop(15%, #fff));
501 502 background-image: -webkit-linear-gradient(top, #eee 1%, #fff 15%);
502 503 background-image: -moz-linear-gradient(top, #eee 1%, #fff 15%);
503 504 background-image: linear-gradient(to bottom, #eee 1%, #fff 15%);
504 505 }
505 506
506 507 html[dir="rtl"] .select2-container-multi .select2-choices {
507 508 padding: 0 0 0 5px;
508 509 }
509 510
510 511 .select2-locked {
511 512 padding: 3px 5px 3px 5px !important;
512 513 }
513 514
514 515 .select2-container-multi .select2-choices {
515 516 min-height: 26px;
516 517 }
517 518
518 519 .select2-container-multi.select2-container-active .select2-choices {
519 520 border: 1px solid #5897fb;
520 521 outline: none;
521 522
522 523 -webkit-box-shadow: 0 0 5px rgba(0, 0, 0, .3);
523 524 box-shadow: 0 0 5px rgba(0, 0, 0, .3);
524 525 }
525 526 .select2-container-multi .select2-choices li {
526 527 float: left;
527 528 list-style: none;
528 529 }
529 530 html[dir="rtl"] .select2-container-multi .select2-choices li
530 531 {
531 532 float: right;
532 533 }
533 534 .select2-container-multi .select2-choices .select2-search-field {
534 535 margin: 0;
535 536 padding: 0;
536 537 white-space: nowrap;
537 538 }
538 539
539 540 .select2-container-multi .select2-choices .select2-search-field input {
540 541 padding: 5px;
541 542 margin: 1px 0;
542 543
543 544 font-family: sans-serif;
544 545 font-size: 100%;
545 546 color: #666;
546 547 outline: 0;
547 548 border: 0;
548 549 -webkit-box-shadow: none;
549 550 box-shadow: none;
550 551 background: transparent !important;
551 552 }
552 553
553 554 .select2-container-multi .select2-choices .select2-search-field input.select2-active {
554 555 background: #fff url('../../images/select2-spinner.gif') no-repeat 100% !important;
555 556 }
556 557
557 558 .select2-default {
558 559 color: #999 !important;
559 560 }
560 561
561 562 .select2-container-multi .select2-choices .select2-search-choice {
562 563 padding: 3px 5px 3px 18px;
563 564 margin: 3px 0 3px 5px;
564 565 position: relative;
565 566
566 567 line-height: 13px;
567 568 color: #333;
568 569 cursor: default;
569 570 border: 1px solid #aaaaaa;
570 571
571 572 border-radius: 3px;
572 573
573 574 -webkit-box-shadow: 0 0 2px #fff inset, 0 1px 0 rgba(0, 0, 0, 0.05);
574 575 box-shadow: 0 0 2px #fff inset, 0 1px 0 rgba(0, 0, 0, 0.05);
575 576
576 577 background-clip: padding-box;
577 578
578 579 -webkit-touch-callout: none;
579 580 -webkit-user-select: none;
580 581 -moz-user-select: none;
581 582 -ms-user-select: none;
582 583 user-select: none;
583 584
584 585 background-color: #e4e4e4;
585 586 filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#eeeeee', endColorstr='#f4f4f4', GradientType=0);
586 587 background-image: -webkit-gradient(linear, 0% 0%, 0% 100%, color-stop(20%, #f4f4f4), color-stop(50%, #f0f0f0), color-stop(52%, #e8e8e8), color-stop(100%, #eee));
587 588 background-image: -webkit-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eee 100%);
588 589 background-image: -moz-linear-gradient(top, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eee 100%);
589 590 background-image: linear-gradient(to bottom, #f4f4f4 20%, #f0f0f0 50%, #e8e8e8 52%, #eee 100%);
590 591 }
591 592 html[dir="rtl"] .select2-container-multi .select2-choices .select2-search-choice
592 593 {
593 594 margin: 3px 5px 3px 0;
594 595 padding: 3px 18px 3px 5px;
595 596 }
596 597 .select2-container-multi .select2-choices .select2-search-choice .select2-chosen {
597 598 cursor: default;
598 599 }
599 600 .select2-container-multi .select2-choices .select2-search-choice-focus {
600 601 background: #d4d4d4;
601 602 }
602 603
603 604 .select2-search-choice-close {
604 605 display: block;
605 606 width: 12px;
606 607 height: 13px;
607 608 position: absolute;
608 609 right: 3px;
609 610 top: 4px;
610 611
611 612 font-size: 1px;
612 613 outline: none;
613 614 background: url('../../images/select2.png') right top no-repeat;
614 615 }
615 616 html[dir="rtl"] .select2-search-choice-close {
616 617 right: auto;
617 618 left: 3px;
618 619 }
619 620
620 621 .select2-container-multi .select2-search-choice-close {
621 622 left: 3px;
622 623 }
623 624
624 625 html[dir="rtl"] .select2-container-multi .select2-search-choice-close {
625 626 left: auto;
626 627 right: 2px;
627 628 }
628 629
629 630 .select2-container-multi .select2-choices .select2-search-choice .select2-search-choice-close:hover {
630 631 background-position: right -11px;
631 632 }
632 633 .select2-container-multi .select2-choices .select2-search-choice-focus .select2-search-choice-close {
633 634 background-position: right -11px;
634 635 }
635 636
636 637 /* disabled styles */
637 638 .select2-container-multi.select2-container-disabled .select2-choices {
638 639 background-color: #f4f4f4;
639 640 background-image: none;
640 641 border: 1px solid #ddd;
641 642 cursor: default;
642 643 }
643 644
644 645 .select2-container-multi.select2-container-disabled .select2-choices .select2-search-choice {
645 646 padding: 3px 5px 3px 5px;
646 647 border: 1px solid #ddd;
647 648 background-image: none;
648 649 background-color: #f4f4f4;
649 650 }
650 651
651 652 .select2-container-multi.select2-container-disabled .select2-choices .select2-search-choice .select2-search-choice-close { display: none;
652 653 background: none;
653 654 }
654 655 /* end multiselect */
655 656
656 657
657 658 .select2-result-selectable .select2-match,
658 659 .select2-result-unselectable .select2-match {
659 660 text-decoration: underline;
660 661 }
661 662
662 663 .select2-offscreen, .select2-offscreen:focus {
663 664 clip: rect(0 0 0 0) !important;
664 665 width: 1px !important;
665 666 height: 1px !important;
666 667 border: 0 !important;
667 668 margin: 0 !important;
668 669 padding: 0 !important;
669 670 overflow: hidden !important;
670 671 position: absolute !important;
671 672 outline: 0 !important;
672 673 left: 0px !important;
673 674 top: 0px !important;
674 675 }
675 676
676 677 .select2-display-none {
677 678 display: none;
678 679 }
679 680
680 681 .select2-measure-scrollbar {
681 682 position: absolute;
682 683 top: -10000px;
683 684 left: -10000px;
684 685 width: 100px;
685 686 height: 100px;
686 687 overflow: scroll;
687 688 }
688 689
689 690 /* Retina-ize icons */
690 691
691 692 @media only screen and (-webkit-min-device-pixel-ratio: 1.5), only screen and (min-resolution: 2dppx) {
692 693 .select2-search input,
693 694 .select2-search-choice-close,
694 695 .select2-container .select2-choice abbr,
695 696 .select2-container .select2-choice .select2-arrow b {
696 697 background-image: url('../../images/select2x2.png') !important;
697 698 background-repeat: no-repeat !important;
698 699 background-size: 60px 40px !important;
699 700 }
700 701
701 702 .select2-search input {
702 703 background-position: 100% -21px !important;
703 704 }
704 705 }
@@ -1,81 +1,79 b''
1 1 ## Changesets table !
2 2 <%namespace name="base" file="/base/base.mako"/>
3 3
4 4 %if c.ancestor:
5 <div class="ancestor">${_('Common Ancestor Commit')}:
6 <a href="${h.route_path('repo_commit', repo_name=c.repo_name, commit_id=c.ancestor)}">
7 ${h.short_id(c.ancestor)}
8 </a>. ${_('Compare was calculated based on this shared commit.')}
5 <div class="ancestor">${_('Compare was calculated based on this common ancestor commit')}:
6 <a href="${h.route_path('repo_commit', repo_name=c.repo_name, commit_id=c.ancestor)}">${h.short_id(c.ancestor)}</a>
9 7 <input id="common_ancestor" type="hidden" name="common_ancestor" value="${c.ancestor}">
10 8 </div>
11 9 %endif
12 10
13 11 <div class="container">
14 12 <input type="hidden" name="__start__" value="revisions:sequence">
15 13 <table class="rctable compare_view_commits">
16 14 <tr>
17 15 <th>${_('Time')}</th>
18 16 <th>${_('Author')}</th>
19 17 <th>${_('Commit')}</th>
20 18 <th></th>
21 19 <th>${_('Description')}</th>
22 20 </tr>
23 21 ## to speed up lookups cache some functions before the loop
24 22 <%
25 23 active_patterns = h.get_active_pattern_entries(c.repo_name)
26 24 urlify_commit_message = h.partial(h.urlify_commit_message, active_pattern_entries=active_patterns)
27 25 %>
28 26 %for commit in c.commit_ranges:
29 27 <tr id="row-${commit.raw_id}"
30 28 commit_id="${commit.raw_id}"
31 29 class="compare_select"
32 30 style="${'display: none' if c.collapse_all_commits else ''}"
33 31 >
34 32 <td class="td-time">
35 33 ${h.age_component(commit.date)}
36 34 </td>
37 35 <td class="td-user">
38 36 ${base.gravatar_with_user(commit.author, 16, tooltip=True)}
39 37 </td>
40 38 <td class="td-hash">
41 39 <code>
42 40 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=commit.raw_id)}">
43 41 r${commit.idx}:${h.short_id(commit.raw_id)}
44 42 </a>
45 43 ${h.hidden('revisions',commit.raw_id)}
46 44 </code>
47 45 </td>
48 46 <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_('Expand commit message')}" onclick="commitsController.expandCommit(this); return false">
49 47 <i class="icon-expand-linked"></i>
50 48 </td>
51 49 <td class="mid td-description">
52 50 <div class="log-container truncate-wrap">
53 51 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">${urlify_commit_message(commit.message, c.repo_name)}</div>
54 52 </div>
55 53 </td>
56 54 </tr>
57 55 %endfor
58 56 <tr class="compare_select_hidden" style="${('' if c.collapse_all_commits else 'display: none')}">
59 57 <td colspan="5">
60 58 ${_ungettext('{} commit hidden, click expand to show them.', '{} commits hidden, click expand to show them.', len(c.commit_ranges)).format(len(c.commit_ranges))}
61 59 </td>
62 60 </tr>
63 61 % if not c.commit_ranges:
64 62 <tr class="compare_select">
65 63 <td colspan="5">
66 64 ${_('No commits in this compare')}
67 65 </td>
68 66 </tr>
69 67 % endif
70 68 </table>
71 69 <input type="hidden" name="__end__" value="revisions:sequence">
72 70
73 71 </div>
74 72
75 73 <script>
76 74 commitsController = new CommitsController();
77 75 $('.compare_select').on('click',function(e){
78 76 var cid = $(this).attr('commit_id');
79 77 $('#row-'+cid).toggleClass('hl', !$('#row-'+cid).hasClass('hl'));
80 78 });
81 79 </script>
@@ -1,547 +1,537 b''
1 1 <%inherit file="/base/base.mako"/>
2 2 <%namespace name="dt" file="/data_table/_dt_elements.mako"/>
3 3
4 4 <%def name="title()">
5 5 ${c.repo_name} ${_('New pull request')}
6 6 </%def>
7 7
8 8 <%def name="breadcrumbs_links()"></%def>
9 9
10 10 <%def name="menu_bar_nav()">
11 11 ${self.menu_items(active='repositories')}
12 12 </%def>
13 13
14 14 <%def name="menu_bar_subnav()">
15 15 ${self.repo_menu(active='showpullrequest')}
16 16 </%def>
17 17
18 18 <%def name="main()">
19 19 <div class="box">
20 20 ${h.secure_form(h.route_path('pullrequest_create', repo_name=c.repo_name, _query=request.GET.mixed()), id='pull_request_form', request=request)}
21 21
22 22 <div class="box pr-summary">
23 23
24 24 <div class="summary-details block-left">
25 25
26 26
27 27 <div class="pr-details-title">
28 28 ${_('New pull request')}
29 29 </div>
30 30
31 31 <div class="form" style="padding-top: 10px">
32 32 <!-- fields -->
33 33
34 34 <div class="fields" >
35 35
36 36 <div class="field">
37 37 <div class="label">
38 38 <label for="pullrequest_title">${_('Title')}:</label>
39 39 </div>
40 40 <div class="input">
41 41 ${h.text('pullrequest_title', c.default_title, class_="medium autogenerated-title")}
42 42 </div>
43 43 <p class="help-block">
44 44 Start the title with WIP: to prevent accidental merge of Work In Progress pull request before it's ready.
45 45 </p>
46 46 </div>
47 47
48 48 <div class="field">
49 49 <div class="label label-textarea">
50 50 <label for="pullrequest_desc">${_('Description')}:</label>
51 51 </div>
52 52 <div class="textarea text-area">
53 53 <input id="pr-renderer-input" type="hidden" name="description_renderer" value="${c.visual.default_renderer}">
54 54 ${dt.markup_form('pullrequest_desc')}
55 55 </div>
56 56 </div>
57 57
58 58 <div class="field">
59 59 <div class="label label-textarea">
60 60 <label for="commit_flow">${_('Commit flow')}:</label>
61 61 </div>
62 62
63 63 ## TODO: johbo: Abusing the "content" class here to get the
64 64 ## desired effect. Should be replaced by a proper solution.
65 65
66 66 ##ORG
67 67 <div class="content">
68 68 <strong>${_('Source repository')}:</strong>
69 69 ${c.rhodecode_db_repo.description}
70 70 </div>
71 71 <div class="content">
72 72 ${h.hidden('source_repo')}
73 73 ${h.hidden('source_ref')}
74 74 </div>
75 75
76 76 ##OTHER, most Probably the PARENT OF THIS FORK
77 77 <div class="content">
78 78 ## filled with JS
79 79 <div id="target_repo_desc"></div>
80 80 </div>
81 81
82 82 <div class="content">
83 83 ${h.hidden('target_repo')}
84 84 ${h.hidden('target_ref')}
85 85 <span id="target_ref_loading" style="display: none">
86 86 ${_('Loading refs...')}
87 87 </span>
88 88 </div>
89 89 </div>
90 90
91 91 <div class="field">
92 92 <div class="label label-textarea">
93 93 <label for="pullrequest_submit"></label>
94 94 </div>
95 95 <div class="input">
96 96 <div class="pr-submit-button">
97 97 <input id="pr_submit" class="btn" name="save" type="submit" value="${_('Submit Pull Request')}">
98 98 </div>
99 99 <div id="pr_open_message"></div>
100 100 </div>
101 101 </div>
102 102
103 103 <div class="pr-spacing-container"></div>
104 104 </div>
105 105 </div>
106 106 </div>
107 107 <div>
108 108 ## AUTHOR
109 109 <div class="reviewers-title block-right">
110 110 <div class="pr-details-title">
111 111 ${_('Author of this pull request')}
112 112 </div>
113 113 </div>
114 114 <div class="block-right pr-details-content reviewers">
115 115 <ul class="group_members">
116 116 <li>
117 117 ${self.gravatar_with_user(c.rhodecode_user.email, 16, tooltip=True)}
118 118 </li>
119 119 </ul>
120 120 </div>
121 121
122 122 ## REVIEW RULES
123 123 <div id="review_rules" style="display: none" class="reviewers-title block-right">
124 124 <div class="pr-details-title">
125 125 ${_('Reviewer rules')}
126 126 </div>
127 127 <div class="pr-reviewer-rules">
128 128 ## review rules will be appended here, by default reviewers logic
129 129 </div>
130 130 </div>
131 131
132 132 ## REVIEWERS
133 133 <div class="reviewers-title block-right">
134 134 <div class="pr-details-title">
135 135 ${_('Pull request reviewers')}
136 136 <span class="calculate-reviewers"> - ${_('loading...')}</span>
137 137 </div>
138 138 </div>
139 139 <div id="reviewers" class="block-right pr-details-content reviewers">
140 140 ## members goes here, filled via JS based on initial selection !
141 141 <input type="hidden" name="__start__" value="review_members:sequence">
142 142 <ul id="review_members" class="group_members"></ul>
143 143 <input type="hidden" name="__end__" value="review_members:sequence">
144 144 <div id="add_reviewer_input" class='ac'>
145 145 <div class="reviewer_ac">
146 146 ${h.text('user', class_='ac-input', placeholder=_('Add reviewer or reviewer group'))}
147 147 <div id="reviewers_container"></div>
148 148 </div>
149 149 </div>
150 150 </div>
151 151 </div>
152 152 </div>
153 153 <div class="box">
154 154 <div>
155 155 ## overview pulled by ajax
156 156 <div id="pull_request_overview"></div>
157 157 </div>
158 158 </div>
159 159 ${h.end_form()}
160 160 </div>
161 161
162 162 <script type="text/javascript">
163 163 $(function(){
164 164 var defaultSourceRepo = '${c.default_repo_data['source_repo_name']}';
165 165 var defaultSourceRepoData = ${c.default_repo_data['source_refs_json']|n};
166 166 var defaultTargetRepo = '${c.default_repo_data['target_repo_name']}';
167 167 var defaultTargetRepoData = ${c.default_repo_data['target_refs_json']|n};
168 168
169 169 var $pullRequestForm = $('#pull_request_form');
170 170 var $pullRequestSubmit = $('#pr_submit', $pullRequestForm);
171 171 var $sourceRepo = $('#source_repo', $pullRequestForm);
172 172 var $targetRepo = $('#target_repo', $pullRequestForm);
173 173 var $sourceRef = $('#source_ref', $pullRequestForm);
174 174 var $targetRef = $('#target_ref', $pullRequestForm);
175 175
176 176 var sourceRepo = function() { return $sourceRepo.eq(0).val() };
177 177 var sourceRef = function() { return $sourceRef.eq(0).val().split(':') };
178 178
179 179 var targetRepo = function() { return $targetRepo.eq(0).val() };
180 180 var targetRef = function() { return $targetRef.eq(0).val().split(':') };
181 181
182 182 var calculateContainerWidth = function() {
183 183 var maxWidth = 0;
184 184 var repoSelect2Containers = ['#source_repo', '#target_repo'];
185 185 $.each(repoSelect2Containers, function(idx, value) {
186 186 $(value).select2('container').width('auto');
187 187 var curWidth = $(value).select2('container').width();
188 188 if (maxWidth <= curWidth) {
189 189 maxWidth = curWidth;
190 190 }
191 191 $.each(repoSelect2Containers, function(idx, value) {
192 192 $(value).select2('container').width(maxWidth + 10);
193 193 });
194 194 });
195 195 };
196 196
197 197 var initRefSelection = function(selectedRef) {
198 198 return function(element, callback) {
199 199 // translate our select2 id into a text, it's a mapping to show
200 200 // simple label when selecting by internal ID.
201 201 var id, refData;
202 202 if (selectedRef === undefined || selectedRef === null) {
203 203 id = element.val();
204 204 refData = element.val().split(':');
205 205
206 206 if (refData.length !== 3){
207 207 refData = ["", "", ""]
208 208 }
209 209 } else {
210 210 id = selectedRef;
211 211 refData = selectedRef.split(':');
212 212 }
213 213
214 214 var text = refData[1];
215 215 if (refData[0] === 'rev') {
216 216 text = text.substring(0, 12);
217 217 }
218 218
219 219 var data = {id: id, text: text};
220 220 callback(data);
221 221 };
222 222 };
223 223
224 224 var formatRefSelection = function(data, container, escapeMarkup) {
225 225 var prefix = '';
226 226 var refData = data.id.split(':');
227 227 if (refData[0] === 'branch') {
228 228 prefix = '<i class="icon-branch"></i>';
229 229 }
230 230 else if (refData[0] === 'book') {
231 231 prefix = '<i class="icon-bookmark"></i>';
232 232 }
233 233 else if (refData[0] === 'tag') {
234 234 prefix = '<i class="icon-tag"></i>';
235 235 }
236 236
237 237 var originalOption = data.element;
238 238 return prefix + escapeMarkup(data.text);
239 239 };formatSelection:
240 240
241 241 // custom code mirror
242 242 var codeMirrorInstance = $('#pullrequest_desc').get(0).MarkupForm.cm;
243 243
244 var diffDataHandler = function(data) {
245
246 $('#pull_request_overview').html(data);
247
248 var commitElements = data['commits'];
249 var files = data['files'];
250 var added = data['stats'][0]
251 var deleted = data['stats'][1]
252 var commonAncestorId = data['ancestor'];
253
254 var prTitleAndDesc = getTitleAndDescription(
255 sourceRef()[1], commitElements, 5);
256
257 var title = prTitleAndDesc[0];
258 var proposedDescription = prTitleAndDesc[1];
259
260 var useGeneratedTitle = (
261 $('#pullrequest_title').hasClass('autogenerated-title') ||
262 $('#pullrequest_title').val() === "");
263
264 if (title && useGeneratedTitle) {
265 // use generated title if we haven't specified our own
266 $('#pullrequest_title').val(title);
267 $('#pullrequest_title').addClass('autogenerated-title');
268
269 }
270
271 var useGeneratedDescription = (
272 !codeMirrorInstance._userDefinedValue ||
273 codeMirrorInstance.getValue() === "");
274
275 if (proposedDescription && useGeneratedDescription) {
276 // set proposed content, if we haven't defined our own,
277 // or we don't have description written
278 codeMirrorInstance._userDefinedValue = false; // reset state
279 codeMirrorInstance.setValue(proposedDescription);
280 }
281
282 // refresh our codeMirror so events kicks in and it's change aware
283 codeMirrorInstance.refresh();
284
285 var url_data = {
286 'repo_name': targetRepo(),
287 'target_repo': sourceRepo(),
288 'source_ref': targetRef()[2],
289 'source_ref_type': 'rev',
290 'target_ref': sourceRef()[2],
291 'target_ref_type': 'rev',
292 'merge': true,
293 '_': Date.now() // bypass browser caching
294 }; // gather the source/target ref and repo here
295 var url = pyroutes.url('repo_compare', url_data);
296
297 var msg = '<input id="common_ancestor" type="hidden" name="common_ancestor" value="{0}">'.format(commonAncestorId);
298 msg += '<input type="hidden" name="__start__" value="revisions:sequence">'
299
300 $.each(commitElements, function(idx, value) {
301 msg += '<input type="hidden" name="revisions" value="{0}">'.format(value["raw_id"]);
302 });
303
304 msg += '<input type="hidden" name="__end__" value="revisions:sequence">'
305 msg += _ngettext(
306 'This pull requests will consist of <strong>{0} commit</strong>.',
307 'This pull requests will consist of <strong>{0} commits</strong>.',
308 commitElements.length).format(commitElements.length)
309
310 msg += '\n';
311 msg += _ngettext(
312 '<strong>{0} file</strong> changed, ',
313 '<strong>{0} files</strong> changed, ',
314 files.length).format(files.length)
315 msg += '<span class="op-added">{0} lines inserted</span>, <span class="op-deleted">{1} lines deleted</span>.'.format(added, deleted)
316
317 msg += '\n\n <a class="" id="pull_request_overview_url" href="{0}" target="_blank">${_('Show detailed compare.')}</a>'.format(url);
318
319 if (commitElements.length) {
320 var commitsLink = '<a href="#pull_request_overview"><strong>{0}</strong></a>'.format(commitElements.length);
321 prButtonLock(false, msg.replace('__COMMITS__', commitsLink), 'compare');
322 }
323 else {
324 prButtonLock(true, "${_('There are no commits to merge.')}", 'compare');
325 }
326
327 };
328
244 329 reviewersController = new ReviewersController();
330 reviewersController.diffDataHandler = diffDataHandler;
245 331
246 332 var queryTargetRepo = function(self, query) {
247 333 // cache ALL results if query is empty
248 334 var cacheKey = query.term || '__';
249 335 var cachedData = self.cachedDataSource[cacheKey];
250 336
251 337 if (cachedData) {
252 338 query.callback({results: cachedData.results});
253 339 } else {
254 340 $.ajax({
255 341 url: pyroutes.url('pullrequest_repo_targets', {'repo_name': templateContext.repo_name}),
256 342 data: {query: query.term},
257 343 dataType: 'json',
258 344 type: 'GET',
259 345 success: function(data) {
260 346 self.cachedDataSource[cacheKey] = data;
261 347 query.callback({results: data.results});
262 348 },
263 349 error: function(jqXHR, textStatus, errorThrown) {
264 350 var prefix = "Error while fetching entries.\n"
265 351 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
266 352 ajaxErrorSwal(message);
267 353 }
268 354 });
269 355 }
270 356 };
271 357
272 358 var queryTargetRefs = function(initialData, query) {
273 359 var data = {results: []};
274 360 // filter initialData
275 361 $.each(initialData, function() {
276 362 var section = this.text;
277 363 var children = [];
278 364 $.each(this.children, function() {
279 365 if (query.term.length === 0 ||
280 366 this.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0 ) {
281 367 children.push({'id': this.id, 'text': this.text})
282 368 }
283 369 });
284 370 data.results.push({'text': section, 'children': children})
285 371 });
286 372 query.callback({results: data.results});
287 373 };
288 374
289 var loadRepoRefDiffPreview = function() {
290
291 var url_data = {
292 'repo_name': targetRepo(),
293 'target_repo': sourceRepo(),
294 'source_ref': targetRef()[2],
295 'source_ref_type': 'rev',
296 'target_ref': sourceRef()[2],
297 'target_ref_type': 'rev',
298 'merge': true,
299 '_': Date.now() // bypass browser caching
300 }; // gather the source/target ref and repo here
301
302 if (sourceRef().length !== 3 || targetRef().length !== 3) {
303 prButtonLock(true, "${_('Please select source and target')}");
304 return;
305 }
306 var url = pyroutes.url('repo_compare', url_data);
307
308 // lock PR button, so we cannot send PR before it's calculated
309 prButtonLock(true, "${_('Loading compare ...')}", 'compare');
310
311 if (loadRepoRefDiffPreview._currentRequest) {
312 loadRepoRefDiffPreview._currentRequest.abort();
313 }
314
315 loadRepoRefDiffPreview._currentRequest = $.get(url)
316 .error(function(jqXHR, textStatus, errorThrown) {
317 if (textStatus !== 'abort') {
318 var prefix = "Error while processing request.\n"
319 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
320 ajaxErrorSwal(message);
321 }
322
323 })
324 .done(function(data) {
325 loadRepoRefDiffPreview._currentRequest = null;
326 $('#pull_request_overview').html(data);
327
328 var commitElements = $(data).find('tr[commit_id]');
329
330 var prTitleAndDesc = getTitleAndDescription(
331 sourceRef()[1], commitElements, 5);
332
333 var title = prTitleAndDesc[0];
334 var proposedDescription = prTitleAndDesc[1];
335
336 var useGeneratedTitle = (
337 $('#pullrequest_title').hasClass('autogenerated-title') ||
338 $('#pullrequest_title').val() === "");
339
340 if (title && useGeneratedTitle) {
341 // use generated title if we haven't specified our own
342 $('#pullrequest_title').val(title);
343 $('#pullrequest_title').addClass('autogenerated-title');
344
345 }
346
347 var useGeneratedDescription = (
348 !codeMirrorInstance._userDefinedValue ||
349 codeMirrorInstance.getValue() === "");
350
351 if (proposedDescription && useGeneratedDescription) {
352 // set proposed content, if we haven't defined our own,
353 // or we don't have description written
354 codeMirrorInstance._userDefinedValue = false; // reset state
355 codeMirrorInstance.setValue(proposedDescription);
356 }
357
358 // refresh our codeMirror so events kicks in and it's change aware
359 codeMirrorInstance.refresh();
360
361 var msg = '';
362 if (commitElements.length === 1) {
363 msg = "${_ungettext('This pull request will consist of __COMMITS__ commit.', 'This pull request will consist of __COMMITS__ commits.', 1)}";
364 } else {
365 msg = "${_ungettext('This pull request will consist of __COMMITS__ commit.', 'This pull request will consist of __COMMITS__ commits.', 2)}";
366 }
367
368 msg += ' <a id="pull_request_overview_url" href="{0}" target="_blank">${_('Show detailed compare.')}</a>'.format(url);
369
370 if (commitElements.length) {
371 var commitsLink = '<a href="#pull_request_overview"><strong>{0}</strong></a>'.format(commitElements.length);
372 prButtonLock(false, msg.replace('__COMMITS__', commitsLink), 'compare');
373 }
374 else {
375 prButtonLock(true, "${_('There are no commits to merge.')}", 'compare');
376 }
377
378
379 });
380 };
381
382 375 var Select2Box = function(element, overrides) {
383 376 var globalDefaults = {
384 377 dropdownAutoWidth: true,
385 378 containerCssClass: "drop-menu",
386 379 dropdownCssClass: "drop-menu-dropdown"
387 380 };
388 381
389 382 var initSelect2 = function(defaultOptions) {
390 383 var options = jQuery.extend(globalDefaults, defaultOptions, overrides);
391 384 element.select2(options);
392 385 };
393 386
394 387 return {
395 388 initRef: function() {
396 389 var defaultOptions = {
397 390 minimumResultsForSearch: 5,
398 391 formatSelection: formatRefSelection
399 392 };
400 393
401 394 initSelect2(defaultOptions);
402 395 },
403 396
404 397 initRepo: function(defaultValue, readOnly) {
405 398 var defaultOptions = {
406 399 initSelection : function (element, callback) {
407 400 var data = {id: defaultValue, text: defaultValue};
408 401 callback(data);
409 402 }
410 403 };
411 404
412 405 initSelect2(defaultOptions);
413 406
414 407 element.select2('val', defaultSourceRepo);
415 408 if (readOnly === true) {
416 409 element.select2('readonly', true);
417 410 }
418 411 }
419 412 };
420 413 };
421 414
422 415 var initTargetRefs = function(refsData, selectedRef) {
423 416
424 417 Select2Box($targetRef, {
425 418 placeholder: "${_('Select commit reference')}",
426 419 query: function(query) {
427 420 queryTargetRefs(refsData, query);
428 421 },
429 422 initSelection : initRefSelection(selectedRef)
430 423 }).initRef();
431 424
432 425 if (!(selectedRef === undefined)) {
433 426 $targetRef.select2('val', selectedRef);
434 427 }
435 428 };
436 429
437 430 var targetRepoChanged = function(repoData) {
438 431 // generate new DESC of target repo displayed next to select
439 432 var prLink = pyroutes.url('pullrequest_new', {'repo_name': repoData['name']});
440 433 $('#target_repo_desc').html(
441 434 "<strong>${_('Target repository')}</strong>: {0}. <a href=\"{1}\">Switch base, and use as source.</a>".format(repoData['description'], prLink)
442 435 );
443 436
444 437 // generate dynamic select2 for refs.
445 438 initTargetRefs(repoData['refs']['select2_refs'],
446 439 repoData['refs']['selected_ref']);
447 440
448 441 };
449 442
450 443 var sourceRefSelect2 = Select2Box($sourceRef, {
451 444 placeholder: "${_('Select commit reference')}",
452 445 query: function(query) {
453 446 var initialData = defaultSourceRepoData['refs']['select2_refs'];
454 447 queryTargetRefs(initialData, query)
455 448 },
456 449 initSelection: initRefSelection()
457 450 }
458 451 );
459 452
460 453 var sourceRepoSelect2 = Select2Box($sourceRepo, {
461 454 query: function(query) {}
462 455 });
463 456
464 457 var targetRepoSelect2 = Select2Box($targetRepo, {
465 458 cachedDataSource: {},
466 459 query: $.debounce(250, function(query) {
467 460 queryTargetRepo(this, query);
468 461 }),
469 462 formatResult: formatRepoResult
470 463 });
471 464
472 465 sourceRefSelect2.initRef();
473 466
474 467 sourceRepoSelect2.initRepo(defaultSourceRepo, true);
475 468
476 469 targetRepoSelect2.initRepo(defaultTargetRepo, false);
477 470
478 471 $sourceRef.on('change', function(e){
479 loadRepoRefDiffPreview();
480 472 reviewersController.loadDefaultReviewers(
481 473 sourceRepo(), sourceRef(), targetRepo(), targetRef());
482 474 });
483 475
484 476 $targetRef.on('change', function(e){
485 loadRepoRefDiffPreview();
486 477 reviewersController.loadDefaultReviewers(
487 478 sourceRepo(), sourceRef(), targetRepo(), targetRef());
488 479 });
489 480
490 481 $targetRepo.on('change', function(e){
491 482 var repoName = $(this).val();
492 483 calculateContainerWidth();
493 484 $targetRef.select2('destroy');
494 485 $('#target_ref_loading').show();
495 486
496 487 $.ajax({
497 488 url: pyroutes.url('pullrequest_repo_refs',
498 489 {'repo_name': templateContext.repo_name, 'target_repo_name':repoName}),
499 490 data: {},
500 491 dataType: 'json',
501 492 type: 'GET',
502 493 success: function(data) {
503 494 $('#target_ref_loading').hide();
504 495 targetRepoChanged(data);
505 loadRepoRefDiffPreview();
506 496 },
507 497 error: function(jqXHR, textStatus, errorThrown) {
508 498 var prefix = "Error while fetching entries.\n"
509 499 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
510 500 ajaxErrorSwal(message);
511 501 }
512 502 })
513 503
514 504 });
515 505
516 506 $pullRequestForm.on('submit', function(e){
517 507 // Flush changes into textarea
518 508 codeMirrorInstance.save();
519 509 prButtonLock(true, null, 'all');
520 510 $pullRequestSubmit.val(_gettext('Please wait creating pull request...'));
521 511 });
522 512
523 513 prButtonLock(true, "${_('Please select source and target')}", 'all');
524 514
525 515 // auto-load on init, the target refs select2
526 516 calculateContainerWidth();
527 517 targetRepoChanged(defaultTargetRepoData);
528 518
529 519 $('#pullrequest_title').on('keyup', function(e){
530 520 $(this).removeClass('autogenerated-title');
531 521 });
532 522
533 523 % if c.default_source_ref:
534 524 // in case we have a pre-selected value, use it now
535 525 $sourceRef.select2('val', '${c.default_source_ref}');
536 // diff preview load
537 loadRepoRefDiffPreview();
526
527
538 528 // default reviewers
539 529 reviewersController.loadDefaultReviewers(
540 530 sourceRepo(), sourceRef(), targetRepo(), targetRef());
541 531 % endif
542 532
543 533 ReviewerAutoComplete('#user');
544 534 });
545 535 </script>
546 536
547 537 </%def>
General Comments 0
You need to be logged in to leave comments. Login now