##// END OF EJS Templates
pr-versioning: implemented versioning for pull requests....
marcink -
r1368:9a887d01 default
parent child Browse files
Show More
@@ -1,1009 +1,1046 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 pull requests controller for rhodecode for initializing pull requests
23 23 """
24 24 import types
25 25
26 26 import peppercorn
27 27 import formencode
28 28 import logging
29 29 import collections
30 30
31 31 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
32 32 from pylons import request, tmpl_context as c, url
33 33 from pylons.controllers.util import redirect
34 34 from pylons.i18n.translation import _
35 35 from pyramid.threadlocal import get_current_registry
36 36 from sqlalchemy.sql import func
37 37 from sqlalchemy.sql.expression import or_
38 38
39 39 from rhodecode import events
40 40 from rhodecode.lib import auth, diffs, helpers as h, codeblocks
41 41 from rhodecode.lib.ext_json import json
42 42 from rhodecode.lib.base import (
43 43 BaseRepoController, render, vcs_operation_context)
44 44 from rhodecode.lib.auth import (
45 45 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
46 46 HasAcceptedRepoType, XHRRequired)
47 47 from rhodecode.lib.channelstream import channelstream_request
48 48 from rhodecode.lib.utils import jsonify
49 49 from rhodecode.lib.utils2 import (
50 50 safe_int, safe_str, str2bool, safe_unicode)
51 51 from rhodecode.lib.vcs.backends.base import (
52 52 EmptyCommit, UpdateFailureReason, EmptyRepository)
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError,
55 55 NodeDoesNotExistError)
56 56
57 57 from rhodecode.model.changeset_status import ChangesetStatusModel
58 58 from rhodecode.model.comment import CommentsModel
59 59 from rhodecode.model.db import (PullRequest, ChangesetStatus, ChangesetComment,
60 60 Repository, PullRequestVersion)
61 61 from rhodecode.model.forms import PullRequestForm
62 62 from rhodecode.model.meta import Session
63 63 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
64 64
65 65 log = logging.getLogger(__name__)
66 66
67 67
68 68 class PullrequestsController(BaseRepoController):
69
69 70 def __before__(self):
70 71 super(PullrequestsController, self).__before__()
71 72
72 def _load_compare_data(self, pull_request, inline_comments):
73 """
74 Load context data needed for generating compare diff
75
76 :param pull_request: object related to the request
77 :param enable_comments: flag to determine if comments are included
78 """
79 source_repo = pull_request.source_repo
80 source_ref_id = pull_request.source_ref_parts.commit_id
81
82 target_repo = pull_request.target_repo
83 target_ref_id = pull_request.target_ref_parts.commit_id
84
85 # despite opening commits for bookmarks/branches/tags, we always
86 # convert this to rev to prevent changes after bookmark or branch change
87 c.source_ref_type = 'rev'
88 c.source_ref = source_ref_id
89
90 c.target_ref_type = 'rev'
91 c.target_ref = target_ref_id
92
93 c.source_repo = source_repo
94 c.target_repo = target_repo
95
96 c.fulldiff = bool(request.GET.get('fulldiff'))
97
98 # diff_limit is the old behavior, will cut off the whole diff
99 # if the limit is applied otherwise will just hide the
100 # big files from the front-end
101 diff_limit = self.cut_off_limit_diff
102 file_limit = self.cut_off_limit_file
103
104 pre_load = ["author", "branch", "date", "message"]
105
106 c.commit_ranges = []
107 source_commit = EmptyCommit()
108 target_commit = EmptyCommit()
109 c.missing_requirements = False
110 try:
111 c.commit_ranges = [
112 source_repo.get_commit(commit_id=rev, pre_load=pre_load)
113 for rev in pull_request.revisions]
114
115 c.statuses = source_repo.statuses(
116 [x.raw_id for x in c.commit_ranges])
117
118 target_commit = source_repo.get_commit(
119 commit_id=safe_str(target_ref_id))
120 source_commit = source_repo.get_commit(
121 commit_id=safe_str(source_ref_id))
122 except RepositoryRequirementError:
123 c.missing_requirements = True
124
125 # auto collapse if we have more than limit
126 collapse_limit = diffs.DiffProcessor._collapse_commits_over
127 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
128
129 c.changes = {}
130 c.missing_commits = False
131 if (c.missing_requirements or
132 isinstance(source_commit, EmptyCommit) or
133 source_commit == target_commit):
134 _parsed = []
135 c.missing_commits = True
136 else:
137 vcs_diff = PullRequestModel().get_diff(pull_request)
138 diff_processor = diffs.DiffProcessor(
139 vcs_diff, format='newdiff', diff_limit=diff_limit,
140 file_limit=file_limit, show_full_diff=c.fulldiff)
141
142 _parsed = diff_processor.prepare()
143 c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer)
144
145 included_files = {}
146 for f in _parsed:
147 included_files[f['filename']] = f['stats']
148
149 c.deleted_files = [fname for fname in inline_comments if
150 fname not in included_files]
151
152 c.deleted_files_comments = collections.defaultdict(dict)
153 for fname, per_line_comments in inline_comments.items():
154 if fname in c.deleted_files:
155 c.deleted_files_comments[fname]['stats'] = 0
156 c.deleted_files_comments[fname]['comments'] = list()
157 for lno, comments in per_line_comments.items():
158 c.deleted_files_comments[fname]['comments'].extend(comments)
159
160 def _node_getter(commit):
161 def get_node(fname):
162 try:
163 return commit.get_node(fname)
164 except NodeDoesNotExistError:
165 return None
166 return get_node
167
168 c.diffset = codeblocks.DiffSet(
169 repo_name=c.repo_name,
170 source_repo_name=c.source_repo.repo_name,
171 source_node_getter=_node_getter(target_commit),
172 target_node_getter=_node_getter(source_commit),
173 comments=inline_comments
174 ).render_patchset(_parsed, target_commit.raw_id, source_commit.raw_id)
175
176 73 def _extract_ordering(self, request):
177 74 column_index = safe_int(request.GET.get('order[0][column]'))
178 75 order_dir = request.GET.get('order[0][dir]', 'desc')
179 76 order_by = request.GET.get(
180 77 'columns[%s][data][sort]' % column_index, 'name_raw')
181 78 return order_by, order_dir
182 79
183 80 @LoginRequired()
184 81 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
185 82 'repository.admin')
186 83 @HasAcceptedRepoType('git', 'hg')
187 84 def show_all(self, repo_name):
188 85 # filter types
189 86 c.active = 'open'
190 87 c.source = str2bool(request.GET.get('source'))
191 88 c.closed = str2bool(request.GET.get('closed'))
192 89 c.my = str2bool(request.GET.get('my'))
193 90 c.awaiting_review = str2bool(request.GET.get('awaiting_review'))
194 91 c.awaiting_my_review = str2bool(request.GET.get('awaiting_my_review'))
195 92 c.repo_name = repo_name
196 93
197 94 opened_by = None
198 95 if c.my:
199 96 c.active = 'my'
200 97 opened_by = [c.rhodecode_user.user_id]
201 98
202 99 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
203 100 if c.closed:
204 101 c.active = 'closed'
205 102 statuses = [PullRequest.STATUS_CLOSED]
206 103
207 104 if c.awaiting_review and not c.source:
208 105 c.active = 'awaiting'
209 106 if c.source and not c.awaiting_review:
210 107 c.active = 'source'
211 108 if c.awaiting_my_review:
212 109 c.active = 'awaiting_my'
213 110
214 111 data = self._get_pull_requests_list(
215 112 repo_name=repo_name, opened_by=opened_by, statuses=statuses)
216 113 if not request.is_xhr:
217 114 c.data = json.dumps(data['data'])
218 115 c.records_total = data['recordsTotal']
219 116 return render('/pullrequests/pullrequests.mako')
220 117 else:
221 118 return json.dumps(data)
222 119
223 120 def _get_pull_requests_list(self, repo_name, opened_by, statuses):
224 121 # pagination
225 122 start = safe_int(request.GET.get('start'), 0)
226 123 length = safe_int(request.GET.get('length'), c.visual.dashboard_items)
227 124 order_by, order_dir = self._extract_ordering(request)
228 125
229 126 if c.awaiting_review:
230 127 pull_requests = PullRequestModel().get_awaiting_review(
231 128 repo_name, source=c.source, opened_by=opened_by,
232 129 statuses=statuses, offset=start, length=length,
233 130 order_by=order_by, order_dir=order_dir)
234 131 pull_requests_total_count = PullRequestModel(
235 132 ).count_awaiting_review(
236 133 repo_name, source=c.source, statuses=statuses,
237 134 opened_by=opened_by)
238 135 elif c.awaiting_my_review:
239 136 pull_requests = PullRequestModel().get_awaiting_my_review(
240 137 repo_name, source=c.source, opened_by=opened_by,
241 138 user_id=c.rhodecode_user.user_id, statuses=statuses,
242 139 offset=start, length=length, order_by=order_by,
243 140 order_dir=order_dir)
244 141 pull_requests_total_count = PullRequestModel(
245 142 ).count_awaiting_my_review(
246 143 repo_name, source=c.source, user_id=c.rhodecode_user.user_id,
247 144 statuses=statuses, opened_by=opened_by)
248 145 else:
249 146 pull_requests = PullRequestModel().get_all(
250 147 repo_name, source=c.source, opened_by=opened_by,
251 148 statuses=statuses, offset=start, length=length,
252 149 order_by=order_by, order_dir=order_dir)
253 150 pull_requests_total_count = PullRequestModel().count_all(
254 151 repo_name, source=c.source, statuses=statuses,
255 152 opened_by=opened_by)
256 153
257 154 from rhodecode.lib.utils import PartialRenderer
258 155 _render = PartialRenderer('data_table/_dt_elements.mako')
259 156 data = []
260 157 for pr in pull_requests:
261 158 comments = CommentsModel().get_all_comments(
262 159 c.rhodecode_db_repo.repo_id, pull_request=pr)
263 160
264 161 data.append({
265 162 'name': _render('pullrequest_name',
266 163 pr.pull_request_id, pr.target_repo.repo_name),
267 164 'name_raw': pr.pull_request_id,
268 165 'status': _render('pullrequest_status',
269 166 pr.calculated_review_status()),
270 167 'title': _render(
271 168 'pullrequest_title', pr.title, pr.description),
272 169 'description': h.escape(pr.description),
273 170 'updated_on': _render('pullrequest_updated_on',
274 171 h.datetime_to_time(pr.updated_on)),
275 172 'updated_on_raw': h.datetime_to_time(pr.updated_on),
276 173 'created_on': _render('pullrequest_updated_on',
277 174 h.datetime_to_time(pr.created_on)),
278 175 'created_on_raw': h.datetime_to_time(pr.created_on),
279 176 'author': _render('pullrequest_author',
280 177 pr.author.full_contact, ),
281 178 'author_raw': pr.author.full_name,
282 179 'comments': _render('pullrequest_comments', len(comments)),
283 180 'comments_raw': len(comments),
284 181 'closed': pr.is_closed(),
285 182 })
286 183 # json used to render the grid
287 184 data = ({
288 185 'data': data,
289 186 'recordsTotal': pull_requests_total_count,
290 187 'recordsFiltered': pull_requests_total_count,
291 188 })
292 189 return data
293 190
294 191 @LoginRequired()
295 192 @NotAnonymous()
296 193 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
297 194 'repository.admin')
298 195 @HasAcceptedRepoType('git', 'hg')
299 196 def index(self):
300 197 source_repo = c.rhodecode_db_repo
301 198
302 199 try:
303 200 source_repo.scm_instance().get_commit()
304 201 except EmptyRepositoryError:
305 202 h.flash(h.literal(_('There are no commits yet')),
306 203 category='warning')
307 204 redirect(url('summary_home', repo_name=source_repo.repo_name))
308 205
309 206 commit_id = request.GET.get('commit')
310 207 branch_ref = request.GET.get('branch')
311 208 bookmark_ref = request.GET.get('bookmark')
312 209
313 210 try:
314 211 source_repo_data = PullRequestModel().generate_repo_data(
315 212 source_repo, commit_id=commit_id,
316 213 branch=branch_ref, bookmark=bookmark_ref)
317 214 except CommitDoesNotExistError as e:
318 215 log.exception(e)
319 216 h.flash(_('Commit does not exist'), 'error')
320 217 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
321 218
322 219 default_target_repo = source_repo
323 220
324 221 if source_repo.parent:
325 222 parent_vcs_obj = source_repo.parent.scm_instance()
326 223 if parent_vcs_obj and not parent_vcs_obj.is_empty():
327 224 # change default if we have a parent repo
328 225 default_target_repo = source_repo.parent
329 226
330 227 target_repo_data = PullRequestModel().generate_repo_data(
331 228 default_target_repo)
332 229
333 230 selected_source_ref = source_repo_data['refs']['selected_ref']
334 231
335 232 title_source_ref = selected_source_ref.split(':', 2)[1]
336 233 c.default_title = PullRequestModel().generate_pullrequest_title(
337 234 source=source_repo.repo_name,
338 235 source_ref=title_source_ref,
339 236 target=default_target_repo.repo_name
340 237 )
341 238
342 239 c.default_repo_data = {
343 240 'source_repo_name': source_repo.repo_name,
344 241 'source_refs_json': json.dumps(source_repo_data),
345 242 'target_repo_name': default_target_repo.repo_name,
346 243 'target_refs_json': json.dumps(target_repo_data),
347 244 }
348 245 c.default_source_ref = selected_source_ref
349 246
350 247 return render('/pullrequests/pullrequest.mako')
351 248
352 249 @LoginRequired()
353 250 @NotAnonymous()
354 251 @XHRRequired()
355 252 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
356 253 'repository.admin')
357 254 @jsonify
358 255 def get_repo_refs(self, repo_name, target_repo_name):
359 256 repo = Repository.get_by_repo_name(target_repo_name)
360 257 if not repo:
361 258 raise HTTPNotFound
362 259 return PullRequestModel().generate_repo_data(repo)
363 260
364 261 @LoginRequired()
365 262 @NotAnonymous()
366 263 @XHRRequired()
367 264 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
368 265 'repository.admin')
369 266 @jsonify
370 267 def get_repo_destinations(self, repo_name):
371 268 repo = Repository.get_by_repo_name(repo_name)
372 269 if not repo:
373 270 raise HTTPNotFound
374 271 filter_query = request.GET.get('query')
375 272
376 273 query = Repository.query() \
377 274 .order_by(func.length(Repository.repo_name)) \
378 275 .filter(or_(
379 276 Repository.repo_name == repo.repo_name,
380 277 Repository.fork_id == repo.repo_id))
381 278
382 279 if filter_query:
383 280 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
384 281 query = query.filter(
385 282 Repository.repo_name.ilike(ilike_expression))
386 283
387 284 add_parent = False
388 285 if repo.parent:
389 286 if filter_query in repo.parent.repo_name:
390 287 parent_vcs_obj = repo.parent.scm_instance()
391 288 if parent_vcs_obj and not parent_vcs_obj.is_empty():
392 289 add_parent = True
393 290
394 291 limit = 20 - 1 if add_parent else 20
395 292 all_repos = query.limit(limit).all()
396 293 if add_parent:
397 294 all_repos += [repo.parent]
398 295
399 296 repos = []
400 297 for obj in self.scm_model.get_repos(all_repos):
401 298 repos.append({
402 299 'id': obj['name'],
403 300 'text': obj['name'],
404 301 'type': 'repo',
405 302 'obj': obj['dbrepo']
406 303 })
407 304
408 305 data = {
409 306 'more': False,
410 307 'results': [{
411 308 'text': _('Repositories'),
412 309 'children': repos
413 310 }] if repos else []
414 311 }
415 312 return data
416 313
417 314 @LoginRequired()
418 315 @NotAnonymous()
419 316 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
420 317 'repository.admin')
421 318 @HasAcceptedRepoType('git', 'hg')
422 319 @auth.CSRFRequired()
423 320 def create(self, repo_name):
424 321 repo = Repository.get_by_repo_name(repo_name)
425 322 if not repo:
426 323 raise HTTPNotFound
427 324
428 325 controls = peppercorn.parse(request.POST.items())
429 326
430 327 try:
431 328 _form = PullRequestForm(repo.repo_id)().to_python(controls)
432 329 except formencode.Invalid as errors:
433 330 if errors.error_dict.get('revisions'):
434 331 msg = 'Revisions: %s' % errors.error_dict['revisions']
435 332 elif errors.error_dict.get('pullrequest_title'):
436 333 msg = _('Pull request requires a title with min. 3 chars')
437 334 else:
438 335 msg = _('Error creating pull request: {}').format(errors)
439 336 log.exception(msg)
440 337 h.flash(msg, 'error')
441 338
442 339 # would rather just go back to form ...
443 340 return redirect(url('pullrequest_home', repo_name=repo_name))
444 341
445 342 source_repo = _form['source_repo']
446 343 source_ref = _form['source_ref']
447 344 target_repo = _form['target_repo']
448 345 target_ref = _form['target_ref']
449 346 commit_ids = _form['revisions'][::-1]
450 347 reviewers = [
451 348 (r['user_id'], r['reasons']) for r in _form['review_members']]
452 349
453 350 # find the ancestor for this pr
454 351 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
455 352 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
456 353
457 354 source_scm = source_db_repo.scm_instance()
458 355 target_scm = target_db_repo.scm_instance()
459 356
460 357 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
461 358 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
462 359
463 360 ancestor = source_scm.get_common_ancestor(
464 361 source_commit.raw_id, target_commit.raw_id, target_scm)
465 362
466 363 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
467 364 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
468 365
469 366 pullrequest_title = _form['pullrequest_title']
470 367 title_source_ref = source_ref.split(':', 2)[1]
471 368 if not pullrequest_title:
472 369 pullrequest_title = PullRequestModel().generate_pullrequest_title(
473 370 source=source_repo,
474 371 source_ref=title_source_ref,
475 372 target=target_repo
476 373 )
477 374
478 375 description = _form['pullrequest_desc']
479 376 try:
480 377 pull_request = PullRequestModel().create(
481 378 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
482 379 target_ref, commit_ids, reviewers, pullrequest_title,
483 380 description
484 381 )
485 382 Session().commit()
486 383 h.flash(_('Successfully opened new pull request'),
487 384 category='success')
488 385 except Exception as e:
489 386 msg = _('Error occurred during sending pull request')
490 387 log.exception(msg)
491 388 h.flash(msg, category='error')
492 389 return redirect(url('pullrequest_home', repo_name=repo_name))
493 390
494 391 return redirect(url('pullrequest_show', repo_name=target_repo,
495 392 pull_request_id=pull_request.pull_request_id))
496 393
497 394 @LoginRequired()
498 395 @NotAnonymous()
499 396 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
500 397 'repository.admin')
501 398 @auth.CSRFRequired()
502 399 @jsonify
503 400 def update(self, repo_name, pull_request_id):
504 401 pull_request_id = safe_int(pull_request_id)
505 402 pull_request = PullRequest.get_or_404(pull_request_id)
506 403 # only owner or admin can update it
507 404 allowed_to_update = PullRequestModel().check_user_update(
508 405 pull_request, c.rhodecode_user)
509 406 if allowed_to_update:
510 407 controls = peppercorn.parse(request.POST.items())
511 408
512 409 if 'review_members' in controls:
513 410 self._update_reviewers(
514 411 pull_request_id, controls['review_members'])
515 412 elif str2bool(request.POST.get('update_commits', 'false')):
516 413 self._update_commits(pull_request)
517 414 elif str2bool(request.POST.get('close_pull_request', 'false')):
518 415 self._reject_close(pull_request)
519 416 elif str2bool(request.POST.get('edit_pull_request', 'false')):
520 417 self._edit_pull_request(pull_request)
521 418 else:
522 419 raise HTTPBadRequest()
523 420 return True
524 421 raise HTTPForbidden()
525 422
526 423 def _edit_pull_request(self, pull_request):
527 424 try:
528 425 PullRequestModel().edit(
529 426 pull_request, request.POST.get('title'),
530 427 request.POST.get('description'))
531 428 except ValueError:
532 429 msg = _(u'Cannot update closed pull requests.')
533 430 h.flash(msg, category='error')
534 431 return
535 432 else:
536 433 Session().commit()
537 434
538 435 msg = _(u'Pull request title & description updated.')
539 436 h.flash(msg, category='success')
540 437 return
541 438
542 439 def _update_commits(self, pull_request):
543 440 resp = PullRequestModel().update_commits(pull_request)
544 441
545 442 if resp.executed:
546 443 msg = _(
547 444 u'Pull request updated to "{source_commit_id}" with '
548 445 u'{count_added} added, {count_removed} removed commits.')
549 446 msg = msg.format(
550 447 source_commit_id=pull_request.source_ref_parts.commit_id,
551 448 count_added=len(resp.changes.added),
552 449 count_removed=len(resp.changes.removed))
553 450 h.flash(msg, category='success')
554 451
555 452 registry = get_current_registry()
556 453 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
557 454 channelstream_config = rhodecode_plugins.get('channelstream', {})
558 455 if channelstream_config.get('enabled'):
559 456 message = msg + (
560 457 ' - <a onclick="window.location.reload()">'
561 458 '<strong>{}</strong></a>'.format(_('Reload page')))
562 459 channel = '/repo${}$/pr/{}'.format(
563 460 pull_request.target_repo.repo_name,
564 461 pull_request.pull_request_id
565 462 )
566 463 payload = {
567 464 'type': 'message',
568 465 'user': 'system',
569 466 'exclude_users': [request.user.username],
570 467 'channel': channel,
571 468 'message': {
572 469 'message': message,
573 470 'level': 'success',
574 471 'topic': '/notifications'
575 472 }
576 473 }
577 474 channelstream_request(
578 475 channelstream_config, [payload], '/message',
579 476 raise_exc=False)
580 477 else:
581 478 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
582 479 warning_reasons = [
583 480 UpdateFailureReason.NO_CHANGE,
584 481 UpdateFailureReason.WRONG_REF_TPYE,
585 482 ]
586 483 category = 'warning' if resp.reason in warning_reasons else 'error'
587 484 h.flash(msg, category=category)
588 485
589 486 @auth.CSRFRequired()
590 487 @LoginRequired()
591 488 @NotAnonymous()
592 489 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
593 490 'repository.admin')
594 491 def merge(self, repo_name, pull_request_id):
595 492 """
596 493 POST /{repo_name}/pull-request/{pull_request_id}
597 494
598 495 Merge will perform a server-side merge of the specified
599 496 pull request, if the pull request is approved and mergeable.
600 497 After successful merging, the pull request is automatically
601 498 closed, with a relevant comment.
602 499 """
603 500 pull_request_id = safe_int(pull_request_id)
604 501 pull_request = PullRequest.get_or_404(pull_request_id)
605 502 user = c.rhodecode_user
606 503
607 504 check = MergeCheck.validate(pull_request, user)
608 505 merge_possible = not check.failed
609 506
610 507 for err_type, error_msg in check.errors:
611 508 h.flash(error_msg, category=err_type)
612 509
613 510 if merge_possible:
614 511 log.debug("Pre-conditions checked, trying to merge.")
615 512 extras = vcs_operation_context(
616 513 request.environ, repo_name=pull_request.target_repo.repo_name,
617 514 username=user.username, action='push',
618 515 scm=pull_request.target_repo.repo_type)
619 516 self._merge_pull_request(pull_request, user, extras)
620 517
621 518 return redirect(url(
622 519 'pullrequest_show',
623 520 repo_name=pull_request.target_repo.repo_name,
624 521 pull_request_id=pull_request.pull_request_id))
625 522
626 523 def _merge_pull_request(self, pull_request, user, extras):
627 524 merge_resp = PullRequestModel().merge(
628 525 pull_request, user, extras=extras)
629 526
630 527 if merge_resp.executed:
631 528 log.debug("The merge was successful, closing the pull request.")
632 529 PullRequestModel().close_pull_request(
633 530 pull_request.pull_request_id, user)
634 531 Session().commit()
635 532 msg = _('Pull request was successfully merged and closed.')
636 533 h.flash(msg, category='success')
637 534 else:
638 535 log.debug(
639 536 "The merge was not successful. Merge response: %s",
640 537 merge_resp)
641 538 msg = PullRequestModel().merge_status_message(
642 539 merge_resp.failure_reason)
643 540 h.flash(msg, category='error')
644 541
645 542 def _update_reviewers(self, pull_request_id, review_members):
646 543 reviewers = [
647 544 (int(r['user_id']), r['reasons']) for r in review_members]
648 545 PullRequestModel().update_reviewers(pull_request_id, reviewers)
649 546 Session().commit()
650 547
651 548 def _reject_close(self, pull_request):
652 549 if pull_request.is_closed():
653 550 raise HTTPForbidden()
654 551
655 552 PullRequestModel().close_pull_request_with_comment(
656 553 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
657 554 Session().commit()
658 555
659 556 @LoginRequired()
660 557 @NotAnonymous()
661 558 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
662 559 'repository.admin')
663 560 @auth.CSRFRequired()
664 561 @jsonify
665 562 def delete(self, repo_name, pull_request_id):
666 563 pull_request_id = safe_int(pull_request_id)
667 564 pull_request = PullRequest.get_or_404(pull_request_id)
668 565 # only owner can delete it !
669 566 if pull_request.author.user_id == c.rhodecode_user.user_id:
670 567 PullRequestModel().delete(pull_request)
671 568 Session().commit()
672 569 h.flash(_('Successfully deleted pull request'),
673 570 category='success')
674 571 return redirect(url('my_account_pullrequests'))
675 572 raise HTTPForbidden()
676 573
677 574 def _get_pr_version(self, pull_request_id, version=None):
678 575 pull_request_id = safe_int(pull_request_id)
679 576 at_version = None
680 577
681 578 if version and version == 'latest':
682 579 pull_request_ver = PullRequest.get(pull_request_id)
683 580 pull_request_obj = pull_request_ver
684 581 _org_pull_request_obj = pull_request_obj
685 582 at_version = 'latest'
686 583 elif version:
687 584 pull_request_ver = PullRequestVersion.get_or_404(version)
688 585 pull_request_obj = pull_request_ver
689 586 _org_pull_request_obj = pull_request_ver.pull_request
690 587 at_version = pull_request_ver.pull_request_version_id
691 588 else:
692 589 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(pull_request_id)
693 590
694 591 pull_request_display_obj = PullRequest.get_pr_display_object(
695 592 pull_request_obj, _org_pull_request_obj)
593
696 594 return _org_pull_request_obj, pull_request_obj, \
697 595 pull_request_display_obj, at_version
698 596
699 def _get_pr_version_changes(self, version, pull_request_latest):
700 """
701 Generate changes commits, and diff data based on the current pr version
702 """
703
704 #TODO(marcink): save those changes as JSON metadata for chaching later.
705
706 # fake the version to add the "initial" state object
707 pull_request_initial = PullRequest.get_pr_display_object(
708 pull_request_latest, pull_request_latest,
709 internal_methods=['get_commit', 'versions'])
710 pull_request_initial.revisions = []
711 pull_request_initial.source_repo.get_commit = types.MethodType(
712 lambda *a, **k: EmptyCommit(), pull_request_initial)
713 pull_request_initial.source_repo.scm_instance = types.MethodType(
714 lambda *a, **k: EmptyRepository(), pull_request_initial)
715
716 _changes_versions = [pull_request_latest] + \
717 list(reversed(c.versions)) + \
718 [pull_request_initial]
719
720 if version == 'latest':
721 index = 0
722 else:
723 for pos, prver in enumerate(_changes_versions):
724 ver = getattr(prver, 'pull_request_version_id', -1)
725 if ver == safe_int(version):
726 index = pos
727 break
728 else:
729 index = 0
730
731 cur_obj = _changes_versions[index]
732 prev_obj = _changes_versions[index + 1]
733
734 old_commit_ids = set(prev_obj.revisions)
735 new_commit_ids = set(cur_obj.revisions)
736
737 changes = PullRequestModel()._calculate_commit_id_changes(
738 old_commit_ids, new_commit_ids)
739
740 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
741 cur_obj, prev_obj)
742 file_changes = PullRequestModel()._calculate_file_changes(
743 old_diff_data, new_diff_data)
744 return changes, file_changes
745
746 597 @LoginRequired()
747 598 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
748 599 'repository.admin')
749 600 def show(self, repo_name, pull_request_id):
750 601 pull_request_id = safe_int(pull_request_id)
751 602 version = request.GET.get('version')
603 from_version = request.GET.get('from_version') or version
752 604 merge_checks = request.GET.get('merge_checks')
605 c.fulldiff = str2bool(request.GET.get('fulldiff'))
606
607 # register for JS templates
608 c.template_context['pull_request_data']['pull_request_id'] = \
609 pull_request_id
753 610
754 611 (pull_request_latest,
755 612 pull_request_at_ver,
756 613 pull_request_display_obj,
757 at_version) = self._get_pr_version(pull_request_id, version=version)
614 at_version) = self._get_pr_version(
615 pull_request_id, version=version)
616 versions = pull_request_display_obj.versions()
617
618 c.at_version = at_version
619 c.at_version_num = (at_version
620 if at_version and at_version != 'latest'
621 else None)
622 c.at_version_pos = ChangesetComment.get_index_from_version(
623 c.at_version_num, versions)
758 624
759 c.template_context['pull_request_data']['pull_request_id'] = \
760 pull_request_id
625 (prev_pull_request_latest,
626 prev_pull_request_at_ver,
627 prev_pull_request_display_obj,
628 prev_at_version) = self._get_pr_version(
629 pull_request_id, version=from_version)
630
631 c.from_version = prev_at_version
632 c.from_version_num = (prev_at_version
633 if prev_at_version and prev_at_version != 'latest'
634 else None)
635 c.from_version_pos = ChangesetComment.get_index_from_version(
636 c.from_version_num, versions)
637
638 # define if we're in COMPARE mode or VIEW at version mode
639 compare = at_version != prev_at_version
761 640
762 641 # pull_requests repo_name we opened it against
763 642 # ie. target_repo must match
764 643 if repo_name != pull_request_at_ver.target_repo.repo_name:
765 644 raise HTTPNotFound
766 645
767 646 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
768 647 pull_request_at_ver)
769 648
770 c.ancestor = None # TODO: add ancestor here
649 c.ancestor = None # empty ancestor hidden in display
771 650 c.pull_request = pull_request_display_obj
772 651 c.pull_request_latest = pull_request_latest
773 652
774 653 pr_closed = pull_request_latest.is_closed()
775 if at_version and not at_version == 'latest':
654 if compare or (at_version and not at_version == 'latest'):
776 655 c.allowed_to_change_status = False
777 656 c.allowed_to_update = False
778 657 c.allowed_to_merge = False
779 658 c.allowed_to_delete = False
780 659 c.allowed_to_comment = False
781 660 else:
782 661 c.allowed_to_change_status = PullRequestModel(). \
783 662 check_user_change_status(pull_request_at_ver, c.rhodecode_user)
784 663 c.allowed_to_update = PullRequestModel().check_user_update(
785 664 pull_request_latest, c.rhodecode_user) and not pr_closed
786 665 c.allowed_to_merge = PullRequestModel().check_user_merge(
787 666 pull_request_latest, c.rhodecode_user) and not pr_closed
788 667 c.allowed_to_delete = PullRequestModel().check_user_delete(
789 668 pull_request_latest, c.rhodecode_user) and not pr_closed
790 669 c.allowed_to_comment = not pr_closed
791 670
792 cc_model = CommentsModel()
671 # check merge capabilities
672 _merge_check = MergeCheck.validate(
673 pull_request_latest, user=c.rhodecode_user)
674 c.pr_merge_errors = _merge_check.error_details
675 c.pr_merge_possible = not _merge_check.failed
676 c.pr_merge_message = _merge_check.merge_msg
793 677
678 if merge_checks:
679 return render('/pullrequests/pullrequest_merge_checks.mako')
680
681 comments_model = CommentsModel()
682
683 # reviewers and statuses
794 684 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
685 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
795 686 c.pull_request_review_status = pull_request_at_ver.calculated_review_status()
796 687
797 c.versions = pull_request_display_obj.versions()
798 c.at_version = at_version
799 c.at_version_num = at_version if at_version and at_version != 'latest' else None
800 c.at_version_pos = ChangesetComment.get_index_from_version(
801 c.at_version_num, c.versions)
802
803 688 # GENERAL COMMENTS with versions #
804 q = cc_model._all_general_comments_of_pull_request(pull_request_latest)
689 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
690 q = q.order_by(ChangesetComment.comment_id.asc())
805 691 general_comments = q.order_by(ChangesetComment.pull_request_version_id.asc())
806 692
807 693 # pick comments we want to render at current version
808 c.comment_versions = cc_model.aggregate_comments(
809 general_comments, c.versions, c.at_version_num)
694 c.comment_versions = comments_model.aggregate_comments(
695 general_comments, versions, c.at_version_num)
810 696 c.comments = c.comment_versions[c.at_version_num]['until']
811 697
812 698 # INLINE COMMENTS with versions #
813 q = cc_model._all_inline_comments_of_pull_request(pull_request_latest)
699 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
700 q = q.order_by(ChangesetComment.comment_id.asc())
814 701 inline_comments = q.order_by(ChangesetComment.pull_request_version_id.asc())
815 c.inline_versions = cc_model.aggregate_comments(
816 inline_comments, c.versions, c.at_version_num, inline=True)
702 c.inline_versions = comments_model.aggregate_comments(
703 inline_comments, versions, c.at_version_num, inline=True)
704
705 # inject latest version
706 latest_ver = PullRequest.get_pr_display_object(
707 pull_request_latest, pull_request_latest)
708
709 c.versions = versions + [latest_ver]
817 710
818 711 # if we use version, then do not show later comments
819 712 # than current version
820 display_inline_comments = collections.defaultdict(lambda: collections.defaultdict(list))
713 display_inline_comments = collections.defaultdict(
714 lambda: collections.defaultdict(list))
821 715 for co in inline_comments:
822 716 if c.at_version_num:
823 717 # pick comments that are at least UPTO given version, so we
824 718 # don't render comments for higher version
825 719 should_render = co.pull_request_version_id and \
826 720 co.pull_request_version_id <= c.at_version_num
827 721 else:
828 722 # showing all, for 'latest'
829 723 should_render = True
830 724
831 725 if should_render:
832 726 display_inline_comments[co.f_path][co.line_no].append(co)
833 727
834 _merge_check = MergeCheck.validate(
835 pull_request_latest, user=c.rhodecode_user)
836 c.pr_merge_errors = _merge_check.error_details
837 c.pr_merge_possible = not _merge_check.failed
838 c.pr_merge_message = _merge_check.merge_msg
728 # load diff data into template context, if we use compare mode then
729 # diff is calculated based on changes between versions of PR
730
731 source_repo = pull_request_at_ver.source_repo
732 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
733
734 target_repo = pull_request_at_ver.target_repo
735 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
736
737 if compare:
738 # in compare switch the diff base to latest commit from prev version
739 target_ref_id = prev_pull_request_display_obj.revisions[0]
740
741 # despite opening commits for bookmarks/branches/tags, we always
742 # convert this to rev to prevent changes after bookmark or branch change
743 c.source_ref_type = 'rev'
744 c.source_ref = source_ref_id
745
746 c.target_ref_type = 'rev'
747 c.target_ref = target_ref_id
748
749 c.source_repo = source_repo
750 c.target_repo = target_repo
751
752 # diff_limit is the old behavior, will cut off the whole diff
753 # if the limit is applied otherwise will just hide the
754 # big files from the front-end
755 diff_limit = self.cut_off_limit_diff
756 file_limit = self.cut_off_limit_file
757
758 c.commit_ranges = []
759 source_commit = EmptyCommit()
760 target_commit = EmptyCommit()
761 c.missing_requirements = False
762
763 # try first shadow repo, fallback to regular repo
764 try:
765 commits_source_repo = pull_request_latest.get_shadow_repo()
766 except Exception:
767 log.debug('Failed to get shadow repo', exc_info=True)
768 commits_source_repo = source_repo.scm_instance()
769
770 c.commits_source_repo = commits_source_repo
771 commit_cache = {}
772 try:
773 pre_load = ["author", "branch", "date", "message"]
774 show_revs = pull_request_at_ver.revisions
775 for rev in show_revs:
776 comm = commits_source_repo.get_commit(
777 commit_id=rev, pre_load=pre_load)
778 c.commit_ranges.append(comm)
779 commit_cache[comm.raw_id] = comm
839 780
840 if merge_checks:
841 return render('/pullrequests/pullrequest_merge_checks.mako')
781 target_commit = commits_source_repo.get_commit(
782 commit_id=safe_str(target_ref_id))
783 source_commit = commits_source_repo.get_commit(
784 commit_id=safe_str(source_ref_id))
785 except CommitDoesNotExistError:
786 pass
787 except RepositoryRequirementError:
788 log.warning(
789 'Failed to get all required data from repo', exc_info=True)
790 c.missing_requirements = True
791
792 c.statuses = source_repo.statuses(
793 [x.raw_id for x in c.commit_ranges])
794
795 # auto collapse if we have more than limit
796 collapse_limit = diffs.DiffProcessor._collapse_commits_over
797 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
798 c.compare_mode = compare
799
800 c.missing_commits = False
801 if (c.missing_requirements or isinstance(source_commit, EmptyCommit)
802 or source_commit == target_commit):
803
804 c.missing_commits = True
805 else:
806 vcs_diff = PullRequestModel().get_diff(
807 commits_source_repo, source_ref_id, target_ref_id)
808
809 diff_processor = diffs.DiffProcessor(
810 vcs_diff, format='newdiff', diff_limit=diff_limit,
811 file_limit=file_limit, show_full_diff=c.fulldiff)
842 812
843 # load compare data into template context
844 self._load_compare_data(pull_request_at_ver, display_inline_comments)
813 _parsed = diff_processor.prepare()
814 c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer)
815
816 def _node_getter(commit):
817 def get_node(fname):
818 try:
819 return commit.get_node(fname)
820 except NodeDoesNotExistError:
821 return None
822
823 return get_node
824
825 diffset = codeblocks.DiffSet(
826 repo_name=c.repo_name,
827 source_repo_name=c.source_repo.repo_name,
828 source_node_getter=_node_getter(target_commit),
829 target_node_getter=_node_getter(source_commit),
830 comments=display_inline_comments
831 )
832 c.diffset = diffset.render_patchset(
833 _parsed, target_commit.raw_id, source_commit.raw_id)
834
835 # calculate removed files that are bound to comments
836 comment_deleted_files = [
837 fname for fname in display_inline_comments
838 if fname not in c.diffset.file_stats]
839
840 c.deleted_files_comments = collections.defaultdict(dict)
841 for fname, per_line_comments in display_inline_comments.items():
842 if fname in comment_deleted_files:
843 c.deleted_files_comments[fname]['stats'] = 0
844 c.deleted_files_comments[fname]['comments'] = list()
845 for lno, comments in per_line_comments.items():
846 c.deleted_files_comments[fname]['comments'].extend(
847 comments)
845 848
846 849 # this is a hack to properly display links, when creating PR, the
847 850 # compare view and others uses different notation, and
848 851 # compare_commits.mako renders links based on the target_repo.
849 852 # We need to swap that here to generate it properly on the html side
850 853 c.target_repo = c.source_repo
851 854
852 855 if c.allowed_to_update:
853 856 force_close = ('forced_closed', _('Close Pull Request'))
854 857 statuses = ChangesetStatus.STATUSES + [force_close]
855 858 else:
856 859 statuses = ChangesetStatus.STATUSES
857 860 c.commit_statuses = statuses
858 861
859 c.changes = None
860 c.file_changes = None
862 c.show_version_changes = not pr_closed
863 if c.show_version_changes:
864 cur_obj = pull_request_at_ver
865 prev_obj = prev_pull_request_at_ver
866
867 old_commit_ids = prev_obj.revisions
868 new_commit_ids = cur_obj.revisions
869 commit_changes = PullRequestModel()._calculate_commit_id_changes(
870 old_commit_ids, new_commit_ids)
871 c.commit_changes_summary = commit_changes
872
873 # calculate the diff for commits between versions
874 c.commit_changes = []
875 mark = lambda cs, fw: list(
876 h.itertools.izip_longest([], cs, fillvalue=fw))
877 for c_type, raw_id in mark(commit_changes.added, 'a') \
878 + mark(commit_changes.removed, 'r') \
879 + mark(commit_changes.common, 'c'):
861 880
862 c.show_version_changes = 1 # control flag, not used yet
881 if raw_id in commit_cache:
882 commit = commit_cache[raw_id]
883 else:
884 try:
885 commit = commits_source_repo.get_commit(raw_id)
886 except CommitDoesNotExistError:
887 # in case we fail extracting still use "dummy" commit
888 # for display in commit diff
889 commit = h.AttributeDict(
890 {'raw_id': raw_id,
891 'message': 'EMPTY or MISSING COMMIT'})
892 c.commit_changes.append([c_type, commit])
863 893
864 if at_version and c.show_version_changes:
865 c.changes, c.file_changes = self._get_pr_version_changes(
866 version, pull_request_latest)
894 # current user review statuses for each version
895 c.review_versions = {}
896 if c.rhodecode_user.user_id in allowed_reviewers:
897 for co in general_comments:
898 if co.author.user_id == c.rhodecode_user.user_id:
899 # each comment has a status change
900 status = co.status_change
901 if status:
902 _ver_pr = status[0].comment.pull_request_version_id
903 c.review_versions[_ver_pr] = status[0]
867 904
868 905 return render('/pullrequests/pullrequest_show.mako')
869 906
870 907 @LoginRequired()
871 908 @NotAnonymous()
872 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
873 'repository.admin')
909 @HasRepoPermissionAnyDecorator(
910 'repository.read', 'repository.write', 'repository.admin')
874 911 @auth.CSRFRequired()
875 912 @jsonify
876 913 def comment(self, repo_name, pull_request_id):
877 914 pull_request_id = safe_int(pull_request_id)
878 915 pull_request = PullRequest.get_or_404(pull_request_id)
879 916 if pull_request.is_closed():
880 917 raise HTTPForbidden()
881 918
882 919 # TODO: johbo: Re-think this bit, "approved_closed" does not exist
883 920 # as a changeset status, still we want to send it in one value.
884 921 status = request.POST.get('changeset_status', None)
885 922 text = request.POST.get('text')
886 923 comment_type = request.POST.get('comment_type')
887 924 resolves_comment_id = request.POST.get('resolves_comment_id', None)
888 925
889 926 if status and '_closed' in status:
890 927 close_pr = True
891 928 status = status.replace('_closed', '')
892 929 else:
893 930 close_pr = False
894 931
895 932 forced = (status == 'forced')
896 933 if forced:
897 934 status = 'rejected'
898 935
899 936 allowed_to_change_status = PullRequestModel().check_user_change_status(
900 937 pull_request, c.rhodecode_user)
901 938
902 939 if status and allowed_to_change_status:
903 940 message = (_('Status change %(transition_icon)s %(status)s')
904 941 % {'transition_icon': '>',
905 942 'status': ChangesetStatus.get_status_lbl(status)})
906 943 if close_pr:
907 944 message = _('Closing with') + ' ' + message
908 945 text = text or message
909 946 comm = CommentsModel().create(
910 947 text=text,
911 948 repo=c.rhodecode_db_repo.repo_id,
912 949 user=c.rhodecode_user.user_id,
913 950 pull_request=pull_request_id,
914 951 f_path=request.POST.get('f_path'),
915 952 line_no=request.POST.get('line'),
916 953 status_change=(ChangesetStatus.get_status_lbl(status)
917 954 if status and allowed_to_change_status else None),
918 955 status_change_type=(status
919 956 if status and allowed_to_change_status else None),
920 957 closing_pr=close_pr,
921 958 comment_type=comment_type,
922 959 resolves_comment_id=resolves_comment_id
923 960 )
924 961
925 962 if allowed_to_change_status:
926 963 old_calculated_status = pull_request.calculated_review_status()
927 964 # get status if set !
928 965 if status:
929 966 ChangesetStatusModel().set_status(
930 967 c.rhodecode_db_repo.repo_id,
931 968 status,
932 969 c.rhodecode_user.user_id,
933 970 comm,
934 971 pull_request=pull_request_id
935 972 )
936 973
937 974 Session().flush()
938 975 events.trigger(events.PullRequestCommentEvent(pull_request, comm))
939 976 # we now calculate the status of pull request, and based on that
940 977 # calculation we set the commits status
941 978 calculated_status = pull_request.calculated_review_status()
942 979 if old_calculated_status != calculated_status:
943 980 PullRequestModel()._trigger_pull_request_hook(
944 981 pull_request, c.rhodecode_user, 'review_status_change')
945 982
946 983 calculated_status_lbl = ChangesetStatus.get_status_lbl(
947 984 calculated_status)
948 985
949 986 if close_pr:
950 987 status_completed = (
951 988 calculated_status in [ChangesetStatus.STATUS_APPROVED,
952 989 ChangesetStatus.STATUS_REJECTED])
953 990 if forced or status_completed:
954 991 PullRequestModel().close_pull_request(
955 992 pull_request_id, c.rhodecode_user)
956 993 else:
957 994 h.flash(_('Closing pull request on other statuses than '
958 995 'rejected or approved is forbidden. '
959 996 'Calculated status from all reviewers '
960 997 'is currently: %s') % calculated_status_lbl,
961 998 category='warning')
962 999
963 1000 Session().commit()
964 1001
965 1002 if not request.is_xhr:
966 1003 return redirect(h.url('pullrequest_show', repo_name=repo_name,
967 1004 pull_request_id=pull_request_id))
968 1005
969 1006 data = {
970 1007 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
971 1008 }
972 1009 if comm:
973 1010 c.co = comm
974 1011 c.inline_comment = True if comm.line_no else False
975 1012 data.update(comm.get_dict())
976 1013 data.update({'rendered_text':
977 1014 render('changeset/changeset_comment_block.mako')})
978 1015
979 1016 return data
980 1017
981 1018 @LoginRequired()
982 1019 @NotAnonymous()
983 1020 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
984 1021 'repository.admin')
985 1022 @auth.CSRFRequired()
986 1023 @jsonify
987 1024 def delete_comment(self, repo_name, comment_id):
988 1025 return self._delete_comment(comment_id)
989 1026
990 1027 def _delete_comment(self, comment_id):
991 1028 comment_id = safe_int(comment_id)
992 1029 co = ChangesetComment.get_or_404(comment_id)
993 1030 if co.pull_request.is_closed():
994 1031 # don't allow deleting comments on closed pull request
995 1032 raise HTTPForbidden()
996 1033
997 1034 is_owner = co.author.user_id == c.rhodecode_user.user_id
998 1035 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
999 1036 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
1000 1037 old_calculated_status = co.pull_request.calculated_review_status()
1001 1038 CommentsModel().delete(comment=co)
1002 1039 Session().commit()
1003 1040 calculated_status = co.pull_request.calculated_review_status()
1004 1041 if old_calculated_status != calculated_status:
1005 1042 PullRequestModel()._trigger_pull_request_hook(
1006 1043 co.pull_request, c.rhodecode_user, 'review_status_change')
1007 1044 return True
1008 1045 else:
1009 1046 raise HTTPForbidden()
@@ -1,701 +1,703 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import difflib
23 23 from itertools import groupby
24 24
25 25 from pygments import lex
26 26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
27 27 from rhodecode.lib.helpers import (
28 28 get_lexer_for_filenode, html_escape)
29 29 from rhodecode.lib.utils2 import AttributeDict
30 30 from rhodecode.lib.vcs.nodes import FileNode
31 31 from rhodecode.lib.diff_match_patch import diff_match_patch
32 32 from rhodecode.lib.diffs import LimitedDiffContainer
33 33 from pygments.lexers import get_lexer_by_name
34 34
35 35 plain_text_lexer = get_lexer_by_name(
36 36 'text', stripall=False, stripnl=False, ensurenl=False)
37 37
38 38
39 39 log = logging.getLogger()
40 40
41 41
42 42 def filenode_as_lines_tokens(filenode, lexer=None):
43 43 org_lexer = lexer
44 44 lexer = lexer or get_lexer_for_filenode(filenode)
45 45 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
46 46 lexer, filenode, org_lexer)
47 47 tokens = tokenize_string(filenode.content, lexer)
48 48 lines = split_token_stream(tokens, split_string='\n')
49 49 rv = list(lines)
50 50 return rv
51 51
52 52
53 53 def tokenize_string(content, lexer):
54 54 """
55 55 Use pygments to tokenize some content based on a lexer
56 56 ensuring all original new lines and whitespace is preserved
57 57 """
58 58
59 59 lexer.stripall = False
60 60 lexer.stripnl = False
61 61 lexer.ensurenl = False
62 62 for token_type, token_text in lex(content, lexer):
63 63 yield pygment_token_class(token_type), token_text
64 64
65 65
66 66 def split_token_stream(tokens, split_string=u'\n'):
67 67 """
68 68 Take a list of (TokenType, text) tuples and split them by a string
69 69
70 70 >>> split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
71 71 [(TEXT, 'some'), (TEXT, 'text'),
72 72 (TEXT, 'more'), (TEXT, 'text')]
73 73 """
74 74
75 75 buffer = []
76 76 for token_class, token_text in tokens:
77 77 parts = token_text.split(split_string)
78 78 for part in parts[:-1]:
79 79 buffer.append((token_class, part))
80 80 yield buffer
81 81 buffer = []
82 82
83 83 buffer.append((token_class, parts[-1]))
84 84
85 85 if buffer:
86 86 yield buffer
87 87
88 88
89 89 def filenode_as_annotated_lines_tokens(filenode):
90 90 """
91 91 Take a file node and return a list of annotations => lines, if no annotation
92 92 is found, it will be None.
93 93
94 94 eg:
95 95
96 96 [
97 97 (annotation1, [
98 98 (1, line1_tokens_list),
99 99 (2, line2_tokens_list),
100 100 ]),
101 101 (annotation2, [
102 102 (3, line1_tokens_list),
103 103 ]),
104 104 (None, [
105 105 (4, line1_tokens_list),
106 106 ]),
107 107 (annotation1, [
108 108 (5, line1_tokens_list),
109 109 (6, line2_tokens_list),
110 110 ])
111 111 ]
112 112 """
113 113
114 114 commit_cache = {} # cache commit_getter lookups
115 115
116 116 def _get_annotation(commit_id, commit_getter):
117 117 if commit_id not in commit_cache:
118 118 commit_cache[commit_id] = commit_getter()
119 119 return commit_cache[commit_id]
120 120
121 121 annotation_lookup = {
122 122 line_no: _get_annotation(commit_id, commit_getter)
123 123 for line_no, commit_id, commit_getter, line_content
124 124 in filenode.annotate
125 125 }
126 126
127 127 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
128 128 for line_no, tokens
129 129 in enumerate(filenode_as_lines_tokens(filenode), 1))
130 130
131 131 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
132 132
133 133 for annotation, group in grouped_annotations_lines:
134 134 yield (
135 135 annotation, [(line_no, tokens)
136 136 for (_, line_no, tokens) in group]
137 137 )
138 138
139 139
140 140 def render_tokenstream(tokenstream):
141 141 result = []
142 142 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
143 143
144 144 if token_class:
145 145 result.append(u'<span class="%s">' % token_class)
146 146 else:
147 147 result.append(u'<span>')
148 148
149 149 for op_tag, token_text in token_ops_texts:
150 150
151 151 if op_tag:
152 152 result.append(u'<%s>' % op_tag)
153 153
154 154 escaped_text = html_escape(token_text)
155 155
156 156 # TODO: dan: investigate showing hidden characters like space/nl/tab
157 157 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
158 158 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
159 159 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
160 160
161 161 result.append(escaped_text)
162 162
163 163 if op_tag:
164 164 result.append(u'</%s>' % op_tag)
165 165
166 166 result.append(u'</span>')
167 167
168 168 html = ''.join(result)
169 169 return html
170 170
171 171
172 172 def rollup_tokenstream(tokenstream):
173 173 """
174 174 Group a token stream of the format:
175 175
176 176 ('class', 'op', 'text')
177 177 or
178 178 ('class', 'text')
179 179
180 180 into
181 181
182 182 [('class1',
183 183 [('op1', 'text'),
184 184 ('op2', 'text')]),
185 185 ('class2',
186 186 [('op3', 'text')])]
187 187
188 188 This is used to get the minimal tags necessary when
189 189 rendering to html eg for a token stream ie.
190 190
191 191 <span class="A"><ins>he</ins>llo</span>
192 192 vs
193 193 <span class="A"><ins>he</ins></span><span class="A">llo</span>
194 194
195 195 If a 2 tuple is passed in, the output op will be an empty string.
196 196
197 197 eg:
198 198
199 199 >>> rollup_tokenstream([('classA', '', 'h'),
200 200 ('classA', 'del', 'ell'),
201 201 ('classA', '', 'o'),
202 202 ('classB', '', ' '),
203 203 ('classA', '', 'the'),
204 204 ('classA', '', 're'),
205 205 ])
206 206
207 207 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
208 208 ('classB', [('', ' ')],
209 209 ('classA', [('', 'there')]]
210 210
211 211 """
212 212 if tokenstream and len(tokenstream[0]) == 2:
213 213 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
214 214
215 215 result = []
216 216 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
217 217 ops = []
218 218 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
219 219 text_buffer = []
220 220 for t_class, t_op, t_text in token_text_list:
221 221 text_buffer.append(t_text)
222 222 ops.append((token_op, ''.join(text_buffer)))
223 223 result.append((token_class, ops))
224 224 return result
225 225
226 226
227 227 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
228 228 """
229 229 Converts a list of (token_class, token_text) tuples to a list of
230 230 (token_class, token_op, token_text) tuples where token_op is one of
231 231 ('ins', 'del', '')
232 232
233 233 :param old_tokens: list of (token_class, token_text) tuples of old line
234 234 :param new_tokens: list of (token_class, token_text) tuples of new line
235 235 :param use_diff_match_patch: boolean, will use google's diff match patch
236 236 library which has options to 'smooth' out the character by character
237 237 differences making nicer ins/del blocks
238 238 """
239 239
240 240 old_tokens_result = []
241 241 new_tokens_result = []
242 242
243 243 similarity = difflib.SequenceMatcher(None,
244 244 ''.join(token_text for token_class, token_text in old_tokens),
245 245 ''.join(token_text for token_class, token_text in new_tokens)
246 246 ).ratio()
247 247
248 248 if similarity < 0.6: # return, the blocks are too different
249 249 for token_class, token_text in old_tokens:
250 250 old_tokens_result.append((token_class, '', token_text))
251 251 for token_class, token_text in new_tokens:
252 252 new_tokens_result.append((token_class, '', token_text))
253 253 return old_tokens_result, new_tokens_result, similarity
254 254
255 255 token_sequence_matcher = difflib.SequenceMatcher(None,
256 256 [x[1] for x in old_tokens],
257 257 [x[1] for x in new_tokens])
258 258
259 259 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
260 260 # check the differences by token block types first to give a more
261 261 # nicer "block" level replacement vs character diffs
262 262
263 263 if tag == 'equal':
264 264 for token_class, token_text in old_tokens[o1:o2]:
265 265 old_tokens_result.append((token_class, '', token_text))
266 266 for token_class, token_text in new_tokens[n1:n2]:
267 267 new_tokens_result.append((token_class, '', token_text))
268 268 elif tag == 'delete':
269 269 for token_class, token_text in old_tokens[o1:o2]:
270 270 old_tokens_result.append((token_class, 'del', token_text))
271 271 elif tag == 'insert':
272 272 for token_class, token_text in new_tokens[n1:n2]:
273 273 new_tokens_result.append((token_class, 'ins', token_text))
274 274 elif tag == 'replace':
275 275 # if same type token blocks must be replaced, do a diff on the
276 276 # characters in the token blocks to show individual changes
277 277
278 278 old_char_tokens = []
279 279 new_char_tokens = []
280 280 for token_class, token_text in old_tokens[o1:o2]:
281 281 for char in token_text:
282 282 old_char_tokens.append((token_class, char))
283 283
284 284 for token_class, token_text in new_tokens[n1:n2]:
285 285 for char in token_text:
286 286 new_char_tokens.append((token_class, char))
287 287
288 288 old_string = ''.join([token_text for
289 289 token_class, token_text in old_char_tokens])
290 290 new_string = ''.join([token_text for
291 291 token_class, token_text in new_char_tokens])
292 292
293 293 char_sequence = difflib.SequenceMatcher(
294 294 None, old_string, new_string)
295 295 copcodes = char_sequence.get_opcodes()
296 296 obuffer, nbuffer = [], []
297 297
298 298 if use_diff_match_patch:
299 299 dmp = diff_match_patch()
300 300 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
301 301 reps = dmp.diff_main(old_string, new_string)
302 302 dmp.diff_cleanupEfficiency(reps)
303 303
304 304 a, b = 0, 0
305 305 for op, rep in reps:
306 306 l = len(rep)
307 307 if op == 0:
308 308 for i, c in enumerate(rep):
309 309 obuffer.append((old_char_tokens[a+i][0], '', c))
310 310 nbuffer.append((new_char_tokens[b+i][0], '', c))
311 311 a += l
312 312 b += l
313 313 elif op == -1:
314 314 for i, c in enumerate(rep):
315 315 obuffer.append((old_char_tokens[a+i][0], 'del', c))
316 316 a += l
317 317 elif op == 1:
318 318 for i, c in enumerate(rep):
319 319 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
320 320 b += l
321 321 else:
322 322 for ctag, co1, co2, cn1, cn2 in copcodes:
323 323 if ctag == 'equal':
324 324 for token_class, token_text in old_char_tokens[co1:co2]:
325 325 obuffer.append((token_class, '', token_text))
326 326 for token_class, token_text in new_char_tokens[cn1:cn2]:
327 327 nbuffer.append((token_class, '', token_text))
328 328 elif ctag == 'delete':
329 329 for token_class, token_text in old_char_tokens[co1:co2]:
330 330 obuffer.append((token_class, 'del', token_text))
331 331 elif ctag == 'insert':
332 332 for token_class, token_text in new_char_tokens[cn1:cn2]:
333 333 nbuffer.append((token_class, 'ins', token_text))
334 334 elif ctag == 'replace':
335 335 for token_class, token_text in old_char_tokens[co1:co2]:
336 336 obuffer.append((token_class, 'del', token_text))
337 337 for token_class, token_text in new_char_tokens[cn1:cn2]:
338 338 nbuffer.append((token_class, 'ins', token_text))
339 339
340 340 old_tokens_result.extend(obuffer)
341 341 new_tokens_result.extend(nbuffer)
342 342
343 343 return old_tokens_result, new_tokens_result, similarity
344 344
345 345
346 346 class DiffSet(object):
347 347 """
348 348 An object for parsing the diff result from diffs.DiffProcessor and
349 349 adding highlighting, side by side/unified renderings and line diffs
350 350 """
351 351
352 352 HL_REAL = 'REAL' # highlights using original file, slow
353 353 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
354 354 # in the case of multiline code
355 355 HL_NONE = 'NONE' # no highlighting, fastest
356 356
357 357 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
358 358 source_repo_name=None,
359 359 source_node_getter=lambda filename: None,
360 360 target_node_getter=lambda filename: None,
361 361 source_nodes=None, target_nodes=None,
362 362 max_file_size_limit=150 * 1024, # files over this size will
363 363 # use fast highlighting
364 364 comments=None,
365 365 ):
366 366
367 367 self.highlight_mode = highlight_mode
368 368 self.highlighted_filenodes = {}
369 369 self.source_node_getter = source_node_getter
370 370 self.target_node_getter = target_node_getter
371 371 self.source_nodes = source_nodes or {}
372 372 self.target_nodes = target_nodes or {}
373 373 self.repo_name = repo_name
374 374 self.source_repo_name = source_repo_name or repo_name
375 375 self.comments = comments or {}
376 376 self.comments_store = self.comments.copy()
377 377 self.max_file_size_limit = max_file_size_limit
378 378
379 379 def render_patchset(self, patchset, source_ref=None, target_ref=None):
380 380 diffset = AttributeDict(dict(
381 381 lines_added=0,
382 382 lines_deleted=0,
383 383 changed_files=0,
384 384 files=[],
385 file_stats={},
385 386 limited_diff=isinstance(patchset, LimitedDiffContainer),
386 387 repo_name=self.repo_name,
387 388 source_repo_name=self.source_repo_name,
388 389 source_ref=source_ref,
389 390 target_ref=target_ref,
390 391 ))
391 392 for patch in patchset:
393 diffset.file_stats[patch['filename']] = patch['stats']
392 394 filediff = self.render_patch(patch)
393 395 filediff.diffset = diffset
394 396 diffset.files.append(filediff)
395 397 diffset.changed_files += 1
396 398 if not patch['stats']['binary']:
397 399 diffset.lines_added += patch['stats']['added']
398 400 diffset.lines_deleted += patch['stats']['deleted']
399 401
400 402 return diffset
401 403
402 404 _lexer_cache = {}
403 405 def _get_lexer_for_filename(self, filename, filenode=None):
404 406 # cached because we might need to call it twice for source/target
405 407 if filename not in self._lexer_cache:
406 408 if filenode:
407 409 lexer = filenode.lexer
408 410 else:
409 411 lexer = FileNode.get_lexer(filename=filename)
410 412 self._lexer_cache[filename] = lexer
411 413 return self._lexer_cache[filename]
412 414
413 415 def render_patch(self, patch):
414 416 log.debug('rendering diff for %r' % patch['filename'])
415 417
416 418 source_filename = patch['original_filename']
417 419 target_filename = patch['filename']
418 420
419 421 source_lexer = plain_text_lexer
420 422 target_lexer = plain_text_lexer
421 423
422 424 if not patch['stats']['binary']:
423 425 if self.highlight_mode == self.HL_REAL:
424 426 if (source_filename and patch['operation'] in ('D', 'M')
425 427 and source_filename not in self.source_nodes):
426 428 self.source_nodes[source_filename] = (
427 429 self.source_node_getter(source_filename))
428 430
429 431 if (target_filename and patch['operation'] in ('A', 'M')
430 432 and target_filename not in self.target_nodes):
431 433 self.target_nodes[target_filename] = (
432 434 self.target_node_getter(target_filename))
433 435
434 436 elif self.highlight_mode == self.HL_FAST:
435 437 source_lexer = self._get_lexer_for_filename(source_filename)
436 438 target_lexer = self._get_lexer_for_filename(target_filename)
437 439
438 440 source_file = self.source_nodes.get(source_filename, source_filename)
439 441 target_file = self.target_nodes.get(target_filename, target_filename)
440 442
441 443 source_filenode, target_filenode = None, None
442 444
443 445 # TODO: dan: FileNode.lexer works on the content of the file - which
444 446 # can be slow - issue #4289 explains a lexer clean up - which once
445 447 # done can allow caching a lexer for a filenode to avoid the file lookup
446 448 if isinstance(source_file, FileNode):
447 449 source_filenode = source_file
448 450 #source_lexer = source_file.lexer
449 451 source_lexer = self._get_lexer_for_filename(source_filename)
450 452 source_file.lexer = source_lexer
451 453
452 454 if isinstance(target_file, FileNode):
453 455 target_filenode = target_file
454 456 #target_lexer = target_file.lexer
455 457 target_lexer = self._get_lexer_for_filename(target_filename)
456 458 target_file.lexer = target_lexer
457 459
458 460 source_file_path, target_file_path = None, None
459 461
460 462 if source_filename != '/dev/null':
461 463 source_file_path = source_filename
462 464 if target_filename != '/dev/null':
463 465 target_file_path = target_filename
464 466
465 467 source_file_type = source_lexer.name
466 468 target_file_type = target_lexer.name
467 469
468 470 op_hunks = patch['chunks'][0]
469 471 hunks = patch['chunks'][1:]
470 472
471 473 filediff = AttributeDict({
472 474 'source_file_path': source_file_path,
473 475 'target_file_path': target_file_path,
474 476 'source_filenode': source_filenode,
475 477 'target_filenode': target_filenode,
476 478 'hunks': [],
477 479 'source_file_type': target_file_type,
478 480 'target_file_type': source_file_type,
479 481 'patch': patch,
480 482 'source_mode': patch['stats']['old_mode'],
481 483 'target_mode': patch['stats']['new_mode'],
482 484 'limited_diff': isinstance(patch, LimitedDiffContainer),
483 485 'diffset': self,
484 486 })
485 487
486 488 for hunk in hunks:
487 489 hunkbit = self.parse_hunk(hunk, source_file, target_file)
488 490 hunkbit.filediff = filediff
489 491 filediff.hunks.append(hunkbit)
490 492
491 493 left_comments = {}
492 494
493 495 if source_file_path in self.comments_store:
494 496 for lineno, comments in self.comments_store[source_file_path].items():
495 497 left_comments[lineno] = comments
496 498
497 499 if target_file_path in self.comments_store:
498 500 for lineno, comments in self.comments_store[target_file_path].items():
499 501 left_comments[lineno] = comments
500 502
501 503 filediff.left_comments = left_comments
502 504 return filediff
503 505
504 506 def parse_hunk(self, hunk, source_file, target_file):
505 507 result = AttributeDict(dict(
506 508 source_start=hunk['source_start'],
507 509 source_length=hunk['source_length'],
508 510 target_start=hunk['target_start'],
509 511 target_length=hunk['target_length'],
510 512 section_header=hunk['section_header'],
511 513 lines=[],
512 514 ))
513 515 before, after = [], []
514 516
515 517 for line in hunk['lines']:
516 518 if line['action'] == 'unmod':
517 519 result.lines.extend(
518 520 self.parse_lines(before, after, source_file, target_file))
519 521 after.append(line)
520 522 before.append(line)
521 523 elif line['action'] == 'add':
522 524 after.append(line)
523 525 elif line['action'] == 'del':
524 526 before.append(line)
525 527 elif line['action'] == 'old-no-nl':
526 528 before.append(line)
527 529 elif line['action'] == 'new-no-nl':
528 530 after.append(line)
529 531
530 532 result.lines.extend(
531 533 self.parse_lines(before, after, source_file, target_file))
532 534 result.unified = self.as_unified(result.lines)
533 535 result.sideside = result.lines
534 536
535 537 return result
536 538
537 539 def parse_lines(self, before_lines, after_lines, source_file, target_file):
538 540 # TODO: dan: investigate doing the diff comparison and fast highlighting
539 541 # on the entire before and after buffered block lines rather than by
540 542 # line, this means we can get better 'fast' highlighting if the context
541 543 # allows it - eg.
542 544 # line 4: """
543 545 # line 5: this gets highlighted as a string
544 546 # line 6: """
545 547
546 548 lines = []
547 549 while before_lines or after_lines:
548 550 before, after = None, None
549 551 before_tokens, after_tokens = None, None
550 552
551 553 if before_lines:
552 554 before = before_lines.pop(0)
553 555 if after_lines:
554 556 after = after_lines.pop(0)
555 557
556 558 original = AttributeDict()
557 559 modified = AttributeDict()
558 560
559 561 if before:
560 562 if before['action'] == 'old-no-nl':
561 563 before_tokens = [('nonl', before['line'])]
562 564 else:
563 565 before_tokens = self.get_line_tokens(
564 566 line_text=before['line'], line_number=before['old_lineno'],
565 567 file=source_file)
566 568 original.lineno = before['old_lineno']
567 569 original.content = before['line']
568 570 original.action = self.action_to_op(before['action'])
569 571 original.comments = self.get_comments_for('old',
570 572 source_file, before['old_lineno'])
571 573
572 574 if after:
573 575 if after['action'] == 'new-no-nl':
574 576 after_tokens = [('nonl', after['line'])]
575 577 else:
576 578 after_tokens = self.get_line_tokens(
577 579 line_text=after['line'], line_number=after['new_lineno'],
578 580 file=target_file)
579 581 modified.lineno = after['new_lineno']
580 582 modified.content = after['line']
581 583 modified.action = self.action_to_op(after['action'])
582 584 modified.comments = self.get_comments_for('new',
583 585 target_file, after['new_lineno'])
584 586
585 587 # diff the lines
586 588 if before_tokens and after_tokens:
587 589 o_tokens, m_tokens, similarity = tokens_diff(
588 590 before_tokens, after_tokens)
589 591 original.content = render_tokenstream(o_tokens)
590 592 modified.content = render_tokenstream(m_tokens)
591 593 elif before_tokens:
592 594 original.content = render_tokenstream(
593 595 [(x[0], '', x[1]) for x in before_tokens])
594 596 elif after_tokens:
595 597 modified.content = render_tokenstream(
596 598 [(x[0], '', x[1]) for x in after_tokens])
597 599
598 600 lines.append(AttributeDict({
599 601 'original': original,
600 602 'modified': modified,
601 603 }))
602 604
603 605 return lines
604 606
605 607 def get_comments_for(self, version, file, line_number):
606 608 if hasattr(file, 'unicode_path'):
607 609 file = file.unicode_path
608 610
609 611 if not isinstance(file, basestring):
610 612 return None
611 613
612 614 line_key = {
613 615 'old': 'o',
614 616 'new': 'n',
615 617 }[version] + str(line_number)
616 618
617 619 if file in self.comments_store:
618 620 file_comments = self.comments_store[file]
619 621 if line_key in file_comments:
620 622 return file_comments.pop(line_key)
621 623
622 624 def get_line_tokens(self, line_text, line_number, file=None):
623 625 filenode = None
624 626 filename = None
625 627
626 628 if isinstance(file, basestring):
627 629 filename = file
628 630 elif isinstance(file, FileNode):
629 631 filenode = file
630 632 filename = file.unicode_path
631 633
632 634 if self.highlight_mode == self.HL_REAL and filenode:
633 635 lexer = self._get_lexer_for_filename(filename)
634 636 file_size_allowed = file.size < self.max_file_size_limit
635 637 if line_number and file_size_allowed:
636 638 return self.get_tokenized_filenode_line(
637 639 file, line_number, lexer)
638 640
639 641 if self.highlight_mode in (self.HL_REAL, self.HL_FAST) and filename:
640 642 lexer = self._get_lexer_for_filename(filename)
641 643 return list(tokenize_string(line_text, lexer))
642 644
643 645 return list(tokenize_string(line_text, plain_text_lexer))
644 646
645 647 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None):
646 648
647 649 if filenode not in self.highlighted_filenodes:
648 650 tokenized_lines = filenode_as_lines_tokens(filenode, lexer)
649 651 self.highlighted_filenodes[filenode] = tokenized_lines
650 652 return self.highlighted_filenodes[filenode][line_number - 1]
651 653
652 654 def action_to_op(self, action):
653 655 return {
654 656 'add': '+',
655 657 'del': '-',
656 658 'unmod': ' ',
657 659 'old-no-nl': ' ',
658 660 'new-no-nl': ' ',
659 661 }.get(action, action)
660 662
661 663 def as_unified(self, lines):
662 664 """
663 665 Return a generator that yields the lines of a diff in unified order
664 666 """
665 667 def generator():
666 668 buf = []
667 669 for line in lines:
668 670
669 671 if buf and not line.original or line.original.action == ' ':
670 672 for b in buf:
671 673 yield b
672 674 buf = []
673 675
674 676 if line.original:
675 677 if line.original.action == ' ':
676 678 yield (line.original.lineno, line.modified.lineno,
677 679 line.original.action, line.original.content,
678 680 line.original.comments)
679 681 continue
680 682
681 683 if line.original.action == '-':
682 684 yield (line.original.lineno, None,
683 685 line.original.action, line.original.content,
684 686 line.original.comments)
685 687
686 688 if line.modified.action == '+':
687 689 buf.append((
688 690 None, line.modified.lineno,
689 691 line.modified.action, line.modified.content,
690 692 line.modified.comments))
691 693 continue
692 694
693 695 if line.modified:
694 696 yield (None, line.modified.lineno,
695 697 line.modified.action, line.modified.content,
696 698 line.modified.comments)
697 699
698 700 for b in buf:
699 701 yield b
700 702
701 703 return generator()
@@ -1,2003 +1,2004 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27
28 28 import random
29 29 import hashlib
30 30 import StringIO
31 31 import urllib
32 32 import math
33 33 import logging
34 34 import re
35 35 import urlparse
36 36 import time
37 37 import string
38 38 import hashlib
39 39 import pygments
40 import itertools
40 41
41 42 from datetime import datetime
42 43 from functools import partial
43 44 from pygments.formatters.html import HtmlFormatter
44 45 from pygments import highlight as code_highlight
45 46 from pygments.lexers import (
46 47 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
47 48 from pylons import url as pylons_url
48 49 from pylons.i18n.translation import _, ungettext
49 50 from pyramid.threadlocal import get_current_request
50 51
51 52 from webhelpers.html import literal, HTML, escape
52 53 from webhelpers.html.tools import *
53 54 from webhelpers.html.builder import make_tag
54 55 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
55 56 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
56 57 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
57 58 submit, text, password, textarea, title, ul, xml_declaration, radio
58 59 from webhelpers.html.tools import auto_link, button_to, highlight, \
59 60 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
60 61 from webhelpers.pylonslib import Flash as _Flash
61 62 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
62 63 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
63 64 replace_whitespace, urlify, truncate, wrap_paragraphs
64 65 from webhelpers.date import time_ago_in_words
65 66 from webhelpers.paginate import Page as _Page
66 67 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
67 68 convert_boolean_attrs, NotGiven, _make_safe_id_component
68 69 from webhelpers2.number import format_byte_size
69 70
70 71 from rhodecode.lib.action_parser import action_parser
71 72 from rhodecode.lib.ext_json import json
72 73 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
73 74 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
74 75 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
75 76 AttributeDict, safe_int, md5, md5_safe
76 77 from rhodecode.lib.markup_renderer import MarkupRenderer
77 78 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
78 79 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
79 80 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
80 81 from rhodecode.model.changeset_status import ChangesetStatusModel
81 82 from rhodecode.model.db import Permission, User, Repository
82 83 from rhodecode.model.repo_group import RepoGroupModel
83 84 from rhodecode.model.settings import IssueTrackerSettingsModel
84 85
85 86 log = logging.getLogger(__name__)
86 87
87 88
88 89 DEFAULT_USER = User.DEFAULT_USER
89 90 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
90 91
91 92
92 93 def url(*args, **kw):
93 94 return pylons_url(*args, **kw)
94 95
95 96
96 97 def pylons_url_current(*args, **kw):
97 98 """
98 99 This function overrides pylons.url.current() which returns the current
99 100 path so that it will also work from a pyramid only context. This
100 101 should be removed once port to pyramid is complete.
101 102 """
102 103 if not args and not kw:
103 104 request = get_current_request()
104 105 return request.path
105 106 return pylons_url.current(*args, **kw)
106 107
107 108 url.current = pylons_url_current
108 109
109 110
110 111 def url_replace(**qargs):
111 112 """ Returns the current request url while replacing query string args """
112 113
113 114 request = get_current_request()
114 115 new_args = request.GET.mixed()
115 116 new_args.update(qargs)
116 117 return url('', **new_args)
117 118
118 119
119 120 def asset(path, ver=None):
120 121 """
121 122 Helper to generate a static asset file path for rhodecode assets
122 123
123 124 eg. h.asset('images/image.png', ver='3923')
124 125
125 126 :param path: path of asset
126 127 :param ver: optional version query param to append as ?ver=
127 128 """
128 129 request = get_current_request()
129 130 query = {}
130 131 if ver:
131 132 query = {'ver': ver}
132 133 return request.static_path(
133 134 'rhodecode:public/{}'.format(path), _query=query)
134 135
135 136
136 137 default_html_escape_table = {
137 138 ord('&'): u'&amp;',
138 139 ord('<'): u'&lt;',
139 140 ord('>'): u'&gt;',
140 141 ord('"'): u'&quot;',
141 142 ord("'"): u'&#39;',
142 143 }
143 144
144 145
145 146 def html_escape(text, html_escape_table=default_html_escape_table):
146 147 """Produce entities within text."""
147 148 return text.translate(html_escape_table)
148 149
149 150
150 151 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
151 152 """
152 153 Truncate string ``s`` at the first occurrence of ``sub``.
153 154
154 155 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
155 156 """
156 157 suffix_if_chopped = suffix_if_chopped or ''
157 158 pos = s.find(sub)
158 159 if pos == -1:
159 160 return s
160 161
161 162 if inclusive:
162 163 pos += len(sub)
163 164
164 165 chopped = s[:pos]
165 166 left = s[pos:].strip()
166 167
167 168 if left and suffix_if_chopped:
168 169 chopped += suffix_if_chopped
169 170
170 171 return chopped
171 172
172 173
173 174 def shorter(text, size=20):
174 175 postfix = '...'
175 176 if len(text) > size:
176 177 return text[:size - len(postfix)] + postfix
177 178 return text
178 179
179 180
180 181 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
181 182 """
182 183 Reset button
183 184 """
184 185 _set_input_attrs(attrs, type, name, value)
185 186 _set_id_attr(attrs, id, name)
186 187 convert_boolean_attrs(attrs, ["disabled"])
187 188 return HTML.input(**attrs)
188 189
189 190 reset = _reset
190 191 safeid = _make_safe_id_component
191 192
192 193
193 194 def branding(name, length=40):
194 195 return truncate(name, length, indicator="")
195 196
196 197
197 198 def FID(raw_id, path):
198 199 """
199 200 Creates a unique ID for filenode based on it's hash of path and commit
200 201 it's safe to use in urls
201 202
202 203 :param raw_id:
203 204 :param path:
204 205 """
205 206
206 207 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
207 208
208 209
209 210 class _GetError(object):
210 211 """Get error from form_errors, and represent it as span wrapped error
211 212 message
212 213
213 214 :param field_name: field to fetch errors for
214 215 :param form_errors: form errors dict
215 216 """
216 217
217 218 def __call__(self, field_name, form_errors):
218 219 tmpl = """<span class="error_msg">%s</span>"""
219 220 if form_errors and field_name in form_errors:
220 221 return literal(tmpl % form_errors.get(field_name))
221 222
222 223 get_error = _GetError()
223 224
224 225
225 226 class _ToolTip(object):
226 227
227 228 def __call__(self, tooltip_title, trim_at=50):
228 229 """
229 230 Special function just to wrap our text into nice formatted
230 231 autowrapped text
231 232
232 233 :param tooltip_title:
233 234 """
234 235 tooltip_title = escape(tooltip_title)
235 236 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
236 237 return tooltip_title
237 238 tooltip = _ToolTip()
238 239
239 240
240 241 def files_breadcrumbs(repo_name, commit_id, file_path):
241 242 if isinstance(file_path, str):
242 243 file_path = safe_unicode(file_path)
243 244
244 245 # TODO: johbo: Is this always a url like path, or is this operating
245 246 # system dependent?
246 247 path_segments = file_path.split('/')
247 248
248 249 repo_name_html = escape(repo_name)
249 250 if len(path_segments) == 1 and path_segments[0] == '':
250 251 url_segments = [repo_name_html]
251 252 else:
252 253 url_segments = [
253 254 link_to(
254 255 repo_name_html,
255 256 url('files_home',
256 257 repo_name=repo_name,
257 258 revision=commit_id,
258 259 f_path=''),
259 260 class_='pjax-link')]
260 261
261 262 last_cnt = len(path_segments) - 1
262 263 for cnt, segment in enumerate(path_segments):
263 264 if not segment:
264 265 continue
265 266 segment_html = escape(segment)
266 267
267 268 if cnt != last_cnt:
268 269 url_segments.append(
269 270 link_to(
270 271 segment_html,
271 272 url('files_home',
272 273 repo_name=repo_name,
273 274 revision=commit_id,
274 275 f_path='/'.join(path_segments[:cnt + 1])),
275 276 class_='pjax-link'))
276 277 else:
277 278 url_segments.append(segment_html)
278 279
279 280 return literal('/'.join(url_segments))
280 281
281 282
282 283 class CodeHtmlFormatter(HtmlFormatter):
283 284 """
284 285 My code Html Formatter for source codes
285 286 """
286 287
287 288 def wrap(self, source, outfile):
288 289 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
289 290
290 291 def _wrap_code(self, source):
291 292 for cnt, it in enumerate(source):
292 293 i, t = it
293 294 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
294 295 yield i, t
295 296
296 297 def _wrap_tablelinenos(self, inner):
297 298 dummyoutfile = StringIO.StringIO()
298 299 lncount = 0
299 300 for t, line in inner:
300 301 if t:
301 302 lncount += 1
302 303 dummyoutfile.write(line)
303 304
304 305 fl = self.linenostart
305 306 mw = len(str(lncount + fl - 1))
306 307 sp = self.linenospecial
307 308 st = self.linenostep
308 309 la = self.lineanchors
309 310 aln = self.anchorlinenos
310 311 nocls = self.noclasses
311 312 if sp:
312 313 lines = []
313 314
314 315 for i in range(fl, fl + lncount):
315 316 if i % st == 0:
316 317 if i % sp == 0:
317 318 if aln:
318 319 lines.append('<a href="#%s%d" class="special">%*d</a>' %
319 320 (la, i, mw, i))
320 321 else:
321 322 lines.append('<span class="special">%*d</span>' % (mw, i))
322 323 else:
323 324 if aln:
324 325 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
325 326 else:
326 327 lines.append('%*d' % (mw, i))
327 328 else:
328 329 lines.append('')
329 330 ls = '\n'.join(lines)
330 331 else:
331 332 lines = []
332 333 for i in range(fl, fl + lncount):
333 334 if i % st == 0:
334 335 if aln:
335 336 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
336 337 else:
337 338 lines.append('%*d' % (mw, i))
338 339 else:
339 340 lines.append('')
340 341 ls = '\n'.join(lines)
341 342
342 343 # in case you wonder about the seemingly redundant <div> here: since the
343 344 # content in the other cell also is wrapped in a div, some browsers in
344 345 # some configurations seem to mess up the formatting...
345 346 if nocls:
346 347 yield 0, ('<table class="%stable">' % self.cssclass +
347 348 '<tr><td><div class="linenodiv" '
348 349 'style="background-color: #f0f0f0; padding-right: 10px">'
349 350 '<pre style="line-height: 125%">' +
350 351 ls + '</pre></div></td><td id="hlcode" class="code">')
351 352 else:
352 353 yield 0, ('<table class="%stable">' % self.cssclass +
353 354 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
354 355 ls + '</pre></div></td><td id="hlcode" class="code">')
355 356 yield 0, dummyoutfile.getvalue()
356 357 yield 0, '</td></tr></table>'
357 358
358 359
359 360 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
360 361 def __init__(self, **kw):
361 362 # only show these line numbers if set
362 363 self.only_lines = kw.pop('only_line_numbers', [])
363 364 self.query_terms = kw.pop('query_terms', [])
364 365 self.max_lines = kw.pop('max_lines', 5)
365 366 self.line_context = kw.pop('line_context', 3)
366 367 self.url = kw.pop('url', None)
367 368
368 369 super(CodeHtmlFormatter, self).__init__(**kw)
369 370
370 371 def _wrap_code(self, source):
371 372 for cnt, it in enumerate(source):
372 373 i, t = it
373 374 t = '<pre>%s</pre>' % t
374 375 yield i, t
375 376
376 377 def _wrap_tablelinenos(self, inner):
377 378 yield 0, '<table class="code-highlight %stable">' % self.cssclass
378 379
379 380 last_shown_line_number = 0
380 381 current_line_number = 1
381 382
382 383 for t, line in inner:
383 384 if not t:
384 385 yield t, line
385 386 continue
386 387
387 388 if current_line_number in self.only_lines:
388 389 if last_shown_line_number + 1 != current_line_number:
389 390 yield 0, '<tr>'
390 391 yield 0, '<td class="line">...</td>'
391 392 yield 0, '<td id="hlcode" class="code"></td>'
392 393 yield 0, '</tr>'
393 394
394 395 yield 0, '<tr>'
395 396 if self.url:
396 397 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
397 398 self.url, current_line_number, current_line_number)
398 399 else:
399 400 yield 0, '<td class="line"><a href="">%i</a></td>' % (
400 401 current_line_number)
401 402 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
402 403 yield 0, '</tr>'
403 404
404 405 last_shown_line_number = current_line_number
405 406
406 407 current_line_number += 1
407 408
408 409
409 410 yield 0, '</table>'
410 411
411 412
412 413 def extract_phrases(text_query):
413 414 """
414 415 Extracts phrases from search term string making sure phrases
415 416 contained in double quotes are kept together - and discarding empty values
416 417 or fully whitespace values eg.
417 418
418 419 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
419 420
420 421 """
421 422
422 423 in_phrase = False
423 424 buf = ''
424 425 phrases = []
425 426 for char in text_query:
426 427 if in_phrase:
427 428 if char == '"': # end phrase
428 429 phrases.append(buf)
429 430 buf = ''
430 431 in_phrase = False
431 432 continue
432 433 else:
433 434 buf += char
434 435 continue
435 436 else:
436 437 if char == '"': # start phrase
437 438 in_phrase = True
438 439 phrases.append(buf)
439 440 buf = ''
440 441 continue
441 442 elif char == ' ':
442 443 phrases.append(buf)
443 444 buf = ''
444 445 continue
445 446 else:
446 447 buf += char
447 448
448 449 phrases.append(buf)
449 450 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
450 451 return phrases
451 452
452 453
453 454 def get_matching_offsets(text, phrases):
454 455 """
455 456 Returns a list of string offsets in `text` that the list of `terms` match
456 457
457 458 >>> get_matching_offsets('some text here', ['some', 'here'])
458 459 [(0, 4), (10, 14)]
459 460
460 461 """
461 462 offsets = []
462 463 for phrase in phrases:
463 464 for match in re.finditer(phrase, text):
464 465 offsets.append((match.start(), match.end()))
465 466
466 467 return offsets
467 468
468 469
469 470 def normalize_text_for_matching(x):
470 471 """
471 472 Replaces all non alnum characters to spaces and lower cases the string,
472 473 useful for comparing two text strings without punctuation
473 474 """
474 475 return re.sub(r'[^\w]', ' ', x.lower())
475 476
476 477
477 478 def get_matching_line_offsets(lines, terms):
478 479 """ Return a set of `lines` indices (starting from 1) matching a
479 480 text search query, along with `context` lines above/below matching lines
480 481
481 482 :param lines: list of strings representing lines
482 483 :param terms: search term string to match in lines eg. 'some text'
483 484 :param context: number of lines above/below a matching line to add to result
484 485 :param max_lines: cut off for lines of interest
485 486 eg.
486 487
487 488 text = '''
488 489 words words words
489 490 words words words
490 491 some text some
491 492 words words words
492 493 words words words
493 494 text here what
494 495 '''
495 496 get_matching_line_offsets(text, 'text', context=1)
496 497 {3: [(5, 9)], 6: [(0, 4)]]
497 498
498 499 """
499 500 matching_lines = {}
500 501 phrases = [normalize_text_for_matching(phrase)
501 502 for phrase in extract_phrases(terms)]
502 503
503 504 for line_index, line in enumerate(lines, start=1):
504 505 match_offsets = get_matching_offsets(
505 506 normalize_text_for_matching(line), phrases)
506 507 if match_offsets:
507 508 matching_lines[line_index] = match_offsets
508 509
509 510 return matching_lines
510 511
511 512
512 513 def hsv_to_rgb(h, s, v):
513 514 """ Convert hsv color values to rgb """
514 515
515 516 if s == 0.0:
516 517 return v, v, v
517 518 i = int(h * 6.0) # XXX assume int() truncates!
518 519 f = (h * 6.0) - i
519 520 p = v * (1.0 - s)
520 521 q = v * (1.0 - s * f)
521 522 t = v * (1.0 - s * (1.0 - f))
522 523 i = i % 6
523 524 if i == 0:
524 525 return v, t, p
525 526 if i == 1:
526 527 return q, v, p
527 528 if i == 2:
528 529 return p, v, t
529 530 if i == 3:
530 531 return p, q, v
531 532 if i == 4:
532 533 return t, p, v
533 534 if i == 5:
534 535 return v, p, q
535 536
536 537
537 538 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
538 539 """
539 540 Generator for getting n of evenly distributed colors using
540 541 hsv color and golden ratio. It always return same order of colors
541 542
542 543 :param n: number of colors to generate
543 544 :param saturation: saturation of returned colors
544 545 :param lightness: lightness of returned colors
545 546 :returns: RGB tuple
546 547 """
547 548
548 549 golden_ratio = 0.618033988749895
549 550 h = 0.22717784590367374
550 551
551 552 for _ in xrange(n):
552 553 h += golden_ratio
553 554 h %= 1
554 555 HSV_tuple = [h, saturation, lightness]
555 556 RGB_tuple = hsv_to_rgb(*HSV_tuple)
556 557 yield map(lambda x: str(int(x * 256)), RGB_tuple)
557 558
558 559
559 560 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
560 561 """
561 562 Returns a function which when called with an argument returns a unique
562 563 color for that argument, eg.
563 564
564 565 :param n: number of colors to generate
565 566 :param saturation: saturation of returned colors
566 567 :param lightness: lightness of returned colors
567 568 :returns: css RGB string
568 569
569 570 >>> color_hash = color_hasher()
570 571 >>> color_hash('hello')
571 572 'rgb(34, 12, 59)'
572 573 >>> color_hash('hello')
573 574 'rgb(34, 12, 59)'
574 575 >>> color_hash('other')
575 576 'rgb(90, 224, 159)'
576 577 """
577 578
578 579 color_dict = {}
579 580 cgenerator = unique_color_generator(
580 581 saturation=saturation, lightness=lightness)
581 582
582 583 def get_color_string(thing):
583 584 if thing in color_dict:
584 585 col = color_dict[thing]
585 586 else:
586 587 col = color_dict[thing] = cgenerator.next()
587 588 return "rgb(%s)" % (', '.join(col))
588 589
589 590 return get_color_string
590 591
591 592
592 593 def get_lexer_safe(mimetype=None, filepath=None):
593 594 """
594 595 Tries to return a relevant pygments lexer using mimetype/filepath name,
595 596 defaulting to plain text if none could be found
596 597 """
597 598 lexer = None
598 599 try:
599 600 if mimetype:
600 601 lexer = get_lexer_for_mimetype(mimetype)
601 602 if not lexer:
602 603 lexer = get_lexer_for_filename(filepath)
603 604 except pygments.util.ClassNotFound:
604 605 pass
605 606
606 607 if not lexer:
607 608 lexer = get_lexer_by_name('text')
608 609
609 610 return lexer
610 611
611 612
612 613 def get_lexer_for_filenode(filenode):
613 614 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
614 615 return lexer
615 616
616 617
617 618 def pygmentize(filenode, **kwargs):
618 619 """
619 620 pygmentize function using pygments
620 621
621 622 :param filenode:
622 623 """
623 624 lexer = get_lexer_for_filenode(filenode)
624 625 return literal(code_highlight(filenode.content, lexer,
625 626 CodeHtmlFormatter(**kwargs)))
626 627
627 628
628 629 def is_following_repo(repo_name, user_id):
629 630 from rhodecode.model.scm import ScmModel
630 631 return ScmModel().is_following_repo(repo_name, user_id)
631 632
632 633
633 634 class _Message(object):
634 635 """A message returned by ``Flash.pop_messages()``.
635 636
636 637 Converting the message to a string returns the message text. Instances
637 638 also have the following attributes:
638 639
639 640 * ``message``: the message text.
640 641 * ``category``: the category specified when the message was created.
641 642 """
642 643
643 644 def __init__(self, category, message):
644 645 self.category = category
645 646 self.message = message
646 647
647 648 def __str__(self):
648 649 return self.message
649 650
650 651 __unicode__ = __str__
651 652
652 653 def __html__(self):
653 654 return escape(safe_unicode(self.message))
654 655
655 656
656 657 class Flash(_Flash):
657 658
658 659 def pop_messages(self):
659 660 """Return all accumulated messages and delete them from the session.
660 661
661 662 The return value is a list of ``Message`` objects.
662 663 """
663 664 from pylons import session
664 665
665 666 messages = []
666 667
667 668 # Pop the 'old' pylons flash messages. They are tuples of the form
668 669 # (category, message)
669 670 for cat, msg in session.pop(self.session_key, []):
670 671 messages.append(_Message(cat, msg))
671 672
672 673 # Pop the 'new' pyramid flash messages for each category as list
673 674 # of strings.
674 675 for cat in self.categories:
675 676 for msg in session.pop_flash(queue=cat):
676 677 messages.append(_Message(cat, msg))
677 678 # Map messages from the default queue to the 'notice' category.
678 679 for msg in session.pop_flash():
679 680 messages.append(_Message('notice', msg))
680 681
681 682 session.save()
682 683 return messages
683 684
684 685 def json_alerts(self):
685 686 payloads = []
686 687 messages = flash.pop_messages()
687 688 if messages:
688 689 for message in messages:
689 690 subdata = {}
690 691 if hasattr(message.message, 'rsplit'):
691 692 flash_data = message.message.rsplit('|DELIM|', 1)
692 693 org_message = flash_data[0]
693 694 if len(flash_data) > 1:
694 695 subdata = json.loads(flash_data[1])
695 696 else:
696 697 org_message = message.message
697 698 payloads.append({
698 699 'message': {
699 700 'message': u'{}'.format(org_message),
700 701 'level': message.category,
701 702 'force': True,
702 703 'subdata': subdata
703 704 }
704 705 })
705 706 return json.dumps(payloads)
706 707
707 708 flash = Flash()
708 709
709 710 #==============================================================================
710 711 # SCM FILTERS available via h.
711 712 #==============================================================================
712 713 from rhodecode.lib.vcs.utils import author_name, author_email
713 714 from rhodecode.lib.utils2 import credentials_filter, age as _age
714 715 from rhodecode.model.db import User, ChangesetStatus
715 716
716 717 age = _age
717 718 capitalize = lambda x: x.capitalize()
718 719 email = author_email
719 720 short_id = lambda x: x[:12]
720 721 hide_credentials = lambda x: ''.join(credentials_filter(x))
721 722
722 723
723 724 def age_component(datetime_iso, value=None, time_is_local=False):
724 725 title = value or format_date(datetime_iso)
725 726 tzinfo = '+00:00'
726 727
727 728 # detect if we have a timezone info, otherwise, add it
728 729 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
729 730 if time_is_local:
730 731 tzinfo = time.strftime("+%H:%M",
731 732 time.gmtime(
732 733 (datetime.now() - datetime.utcnow()).seconds + 1
733 734 )
734 735 )
735 736
736 737 return literal(
737 738 '<time class="timeago tooltip" '
738 739 'title="{1}" datetime="{0}{2}">{1}</time>'.format(
739 740 datetime_iso, title, tzinfo))
740 741
741 742
742 743 def _shorten_commit_id(commit_id):
743 744 from rhodecode import CONFIG
744 745 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
745 746 return commit_id[:def_len]
746 747
747 748
748 749 def show_id(commit):
749 750 """
750 751 Configurable function that shows ID
751 752 by default it's r123:fffeeefffeee
752 753
753 754 :param commit: commit instance
754 755 """
755 756 from rhodecode import CONFIG
756 757 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
757 758
758 759 raw_id = _shorten_commit_id(commit.raw_id)
759 760 if show_idx:
760 761 return 'r%s:%s' % (commit.idx, raw_id)
761 762 else:
762 763 return '%s' % (raw_id, )
763 764
764 765
765 766 def format_date(date):
766 767 """
767 768 use a standardized formatting for dates used in RhodeCode
768 769
769 770 :param date: date/datetime object
770 771 :return: formatted date
771 772 """
772 773
773 774 if date:
774 775 _fmt = "%a, %d %b %Y %H:%M:%S"
775 776 return safe_unicode(date.strftime(_fmt))
776 777
777 778 return u""
778 779
779 780
780 781 class _RepoChecker(object):
781 782
782 783 def __init__(self, backend_alias):
783 784 self._backend_alias = backend_alias
784 785
785 786 def __call__(self, repository):
786 787 if hasattr(repository, 'alias'):
787 788 _type = repository.alias
788 789 elif hasattr(repository, 'repo_type'):
789 790 _type = repository.repo_type
790 791 else:
791 792 _type = repository
792 793 return _type == self._backend_alias
793 794
794 795 is_git = _RepoChecker('git')
795 796 is_hg = _RepoChecker('hg')
796 797 is_svn = _RepoChecker('svn')
797 798
798 799
799 800 def get_repo_type_by_name(repo_name):
800 801 repo = Repository.get_by_repo_name(repo_name)
801 802 return repo.repo_type
802 803
803 804
804 805 def is_svn_without_proxy(repository):
805 806 if is_svn(repository):
806 807 from rhodecode.model.settings import VcsSettingsModel
807 808 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
808 809 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
809 810 return False
810 811
811 812
812 813 def discover_user(author):
813 814 """
814 815 Tries to discover RhodeCode User based on the autho string. Author string
815 816 is typically `FirstName LastName <email@address.com>`
816 817 """
817 818
818 819 # if author is already an instance use it for extraction
819 820 if isinstance(author, User):
820 821 return author
821 822
822 823 # Valid email in the attribute passed, see if they're in the system
823 824 _email = author_email(author)
824 825 if _email != '':
825 826 user = User.get_by_email(_email, case_insensitive=True, cache=True)
826 827 if user is not None:
827 828 return user
828 829
829 830 # Maybe it's a username, we try to extract it and fetch by username ?
830 831 _author = author_name(author)
831 832 user = User.get_by_username(_author, case_insensitive=True, cache=True)
832 833 if user is not None:
833 834 return user
834 835
835 836 return None
836 837
837 838
838 839 def email_or_none(author):
839 840 # extract email from the commit string
840 841 _email = author_email(author)
841 842
842 843 # If we have an email, use it, otherwise
843 844 # see if it contains a username we can get an email from
844 845 if _email != '':
845 846 return _email
846 847 else:
847 848 user = User.get_by_username(
848 849 author_name(author), case_insensitive=True, cache=True)
849 850
850 851 if user is not None:
851 852 return user.email
852 853
853 854 # No valid email, not a valid user in the system, none!
854 855 return None
855 856
856 857
857 858 def link_to_user(author, length=0, **kwargs):
858 859 user = discover_user(author)
859 860 # user can be None, but if we have it already it means we can re-use it
860 861 # in the person() function, so we save 1 intensive-query
861 862 if user:
862 863 author = user
863 864
864 865 display_person = person(author, 'username_or_name_or_email')
865 866 if length:
866 867 display_person = shorter(display_person, length)
867 868
868 869 if user:
869 870 return link_to(
870 871 escape(display_person),
871 872 url('user_profile', username=user.username),
872 873 **kwargs)
873 874 else:
874 875 return escape(display_person)
875 876
876 877
877 878 def person(author, show_attr="username_and_name"):
878 879 user = discover_user(author)
879 880 if user:
880 881 return getattr(user, show_attr)
881 882 else:
882 883 _author = author_name(author)
883 884 _email = email(author)
884 885 return _author or _email
885 886
886 887
887 888 def author_string(email):
888 889 if email:
889 890 user = User.get_by_email(email, case_insensitive=True, cache=True)
890 891 if user:
891 892 if user.firstname or user.lastname:
892 893 return '%s %s &lt;%s&gt;' % (user.firstname, user.lastname, email)
893 894 else:
894 895 return email
895 896 else:
896 897 return email
897 898 else:
898 899 return None
899 900
900 901
901 902 def person_by_id(id_, show_attr="username_and_name"):
902 903 # attr to return from fetched user
903 904 person_getter = lambda usr: getattr(usr, show_attr)
904 905
905 906 #maybe it's an ID ?
906 907 if str(id_).isdigit() or isinstance(id_, int):
907 908 id_ = int(id_)
908 909 user = User.get(id_)
909 910 if user is not None:
910 911 return person_getter(user)
911 912 return id_
912 913
913 914
914 915 def gravatar_with_user(author, show_disabled=False):
915 916 from rhodecode.lib.utils import PartialRenderer
916 917 _render = PartialRenderer('base/base.mako')
917 918 return _render('gravatar_with_user', author, show_disabled=show_disabled)
918 919
919 920
920 921 def desc_stylize(value):
921 922 """
922 923 converts tags from value into html equivalent
923 924
924 925 :param value:
925 926 """
926 927 if not value:
927 928 return ''
928 929
929 930 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
930 931 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
931 932 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
932 933 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
933 934 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
934 935 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
935 936 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
936 937 '<div class="metatag" tag="lang">\\2</div>', value)
937 938 value = re.sub(r'\[([a-z]+)\]',
938 939 '<div class="metatag" tag="\\1">\\1</div>', value)
939 940
940 941 return value
941 942
942 943
943 944 def escaped_stylize(value):
944 945 """
945 946 converts tags from value into html equivalent, but escaping its value first
946 947 """
947 948 if not value:
948 949 return ''
949 950
950 951 # Using default webhelper escape method, but has to force it as a
951 952 # plain unicode instead of a markup tag to be used in regex expressions
952 953 value = unicode(escape(safe_unicode(value)))
953 954
954 955 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
955 956 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
956 957 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
957 958 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
958 959 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
959 960 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
960 961 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
961 962 '<div class="metatag" tag="lang">\\2</div>', value)
962 963 value = re.sub(r'\[([a-z]+)\]',
963 964 '<div class="metatag" tag="\\1">\\1</div>', value)
964 965
965 966 return value
966 967
967 968
968 969 def bool2icon(value):
969 970 """
970 971 Returns boolean value of a given value, represented as html element with
971 972 classes that will represent icons
972 973
973 974 :param value: given value to convert to html node
974 975 """
975 976
976 977 if value: # does bool conversion
977 978 return HTML.tag('i', class_="icon-true")
978 979 else: # not true as bool
979 980 return HTML.tag('i', class_="icon-false")
980 981
981 982
982 983 #==============================================================================
983 984 # PERMS
984 985 #==============================================================================
985 986 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
986 987 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
987 988 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
988 989 csrf_token_key
989 990
990 991
991 992 #==============================================================================
992 993 # GRAVATAR URL
993 994 #==============================================================================
994 995 class InitialsGravatar(object):
995 996 def __init__(self, email_address, first_name, last_name, size=30,
996 997 background=None, text_color='#fff'):
997 998 self.size = size
998 999 self.first_name = first_name
999 1000 self.last_name = last_name
1000 1001 self.email_address = email_address
1001 1002 self.background = background or self.str2color(email_address)
1002 1003 self.text_color = text_color
1003 1004
1004 1005 def get_color_bank(self):
1005 1006 """
1006 1007 returns a predefined list of colors that gravatars can use.
1007 1008 Those are randomized distinct colors that guarantee readability and
1008 1009 uniqueness.
1009 1010
1010 1011 generated with: http://phrogz.net/css/distinct-colors.html
1011 1012 """
1012 1013 return [
1013 1014 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1014 1015 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1015 1016 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1016 1017 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1017 1018 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1018 1019 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1019 1020 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1020 1021 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1021 1022 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1022 1023 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1023 1024 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1024 1025 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1025 1026 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1026 1027 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1027 1028 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1028 1029 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1029 1030 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1030 1031 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1031 1032 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1032 1033 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1033 1034 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1034 1035 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1035 1036 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1036 1037 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1037 1038 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1038 1039 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1039 1040 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1040 1041 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1041 1042 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1042 1043 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1043 1044 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1044 1045 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1045 1046 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1046 1047 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1047 1048 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1048 1049 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1049 1050 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1050 1051 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1051 1052 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1052 1053 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1053 1054 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1054 1055 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1055 1056 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1056 1057 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1057 1058 '#4f8c46', '#368dd9', '#5c0073'
1058 1059 ]
1059 1060
1060 1061 def rgb_to_hex_color(self, rgb_tuple):
1061 1062 """
1062 1063 Converts an rgb_tuple passed to an hex color.
1063 1064
1064 1065 :param rgb_tuple: tuple with 3 ints represents rgb color space
1065 1066 """
1066 1067 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1067 1068
1068 1069 def email_to_int_list(self, email_str):
1069 1070 """
1070 1071 Get every byte of the hex digest value of email and turn it to integer.
1071 1072 It's going to be always between 0-255
1072 1073 """
1073 1074 digest = md5_safe(email_str.lower())
1074 1075 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1075 1076
1076 1077 def pick_color_bank_index(self, email_str, color_bank):
1077 1078 return self.email_to_int_list(email_str)[0] % len(color_bank)
1078 1079
1079 1080 def str2color(self, email_str):
1080 1081 """
1081 1082 Tries to map in a stable algorithm an email to color
1082 1083
1083 1084 :param email_str:
1084 1085 """
1085 1086 color_bank = self.get_color_bank()
1086 1087 # pick position (module it's length so we always find it in the
1087 1088 # bank even if it's smaller than 256 values
1088 1089 pos = self.pick_color_bank_index(email_str, color_bank)
1089 1090 return color_bank[pos]
1090 1091
1091 1092 def normalize_email(self, email_address):
1092 1093 import unicodedata
1093 1094 # default host used to fill in the fake/missing email
1094 1095 default_host = u'localhost'
1095 1096
1096 1097 if not email_address:
1097 1098 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1098 1099
1099 1100 email_address = safe_unicode(email_address)
1100 1101
1101 1102 if u'@' not in email_address:
1102 1103 email_address = u'%s@%s' % (email_address, default_host)
1103 1104
1104 1105 if email_address.endswith(u'@'):
1105 1106 email_address = u'%s%s' % (email_address, default_host)
1106 1107
1107 1108 email_address = unicodedata.normalize('NFKD', email_address)\
1108 1109 .encode('ascii', 'ignore')
1109 1110 return email_address
1110 1111
1111 1112 def get_initials(self):
1112 1113 """
1113 1114 Returns 2 letter initials calculated based on the input.
1114 1115 The algorithm picks first given email address, and takes first letter
1115 1116 of part before @, and then the first letter of server name. In case
1116 1117 the part before @ is in a format of `somestring.somestring2` it replaces
1117 1118 the server letter with first letter of somestring2
1118 1119
1119 1120 In case function was initialized with both first and lastname, this
1120 1121 overrides the extraction from email by first letter of the first and
1121 1122 last name. We add special logic to that functionality, In case Full name
1122 1123 is compound, like Guido Von Rossum, we use last part of the last name
1123 1124 (Von Rossum) picking `R`.
1124 1125
1125 1126 Function also normalizes the non-ascii characters to they ascii
1126 1127 representation, eg Δ„ => A
1127 1128 """
1128 1129 import unicodedata
1129 1130 # replace non-ascii to ascii
1130 1131 first_name = unicodedata.normalize(
1131 1132 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1132 1133 last_name = unicodedata.normalize(
1133 1134 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1134 1135
1135 1136 # do NFKD encoding, and also make sure email has proper format
1136 1137 email_address = self.normalize_email(self.email_address)
1137 1138
1138 1139 # first push the email initials
1139 1140 prefix, server = email_address.split('@', 1)
1140 1141
1141 1142 # check if prefix is maybe a 'firstname.lastname' syntax
1142 1143 _dot_split = prefix.rsplit('.', 1)
1143 1144 if len(_dot_split) == 2:
1144 1145 initials = [_dot_split[0][0], _dot_split[1][0]]
1145 1146 else:
1146 1147 initials = [prefix[0], server[0]]
1147 1148
1148 1149 # then try to replace either firtname or lastname
1149 1150 fn_letter = (first_name or " ")[0].strip()
1150 1151 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1151 1152
1152 1153 if fn_letter:
1153 1154 initials[0] = fn_letter
1154 1155
1155 1156 if ln_letter:
1156 1157 initials[1] = ln_letter
1157 1158
1158 1159 return ''.join(initials).upper()
1159 1160
1160 1161 def get_img_data_by_type(self, font_family, img_type):
1161 1162 default_user = """
1162 1163 <svg xmlns="http://www.w3.org/2000/svg"
1163 1164 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1164 1165 viewBox="-15 -10 439.165 429.164"
1165 1166
1166 1167 xml:space="preserve"
1167 1168 style="background:{background};" >
1168 1169
1169 1170 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1170 1171 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1171 1172 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1172 1173 168.596,153.916,216.671,
1173 1174 204.583,216.671z" fill="{text_color}"/>
1174 1175 <path d="M407.164,374.717L360.88,
1175 1176 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1176 1177 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1177 1178 15.366-44.203,23.488-69.076,23.488c-24.877,
1178 1179 0-48.762-8.122-69.078-23.488
1179 1180 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1180 1181 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1181 1182 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1182 1183 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1183 1184 19.402-10.527 C409.699,390.129,
1184 1185 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1185 1186 </svg>""".format(
1186 1187 size=self.size,
1187 1188 background='#979797', # @grey4
1188 1189 text_color=self.text_color,
1189 1190 font_family=font_family)
1190 1191
1191 1192 return {
1192 1193 "default_user": default_user
1193 1194 }[img_type]
1194 1195
1195 1196 def get_img_data(self, svg_type=None):
1196 1197 """
1197 1198 generates the svg metadata for image
1198 1199 """
1199 1200
1200 1201 font_family = ','.join([
1201 1202 'proximanovaregular',
1202 1203 'Proxima Nova Regular',
1203 1204 'Proxima Nova',
1204 1205 'Arial',
1205 1206 'Lucida Grande',
1206 1207 'sans-serif'
1207 1208 ])
1208 1209 if svg_type:
1209 1210 return self.get_img_data_by_type(font_family, svg_type)
1210 1211
1211 1212 initials = self.get_initials()
1212 1213 img_data = """
1213 1214 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1214 1215 width="{size}" height="{size}"
1215 1216 style="width: 100%; height: 100%; background-color: {background}"
1216 1217 viewBox="0 0 {size} {size}">
1217 1218 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1218 1219 pointer-events="auto" fill="{text_color}"
1219 1220 font-family="{font_family}"
1220 1221 style="font-weight: 400; font-size: {f_size}px;">{text}
1221 1222 </text>
1222 1223 </svg>""".format(
1223 1224 size=self.size,
1224 1225 f_size=self.size/1.85, # scale the text inside the box nicely
1225 1226 background=self.background,
1226 1227 text_color=self.text_color,
1227 1228 text=initials.upper(),
1228 1229 font_family=font_family)
1229 1230
1230 1231 return img_data
1231 1232
1232 1233 def generate_svg(self, svg_type=None):
1233 1234 img_data = self.get_img_data(svg_type)
1234 1235 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1235 1236
1236 1237
1237 1238 def initials_gravatar(email_address, first_name, last_name, size=30):
1238 1239 svg_type = None
1239 1240 if email_address == User.DEFAULT_USER_EMAIL:
1240 1241 svg_type = 'default_user'
1241 1242 klass = InitialsGravatar(email_address, first_name, last_name, size)
1242 1243 return klass.generate_svg(svg_type=svg_type)
1243 1244
1244 1245
1245 1246 def gravatar_url(email_address, size=30):
1246 1247 # doh, we need to re-import those to mock it later
1247 1248 from pylons import tmpl_context as c
1248 1249
1249 1250 _use_gravatar = c.visual.use_gravatar
1250 1251 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1251 1252
1252 1253 email_address = email_address or User.DEFAULT_USER_EMAIL
1253 1254 if isinstance(email_address, unicode):
1254 1255 # hashlib crashes on unicode items
1255 1256 email_address = safe_str(email_address)
1256 1257
1257 1258 # empty email or default user
1258 1259 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1259 1260 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1260 1261
1261 1262 if _use_gravatar:
1262 1263 # TODO: Disuse pyramid thread locals. Think about another solution to
1263 1264 # get the host and schema here.
1264 1265 request = get_current_request()
1265 1266 tmpl = safe_str(_gravatar_url)
1266 1267 tmpl = tmpl.replace('{email}', email_address)\
1267 1268 .replace('{md5email}', md5_safe(email_address.lower())) \
1268 1269 .replace('{netloc}', request.host)\
1269 1270 .replace('{scheme}', request.scheme)\
1270 1271 .replace('{size}', safe_str(size))
1271 1272 return tmpl
1272 1273 else:
1273 1274 return initials_gravatar(email_address, '', '', size=size)
1274 1275
1275 1276
1276 1277 class Page(_Page):
1277 1278 """
1278 1279 Custom pager to match rendering style with paginator
1279 1280 """
1280 1281
1281 1282 def _get_pos(self, cur_page, max_page, items):
1282 1283 edge = (items / 2) + 1
1283 1284 if (cur_page <= edge):
1284 1285 radius = max(items / 2, items - cur_page)
1285 1286 elif (max_page - cur_page) < edge:
1286 1287 radius = (items - 1) - (max_page - cur_page)
1287 1288 else:
1288 1289 radius = items / 2
1289 1290
1290 1291 left = max(1, (cur_page - (radius)))
1291 1292 right = min(max_page, cur_page + (radius))
1292 1293 return left, cur_page, right
1293 1294
1294 1295 def _range(self, regexp_match):
1295 1296 """
1296 1297 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1297 1298
1298 1299 Arguments:
1299 1300
1300 1301 regexp_match
1301 1302 A "re" (regular expressions) match object containing the
1302 1303 radius of linked pages around the current page in
1303 1304 regexp_match.group(1) as a string
1304 1305
1305 1306 This function is supposed to be called as a callable in
1306 1307 re.sub.
1307 1308
1308 1309 """
1309 1310 radius = int(regexp_match.group(1))
1310 1311
1311 1312 # Compute the first and last page number within the radius
1312 1313 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1313 1314 # -> leftmost_page = 5
1314 1315 # -> rightmost_page = 9
1315 1316 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1316 1317 self.last_page,
1317 1318 (radius * 2) + 1)
1318 1319 nav_items = []
1319 1320
1320 1321 # Create a link to the first page (unless we are on the first page
1321 1322 # or there would be no need to insert '..' spacers)
1322 1323 if self.page != self.first_page and self.first_page < leftmost_page:
1323 1324 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1324 1325
1325 1326 # Insert dots if there are pages between the first page
1326 1327 # and the currently displayed page range
1327 1328 if leftmost_page - self.first_page > 1:
1328 1329 # Wrap in a SPAN tag if nolink_attr is set
1329 1330 text = '..'
1330 1331 if self.dotdot_attr:
1331 1332 text = HTML.span(c=text, **self.dotdot_attr)
1332 1333 nav_items.append(text)
1333 1334
1334 1335 for thispage in xrange(leftmost_page, rightmost_page + 1):
1335 1336 # Hilight the current page number and do not use a link
1336 1337 if thispage == self.page:
1337 1338 text = '%s' % (thispage,)
1338 1339 # Wrap in a SPAN tag if nolink_attr is set
1339 1340 if self.curpage_attr:
1340 1341 text = HTML.span(c=text, **self.curpage_attr)
1341 1342 nav_items.append(text)
1342 1343 # Otherwise create just a link to that page
1343 1344 else:
1344 1345 text = '%s' % (thispage,)
1345 1346 nav_items.append(self._pagerlink(thispage, text))
1346 1347
1347 1348 # Insert dots if there are pages between the displayed
1348 1349 # page numbers and the end of the page range
1349 1350 if self.last_page - rightmost_page > 1:
1350 1351 text = '..'
1351 1352 # Wrap in a SPAN tag if nolink_attr is set
1352 1353 if self.dotdot_attr:
1353 1354 text = HTML.span(c=text, **self.dotdot_attr)
1354 1355 nav_items.append(text)
1355 1356
1356 1357 # Create a link to the very last page (unless we are on the last
1357 1358 # page or there would be no need to insert '..' spacers)
1358 1359 if self.page != self.last_page and rightmost_page < self.last_page:
1359 1360 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1360 1361
1361 1362 ## prerender links
1362 1363 #_page_link = url.current()
1363 1364 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1364 1365 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1365 1366 return self.separator.join(nav_items)
1366 1367
1367 1368 def pager(self, format='~2~', page_param='page', partial_param='partial',
1368 1369 show_if_single_page=False, separator=' ', onclick=None,
1369 1370 symbol_first='<<', symbol_last='>>',
1370 1371 symbol_previous='<', symbol_next='>',
1371 1372 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1372 1373 curpage_attr={'class': 'pager_curpage'},
1373 1374 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1374 1375
1375 1376 self.curpage_attr = curpage_attr
1376 1377 self.separator = separator
1377 1378 self.pager_kwargs = kwargs
1378 1379 self.page_param = page_param
1379 1380 self.partial_param = partial_param
1380 1381 self.onclick = onclick
1381 1382 self.link_attr = link_attr
1382 1383 self.dotdot_attr = dotdot_attr
1383 1384
1384 1385 # Don't show navigator if there is no more than one page
1385 1386 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1386 1387 return ''
1387 1388
1388 1389 from string import Template
1389 1390 # Replace ~...~ in token format by range of pages
1390 1391 result = re.sub(r'~(\d+)~', self._range, format)
1391 1392
1392 1393 # Interpolate '%' variables
1393 1394 result = Template(result).safe_substitute({
1394 1395 'first_page': self.first_page,
1395 1396 'last_page': self.last_page,
1396 1397 'page': self.page,
1397 1398 'page_count': self.page_count,
1398 1399 'items_per_page': self.items_per_page,
1399 1400 'first_item': self.first_item,
1400 1401 'last_item': self.last_item,
1401 1402 'item_count': self.item_count,
1402 1403 'link_first': self.page > self.first_page and \
1403 1404 self._pagerlink(self.first_page, symbol_first) or '',
1404 1405 'link_last': self.page < self.last_page and \
1405 1406 self._pagerlink(self.last_page, symbol_last) or '',
1406 1407 'link_previous': self.previous_page and \
1407 1408 self._pagerlink(self.previous_page, symbol_previous) \
1408 1409 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1409 1410 'link_next': self.next_page and \
1410 1411 self._pagerlink(self.next_page, symbol_next) \
1411 1412 or HTML.span(symbol_next, class_="pg-next disabled")
1412 1413 })
1413 1414
1414 1415 return literal(result)
1415 1416
1416 1417
1417 1418 #==============================================================================
1418 1419 # REPO PAGER, PAGER FOR REPOSITORY
1419 1420 #==============================================================================
1420 1421 class RepoPage(Page):
1421 1422
1422 1423 def __init__(self, collection, page=1, items_per_page=20,
1423 1424 item_count=None, url=None, **kwargs):
1424 1425
1425 1426 """Create a "RepoPage" instance. special pager for paging
1426 1427 repository
1427 1428 """
1428 1429 self._url_generator = url
1429 1430
1430 1431 # Safe the kwargs class-wide so they can be used in the pager() method
1431 1432 self.kwargs = kwargs
1432 1433
1433 1434 # Save a reference to the collection
1434 1435 self.original_collection = collection
1435 1436
1436 1437 self.collection = collection
1437 1438
1438 1439 # The self.page is the number of the current page.
1439 1440 # The first page has the number 1!
1440 1441 try:
1441 1442 self.page = int(page) # make it int() if we get it as a string
1442 1443 except (ValueError, TypeError):
1443 1444 self.page = 1
1444 1445
1445 1446 self.items_per_page = items_per_page
1446 1447
1447 1448 # Unless the user tells us how many items the collections has
1448 1449 # we calculate that ourselves.
1449 1450 if item_count is not None:
1450 1451 self.item_count = item_count
1451 1452 else:
1452 1453 self.item_count = len(self.collection)
1453 1454
1454 1455 # Compute the number of the first and last available page
1455 1456 if self.item_count > 0:
1456 1457 self.first_page = 1
1457 1458 self.page_count = int(math.ceil(float(self.item_count) /
1458 1459 self.items_per_page))
1459 1460 self.last_page = self.first_page + self.page_count - 1
1460 1461
1461 1462 # Make sure that the requested page number is the range of
1462 1463 # valid pages
1463 1464 if self.page > self.last_page:
1464 1465 self.page = self.last_page
1465 1466 elif self.page < self.first_page:
1466 1467 self.page = self.first_page
1467 1468
1468 1469 # Note: the number of items on this page can be less than
1469 1470 # items_per_page if the last page is not full
1470 1471 self.first_item = max(0, (self.item_count) - (self.page *
1471 1472 items_per_page))
1472 1473 self.last_item = ((self.item_count - 1) - items_per_page *
1473 1474 (self.page - 1))
1474 1475
1475 1476 self.items = list(self.collection[self.first_item:self.last_item + 1])
1476 1477
1477 1478 # Links to previous and next page
1478 1479 if self.page > self.first_page:
1479 1480 self.previous_page = self.page - 1
1480 1481 else:
1481 1482 self.previous_page = None
1482 1483
1483 1484 if self.page < self.last_page:
1484 1485 self.next_page = self.page + 1
1485 1486 else:
1486 1487 self.next_page = None
1487 1488
1488 1489 # No items available
1489 1490 else:
1490 1491 self.first_page = None
1491 1492 self.page_count = 0
1492 1493 self.last_page = None
1493 1494 self.first_item = None
1494 1495 self.last_item = None
1495 1496 self.previous_page = None
1496 1497 self.next_page = None
1497 1498 self.items = []
1498 1499
1499 1500 # This is a subclass of the 'list' type. Initialise the list now.
1500 1501 list.__init__(self, reversed(self.items))
1501 1502
1502 1503
1503 1504 def changed_tooltip(nodes):
1504 1505 """
1505 1506 Generates a html string for changed nodes in commit page.
1506 1507 It limits the output to 30 entries
1507 1508
1508 1509 :param nodes: LazyNodesGenerator
1509 1510 """
1510 1511 if nodes:
1511 1512 pref = ': <br/> '
1512 1513 suf = ''
1513 1514 if len(nodes) > 30:
1514 1515 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1515 1516 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1516 1517 for x in nodes[:30]]) + suf)
1517 1518 else:
1518 1519 return ': ' + _('No Files')
1519 1520
1520 1521
1521 1522 def breadcrumb_repo_link(repo):
1522 1523 """
1523 1524 Makes a breadcrumbs path link to repo
1524 1525
1525 1526 ex::
1526 1527 group >> subgroup >> repo
1527 1528
1528 1529 :param repo: a Repository instance
1529 1530 """
1530 1531
1531 1532 path = [
1532 1533 link_to(group.name, url('repo_group_home', group_name=group.group_name))
1533 1534 for group in repo.groups_with_parents
1534 1535 ] + [
1535 1536 link_to(repo.just_name, url('summary_home', repo_name=repo.repo_name))
1536 1537 ]
1537 1538
1538 1539 return literal(' &raquo; '.join(path))
1539 1540
1540 1541
1541 1542 def format_byte_size_binary(file_size):
1542 1543 """
1543 1544 Formats file/folder sizes to standard.
1544 1545 """
1545 1546 formatted_size = format_byte_size(file_size, binary=True)
1546 1547 return formatted_size
1547 1548
1548 1549
1549 1550 def fancy_file_stats(stats):
1550 1551 """
1551 1552 Displays a fancy two colored bar for number of added/deleted
1552 1553 lines of code on file
1553 1554
1554 1555 :param stats: two element list of added/deleted lines of code
1555 1556 """
1556 1557 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
1557 1558 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
1558 1559
1559 1560 def cgen(l_type, a_v, d_v):
1560 1561 mapping = {'tr': 'top-right-rounded-corner-mid',
1561 1562 'tl': 'top-left-rounded-corner-mid',
1562 1563 'br': 'bottom-right-rounded-corner-mid',
1563 1564 'bl': 'bottom-left-rounded-corner-mid'}
1564 1565 map_getter = lambda x: mapping[x]
1565 1566
1566 1567 if l_type == 'a' and d_v:
1567 1568 #case when added and deleted are present
1568 1569 return ' '.join(map(map_getter, ['tl', 'bl']))
1569 1570
1570 1571 if l_type == 'a' and not d_v:
1571 1572 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1572 1573
1573 1574 if l_type == 'd' and a_v:
1574 1575 return ' '.join(map(map_getter, ['tr', 'br']))
1575 1576
1576 1577 if l_type == 'd' and not a_v:
1577 1578 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1578 1579
1579 1580 a, d = stats['added'], stats['deleted']
1580 1581 width = 100
1581 1582
1582 1583 if stats['binary']: # binary operations like chmod/rename etc
1583 1584 lbl = []
1584 1585 bin_op = 0 # undefined
1585 1586
1586 1587 # prefix with bin for binary files
1587 1588 if BIN_FILENODE in stats['ops']:
1588 1589 lbl += ['bin']
1589 1590
1590 1591 if NEW_FILENODE in stats['ops']:
1591 1592 lbl += [_('new file')]
1592 1593 bin_op = NEW_FILENODE
1593 1594 elif MOD_FILENODE in stats['ops']:
1594 1595 lbl += [_('mod')]
1595 1596 bin_op = MOD_FILENODE
1596 1597 elif DEL_FILENODE in stats['ops']:
1597 1598 lbl += [_('del')]
1598 1599 bin_op = DEL_FILENODE
1599 1600 elif RENAMED_FILENODE in stats['ops']:
1600 1601 lbl += [_('rename')]
1601 1602 bin_op = RENAMED_FILENODE
1602 1603
1603 1604 # chmod can go with other operations, so we add a + to lbl if needed
1604 1605 if CHMOD_FILENODE in stats['ops']:
1605 1606 lbl += [_('chmod')]
1606 1607 if bin_op == 0:
1607 1608 bin_op = CHMOD_FILENODE
1608 1609
1609 1610 lbl = '+'.join(lbl)
1610 1611 b_a = '<div class="bin bin%s %s" style="width:100%%">%s</div>' \
1611 1612 % (bin_op, cgen('a', a_v='', d_v=0), lbl)
1612 1613 b_d = '<div class="bin bin1" style="width:0%%"></div>'
1613 1614 return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d))
1614 1615
1615 1616 t = stats['added'] + stats['deleted']
1616 1617 unit = float(width) / (t or 1)
1617 1618
1618 1619 # needs > 9% of width to be visible or 0 to be hidden
1619 1620 a_p = max(9, unit * a) if a > 0 else 0
1620 1621 d_p = max(9, unit * d) if d > 0 else 0
1621 1622 p_sum = a_p + d_p
1622 1623
1623 1624 if p_sum > width:
1624 1625 #adjust the percentage to be == 100% since we adjusted to 9
1625 1626 if a_p > d_p:
1626 1627 a_p = a_p - (p_sum - width)
1627 1628 else:
1628 1629 d_p = d_p - (p_sum - width)
1629 1630
1630 1631 a_v = a if a > 0 else ''
1631 1632 d_v = d if d > 0 else ''
1632 1633
1633 1634 d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
1634 1635 cgen('a', a_v, d_v), a_p, a_v
1635 1636 )
1636 1637 d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
1637 1638 cgen('d', a_v, d_v), d_p, d_v
1638 1639 )
1639 1640 return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1640 1641
1641 1642
1642 1643 def urlify_text(text_, safe=True):
1643 1644 """
1644 1645 Extrac urls from text and make html links out of them
1645 1646
1646 1647 :param text_:
1647 1648 """
1648 1649
1649 1650 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1650 1651 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1651 1652
1652 1653 def url_func(match_obj):
1653 1654 url_full = match_obj.groups()[0]
1654 1655 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1655 1656 _newtext = url_pat.sub(url_func, text_)
1656 1657 if safe:
1657 1658 return literal(_newtext)
1658 1659 return _newtext
1659 1660
1660 1661
1661 1662 def urlify_commits(text_, repository):
1662 1663 """
1663 1664 Extract commit ids from text and make link from them
1664 1665
1665 1666 :param text_:
1666 1667 :param repository: repo name to build the URL with
1667 1668 """
1668 1669 from pylons import url # doh, we need to re-import url to mock it later
1669 1670 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1670 1671
1671 1672 def url_func(match_obj):
1672 1673 commit_id = match_obj.groups()[1]
1673 1674 pref = match_obj.groups()[0]
1674 1675 suf = match_obj.groups()[2]
1675 1676
1676 1677 tmpl = (
1677 1678 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1678 1679 '%(commit_id)s</a>%(suf)s'
1679 1680 )
1680 1681 return tmpl % {
1681 1682 'pref': pref,
1682 1683 'cls': 'revision-link',
1683 1684 'url': url('changeset_home', repo_name=repository,
1684 1685 revision=commit_id, qualified=True),
1685 1686 'commit_id': commit_id,
1686 1687 'suf': suf
1687 1688 }
1688 1689
1689 1690 newtext = URL_PAT.sub(url_func, text_)
1690 1691
1691 1692 return newtext
1692 1693
1693 1694
1694 1695 def _process_url_func(match_obj, repo_name, uid, entry,
1695 1696 return_raw_data=False):
1696 1697 pref = ''
1697 1698 if match_obj.group().startswith(' '):
1698 1699 pref = ' '
1699 1700
1700 1701 issue_id = ''.join(match_obj.groups())
1701 1702 tmpl = (
1702 1703 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1703 1704 '%(issue-prefix)s%(id-repr)s'
1704 1705 '</a>')
1705 1706
1706 1707 (repo_name_cleaned,
1707 1708 parent_group_name) = RepoGroupModel().\
1708 1709 _get_group_name_and_parent(repo_name)
1709 1710
1710 1711 # variables replacement
1711 1712 named_vars = {
1712 1713 'id': issue_id,
1713 1714 'repo': repo_name,
1714 1715 'repo_name': repo_name_cleaned,
1715 1716 'group_name': parent_group_name
1716 1717 }
1717 1718 # named regex variables
1718 1719 named_vars.update(match_obj.groupdict())
1719 1720 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1720 1721
1721 1722 data = {
1722 1723 'pref': pref,
1723 1724 'cls': 'issue-tracker-link',
1724 1725 'url': _url,
1725 1726 'id-repr': issue_id,
1726 1727 'issue-prefix': entry['pref'],
1727 1728 'serv': entry['url'],
1728 1729 }
1729 1730 if return_raw_data:
1730 1731 return {
1731 1732 'id': issue_id,
1732 1733 'url': _url
1733 1734 }
1734 1735 return tmpl % data
1735 1736
1736 1737
1737 1738 def process_patterns(text_string, repo_name, config=None):
1738 1739 repo = None
1739 1740 if repo_name:
1740 1741 # Retrieving repo_name to avoid invalid repo_name to explode on
1741 1742 # IssueTrackerSettingsModel but still passing invalid name further down
1742 1743 repo = Repository.get_by_repo_name(repo_name, cache=True)
1743 1744
1744 1745 settings_model = IssueTrackerSettingsModel(repo=repo)
1745 1746 active_entries = settings_model.get_settings(cache=True)
1746 1747
1747 1748 issues_data = []
1748 1749 newtext = text_string
1749 1750 for uid, entry in active_entries.items():
1750 1751 log.debug('found issue tracker entry with uid %s' % (uid,))
1751 1752
1752 1753 if not (entry['pat'] and entry['url']):
1753 1754 log.debug('skipping due to missing data')
1754 1755 continue
1755 1756
1756 1757 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1757 1758 % (uid, entry['pat'], entry['url'], entry['pref']))
1758 1759
1759 1760 try:
1760 1761 pattern = re.compile(r'%s' % entry['pat'])
1761 1762 except re.error:
1762 1763 log.exception(
1763 1764 'issue tracker pattern: `%s` failed to compile',
1764 1765 entry['pat'])
1765 1766 continue
1766 1767
1767 1768 data_func = partial(
1768 1769 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1769 1770 return_raw_data=True)
1770 1771
1771 1772 for match_obj in pattern.finditer(text_string):
1772 1773 issues_data.append(data_func(match_obj))
1773 1774
1774 1775 url_func = partial(
1775 1776 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1776 1777
1777 1778 newtext = pattern.sub(url_func, newtext)
1778 1779 log.debug('processed prefix:uid `%s`' % (uid,))
1779 1780
1780 1781 return newtext, issues_data
1781 1782
1782 1783
1783 1784 def urlify_commit_message(commit_text, repository=None):
1784 1785 """
1785 1786 Parses given text message and makes proper links.
1786 1787 issues are linked to given issue-server, and rest is a commit link
1787 1788
1788 1789 :param commit_text:
1789 1790 :param repository:
1790 1791 """
1791 1792 from pylons import url # doh, we need to re-import url to mock it later
1792 1793
1793 1794 def escaper(string):
1794 1795 return string.replace('<', '&lt;').replace('>', '&gt;')
1795 1796
1796 1797 newtext = escaper(commit_text)
1797 1798
1798 1799 # extract http/https links and make them real urls
1799 1800 newtext = urlify_text(newtext, safe=False)
1800 1801
1801 1802 # urlify commits - extract commit ids and make link out of them, if we have
1802 1803 # the scope of repository present.
1803 1804 if repository:
1804 1805 newtext = urlify_commits(newtext, repository)
1805 1806
1806 1807 # process issue tracker patterns
1807 1808 newtext, issues = process_patterns(newtext, repository or '')
1808 1809
1809 1810 return literal(newtext)
1810 1811
1811 1812
1812 1813 def rst(source, mentions=False):
1813 1814 return literal('<div class="rst-block">%s</div>' %
1814 1815 MarkupRenderer.rst(source, mentions=mentions))
1815 1816
1816 1817
1817 1818 def markdown(source, mentions=False):
1818 1819 return literal('<div class="markdown-block">%s</div>' %
1819 1820 MarkupRenderer.markdown(source, flavored=True,
1820 1821 mentions=mentions))
1821 1822
1822 1823 def renderer_from_filename(filename, exclude=None):
1823 1824 return MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1824 1825
1825 1826
1826 1827 def render(source, renderer='rst', mentions=False):
1827 1828 if renderer == 'rst':
1828 1829 return rst(source, mentions=mentions)
1829 1830 if renderer == 'markdown':
1830 1831 return markdown(source, mentions=mentions)
1831 1832
1832 1833
1833 1834 def commit_status(repo, commit_id):
1834 1835 return ChangesetStatusModel().get_status(repo, commit_id)
1835 1836
1836 1837
1837 1838 def commit_status_lbl(commit_status):
1838 1839 return dict(ChangesetStatus.STATUSES).get(commit_status)
1839 1840
1840 1841
1841 1842 def commit_time(repo_name, commit_id):
1842 1843 repo = Repository.get_by_repo_name(repo_name)
1843 1844 commit = repo.get_commit(commit_id=commit_id)
1844 1845 return commit.date
1845 1846
1846 1847
1847 1848 def get_permission_name(key):
1848 1849 return dict(Permission.PERMS).get(key)
1849 1850
1850 1851
1851 1852 def journal_filter_help():
1852 1853 return _(
1853 1854 'Example filter terms:\n' +
1854 1855 ' repository:vcs\n' +
1855 1856 ' username:marcin\n' +
1856 1857 ' action:*push*\n' +
1857 1858 ' ip:127.0.0.1\n' +
1858 1859 ' date:20120101\n' +
1859 1860 ' date:[20120101100000 TO 20120102]\n' +
1860 1861 '\n' +
1861 1862 'Generate wildcards using \'*\' character:\n' +
1862 1863 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1863 1864 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1864 1865 '\n' +
1865 1866 'Optional AND / OR operators in queries\n' +
1866 1867 ' "repository:vcs OR repository:test"\n' +
1867 1868 ' "username:test AND repository:test*"\n'
1868 1869 )
1869 1870
1870 1871
1871 1872 def not_mapped_error(repo_name):
1872 1873 flash(_('%s repository is not mapped to db perhaps'
1873 1874 ' it was created or renamed from the filesystem'
1874 1875 ' please run the application again'
1875 1876 ' in order to rescan repositories') % repo_name, category='error')
1876 1877
1877 1878
1878 1879 def ip_range(ip_addr):
1879 1880 from rhodecode.model.db import UserIpMap
1880 1881 s, e = UserIpMap._get_ip_range(ip_addr)
1881 1882 return '%s - %s' % (s, e)
1882 1883
1883 1884
1884 1885 def form(url, method='post', needs_csrf_token=True, **attrs):
1885 1886 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1886 1887 if method.lower() != 'get' and needs_csrf_token:
1887 1888 raise Exception(
1888 1889 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1889 1890 'CSRF token. If the endpoint does not require such token you can ' +
1890 1891 'explicitly set the parameter needs_csrf_token to false.')
1891 1892
1892 1893 return wh_form(url, method=method, **attrs)
1893 1894
1894 1895
1895 1896 def secure_form(url, method="POST", multipart=False, **attrs):
1896 1897 """Start a form tag that points the action to an url. This
1897 1898 form tag will also include the hidden field containing
1898 1899 the auth token.
1899 1900
1900 1901 The url options should be given either as a string, or as a
1901 1902 ``url()`` function. The method for the form defaults to POST.
1902 1903
1903 1904 Options:
1904 1905
1905 1906 ``multipart``
1906 1907 If set to True, the enctype is set to "multipart/form-data".
1907 1908 ``method``
1908 1909 The method to use when submitting the form, usually either
1909 1910 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1910 1911 hidden input with name _method is added to simulate the verb
1911 1912 over POST.
1912 1913
1913 1914 """
1914 1915 from webhelpers.pylonslib.secure_form import insecure_form
1915 1916 form = insecure_form(url, method, multipart, **attrs)
1916 1917 token = csrf_input()
1917 1918 return literal("%s\n%s" % (form, token))
1918 1919
1919 1920 def csrf_input():
1920 1921 return literal(
1921 1922 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1922 1923 csrf_token_key, csrf_token_key, get_csrf_token()))
1923 1924
1924 1925 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1925 1926 select_html = select(name, selected, options, **attrs)
1926 1927 select2 = """
1927 1928 <script>
1928 1929 $(document).ready(function() {
1929 1930 $('#%s').select2({
1930 1931 containerCssClass: 'drop-menu',
1931 1932 dropdownCssClass: 'drop-menu-dropdown',
1932 1933 dropdownAutoWidth: true%s
1933 1934 });
1934 1935 });
1935 1936 </script>
1936 1937 """
1937 1938 filter_option = """,
1938 1939 minimumResultsForSearch: -1
1939 1940 """
1940 1941 input_id = attrs.get('id') or name
1941 1942 filter_enabled = "" if enable_filter else filter_option
1942 1943 select_script = literal(select2 % (input_id, filter_enabled))
1943 1944
1944 1945 return literal(select_html+select_script)
1945 1946
1946 1947
1947 1948 def get_visual_attr(tmpl_context_var, attr_name):
1948 1949 """
1949 1950 A safe way to get a variable from visual variable of template context
1950 1951
1951 1952 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1952 1953 :param attr_name: name of the attribute we fetch from the c.visual
1953 1954 """
1954 1955 visual = getattr(tmpl_context_var, 'visual', None)
1955 1956 if not visual:
1956 1957 return
1957 1958 else:
1958 1959 return getattr(visual, attr_name, None)
1959 1960
1960 1961
1961 1962 def get_last_path_part(file_node):
1962 1963 if not file_node.path:
1963 1964 return u''
1964 1965
1965 1966 path = safe_unicode(file_node.path.split('/')[-1])
1966 1967 return u'../' + path
1967 1968
1968 1969
1969 1970 def route_path(*args, **kwds):
1970 1971 """
1971 1972 Wrapper around pyramids `route_path` function. It is used to generate
1972 1973 URLs from within pylons views or templates. This will be removed when
1973 1974 pyramid migration if finished.
1974 1975 """
1975 1976 req = get_current_request()
1976 1977 return req.route_path(*args, **kwds)
1977 1978
1978 1979
1979 1980 def route_path_or_none(*args, **kwargs):
1980 1981 try:
1981 1982 return route_path(*args, **kwargs)
1982 1983 except KeyError:
1983 1984 return None
1984 1985
1985 1986
1986 1987 def static_url(*args, **kwds):
1987 1988 """
1988 1989 Wrapper around pyramids `route_path` function. It is used to generate
1989 1990 URLs from within pylons views or templates. This will be removed when
1990 1991 pyramid migration if finished.
1991 1992 """
1992 1993 req = get_current_request()
1993 1994 return req.static_url(*args, **kwds)
1994 1995
1995 1996
1996 1997 def resource_path(*args, **kwds):
1997 1998 """
1998 1999 Wrapper around pyramids `route_path` function. It is used to generate
1999 2000 URLs from within pylons views or templates. This will be removed when
2000 2001 pyramid migration if finished.
2001 2002 """
2002 2003 req = get_current_request()
2003 2004 return req.resource_path(*args, **kwds)
@@ -1,3858 +1,3861 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37
38 38 from sqlalchemy import *
39 39 from sqlalchemy.ext.declarative import declared_attr
40 40 from sqlalchemy.ext.hybrid import hybrid_property
41 41 from sqlalchemy.orm import (
42 42 relationship, joinedload, class_mapper, validates, aliased)
43 43 from sqlalchemy.sql.expression import true
44 44 from beaker.cache import cache_region
45 45 from webob.exc import HTTPNotFound
46 46 from zope.cachedescriptors.property import Lazy as LazyProperty
47 47
48 48 from pylons import url
49 49 from pylons.i18n.translation import lazy_ugettext as _
50 50
51 51 from rhodecode.lib.vcs import get_vcs_instance
52 52 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
53 53 from rhodecode.lib.utils2 import (
54 54 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
55 55 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
56 56 glob2re, StrictAttributeDict)
57 57 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType
58 58 from rhodecode.lib.ext_json import json
59 59 from rhodecode.lib.caching_query import FromCache
60 60 from rhodecode.lib.encrypt import AESCipher
61 61
62 62 from rhodecode.model.meta import Base, Session
63 63
64 64 URL_SEP = '/'
65 65 log = logging.getLogger(__name__)
66 66
67 67 # =============================================================================
68 68 # BASE CLASSES
69 69 # =============================================================================
70 70
71 71 # this is propagated from .ini file rhodecode.encrypted_values.secret or
72 72 # beaker.session.secret if first is not set.
73 73 # and initialized at environment.py
74 74 ENCRYPTION_KEY = None
75 75
76 76 # used to sort permissions by types, '#' used here is not allowed to be in
77 77 # usernames, and it's very early in sorted string.printable table.
78 78 PERMISSION_TYPE_SORT = {
79 79 'admin': '####',
80 80 'write': '###',
81 81 'read': '##',
82 82 'none': '#',
83 83 }
84 84
85 85
86 86 def display_sort(obj):
87 87 """
88 88 Sort function used to sort permissions in .permissions() function of
89 89 Repository, RepoGroup, UserGroup. Also it put the default user in front
90 90 of all other resources
91 91 """
92 92
93 93 if obj.username == User.DEFAULT_USER:
94 94 return '#####'
95 95 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
96 96 return prefix + obj.username
97 97
98 98
99 99 def _hash_key(k):
100 100 return md5_safe(k)
101 101
102 102
103 103 class EncryptedTextValue(TypeDecorator):
104 104 """
105 105 Special column for encrypted long text data, use like::
106 106
107 107 value = Column("encrypted_value", EncryptedValue(), nullable=False)
108 108
109 109 This column is intelligent so if value is in unencrypted form it return
110 110 unencrypted form, but on save it always encrypts
111 111 """
112 112 impl = Text
113 113
114 114 def process_bind_param(self, value, dialect):
115 115 if not value:
116 116 return value
117 117 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
118 118 # protect against double encrypting if someone manually starts
119 119 # doing
120 120 raise ValueError('value needs to be in unencrypted format, ie. '
121 121 'not starting with enc$aes')
122 122 return 'enc$aes_hmac$%s' % AESCipher(
123 123 ENCRYPTION_KEY, hmac=True).encrypt(value)
124 124
125 125 def process_result_value(self, value, dialect):
126 126 import rhodecode
127 127
128 128 if not value:
129 129 return value
130 130
131 131 parts = value.split('$', 3)
132 132 if not len(parts) == 3:
133 133 # probably not encrypted values
134 134 return value
135 135 else:
136 136 if parts[0] != 'enc':
137 137 # parts ok but without our header ?
138 138 return value
139 139 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
140 140 'rhodecode.encrypted_values.strict') or True)
141 141 # at that stage we know it's our encryption
142 142 if parts[1] == 'aes':
143 143 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
144 144 elif parts[1] == 'aes_hmac':
145 145 decrypted_data = AESCipher(
146 146 ENCRYPTION_KEY, hmac=True,
147 147 strict_verification=enc_strict_mode).decrypt(parts[2])
148 148 else:
149 149 raise ValueError(
150 150 'Encryption type part is wrong, must be `aes` '
151 151 'or `aes_hmac`, got `%s` instead' % (parts[1]))
152 152 return decrypted_data
153 153
154 154
155 155 class BaseModel(object):
156 156 """
157 157 Base Model for all classes
158 158 """
159 159
160 160 @classmethod
161 161 def _get_keys(cls):
162 162 """return column names for this model """
163 163 return class_mapper(cls).c.keys()
164 164
165 165 def get_dict(self):
166 166 """
167 167 return dict with keys and values corresponding
168 168 to this model data """
169 169
170 170 d = {}
171 171 for k in self._get_keys():
172 172 d[k] = getattr(self, k)
173 173
174 174 # also use __json__() if present to get additional fields
175 175 _json_attr = getattr(self, '__json__', None)
176 176 if _json_attr:
177 177 # update with attributes from __json__
178 178 if callable(_json_attr):
179 179 _json_attr = _json_attr()
180 180 for k, val in _json_attr.iteritems():
181 181 d[k] = val
182 182 return d
183 183
184 184 def get_appstruct(self):
185 185 """return list with keys and values tuples corresponding
186 186 to this model data """
187 187
188 188 l = []
189 189 for k in self._get_keys():
190 190 l.append((k, getattr(self, k),))
191 191 return l
192 192
193 193 def populate_obj(self, populate_dict):
194 194 """populate model with data from given populate_dict"""
195 195
196 196 for k in self._get_keys():
197 197 if k in populate_dict:
198 198 setattr(self, k, populate_dict[k])
199 199
200 200 @classmethod
201 201 def query(cls):
202 202 return Session().query(cls)
203 203
204 204 @classmethod
205 205 def get(cls, id_):
206 206 if id_:
207 207 return cls.query().get(id_)
208 208
209 209 @classmethod
210 210 def get_or_404(cls, id_):
211 211 try:
212 212 id_ = int(id_)
213 213 except (TypeError, ValueError):
214 214 raise HTTPNotFound
215 215
216 216 res = cls.query().get(id_)
217 217 if not res:
218 218 raise HTTPNotFound
219 219 return res
220 220
221 221 @classmethod
222 222 def getAll(cls):
223 223 # deprecated and left for backward compatibility
224 224 return cls.get_all()
225 225
226 226 @classmethod
227 227 def get_all(cls):
228 228 return cls.query().all()
229 229
230 230 @classmethod
231 231 def delete(cls, id_):
232 232 obj = cls.query().get(id_)
233 233 Session().delete(obj)
234 234
235 235 @classmethod
236 236 def identity_cache(cls, session, attr_name, value):
237 237 exist_in_session = []
238 238 for (item_cls, pkey), instance in session.identity_map.items():
239 239 if cls == item_cls and getattr(instance, attr_name) == value:
240 240 exist_in_session.append(instance)
241 241 if exist_in_session:
242 242 if len(exist_in_session) == 1:
243 243 return exist_in_session[0]
244 244 log.exception(
245 245 'multiple objects with attr %s and '
246 246 'value %s found with same name: %r',
247 247 attr_name, value, exist_in_session)
248 248
249 249 def __repr__(self):
250 250 if hasattr(self, '__unicode__'):
251 251 # python repr needs to return str
252 252 try:
253 253 return safe_str(self.__unicode__())
254 254 except UnicodeDecodeError:
255 255 pass
256 256 return '<DB:%s>' % (self.__class__.__name__)
257 257
258 258
259 259 class RhodeCodeSetting(Base, BaseModel):
260 260 __tablename__ = 'rhodecode_settings'
261 261 __table_args__ = (
262 262 UniqueConstraint('app_settings_name'),
263 263 {'extend_existing': True, 'mysql_engine': 'InnoDB',
264 264 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
265 265 )
266 266
267 267 SETTINGS_TYPES = {
268 268 'str': safe_str,
269 269 'int': safe_int,
270 270 'unicode': safe_unicode,
271 271 'bool': str2bool,
272 272 'list': functools.partial(aslist, sep=',')
273 273 }
274 274 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
275 275 GLOBAL_CONF_KEY = 'app_settings'
276 276
277 277 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
278 278 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
279 279 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
280 280 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
281 281
282 282 def __init__(self, key='', val='', type='unicode'):
283 283 self.app_settings_name = key
284 284 self.app_settings_type = type
285 285 self.app_settings_value = val
286 286
287 287 @validates('_app_settings_value')
288 288 def validate_settings_value(self, key, val):
289 289 assert type(val) == unicode
290 290 return val
291 291
292 292 @hybrid_property
293 293 def app_settings_value(self):
294 294 v = self._app_settings_value
295 295 _type = self.app_settings_type
296 296 if _type:
297 297 _type = self.app_settings_type.split('.')[0]
298 298 # decode the encrypted value
299 299 if 'encrypted' in self.app_settings_type:
300 300 cipher = EncryptedTextValue()
301 301 v = safe_unicode(cipher.process_result_value(v, None))
302 302
303 303 converter = self.SETTINGS_TYPES.get(_type) or \
304 304 self.SETTINGS_TYPES['unicode']
305 305 return converter(v)
306 306
307 307 @app_settings_value.setter
308 308 def app_settings_value(self, val):
309 309 """
310 310 Setter that will always make sure we use unicode in app_settings_value
311 311
312 312 :param val:
313 313 """
314 314 val = safe_unicode(val)
315 315 # encode the encrypted value
316 316 if 'encrypted' in self.app_settings_type:
317 317 cipher = EncryptedTextValue()
318 318 val = safe_unicode(cipher.process_bind_param(val, None))
319 319 self._app_settings_value = val
320 320
321 321 @hybrid_property
322 322 def app_settings_type(self):
323 323 return self._app_settings_type
324 324
325 325 @app_settings_type.setter
326 326 def app_settings_type(self, val):
327 327 if val.split('.')[0] not in self.SETTINGS_TYPES:
328 328 raise Exception('type must be one of %s got %s'
329 329 % (self.SETTINGS_TYPES.keys(), val))
330 330 self._app_settings_type = val
331 331
332 332 def __unicode__(self):
333 333 return u"<%s('%s:%s[%s]')>" % (
334 334 self.__class__.__name__,
335 335 self.app_settings_name, self.app_settings_value,
336 336 self.app_settings_type
337 337 )
338 338
339 339
340 340 class RhodeCodeUi(Base, BaseModel):
341 341 __tablename__ = 'rhodecode_ui'
342 342 __table_args__ = (
343 343 UniqueConstraint('ui_key'),
344 344 {'extend_existing': True, 'mysql_engine': 'InnoDB',
345 345 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
346 346 )
347 347
348 348 HOOK_REPO_SIZE = 'changegroup.repo_size'
349 349 # HG
350 350 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
351 351 HOOK_PULL = 'outgoing.pull_logger'
352 352 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
353 353 HOOK_PUSH = 'changegroup.push_logger'
354 354
355 355 # TODO: johbo: Unify way how hooks are configured for git and hg,
356 356 # git part is currently hardcoded.
357 357
358 358 # SVN PATTERNS
359 359 SVN_BRANCH_ID = 'vcs_svn_branch'
360 360 SVN_TAG_ID = 'vcs_svn_tag'
361 361
362 362 ui_id = Column(
363 363 "ui_id", Integer(), nullable=False, unique=True, default=None,
364 364 primary_key=True)
365 365 ui_section = Column(
366 366 "ui_section", String(255), nullable=True, unique=None, default=None)
367 367 ui_key = Column(
368 368 "ui_key", String(255), nullable=True, unique=None, default=None)
369 369 ui_value = Column(
370 370 "ui_value", String(255), nullable=True, unique=None, default=None)
371 371 ui_active = Column(
372 372 "ui_active", Boolean(), nullable=True, unique=None, default=True)
373 373
374 374 def __repr__(self):
375 375 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
376 376 self.ui_key, self.ui_value)
377 377
378 378
379 379 class RepoRhodeCodeSetting(Base, BaseModel):
380 380 __tablename__ = 'repo_rhodecode_settings'
381 381 __table_args__ = (
382 382 UniqueConstraint(
383 383 'app_settings_name', 'repository_id',
384 384 name='uq_repo_rhodecode_setting_name_repo_id'),
385 385 {'extend_existing': True, 'mysql_engine': 'InnoDB',
386 386 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
387 387 )
388 388
389 389 repository_id = Column(
390 390 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
391 391 nullable=False)
392 392 app_settings_id = Column(
393 393 "app_settings_id", Integer(), nullable=False, unique=True,
394 394 default=None, primary_key=True)
395 395 app_settings_name = Column(
396 396 "app_settings_name", String(255), nullable=True, unique=None,
397 397 default=None)
398 398 _app_settings_value = Column(
399 399 "app_settings_value", String(4096), nullable=True, unique=None,
400 400 default=None)
401 401 _app_settings_type = Column(
402 402 "app_settings_type", String(255), nullable=True, unique=None,
403 403 default=None)
404 404
405 405 repository = relationship('Repository')
406 406
407 407 def __init__(self, repository_id, key='', val='', type='unicode'):
408 408 self.repository_id = repository_id
409 409 self.app_settings_name = key
410 410 self.app_settings_type = type
411 411 self.app_settings_value = val
412 412
413 413 @validates('_app_settings_value')
414 414 def validate_settings_value(self, key, val):
415 415 assert type(val) == unicode
416 416 return val
417 417
418 418 @hybrid_property
419 419 def app_settings_value(self):
420 420 v = self._app_settings_value
421 421 type_ = self.app_settings_type
422 422 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
423 423 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
424 424 return converter(v)
425 425
426 426 @app_settings_value.setter
427 427 def app_settings_value(self, val):
428 428 """
429 429 Setter that will always make sure we use unicode in app_settings_value
430 430
431 431 :param val:
432 432 """
433 433 self._app_settings_value = safe_unicode(val)
434 434
435 435 @hybrid_property
436 436 def app_settings_type(self):
437 437 return self._app_settings_type
438 438
439 439 @app_settings_type.setter
440 440 def app_settings_type(self, val):
441 441 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
442 442 if val not in SETTINGS_TYPES:
443 443 raise Exception('type must be one of %s got %s'
444 444 % (SETTINGS_TYPES.keys(), val))
445 445 self._app_settings_type = val
446 446
447 447 def __unicode__(self):
448 448 return u"<%s('%s:%s:%s[%s]')>" % (
449 449 self.__class__.__name__, self.repository.repo_name,
450 450 self.app_settings_name, self.app_settings_value,
451 451 self.app_settings_type
452 452 )
453 453
454 454
455 455 class RepoRhodeCodeUi(Base, BaseModel):
456 456 __tablename__ = 'repo_rhodecode_ui'
457 457 __table_args__ = (
458 458 UniqueConstraint(
459 459 'repository_id', 'ui_section', 'ui_key',
460 460 name='uq_repo_rhodecode_ui_repository_id_section_key'),
461 461 {'extend_existing': True, 'mysql_engine': 'InnoDB',
462 462 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
463 463 )
464 464
465 465 repository_id = Column(
466 466 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
467 467 nullable=False)
468 468 ui_id = Column(
469 469 "ui_id", Integer(), nullable=False, unique=True, default=None,
470 470 primary_key=True)
471 471 ui_section = Column(
472 472 "ui_section", String(255), nullable=True, unique=None, default=None)
473 473 ui_key = Column(
474 474 "ui_key", String(255), nullable=True, unique=None, default=None)
475 475 ui_value = Column(
476 476 "ui_value", String(255), nullable=True, unique=None, default=None)
477 477 ui_active = Column(
478 478 "ui_active", Boolean(), nullable=True, unique=None, default=True)
479 479
480 480 repository = relationship('Repository')
481 481
482 482 def __repr__(self):
483 483 return '<%s[%s:%s]%s=>%s]>' % (
484 484 self.__class__.__name__, self.repository.repo_name,
485 485 self.ui_section, self.ui_key, self.ui_value)
486 486
487 487
488 488 class User(Base, BaseModel):
489 489 __tablename__ = 'users'
490 490 __table_args__ = (
491 491 UniqueConstraint('username'), UniqueConstraint('email'),
492 492 Index('u_username_idx', 'username'),
493 493 Index('u_email_idx', 'email'),
494 494 {'extend_existing': True, 'mysql_engine': 'InnoDB',
495 495 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
496 496 )
497 497 DEFAULT_USER = 'default'
498 498 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
499 499 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
500 500
501 501 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
502 502 username = Column("username", String(255), nullable=True, unique=None, default=None)
503 503 password = Column("password", String(255), nullable=True, unique=None, default=None)
504 504 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
505 505 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
506 506 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
507 507 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
508 508 _email = Column("email", String(255), nullable=True, unique=None, default=None)
509 509 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
510 510 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
511 511 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
512 512 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
513 513 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
514 514 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
515 515 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
516 516
517 517 user_log = relationship('UserLog')
518 518 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
519 519
520 520 repositories = relationship('Repository')
521 521 repository_groups = relationship('RepoGroup')
522 522 user_groups = relationship('UserGroup')
523 523
524 524 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
525 525 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
526 526
527 527 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
528 528 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
529 529 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
530 530
531 531 group_member = relationship('UserGroupMember', cascade='all')
532 532
533 533 notifications = relationship('UserNotification', cascade='all')
534 534 # notifications assigned to this user
535 535 user_created_notifications = relationship('Notification', cascade='all')
536 536 # comments created by this user
537 537 user_comments = relationship('ChangesetComment', cascade='all')
538 538 # user profile extra info
539 539 user_emails = relationship('UserEmailMap', cascade='all')
540 540 user_ip_map = relationship('UserIpMap', cascade='all')
541 541 user_auth_tokens = relationship('UserApiKeys', cascade='all')
542 542 # gists
543 543 user_gists = relationship('Gist', cascade='all')
544 544 # user pull requests
545 545 user_pull_requests = relationship('PullRequest', cascade='all')
546 546 # external identities
547 547 extenal_identities = relationship(
548 548 'ExternalIdentity',
549 549 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
550 550 cascade='all')
551 551
552 552 def __unicode__(self):
553 553 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
554 554 self.user_id, self.username)
555 555
556 556 @hybrid_property
557 557 def email(self):
558 558 return self._email
559 559
560 560 @email.setter
561 561 def email(self, val):
562 562 self._email = val.lower() if val else None
563 563
564 564 @property
565 565 def firstname(self):
566 566 # alias for future
567 567 return self.name
568 568
569 569 @property
570 570 def emails(self):
571 571 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
572 572 return [self.email] + [x.email for x in other]
573 573
574 574 @property
575 575 def auth_tokens(self):
576 576 return [self.api_key] + [x.api_key for x in self.extra_auth_tokens]
577 577
578 578 @property
579 579 def extra_auth_tokens(self):
580 580 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
581 581
582 582 @property
583 583 def feed_token(self):
584 584 feed_tokens = UserApiKeys.query()\
585 585 .filter(UserApiKeys.user == self)\
586 586 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
587 587 .all()
588 588 if feed_tokens:
589 589 return feed_tokens[0].api_key
590 590 else:
591 591 # use the main token so we don't end up with nothing...
592 592 return self.api_key
593 593
594 594 @classmethod
595 595 def extra_valid_auth_tokens(cls, user, role=None):
596 596 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
597 597 .filter(or_(UserApiKeys.expires == -1,
598 598 UserApiKeys.expires >= time.time()))
599 599 if role:
600 600 tokens = tokens.filter(or_(UserApiKeys.role == role,
601 601 UserApiKeys.role == UserApiKeys.ROLE_ALL))
602 602 return tokens.all()
603 603
604 604 @property
605 605 def builtin_token_roles(self):
606 606 return map(UserApiKeys._get_role_name, [
607 607 UserApiKeys.ROLE_API, UserApiKeys.ROLE_FEED, UserApiKeys.ROLE_HTTP
608 608 ])
609 609
610 610 @property
611 611 def ip_addresses(self):
612 612 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
613 613 return [x.ip_addr for x in ret]
614 614
615 615 @property
616 616 def username_and_name(self):
617 617 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
618 618
619 619 @property
620 620 def username_or_name_or_email(self):
621 621 full_name = self.full_name if self.full_name is not ' ' else None
622 622 return self.username or full_name or self.email
623 623
624 624 @property
625 625 def full_name(self):
626 626 return '%s %s' % (self.firstname, self.lastname)
627 627
628 628 @property
629 629 def full_name_or_username(self):
630 630 return ('%s %s' % (self.firstname, self.lastname)
631 631 if (self.firstname and self.lastname) else self.username)
632 632
633 633 @property
634 634 def full_contact(self):
635 635 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
636 636
637 637 @property
638 638 def short_contact(self):
639 639 return '%s %s' % (self.firstname, self.lastname)
640 640
641 641 @property
642 642 def is_admin(self):
643 643 return self.admin
644 644
645 645 @property
646 646 def AuthUser(self):
647 647 """
648 648 Returns instance of AuthUser for this user
649 649 """
650 650 from rhodecode.lib.auth import AuthUser
651 651 return AuthUser(user_id=self.user_id, api_key=self.api_key,
652 652 username=self.username)
653 653
654 654 @hybrid_property
655 655 def user_data(self):
656 656 if not self._user_data:
657 657 return {}
658 658
659 659 try:
660 660 return json.loads(self._user_data)
661 661 except TypeError:
662 662 return {}
663 663
664 664 @user_data.setter
665 665 def user_data(self, val):
666 666 if not isinstance(val, dict):
667 667 raise Exception('user_data must be dict, got %s' % type(val))
668 668 try:
669 669 self._user_data = json.dumps(val)
670 670 except Exception:
671 671 log.error(traceback.format_exc())
672 672
673 673 @classmethod
674 674 def get_by_username(cls, username, case_insensitive=False,
675 675 cache=False, identity_cache=False):
676 676 session = Session()
677 677
678 678 if case_insensitive:
679 679 q = cls.query().filter(
680 680 func.lower(cls.username) == func.lower(username))
681 681 else:
682 682 q = cls.query().filter(cls.username == username)
683 683
684 684 if cache:
685 685 if identity_cache:
686 686 val = cls.identity_cache(session, 'username', username)
687 687 if val:
688 688 return val
689 689 else:
690 690 q = q.options(
691 691 FromCache("sql_cache_short",
692 692 "get_user_by_name_%s" % _hash_key(username)))
693 693
694 694 return q.scalar()
695 695
696 696 @classmethod
697 697 def get_by_auth_token(cls, auth_token, cache=False, fallback=True):
698 698 q = cls.query().filter(cls.api_key == auth_token)
699 699
700 700 if cache:
701 701 q = q.options(FromCache("sql_cache_short",
702 702 "get_auth_token_%s" % auth_token))
703 703 res = q.scalar()
704 704
705 705 if fallback and not res:
706 706 #fallback to additional keys
707 707 _res = UserApiKeys.query()\
708 708 .filter(UserApiKeys.api_key == auth_token)\
709 709 .filter(or_(UserApiKeys.expires == -1,
710 710 UserApiKeys.expires >= time.time()))\
711 711 .first()
712 712 if _res:
713 713 res = _res.user
714 714 return res
715 715
716 716 @classmethod
717 717 def get_by_email(cls, email, case_insensitive=False, cache=False):
718 718
719 719 if case_insensitive:
720 720 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
721 721
722 722 else:
723 723 q = cls.query().filter(cls.email == email)
724 724
725 725 if cache:
726 726 q = q.options(FromCache("sql_cache_short",
727 727 "get_email_key_%s" % _hash_key(email)))
728 728
729 729 ret = q.scalar()
730 730 if ret is None:
731 731 q = UserEmailMap.query()
732 732 # try fetching in alternate email map
733 733 if case_insensitive:
734 734 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
735 735 else:
736 736 q = q.filter(UserEmailMap.email == email)
737 737 q = q.options(joinedload(UserEmailMap.user))
738 738 if cache:
739 739 q = q.options(FromCache("sql_cache_short",
740 740 "get_email_map_key_%s" % email))
741 741 ret = getattr(q.scalar(), 'user', None)
742 742
743 743 return ret
744 744
745 745 @classmethod
746 746 def get_from_cs_author(cls, author):
747 747 """
748 748 Tries to get User objects out of commit author string
749 749
750 750 :param author:
751 751 """
752 752 from rhodecode.lib.helpers import email, author_name
753 753 # Valid email in the attribute passed, see if they're in the system
754 754 _email = email(author)
755 755 if _email:
756 756 user = cls.get_by_email(_email, case_insensitive=True)
757 757 if user:
758 758 return user
759 759 # Maybe we can match by username?
760 760 _author = author_name(author)
761 761 user = cls.get_by_username(_author, case_insensitive=True)
762 762 if user:
763 763 return user
764 764
765 765 def update_userdata(self, **kwargs):
766 766 usr = self
767 767 old = usr.user_data
768 768 old.update(**kwargs)
769 769 usr.user_data = old
770 770 Session().add(usr)
771 771 log.debug('updated userdata with ', kwargs)
772 772
773 773 def update_lastlogin(self):
774 774 """Update user lastlogin"""
775 775 self.last_login = datetime.datetime.now()
776 776 Session().add(self)
777 777 log.debug('updated user %s lastlogin', self.username)
778 778
779 779 def update_lastactivity(self):
780 780 """Update user lastactivity"""
781 781 usr = self
782 782 old = usr.user_data
783 783 old.update({'last_activity': time.time()})
784 784 usr.user_data = old
785 785 Session().add(usr)
786 786 log.debug('updated user %s lastactivity', usr.username)
787 787
788 788 def update_password(self, new_password, change_api_key=False):
789 789 from rhodecode.lib.auth import get_crypt_password,generate_auth_token
790 790
791 791 self.password = get_crypt_password(new_password)
792 792 if change_api_key:
793 793 self.api_key = generate_auth_token(self.username)
794 794 Session().add(self)
795 795
796 796 @classmethod
797 797 def get_first_super_admin(cls):
798 798 user = User.query().filter(User.admin == true()).first()
799 799 if user is None:
800 800 raise Exception('FATAL: Missing administrative account!')
801 801 return user
802 802
803 803 @classmethod
804 804 def get_all_super_admins(cls):
805 805 """
806 806 Returns all admin accounts sorted by username
807 807 """
808 808 return User.query().filter(User.admin == true())\
809 809 .order_by(User.username.asc()).all()
810 810
811 811 @classmethod
812 812 def get_default_user(cls, cache=False):
813 813 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
814 814 if user is None:
815 815 raise Exception('FATAL: Missing default account!')
816 816 return user
817 817
818 818 def _get_default_perms(self, user, suffix=''):
819 819 from rhodecode.model.permission import PermissionModel
820 820 return PermissionModel().get_default_perms(user.user_perms, suffix)
821 821
822 822 def get_default_perms(self, suffix=''):
823 823 return self._get_default_perms(self, suffix)
824 824
825 825 def get_api_data(self, include_secrets=False, details='full'):
826 826 """
827 827 Common function for generating user related data for API
828 828
829 829 :param include_secrets: By default secrets in the API data will be replaced
830 830 by a placeholder value to prevent exposing this data by accident. In case
831 831 this data shall be exposed, set this flag to ``True``.
832 832
833 833 :param details: details can be 'basic|full' basic gives only a subset of
834 834 the available user information that includes user_id, name and emails.
835 835 """
836 836 user = self
837 837 user_data = self.user_data
838 838 data = {
839 839 'user_id': user.user_id,
840 840 'username': user.username,
841 841 'firstname': user.name,
842 842 'lastname': user.lastname,
843 843 'email': user.email,
844 844 'emails': user.emails,
845 845 }
846 846 if details == 'basic':
847 847 return data
848 848
849 849 api_key_length = 40
850 850 api_key_replacement = '*' * api_key_length
851 851
852 852 extras = {
853 853 'api_key': api_key_replacement,
854 854 'api_keys': [api_key_replacement],
855 855 'active': user.active,
856 856 'admin': user.admin,
857 857 'extern_type': user.extern_type,
858 858 'extern_name': user.extern_name,
859 859 'last_login': user.last_login,
860 860 'ip_addresses': user.ip_addresses,
861 861 'language': user_data.get('language')
862 862 }
863 863 data.update(extras)
864 864
865 865 if include_secrets:
866 866 data['api_key'] = user.api_key
867 867 data['api_keys'] = user.auth_tokens
868 868 return data
869 869
870 870 def __json__(self):
871 871 data = {
872 872 'full_name': self.full_name,
873 873 'full_name_or_username': self.full_name_or_username,
874 874 'short_contact': self.short_contact,
875 875 'full_contact': self.full_contact,
876 876 }
877 877 data.update(self.get_api_data())
878 878 return data
879 879
880 880
881 881 class UserApiKeys(Base, BaseModel):
882 882 __tablename__ = 'user_api_keys'
883 883 __table_args__ = (
884 884 Index('uak_api_key_idx', 'api_key'),
885 885 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
886 886 UniqueConstraint('api_key'),
887 887 {'extend_existing': True, 'mysql_engine': 'InnoDB',
888 888 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
889 889 )
890 890 __mapper_args__ = {}
891 891
892 892 # ApiKey role
893 893 ROLE_ALL = 'token_role_all'
894 894 ROLE_HTTP = 'token_role_http'
895 895 ROLE_VCS = 'token_role_vcs'
896 896 ROLE_API = 'token_role_api'
897 897 ROLE_FEED = 'token_role_feed'
898 898 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
899 899
900 900 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
901 901 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
902 902 api_key = Column("api_key", String(255), nullable=False, unique=True)
903 903 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
904 904 expires = Column('expires', Float(53), nullable=False)
905 905 role = Column('role', String(255), nullable=True)
906 906 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
907 907
908 908 user = relationship('User', lazy='joined')
909 909
910 910 @classmethod
911 911 def _get_role_name(cls, role):
912 912 return {
913 913 cls.ROLE_ALL: _('all'),
914 914 cls.ROLE_HTTP: _('http/web interface'),
915 915 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
916 916 cls.ROLE_API: _('api calls'),
917 917 cls.ROLE_FEED: _('feed access'),
918 918 }.get(role, role)
919 919
920 920 @property
921 921 def expired(self):
922 922 if self.expires == -1:
923 923 return False
924 924 return time.time() > self.expires
925 925
926 926 @property
927 927 def role_humanized(self):
928 928 return self._get_role_name(self.role)
929 929
930 930
931 931 class UserEmailMap(Base, BaseModel):
932 932 __tablename__ = 'user_email_map'
933 933 __table_args__ = (
934 934 Index('uem_email_idx', 'email'),
935 935 UniqueConstraint('email'),
936 936 {'extend_existing': True, 'mysql_engine': 'InnoDB',
937 937 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
938 938 )
939 939 __mapper_args__ = {}
940 940
941 941 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
942 942 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
943 943 _email = Column("email", String(255), nullable=True, unique=False, default=None)
944 944 user = relationship('User', lazy='joined')
945 945
946 946 @validates('_email')
947 947 def validate_email(self, key, email):
948 948 # check if this email is not main one
949 949 main_email = Session().query(User).filter(User.email == email).scalar()
950 950 if main_email is not None:
951 951 raise AttributeError('email %s is present is user table' % email)
952 952 return email
953 953
954 954 @hybrid_property
955 955 def email(self):
956 956 return self._email
957 957
958 958 @email.setter
959 959 def email(self, val):
960 960 self._email = val.lower() if val else None
961 961
962 962
963 963 class UserIpMap(Base, BaseModel):
964 964 __tablename__ = 'user_ip_map'
965 965 __table_args__ = (
966 966 UniqueConstraint('user_id', 'ip_addr'),
967 967 {'extend_existing': True, 'mysql_engine': 'InnoDB',
968 968 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
969 969 )
970 970 __mapper_args__ = {}
971 971
972 972 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
973 973 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
974 974 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
975 975 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
976 976 description = Column("description", String(10000), nullable=True, unique=None, default=None)
977 977 user = relationship('User', lazy='joined')
978 978
979 979 @classmethod
980 980 def _get_ip_range(cls, ip_addr):
981 981 net = ipaddress.ip_network(ip_addr, strict=False)
982 982 return [str(net.network_address), str(net.broadcast_address)]
983 983
984 984 def __json__(self):
985 985 return {
986 986 'ip_addr': self.ip_addr,
987 987 'ip_range': self._get_ip_range(self.ip_addr),
988 988 }
989 989
990 990 def __unicode__(self):
991 991 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
992 992 self.user_id, self.ip_addr)
993 993
994 994 class UserLog(Base, BaseModel):
995 995 __tablename__ = 'user_logs'
996 996 __table_args__ = (
997 997 {'extend_existing': True, 'mysql_engine': 'InnoDB',
998 998 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
999 999 )
1000 1000 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1001 1001 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1002 1002 username = Column("username", String(255), nullable=True, unique=None, default=None)
1003 1003 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1004 1004 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1005 1005 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1006 1006 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1007 1007 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1008 1008
1009 1009 def __unicode__(self):
1010 1010 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1011 1011 self.repository_name,
1012 1012 self.action)
1013 1013
1014 1014 @property
1015 1015 def action_as_day(self):
1016 1016 return datetime.date(*self.action_date.timetuple()[:3])
1017 1017
1018 1018 user = relationship('User')
1019 1019 repository = relationship('Repository', cascade='')
1020 1020
1021 1021
1022 1022 class UserGroup(Base, BaseModel):
1023 1023 __tablename__ = 'users_groups'
1024 1024 __table_args__ = (
1025 1025 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1026 1026 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1027 1027 )
1028 1028
1029 1029 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1030 1030 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1031 1031 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1032 1032 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1033 1033 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1034 1034 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1035 1035 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1036 1036 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1037 1037
1038 1038 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1039 1039 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1040 1040 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1041 1041 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1042 1042 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1043 1043 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1044 1044
1045 1045 user = relationship('User')
1046 1046
1047 1047 @hybrid_property
1048 1048 def group_data(self):
1049 1049 if not self._group_data:
1050 1050 return {}
1051 1051
1052 1052 try:
1053 1053 return json.loads(self._group_data)
1054 1054 except TypeError:
1055 1055 return {}
1056 1056
1057 1057 @group_data.setter
1058 1058 def group_data(self, val):
1059 1059 try:
1060 1060 self._group_data = json.dumps(val)
1061 1061 except Exception:
1062 1062 log.error(traceback.format_exc())
1063 1063
1064 1064 def __unicode__(self):
1065 1065 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1066 1066 self.users_group_id,
1067 1067 self.users_group_name)
1068 1068
1069 1069 @classmethod
1070 1070 def get_by_group_name(cls, group_name, cache=False,
1071 1071 case_insensitive=False):
1072 1072 if case_insensitive:
1073 1073 q = cls.query().filter(func.lower(cls.users_group_name) ==
1074 1074 func.lower(group_name))
1075 1075
1076 1076 else:
1077 1077 q = cls.query().filter(cls.users_group_name == group_name)
1078 1078 if cache:
1079 1079 q = q.options(FromCache(
1080 1080 "sql_cache_short",
1081 1081 "get_group_%s" % _hash_key(group_name)))
1082 1082 return q.scalar()
1083 1083
1084 1084 @classmethod
1085 1085 def get(cls, user_group_id, cache=False):
1086 1086 user_group = cls.query()
1087 1087 if cache:
1088 1088 user_group = user_group.options(FromCache("sql_cache_short",
1089 1089 "get_users_group_%s" % user_group_id))
1090 1090 return user_group.get(user_group_id)
1091 1091
1092 1092 def permissions(self, with_admins=True, with_owner=True):
1093 1093 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1094 1094 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1095 1095 joinedload(UserUserGroupToPerm.user),
1096 1096 joinedload(UserUserGroupToPerm.permission),)
1097 1097
1098 1098 # get owners and admins and permissions. We do a trick of re-writing
1099 1099 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1100 1100 # has a global reference and changing one object propagates to all
1101 1101 # others. This means if admin is also an owner admin_row that change
1102 1102 # would propagate to both objects
1103 1103 perm_rows = []
1104 1104 for _usr in q.all():
1105 1105 usr = AttributeDict(_usr.user.get_dict())
1106 1106 usr.permission = _usr.permission.permission_name
1107 1107 perm_rows.append(usr)
1108 1108
1109 1109 # filter the perm rows by 'default' first and then sort them by
1110 1110 # admin,write,read,none permissions sorted again alphabetically in
1111 1111 # each group
1112 1112 perm_rows = sorted(perm_rows, key=display_sort)
1113 1113
1114 1114 _admin_perm = 'usergroup.admin'
1115 1115 owner_row = []
1116 1116 if with_owner:
1117 1117 usr = AttributeDict(self.user.get_dict())
1118 1118 usr.owner_row = True
1119 1119 usr.permission = _admin_perm
1120 1120 owner_row.append(usr)
1121 1121
1122 1122 super_admin_rows = []
1123 1123 if with_admins:
1124 1124 for usr in User.get_all_super_admins():
1125 1125 # if this admin is also owner, don't double the record
1126 1126 if usr.user_id == owner_row[0].user_id:
1127 1127 owner_row[0].admin_row = True
1128 1128 else:
1129 1129 usr = AttributeDict(usr.get_dict())
1130 1130 usr.admin_row = True
1131 1131 usr.permission = _admin_perm
1132 1132 super_admin_rows.append(usr)
1133 1133
1134 1134 return super_admin_rows + owner_row + perm_rows
1135 1135
1136 1136 def permission_user_groups(self):
1137 1137 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1138 1138 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1139 1139 joinedload(UserGroupUserGroupToPerm.target_user_group),
1140 1140 joinedload(UserGroupUserGroupToPerm.permission),)
1141 1141
1142 1142 perm_rows = []
1143 1143 for _user_group in q.all():
1144 1144 usr = AttributeDict(_user_group.user_group.get_dict())
1145 1145 usr.permission = _user_group.permission.permission_name
1146 1146 perm_rows.append(usr)
1147 1147
1148 1148 return perm_rows
1149 1149
1150 1150 def _get_default_perms(self, user_group, suffix=''):
1151 1151 from rhodecode.model.permission import PermissionModel
1152 1152 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1153 1153
1154 1154 def get_default_perms(self, suffix=''):
1155 1155 return self._get_default_perms(self, suffix)
1156 1156
1157 1157 def get_api_data(self, with_group_members=True, include_secrets=False):
1158 1158 """
1159 1159 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1160 1160 basically forwarded.
1161 1161
1162 1162 """
1163 1163 user_group = self
1164 1164
1165 1165 data = {
1166 1166 'users_group_id': user_group.users_group_id,
1167 1167 'group_name': user_group.users_group_name,
1168 1168 'group_description': user_group.user_group_description,
1169 1169 'active': user_group.users_group_active,
1170 1170 'owner': user_group.user.username,
1171 1171 }
1172 1172 if with_group_members:
1173 1173 users = []
1174 1174 for user in user_group.members:
1175 1175 user = user.user
1176 1176 users.append(user.get_api_data(include_secrets=include_secrets))
1177 1177 data['users'] = users
1178 1178
1179 1179 return data
1180 1180
1181 1181
1182 1182 class UserGroupMember(Base, BaseModel):
1183 1183 __tablename__ = 'users_groups_members'
1184 1184 __table_args__ = (
1185 1185 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1186 1186 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1187 1187 )
1188 1188
1189 1189 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1190 1190 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1191 1191 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1192 1192
1193 1193 user = relationship('User', lazy='joined')
1194 1194 users_group = relationship('UserGroup')
1195 1195
1196 1196 def __init__(self, gr_id='', u_id=''):
1197 1197 self.users_group_id = gr_id
1198 1198 self.user_id = u_id
1199 1199
1200 1200
1201 1201 class RepositoryField(Base, BaseModel):
1202 1202 __tablename__ = 'repositories_fields'
1203 1203 __table_args__ = (
1204 1204 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1205 1205 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1206 1206 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1207 1207 )
1208 1208 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1209 1209
1210 1210 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1211 1211 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1212 1212 field_key = Column("field_key", String(250))
1213 1213 field_label = Column("field_label", String(1024), nullable=False)
1214 1214 field_value = Column("field_value", String(10000), nullable=False)
1215 1215 field_desc = Column("field_desc", String(1024), nullable=False)
1216 1216 field_type = Column("field_type", String(255), nullable=False, unique=None)
1217 1217 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1218 1218
1219 1219 repository = relationship('Repository')
1220 1220
1221 1221 @property
1222 1222 def field_key_prefixed(self):
1223 1223 return 'ex_%s' % self.field_key
1224 1224
1225 1225 @classmethod
1226 1226 def un_prefix_key(cls, key):
1227 1227 if key.startswith(cls.PREFIX):
1228 1228 return key[len(cls.PREFIX):]
1229 1229 return key
1230 1230
1231 1231 @classmethod
1232 1232 def get_by_key_name(cls, key, repo):
1233 1233 row = cls.query()\
1234 1234 .filter(cls.repository == repo)\
1235 1235 .filter(cls.field_key == key).scalar()
1236 1236 return row
1237 1237
1238 1238
1239 1239 class Repository(Base, BaseModel):
1240 1240 __tablename__ = 'repositories'
1241 1241 __table_args__ = (
1242 1242 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1243 1243 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1244 1244 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1245 1245 )
1246 1246 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1247 1247 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1248 1248
1249 1249 STATE_CREATED = 'repo_state_created'
1250 1250 STATE_PENDING = 'repo_state_pending'
1251 1251 STATE_ERROR = 'repo_state_error'
1252 1252
1253 1253 LOCK_AUTOMATIC = 'lock_auto'
1254 1254 LOCK_API = 'lock_api'
1255 1255 LOCK_WEB = 'lock_web'
1256 1256 LOCK_PULL = 'lock_pull'
1257 1257
1258 1258 NAME_SEP = URL_SEP
1259 1259
1260 1260 repo_id = Column(
1261 1261 "repo_id", Integer(), nullable=False, unique=True, default=None,
1262 1262 primary_key=True)
1263 1263 _repo_name = Column(
1264 1264 "repo_name", Text(), nullable=False, default=None)
1265 1265 _repo_name_hash = Column(
1266 1266 "repo_name_hash", String(255), nullable=False, unique=True)
1267 1267 repo_state = Column("repo_state", String(255), nullable=True)
1268 1268
1269 1269 clone_uri = Column(
1270 1270 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1271 1271 default=None)
1272 1272 repo_type = Column(
1273 1273 "repo_type", String(255), nullable=False, unique=False, default=None)
1274 1274 user_id = Column(
1275 1275 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1276 1276 unique=False, default=None)
1277 1277 private = Column(
1278 1278 "private", Boolean(), nullable=True, unique=None, default=None)
1279 1279 enable_statistics = Column(
1280 1280 "statistics", Boolean(), nullable=True, unique=None, default=True)
1281 1281 enable_downloads = Column(
1282 1282 "downloads", Boolean(), nullable=True, unique=None, default=True)
1283 1283 description = Column(
1284 1284 "description", String(10000), nullable=True, unique=None, default=None)
1285 1285 created_on = Column(
1286 1286 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1287 1287 default=datetime.datetime.now)
1288 1288 updated_on = Column(
1289 1289 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1290 1290 default=datetime.datetime.now)
1291 1291 _landing_revision = Column(
1292 1292 "landing_revision", String(255), nullable=False, unique=False,
1293 1293 default=None)
1294 1294 enable_locking = Column(
1295 1295 "enable_locking", Boolean(), nullable=False, unique=None,
1296 1296 default=False)
1297 1297 _locked = Column(
1298 1298 "locked", String(255), nullable=True, unique=False, default=None)
1299 1299 _changeset_cache = Column(
1300 1300 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1301 1301
1302 1302 fork_id = Column(
1303 1303 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1304 1304 nullable=True, unique=False, default=None)
1305 1305 group_id = Column(
1306 1306 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1307 1307 unique=False, default=None)
1308 1308
1309 1309 user = relationship('User', lazy='joined')
1310 1310 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1311 1311 group = relationship('RepoGroup', lazy='joined')
1312 1312 repo_to_perm = relationship(
1313 1313 'UserRepoToPerm', cascade='all',
1314 1314 order_by='UserRepoToPerm.repo_to_perm_id')
1315 1315 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1316 1316 stats = relationship('Statistics', cascade='all', uselist=False)
1317 1317
1318 1318 followers = relationship(
1319 1319 'UserFollowing',
1320 1320 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1321 1321 cascade='all')
1322 1322 extra_fields = relationship(
1323 1323 'RepositoryField', cascade="all, delete, delete-orphan")
1324 1324 logs = relationship('UserLog')
1325 1325 comments = relationship(
1326 1326 'ChangesetComment', cascade="all, delete, delete-orphan")
1327 1327 pull_requests_source = relationship(
1328 1328 'PullRequest',
1329 1329 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1330 1330 cascade="all, delete, delete-orphan")
1331 1331 pull_requests_target = relationship(
1332 1332 'PullRequest',
1333 1333 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1334 1334 cascade="all, delete, delete-orphan")
1335 1335 ui = relationship('RepoRhodeCodeUi', cascade="all")
1336 1336 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1337 1337 integrations = relationship('Integration',
1338 1338 cascade="all, delete, delete-orphan")
1339 1339
1340 1340 def __unicode__(self):
1341 1341 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1342 1342 safe_unicode(self.repo_name))
1343 1343
1344 1344 @hybrid_property
1345 1345 def landing_rev(self):
1346 1346 # always should return [rev_type, rev]
1347 1347 if self._landing_revision:
1348 1348 _rev_info = self._landing_revision.split(':')
1349 1349 if len(_rev_info) < 2:
1350 1350 _rev_info.insert(0, 'rev')
1351 1351 return [_rev_info[0], _rev_info[1]]
1352 1352 return [None, None]
1353 1353
1354 1354 @landing_rev.setter
1355 1355 def landing_rev(self, val):
1356 1356 if ':' not in val:
1357 1357 raise ValueError('value must be delimited with `:` and consist '
1358 1358 'of <rev_type>:<rev>, got %s instead' % val)
1359 1359 self._landing_revision = val
1360 1360
1361 1361 @hybrid_property
1362 1362 def locked(self):
1363 1363 if self._locked:
1364 1364 user_id, timelocked, reason = self._locked.split(':')
1365 1365 lock_values = int(user_id), timelocked, reason
1366 1366 else:
1367 1367 lock_values = [None, None, None]
1368 1368 return lock_values
1369 1369
1370 1370 @locked.setter
1371 1371 def locked(self, val):
1372 1372 if val and isinstance(val, (list, tuple)):
1373 1373 self._locked = ':'.join(map(str, val))
1374 1374 else:
1375 1375 self._locked = None
1376 1376
1377 1377 @hybrid_property
1378 1378 def changeset_cache(self):
1379 1379 from rhodecode.lib.vcs.backends.base import EmptyCommit
1380 1380 dummy = EmptyCommit().__json__()
1381 1381 if not self._changeset_cache:
1382 1382 return dummy
1383 1383 try:
1384 1384 return json.loads(self._changeset_cache)
1385 1385 except TypeError:
1386 1386 return dummy
1387 1387 except Exception:
1388 1388 log.error(traceback.format_exc())
1389 1389 return dummy
1390 1390
1391 1391 @changeset_cache.setter
1392 1392 def changeset_cache(self, val):
1393 1393 try:
1394 1394 self._changeset_cache = json.dumps(val)
1395 1395 except Exception:
1396 1396 log.error(traceback.format_exc())
1397 1397
1398 1398 @hybrid_property
1399 1399 def repo_name(self):
1400 1400 return self._repo_name
1401 1401
1402 1402 @repo_name.setter
1403 1403 def repo_name(self, value):
1404 1404 self._repo_name = value
1405 1405 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1406 1406
1407 1407 @classmethod
1408 1408 def normalize_repo_name(cls, repo_name):
1409 1409 """
1410 1410 Normalizes os specific repo_name to the format internally stored inside
1411 1411 database using URL_SEP
1412 1412
1413 1413 :param cls:
1414 1414 :param repo_name:
1415 1415 """
1416 1416 return cls.NAME_SEP.join(repo_name.split(os.sep))
1417 1417
1418 1418 @classmethod
1419 1419 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1420 1420 session = Session()
1421 1421 q = session.query(cls).filter(cls.repo_name == repo_name)
1422 1422
1423 1423 if cache:
1424 1424 if identity_cache:
1425 1425 val = cls.identity_cache(session, 'repo_name', repo_name)
1426 1426 if val:
1427 1427 return val
1428 1428 else:
1429 1429 q = q.options(
1430 1430 FromCache("sql_cache_short",
1431 1431 "get_repo_by_name_%s" % _hash_key(repo_name)))
1432 1432
1433 1433 return q.scalar()
1434 1434
1435 1435 @classmethod
1436 1436 def get_by_full_path(cls, repo_full_path):
1437 1437 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1438 1438 repo_name = cls.normalize_repo_name(repo_name)
1439 1439 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1440 1440
1441 1441 @classmethod
1442 1442 def get_repo_forks(cls, repo_id):
1443 1443 return cls.query().filter(Repository.fork_id == repo_id)
1444 1444
1445 1445 @classmethod
1446 1446 def base_path(cls):
1447 1447 """
1448 1448 Returns base path when all repos are stored
1449 1449
1450 1450 :param cls:
1451 1451 """
1452 1452 q = Session().query(RhodeCodeUi)\
1453 1453 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1454 1454 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1455 1455 return q.one().ui_value
1456 1456
1457 1457 @classmethod
1458 1458 def is_valid(cls, repo_name):
1459 1459 """
1460 1460 returns True if given repo name is a valid filesystem repository
1461 1461
1462 1462 :param cls:
1463 1463 :param repo_name:
1464 1464 """
1465 1465 from rhodecode.lib.utils import is_valid_repo
1466 1466
1467 1467 return is_valid_repo(repo_name, cls.base_path())
1468 1468
1469 1469 @classmethod
1470 1470 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1471 1471 case_insensitive=True):
1472 1472 q = Repository.query()
1473 1473
1474 1474 if not isinstance(user_id, Optional):
1475 1475 q = q.filter(Repository.user_id == user_id)
1476 1476
1477 1477 if not isinstance(group_id, Optional):
1478 1478 q = q.filter(Repository.group_id == group_id)
1479 1479
1480 1480 if case_insensitive:
1481 1481 q = q.order_by(func.lower(Repository.repo_name))
1482 1482 else:
1483 1483 q = q.order_by(Repository.repo_name)
1484 1484 return q.all()
1485 1485
1486 1486 @property
1487 1487 def forks(self):
1488 1488 """
1489 1489 Return forks of this repo
1490 1490 """
1491 1491 return Repository.get_repo_forks(self.repo_id)
1492 1492
1493 1493 @property
1494 1494 def parent(self):
1495 1495 """
1496 1496 Returns fork parent
1497 1497 """
1498 1498 return self.fork
1499 1499
1500 1500 @property
1501 1501 def just_name(self):
1502 1502 return self.repo_name.split(self.NAME_SEP)[-1]
1503 1503
1504 1504 @property
1505 1505 def groups_with_parents(self):
1506 1506 groups = []
1507 1507 if self.group is None:
1508 1508 return groups
1509 1509
1510 1510 cur_gr = self.group
1511 1511 groups.insert(0, cur_gr)
1512 1512 while 1:
1513 1513 gr = getattr(cur_gr, 'parent_group', None)
1514 1514 cur_gr = cur_gr.parent_group
1515 1515 if gr is None:
1516 1516 break
1517 1517 groups.insert(0, gr)
1518 1518
1519 1519 return groups
1520 1520
1521 1521 @property
1522 1522 def groups_and_repo(self):
1523 1523 return self.groups_with_parents, self
1524 1524
1525 1525 @LazyProperty
1526 1526 def repo_path(self):
1527 1527 """
1528 1528 Returns base full path for that repository means where it actually
1529 1529 exists on a filesystem
1530 1530 """
1531 1531 q = Session().query(RhodeCodeUi).filter(
1532 1532 RhodeCodeUi.ui_key == self.NAME_SEP)
1533 1533 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1534 1534 return q.one().ui_value
1535 1535
1536 1536 @property
1537 1537 def repo_full_path(self):
1538 1538 p = [self.repo_path]
1539 1539 # we need to split the name by / since this is how we store the
1540 1540 # names in the database, but that eventually needs to be converted
1541 1541 # into a valid system path
1542 1542 p += self.repo_name.split(self.NAME_SEP)
1543 1543 return os.path.join(*map(safe_unicode, p))
1544 1544
1545 1545 @property
1546 1546 def cache_keys(self):
1547 1547 """
1548 1548 Returns associated cache keys for that repo
1549 1549 """
1550 1550 return CacheKey.query()\
1551 1551 .filter(CacheKey.cache_args == self.repo_name)\
1552 1552 .order_by(CacheKey.cache_key)\
1553 1553 .all()
1554 1554
1555 1555 def get_new_name(self, repo_name):
1556 1556 """
1557 1557 returns new full repository name based on assigned group and new new
1558 1558
1559 1559 :param group_name:
1560 1560 """
1561 1561 path_prefix = self.group.full_path_splitted if self.group else []
1562 1562 return self.NAME_SEP.join(path_prefix + [repo_name])
1563 1563
1564 1564 @property
1565 1565 def _config(self):
1566 1566 """
1567 1567 Returns db based config object.
1568 1568 """
1569 1569 from rhodecode.lib.utils import make_db_config
1570 1570 return make_db_config(clear_session=False, repo=self)
1571 1571
1572 1572 def permissions(self, with_admins=True, with_owner=True):
1573 1573 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1574 1574 q = q.options(joinedload(UserRepoToPerm.repository),
1575 1575 joinedload(UserRepoToPerm.user),
1576 1576 joinedload(UserRepoToPerm.permission),)
1577 1577
1578 1578 # get owners and admins and permissions. We do a trick of re-writing
1579 1579 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1580 1580 # has a global reference and changing one object propagates to all
1581 1581 # others. This means if admin is also an owner admin_row that change
1582 1582 # would propagate to both objects
1583 1583 perm_rows = []
1584 1584 for _usr in q.all():
1585 1585 usr = AttributeDict(_usr.user.get_dict())
1586 1586 usr.permission = _usr.permission.permission_name
1587 1587 perm_rows.append(usr)
1588 1588
1589 1589 # filter the perm rows by 'default' first and then sort them by
1590 1590 # admin,write,read,none permissions sorted again alphabetically in
1591 1591 # each group
1592 1592 perm_rows = sorted(perm_rows, key=display_sort)
1593 1593
1594 1594 _admin_perm = 'repository.admin'
1595 1595 owner_row = []
1596 1596 if with_owner:
1597 1597 usr = AttributeDict(self.user.get_dict())
1598 1598 usr.owner_row = True
1599 1599 usr.permission = _admin_perm
1600 1600 owner_row.append(usr)
1601 1601
1602 1602 super_admin_rows = []
1603 1603 if with_admins:
1604 1604 for usr in User.get_all_super_admins():
1605 1605 # if this admin is also owner, don't double the record
1606 1606 if usr.user_id == owner_row[0].user_id:
1607 1607 owner_row[0].admin_row = True
1608 1608 else:
1609 1609 usr = AttributeDict(usr.get_dict())
1610 1610 usr.admin_row = True
1611 1611 usr.permission = _admin_perm
1612 1612 super_admin_rows.append(usr)
1613 1613
1614 1614 return super_admin_rows + owner_row + perm_rows
1615 1615
1616 1616 def permission_user_groups(self):
1617 1617 q = UserGroupRepoToPerm.query().filter(
1618 1618 UserGroupRepoToPerm.repository == self)
1619 1619 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1620 1620 joinedload(UserGroupRepoToPerm.users_group),
1621 1621 joinedload(UserGroupRepoToPerm.permission),)
1622 1622
1623 1623 perm_rows = []
1624 1624 for _user_group in q.all():
1625 1625 usr = AttributeDict(_user_group.users_group.get_dict())
1626 1626 usr.permission = _user_group.permission.permission_name
1627 1627 perm_rows.append(usr)
1628 1628
1629 1629 return perm_rows
1630 1630
1631 1631 def get_api_data(self, include_secrets=False):
1632 1632 """
1633 1633 Common function for generating repo api data
1634 1634
1635 1635 :param include_secrets: See :meth:`User.get_api_data`.
1636 1636
1637 1637 """
1638 1638 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1639 1639 # move this methods on models level.
1640 1640 from rhodecode.model.settings import SettingsModel
1641 1641
1642 1642 repo = self
1643 1643 _user_id, _time, _reason = self.locked
1644 1644
1645 1645 data = {
1646 1646 'repo_id': repo.repo_id,
1647 1647 'repo_name': repo.repo_name,
1648 1648 'repo_type': repo.repo_type,
1649 1649 'clone_uri': repo.clone_uri or '',
1650 1650 'url': url('summary_home', repo_name=self.repo_name, qualified=True),
1651 1651 'private': repo.private,
1652 1652 'created_on': repo.created_on,
1653 1653 'description': repo.description,
1654 1654 'landing_rev': repo.landing_rev,
1655 1655 'owner': repo.user.username,
1656 1656 'fork_of': repo.fork.repo_name if repo.fork else None,
1657 1657 'enable_statistics': repo.enable_statistics,
1658 1658 'enable_locking': repo.enable_locking,
1659 1659 'enable_downloads': repo.enable_downloads,
1660 1660 'last_changeset': repo.changeset_cache,
1661 1661 'locked_by': User.get(_user_id).get_api_data(
1662 1662 include_secrets=include_secrets) if _user_id else None,
1663 1663 'locked_date': time_to_datetime(_time) if _time else None,
1664 1664 'lock_reason': _reason if _reason else None,
1665 1665 }
1666 1666
1667 1667 # TODO: mikhail: should be per-repo settings here
1668 1668 rc_config = SettingsModel().get_all_settings()
1669 1669 repository_fields = str2bool(
1670 1670 rc_config.get('rhodecode_repository_fields'))
1671 1671 if repository_fields:
1672 1672 for f in self.extra_fields:
1673 1673 data[f.field_key_prefixed] = f.field_value
1674 1674
1675 1675 return data
1676 1676
1677 1677 @classmethod
1678 1678 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1679 1679 if not lock_time:
1680 1680 lock_time = time.time()
1681 1681 if not lock_reason:
1682 1682 lock_reason = cls.LOCK_AUTOMATIC
1683 1683 repo.locked = [user_id, lock_time, lock_reason]
1684 1684 Session().add(repo)
1685 1685 Session().commit()
1686 1686
1687 1687 @classmethod
1688 1688 def unlock(cls, repo):
1689 1689 repo.locked = None
1690 1690 Session().add(repo)
1691 1691 Session().commit()
1692 1692
1693 1693 @classmethod
1694 1694 def getlock(cls, repo):
1695 1695 return repo.locked
1696 1696
1697 1697 def is_user_lock(self, user_id):
1698 1698 if self.lock[0]:
1699 1699 lock_user_id = safe_int(self.lock[0])
1700 1700 user_id = safe_int(user_id)
1701 1701 # both are ints, and they are equal
1702 1702 return all([lock_user_id, user_id]) and lock_user_id == user_id
1703 1703
1704 1704 return False
1705 1705
1706 1706 def get_locking_state(self, action, user_id, only_when_enabled=True):
1707 1707 """
1708 1708 Checks locking on this repository, if locking is enabled and lock is
1709 1709 present returns a tuple of make_lock, locked, locked_by.
1710 1710 make_lock can have 3 states None (do nothing) True, make lock
1711 1711 False release lock, This value is later propagated to hooks, which
1712 1712 do the locking. Think about this as signals passed to hooks what to do.
1713 1713
1714 1714 """
1715 1715 # TODO: johbo: This is part of the business logic and should be moved
1716 1716 # into the RepositoryModel.
1717 1717
1718 1718 if action not in ('push', 'pull'):
1719 1719 raise ValueError("Invalid action value: %s" % repr(action))
1720 1720
1721 1721 # defines if locked error should be thrown to user
1722 1722 currently_locked = False
1723 1723 # defines if new lock should be made, tri-state
1724 1724 make_lock = None
1725 1725 repo = self
1726 1726 user = User.get(user_id)
1727 1727
1728 1728 lock_info = repo.locked
1729 1729
1730 1730 if repo and (repo.enable_locking or not only_when_enabled):
1731 1731 if action == 'push':
1732 1732 # check if it's already locked !, if it is compare users
1733 1733 locked_by_user_id = lock_info[0]
1734 1734 if user.user_id == locked_by_user_id:
1735 1735 log.debug(
1736 1736 'Got `push` action from user %s, now unlocking', user)
1737 1737 # unlock if we have push from user who locked
1738 1738 make_lock = False
1739 1739 else:
1740 1740 # we're not the same user who locked, ban with
1741 1741 # code defined in settings (default is 423 HTTP Locked) !
1742 1742 log.debug('Repo %s is currently locked by %s', repo, user)
1743 1743 currently_locked = True
1744 1744 elif action == 'pull':
1745 1745 # [0] user [1] date
1746 1746 if lock_info[0] and lock_info[1]:
1747 1747 log.debug('Repo %s is currently locked by %s', repo, user)
1748 1748 currently_locked = True
1749 1749 else:
1750 1750 log.debug('Setting lock on repo %s by %s', repo, user)
1751 1751 make_lock = True
1752 1752
1753 1753 else:
1754 1754 log.debug('Repository %s do not have locking enabled', repo)
1755 1755
1756 1756 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1757 1757 make_lock, currently_locked, lock_info)
1758 1758
1759 1759 from rhodecode.lib.auth import HasRepoPermissionAny
1760 1760 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1761 1761 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1762 1762 # if we don't have at least write permission we cannot make a lock
1763 1763 log.debug('lock state reset back to FALSE due to lack '
1764 1764 'of at least read permission')
1765 1765 make_lock = False
1766 1766
1767 1767 return make_lock, currently_locked, lock_info
1768 1768
1769 1769 @property
1770 1770 def last_db_change(self):
1771 1771 return self.updated_on
1772 1772
1773 1773 @property
1774 1774 def clone_uri_hidden(self):
1775 1775 clone_uri = self.clone_uri
1776 1776 if clone_uri:
1777 1777 import urlobject
1778 1778 url_obj = urlobject.URLObject(clone_uri)
1779 1779 if url_obj.password:
1780 1780 clone_uri = url_obj.with_password('*****')
1781 1781 return clone_uri
1782 1782
1783 1783 def clone_url(self, **override):
1784 1784 qualified_home_url = url('home', qualified=True)
1785 1785
1786 1786 uri_tmpl = None
1787 1787 if 'with_id' in override:
1788 1788 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1789 1789 del override['with_id']
1790 1790
1791 1791 if 'uri_tmpl' in override:
1792 1792 uri_tmpl = override['uri_tmpl']
1793 1793 del override['uri_tmpl']
1794 1794
1795 1795 # we didn't override our tmpl from **overrides
1796 1796 if not uri_tmpl:
1797 1797 uri_tmpl = self.DEFAULT_CLONE_URI
1798 1798 try:
1799 1799 from pylons import tmpl_context as c
1800 1800 uri_tmpl = c.clone_uri_tmpl
1801 1801 except Exception:
1802 1802 # in any case if we call this outside of request context,
1803 1803 # ie, not having tmpl_context set up
1804 1804 pass
1805 1805
1806 1806 return get_clone_url(uri_tmpl=uri_tmpl,
1807 1807 qualifed_home_url=qualified_home_url,
1808 1808 repo_name=self.repo_name,
1809 1809 repo_id=self.repo_id, **override)
1810 1810
1811 1811 def set_state(self, state):
1812 1812 self.repo_state = state
1813 1813 Session().add(self)
1814 1814 #==========================================================================
1815 1815 # SCM PROPERTIES
1816 1816 #==========================================================================
1817 1817
1818 1818 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1819 1819 return get_commit_safe(
1820 1820 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1821 1821
1822 1822 def get_changeset(self, rev=None, pre_load=None):
1823 1823 warnings.warn("Use get_commit", DeprecationWarning)
1824 1824 commit_id = None
1825 1825 commit_idx = None
1826 1826 if isinstance(rev, basestring):
1827 1827 commit_id = rev
1828 1828 else:
1829 1829 commit_idx = rev
1830 1830 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1831 1831 pre_load=pre_load)
1832 1832
1833 1833 def get_landing_commit(self):
1834 1834 """
1835 1835 Returns landing commit, or if that doesn't exist returns the tip
1836 1836 """
1837 1837 _rev_type, _rev = self.landing_rev
1838 1838 commit = self.get_commit(_rev)
1839 1839 if isinstance(commit, EmptyCommit):
1840 1840 return self.get_commit()
1841 1841 return commit
1842 1842
1843 1843 def update_commit_cache(self, cs_cache=None, config=None):
1844 1844 """
1845 1845 Update cache of last changeset for repository, keys should be::
1846 1846
1847 1847 short_id
1848 1848 raw_id
1849 1849 revision
1850 1850 parents
1851 1851 message
1852 1852 date
1853 1853 author
1854 1854
1855 1855 :param cs_cache:
1856 1856 """
1857 1857 from rhodecode.lib.vcs.backends.base import BaseChangeset
1858 1858 if cs_cache is None:
1859 1859 # use no-cache version here
1860 1860 scm_repo = self.scm_instance(cache=False, config=config)
1861 1861 if scm_repo:
1862 1862 cs_cache = scm_repo.get_commit(
1863 1863 pre_load=["author", "date", "message", "parents"])
1864 1864 else:
1865 1865 cs_cache = EmptyCommit()
1866 1866
1867 1867 if isinstance(cs_cache, BaseChangeset):
1868 1868 cs_cache = cs_cache.__json__()
1869 1869
1870 1870 def is_outdated(new_cs_cache):
1871 1871 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
1872 1872 new_cs_cache['revision'] != self.changeset_cache['revision']):
1873 1873 return True
1874 1874 return False
1875 1875
1876 1876 # check if we have maybe already latest cached revision
1877 1877 if is_outdated(cs_cache) or not self.changeset_cache:
1878 1878 _default = datetime.datetime.fromtimestamp(0)
1879 1879 last_change = cs_cache.get('date') or _default
1880 1880 log.debug('updated repo %s with new cs cache %s',
1881 1881 self.repo_name, cs_cache)
1882 1882 self.updated_on = last_change
1883 1883 self.changeset_cache = cs_cache
1884 1884 Session().add(self)
1885 1885 Session().commit()
1886 1886 else:
1887 1887 log.debug('Skipping update_commit_cache for repo:`%s` '
1888 1888 'commit already with latest changes', self.repo_name)
1889 1889
1890 1890 @property
1891 1891 def tip(self):
1892 1892 return self.get_commit('tip')
1893 1893
1894 1894 @property
1895 1895 def author(self):
1896 1896 return self.tip.author
1897 1897
1898 1898 @property
1899 1899 def last_change(self):
1900 1900 return self.scm_instance().last_change
1901 1901
1902 1902 def get_comments(self, revisions=None):
1903 1903 """
1904 1904 Returns comments for this repository grouped by revisions
1905 1905
1906 1906 :param revisions: filter query by revisions only
1907 1907 """
1908 1908 cmts = ChangesetComment.query()\
1909 1909 .filter(ChangesetComment.repo == self)
1910 1910 if revisions:
1911 1911 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1912 1912 grouped = collections.defaultdict(list)
1913 1913 for cmt in cmts.all():
1914 1914 grouped[cmt.revision].append(cmt)
1915 1915 return grouped
1916 1916
1917 1917 def statuses(self, revisions=None):
1918 1918 """
1919 1919 Returns statuses for this repository
1920 1920
1921 1921 :param revisions: list of revisions to get statuses for
1922 1922 """
1923 1923 statuses = ChangesetStatus.query()\
1924 1924 .filter(ChangesetStatus.repo == self)\
1925 1925 .filter(ChangesetStatus.version == 0)
1926 1926
1927 1927 if revisions:
1928 1928 # Try doing the filtering in chunks to avoid hitting limits
1929 1929 size = 500
1930 1930 status_results = []
1931 1931 for chunk in xrange(0, len(revisions), size):
1932 1932 status_results += statuses.filter(
1933 1933 ChangesetStatus.revision.in_(
1934 1934 revisions[chunk: chunk+size])
1935 1935 ).all()
1936 1936 else:
1937 1937 status_results = statuses.all()
1938 1938
1939 1939 grouped = {}
1940 1940
1941 1941 # maybe we have open new pullrequest without a status?
1942 1942 stat = ChangesetStatus.STATUS_UNDER_REVIEW
1943 1943 status_lbl = ChangesetStatus.get_status_lbl(stat)
1944 1944 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
1945 1945 for rev in pr.revisions:
1946 1946 pr_id = pr.pull_request_id
1947 1947 pr_repo = pr.target_repo.repo_name
1948 1948 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
1949 1949
1950 1950 for stat in status_results:
1951 1951 pr_id = pr_repo = None
1952 1952 if stat.pull_request:
1953 1953 pr_id = stat.pull_request.pull_request_id
1954 1954 pr_repo = stat.pull_request.target_repo.repo_name
1955 1955 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1956 1956 pr_id, pr_repo]
1957 1957 return grouped
1958 1958
1959 1959 # ==========================================================================
1960 1960 # SCM CACHE INSTANCE
1961 1961 # ==========================================================================
1962 1962
1963 1963 def scm_instance(self, **kwargs):
1964 1964 import rhodecode
1965 1965
1966 1966 # Passing a config will not hit the cache currently only used
1967 1967 # for repo2dbmapper
1968 1968 config = kwargs.pop('config', None)
1969 1969 cache = kwargs.pop('cache', None)
1970 1970 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
1971 1971 # if cache is NOT defined use default global, else we have a full
1972 1972 # control over cache behaviour
1973 1973 if cache is None and full_cache and not config:
1974 1974 return self._get_instance_cached()
1975 1975 return self._get_instance(cache=bool(cache), config=config)
1976 1976
1977 1977 def _get_instance_cached(self):
1978 1978 @cache_region('long_term')
1979 1979 def _get_repo(cache_key):
1980 1980 return self._get_instance()
1981 1981
1982 1982 invalidator_context = CacheKey.repo_context_cache(
1983 1983 _get_repo, self.repo_name, None, thread_scoped=True)
1984 1984
1985 1985 with invalidator_context as context:
1986 1986 context.invalidate()
1987 1987 repo = context.compute()
1988 1988
1989 1989 return repo
1990 1990
1991 1991 def _get_instance(self, cache=True, config=None):
1992 1992 config = config or self._config
1993 1993 custom_wire = {
1994 1994 'cache': cache # controls the vcs.remote cache
1995 1995 }
1996 1996 repo = get_vcs_instance(
1997 1997 repo_path=safe_str(self.repo_full_path),
1998 1998 config=config,
1999 1999 with_wire=custom_wire,
2000 2000 create=False,
2001 2001 _vcs_alias=self.repo_type)
2002 2002
2003 2003 return repo
2004 2004
2005 2005 def __json__(self):
2006 2006 return {'landing_rev': self.landing_rev}
2007 2007
2008 2008 def get_dict(self):
2009 2009
2010 2010 # Since we transformed `repo_name` to a hybrid property, we need to
2011 2011 # keep compatibility with the code which uses `repo_name` field.
2012 2012
2013 2013 result = super(Repository, self).get_dict()
2014 2014 result['repo_name'] = result.pop('_repo_name', None)
2015 2015 return result
2016 2016
2017 2017
2018 2018 class RepoGroup(Base, BaseModel):
2019 2019 __tablename__ = 'groups'
2020 2020 __table_args__ = (
2021 2021 UniqueConstraint('group_name', 'group_parent_id'),
2022 2022 CheckConstraint('group_id != group_parent_id'),
2023 2023 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2024 2024 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2025 2025 )
2026 2026 __mapper_args__ = {'order_by': 'group_name'}
2027 2027
2028 2028 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2029 2029
2030 2030 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2031 2031 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2032 2032 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2033 2033 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2034 2034 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2035 2035 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2036 2036 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2037 2037 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2038 2038
2039 2039 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2040 2040 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2041 2041 parent_group = relationship('RepoGroup', remote_side=group_id)
2042 2042 user = relationship('User')
2043 2043 integrations = relationship('Integration',
2044 2044 cascade="all, delete, delete-orphan")
2045 2045
2046 2046 def __init__(self, group_name='', parent_group=None):
2047 2047 self.group_name = group_name
2048 2048 self.parent_group = parent_group
2049 2049
2050 2050 def __unicode__(self):
2051 2051 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2052 2052 self.group_name)
2053 2053
2054 2054 @classmethod
2055 2055 def _generate_choice(cls, repo_group):
2056 2056 from webhelpers.html import literal as _literal
2057 2057 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2058 2058 return repo_group.group_id, _name(repo_group.full_path_splitted)
2059 2059
2060 2060 @classmethod
2061 2061 def groups_choices(cls, groups=None, show_empty_group=True):
2062 2062 if not groups:
2063 2063 groups = cls.query().all()
2064 2064
2065 2065 repo_groups = []
2066 2066 if show_empty_group:
2067 2067 repo_groups = [('-1', u'-- %s --' % _('No parent'))]
2068 2068
2069 2069 repo_groups.extend([cls._generate_choice(x) for x in groups])
2070 2070
2071 2071 repo_groups = sorted(
2072 2072 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2073 2073 return repo_groups
2074 2074
2075 2075 @classmethod
2076 2076 def url_sep(cls):
2077 2077 return URL_SEP
2078 2078
2079 2079 @classmethod
2080 2080 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2081 2081 if case_insensitive:
2082 2082 gr = cls.query().filter(func.lower(cls.group_name)
2083 2083 == func.lower(group_name))
2084 2084 else:
2085 2085 gr = cls.query().filter(cls.group_name == group_name)
2086 2086 if cache:
2087 2087 gr = gr.options(FromCache(
2088 2088 "sql_cache_short",
2089 2089 "get_group_%s" % _hash_key(group_name)))
2090 2090 return gr.scalar()
2091 2091
2092 2092 @classmethod
2093 2093 def get_user_personal_repo_group(cls, user_id):
2094 2094 user = User.get(user_id)
2095 2095 return cls.query()\
2096 2096 .filter(cls.personal == true())\
2097 2097 .filter(cls.user == user).scalar()
2098 2098
2099 2099 @classmethod
2100 2100 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2101 2101 case_insensitive=True):
2102 2102 q = RepoGroup.query()
2103 2103
2104 2104 if not isinstance(user_id, Optional):
2105 2105 q = q.filter(RepoGroup.user_id == user_id)
2106 2106
2107 2107 if not isinstance(group_id, Optional):
2108 2108 q = q.filter(RepoGroup.group_parent_id == group_id)
2109 2109
2110 2110 if case_insensitive:
2111 2111 q = q.order_by(func.lower(RepoGroup.group_name))
2112 2112 else:
2113 2113 q = q.order_by(RepoGroup.group_name)
2114 2114 return q.all()
2115 2115
2116 2116 @property
2117 2117 def parents(self):
2118 2118 parents_recursion_limit = 10
2119 2119 groups = []
2120 2120 if self.parent_group is None:
2121 2121 return groups
2122 2122 cur_gr = self.parent_group
2123 2123 groups.insert(0, cur_gr)
2124 2124 cnt = 0
2125 2125 while 1:
2126 2126 cnt += 1
2127 2127 gr = getattr(cur_gr, 'parent_group', None)
2128 2128 cur_gr = cur_gr.parent_group
2129 2129 if gr is None:
2130 2130 break
2131 2131 if cnt == parents_recursion_limit:
2132 2132 # this will prevent accidental infinit loops
2133 2133 log.error(('more than %s parents found for group %s, stopping '
2134 2134 'recursive parent fetching' % (parents_recursion_limit, self)))
2135 2135 break
2136 2136
2137 2137 groups.insert(0, gr)
2138 2138 return groups
2139 2139
2140 2140 @property
2141 2141 def children(self):
2142 2142 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2143 2143
2144 2144 @property
2145 2145 def name(self):
2146 2146 return self.group_name.split(RepoGroup.url_sep())[-1]
2147 2147
2148 2148 @property
2149 2149 def full_path(self):
2150 2150 return self.group_name
2151 2151
2152 2152 @property
2153 2153 def full_path_splitted(self):
2154 2154 return self.group_name.split(RepoGroup.url_sep())
2155 2155
2156 2156 @property
2157 2157 def repositories(self):
2158 2158 return Repository.query()\
2159 2159 .filter(Repository.group == self)\
2160 2160 .order_by(Repository.repo_name)
2161 2161
2162 2162 @property
2163 2163 def repositories_recursive_count(self):
2164 2164 cnt = self.repositories.count()
2165 2165
2166 2166 def children_count(group):
2167 2167 cnt = 0
2168 2168 for child in group.children:
2169 2169 cnt += child.repositories.count()
2170 2170 cnt += children_count(child)
2171 2171 return cnt
2172 2172
2173 2173 return cnt + children_count(self)
2174 2174
2175 2175 def _recursive_objects(self, include_repos=True):
2176 2176 all_ = []
2177 2177
2178 2178 def _get_members(root_gr):
2179 2179 if include_repos:
2180 2180 for r in root_gr.repositories:
2181 2181 all_.append(r)
2182 2182 childs = root_gr.children.all()
2183 2183 if childs:
2184 2184 for gr in childs:
2185 2185 all_.append(gr)
2186 2186 _get_members(gr)
2187 2187
2188 2188 _get_members(self)
2189 2189 return [self] + all_
2190 2190
2191 2191 def recursive_groups_and_repos(self):
2192 2192 """
2193 2193 Recursive return all groups, with repositories in those groups
2194 2194 """
2195 2195 return self._recursive_objects()
2196 2196
2197 2197 def recursive_groups(self):
2198 2198 """
2199 2199 Returns all children groups for this group including children of children
2200 2200 """
2201 2201 return self._recursive_objects(include_repos=False)
2202 2202
2203 2203 def get_new_name(self, group_name):
2204 2204 """
2205 2205 returns new full group name based on parent and new name
2206 2206
2207 2207 :param group_name:
2208 2208 """
2209 2209 path_prefix = (self.parent_group.full_path_splitted if
2210 2210 self.parent_group else [])
2211 2211 return RepoGroup.url_sep().join(path_prefix + [group_name])
2212 2212
2213 2213 def permissions(self, with_admins=True, with_owner=True):
2214 2214 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2215 2215 q = q.options(joinedload(UserRepoGroupToPerm.group),
2216 2216 joinedload(UserRepoGroupToPerm.user),
2217 2217 joinedload(UserRepoGroupToPerm.permission),)
2218 2218
2219 2219 # get owners and admins and permissions. We do a trick of re-writing
2220 2220 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2221 2221 # has a global reference and changing one object propagates to all
2222 2222 # others. This means if admin is also an owner admin_row that change
2223 2223 # would propagate to both objects
2224 2224 perm_rows = []
2225 2225 for _usr in q.all():
2226 2226 usr = AttributeDict(_usr.user.get_dict())
2227 2227 usr.permission = _usr.permission.permission_name
2228 2228 perm_rows.append(usr)
2229 2229
2230 2230 # filter the perm rows by 'default' first and then sort them by
2231 2231 # admin,write,read,none permissions sorted again alphabetically in
2232 2232 # each group
2233 2233 perm_rows = sorted(perm_rows, key=display_sort)
2234 2234
2235 2235 _admin_perm = 'group.admin'
2236 2236 owner_row = []
2237 2237 if with_owner:
2238 2238 usr = AttributeDict(self.user.get_dict())
2239 2239 usr.owner_row = True
2240 2240 usr.permission = _admin_perm
2241 2241 owner_row.append(usr)
2242 2242
2243 2243 super_admin_rows = []
2244 2244 if with_admins:
2245 2245 for usr in User.get_all_super_admins():
2246 2246 # if this admin is also owner, don't double the record
2247 2247 if usr.user_id == owner_row[0].user_id:
2248 2248 owner_row[0].admin_row = True
2249 2249 else:
2250 2250 usr = AttributeDict(usr.get_dict())
2251 2251 usr.admin_row = True
2252 2252 usr.permission = _admin_perm
2253 2253 super_admin_rows.append(usr)
2254 2254
2255 2255 return super_admin_rows + owner_row + perm_rows
2256 2256
2257 2257 def permission_user_groups(self):
2258 2258 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2259 2259 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2260 2260 joinedload(UserGroupRepoGroupToPerm.users_group),
2261 2261 joinedload(UserGroupRepoGroupToPerm.permission),)
2262 2262
2263 2263 perm_rows = []
2264 2264 for _user_group in q.all():
2265 2265 usr = AttributeDict(_user_group.users_group.get_dict())
2266 2266 usr.permission = _user_group.permission.permission_name
2267 2267 perm_rows.append(usr)
2268 2268
2269 2269 return perm_rows
2270 2270
2271 2271 def get_api_data(self):
2272 2272 """
2273 2273 Common function for generating api data
2274 2274
2275 2275 """
2276 2276 group = self
2277 2277 data = {
2278 2278 'group_id': group.group_id,
2279 2279 'group_name': group.group_name,
2280 2280 'group_description': group.group_description,
2281 2281 'parent_group': group.parent_group.group_name if group.parent_group else None,
2282 2282 'repositories': [x.repo_name for x in group.repositories],
2283 2283 'owner': group.user.username,
2284 2284 }
2285 2285 return data
2286 2286
2287 2287
2288 2288 class Permission(Base, BaseModel):
2289 2289 __tablename__ = 'permissions'
2290 2290 __table_args__ = (
2291 2291 Index('p_perm_name_idx', 'permission_name'),
2292 2292 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2293 2293 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2294 2294 )
2295 2295 PERMS = [
2296 2296 ('hg.admin', _('RhodeCode Super Administrator')),
2297 2297
2298 2298 ('repository.none', _('Repository no access')),
2299 2299 ('repository.read', _('Repository read access')),
2300 2300 ('repository.write', _('Repository write access')),
2301 2301 ('repository.admin', _('Repository admin access')),
2302 2302
2303 2303 ('group.none', _('Repository group no access')),
2304 2304 ('group.read', _('Repository group read access')),
2305 2305 ('group.write', _('Repository group write access')),
2306 2306 ('group.admin', _('Repository group admin access')),
2307 2307
2308 2308 ('usergroup.none', _('User group no access')),
2309 2309 ('usergroup.read', _('User group read access')),
2310 2310 ('usergroup.write', _('User group write access')),
2311 2311 ('usergroup.admin', _('User group admin access')),
2312 2312
2313 2313 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2314 2314 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2315 2315
2316 2316 ('hg.usergroup.create.false', _('User Group creation disabled')),
2317 2317 ('hg.usergroup.create.true', _('User Group creation enabled')),
2318 2318
2319 2319 ('hg.create.none', _('Repository creation disabled')),
2320 2320 ('hg.create.repository', _('Repository creation enabled')),
2321 2321 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2322 2322 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2323 2323
2324 2324 ('hg.fork.none', _('Repository forking disabled')),
2325 2325 ('hg.fork.repository', _('Repository forking enabled')),
2326 2326
2327 2327 ('hg.register.none', _('Registration disabled')),
2328 2328 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2329 2329 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2330 2330
2331 2331 ('hg.password_reset.enabled', _('Password reset enabled')),
2332 2332 ('hg.password_reset.hidden', _('Password reset hidden')),
2333 2333 ('hg.password_reset.disabled', _('Password reset disabled')),
2334 2334
2335 2335 ('hg.extern_activate.manual', _('Manual activation of external account')),
2336 2336 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2337 2337
2338 2338 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2339 2339 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2340 2340 ]
2341 2341
2342 2342 # definition of system default permissions for DEFAULT user
2343 2343 DEFAULT_USER_PERMISSIONS = [
2344 2344 'repository.read',
2345 2345 'group.read',
2346 2346 'usergroup.read',
2347 2347 'hg.create.repository',
2348 2348 'hg.repogroup.create.false',
2349 2349 'hg.usergroup.create.false',
2350 2350 'hg.create.write_on_repogroup.true',
2351 2351 'hg.fork.repository',
2352 2352 'hg.register.manual_activate',
2353 2353 'hg.password_reset.enabled',
2354 2354 'hg.extern_activate.auto',
2355 2355 'hg.inherit_default_perms.true',
2356 2356 ]
2357 2357
2358 2358 # defines which permissions are more important higher the more important
2359 2359 # Weight defines which permissions are more important.
2360 2360 # The higher number the more important.
2361 2361 PERM_WEIGHTS = {
2362 2362 'repository.none': 0,
2363 2363 'repository.read': 1,
2364 2364 'repository.write': 3,
2365 2365 'repository.admin': 4,
2366 2366
2367 2367 'group.none': 0,
2368 2368 'group.read': 1,
2369 2369 'group.write': 3,
2370 2370 'group.admin': 4,
2371 2371
2372 2372 'usergroup.none': 0,
2373 2373 'usergroup.read': 1,
2374 2374 'usergroup.write': 3,
2375 2375 'usergroup.admin': 4,
2376 2376
2377 2377 'hg.repogroup.create.false': 0,
2378 2378 'hg.repogroup.create.true': 1,
2379 2379
2380 2380 'hg.usergroup.create.false': 0,
2381 2381 'hg.usergroup.create.true': 1,
2382 2382
2383 2383 'hg.fork.none': 0,
2384 2384 'hg.fork.repository': 1,
2385 2385 'hg.create.none': 0,
2386 2386 'hg.create.repository': 1
2387 2387 }
2388 2388
2389 2389 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2390 2390 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2391 2391 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2392 2392
2393 2393 def __unicode__(self):
2394 2394 return u"<%s('%s:%s')>" % (
2395 2395 self.__class__.__name__, self.permission_id, self.permission_name
2396 2396 )
2397 2397
2398 2398 @classmethod
2399 2399 def get_by_key(cls, key):
2400 2400 return cls.query().filter(cls.permission_name == key).scalar()
2401 2401
2402 2402 @classmethod
2403 2403 def get_default_repo_perms(cls, user_id, repo_id=None):
2404 2404 q = Session().query(UserRepoToPerm, Repository, Permission)\
2405 2405 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2406 2406 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2407 2407 .filter(UserRepoToPerm.user_id == user_id)
2408 2408 if repo_id:
2409 2409 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2410 2410 return q.all()
2411 2411
2412 2412 @classmethod
2413 2413 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2414 2414 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2415 2415 .join(
2416 2416 Permission,
2417 2417 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2418 2418 .join(
2419 2419 Repository,
2420 2420 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2421 2421 .join(
2422 2422 UserGroup,
2423 2423 UserGroupRepoToPerm.users_group_id ==
2424 2424 UserGroup.users_group_id)\
2425 2425 .join(
2426 2426 UserGroupMember,
2427 2427 UserGroupRepoToPerm.users_group_id ==
2428 2428 UserGroupMember.users_group_id)\
2429 2429 .filter(
2430 2430 UserGroupMember.user_id == user_id,
2431 2431 UserGroup.users_group_active == true())
2432 2432 if repo_id:
2433 2433 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2434 2434 return q.all()
2435 2435
2436 2436 @classmethod
2437 2437 def get_default_group_perms(cls, user_id, repo_group_id=None):
2438 2438 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2439 2439 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2440 2440 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2441 2441 .filter(UserRepoGroupToPerm.user_id == user_id)
2442 2442 if repo_group_id:
2443 2443 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2444 2444 return q.all()
2445 2445
2446 2446 @classmethod
2447 2447 def get_default_group_perms_from_user_group(
2448 2448 cls, user_id, repo_group_id=None):
2449 2449 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2450 2450 .join(
2451 2451 Permission,
2452 2452 UserGroupRepoGroupToPerm.permission_id ==
2453 2453 Permission.permission_id)\
2454 2454 .join(
2455 2455 RepoGroup,
2456 2456 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2457 2457 .join(
2458 2458 UserGroup,
2459 2459 UserGroupRepoGroupToPerm.users_group_id ==
2460 2460 UserGroup.users_group_id)\
2461 2461 .join(
2462 2462 UserGroupMember,
2463 2463 UserGroupRepoGroupToPerm.users_group_id ==
2464 2464 UserGroupMember.users_group_id)\
2465 2465 .filter(
2466 2466 UserGroupMember.user_id == user_id,
2467 2467 UserGroup.users_group_active == true())
2468 2468 if repo_group_id:
2469 2469 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2470 2470 return q.all()
2471 2471
2472 2472 @classmethod
2473 2473 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2474 2474 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2475 2475 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2476 2476 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2477 2477 .filter(UserUserGroupToPerm.user_id == user_id)
2478 2478 if user_group_id:
2479 2479 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2480 2480 return q.all()
2481 2481
2482 2482 @classmethod
2483 2483 def get_default_user_group_perms_from_user_group(
2484 2484 cls, user_id, user_group_id=None):
2485 2485 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2486 2486 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2487 2487 .join(
2488 2488 Permission,
2489 2489 UserGroupUserGroupToPerm.permission_id ==
2490 2490 Permission.permission_id)\
2491 2491 .join(
2492 2492 TargetUserGroup,
2493 2493 UserGroupUserGroupToPerm.target_user_group_id ==
2494 2494 TargetUserGroup.users_group_id)\
2495 2495 .join(
2496 2496 UserGroup,
2497 2497 UserGroupUserGroupToPerm.user_group_id ==
2498 2498 UserGroup.users_group_id)\
2499 2499 .join(
2500 2500 UserGroupMember,
2501 2501 UserGroupUserGroupToPerm.user_group_id ==
2502 2502 UserGroupMember.users_group_id)\
2503 2503 .filter(
2504 2504 UserGroupMember.user_id == user_id,
2505 2505 UserGroup.users_group_active == true())
2506 2506 if user_group_id:
2507 2507 q = q.filter(
2508 2508 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2509 2509
2510 2510 return q.all()
2511 2511
2512 2512
2513 2513 class UserRepoToPerm(Base, BaseModel):
2514 2514 __tablename__ = 'repo_to_perm'
2515 2515 __table_args__ = (
2516 2516 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2517 2517 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2518 2518 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2519 2519 )
2520 2520 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2521 2521 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2522 2522 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2523 2523 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2524 2524
2525 2525 user = relationship('User')
2526 2526 repository = relationship('Repository')
2527 2527 permission = relationship('Permission')
2528 2528
2529 2529 @classmethod
2530 2530 def create(cls, user, repository, permission):
2531 2531 n = cls()
2532 2532 n.user = user
2533 2533 n.repository = repository
2534 2534 n.permission = permission
2535 2535 Session().add(n)
2536 2536 return n
2537 2537
2538 2538 def __unicode__(self):
2539 2539 return u'<%s => %s >' % (self.user, self.repository)
2540 2540
2541 2541
2542 2542 class UserUserGroupToPerm(Base, BaseModel):
2543 2543 __tablename__ = 'user_user_group_to_perm'
2544 2544 __table_args__ = (
2545 2545 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2546 2546 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2547 2547 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2548 2548 )
2549 2549 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2550 2550 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2551 2551 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2552 2552 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2553 2553
2554 2554 user = relationship('User')
2555 2555 user_group = relationship('UserGroup')
2556 2556 permission = relationship('Permission')
2557 2557
2558 2558 @classmethod
2559 2559 def create(cls, user, user_group, permission):
2560 2560 n = cls()
2561 2561 n.user = user
2562 2562 n.user_group = user_group
2563 2563 n.permission = permission
2564 2564 Session().add(n)
2565 2565 return n
2566 2566
2567 2567 def __unicode__(self):
2568 2568 return u'<%s => %s >' % (self.user, self.user_group)
2569 2569
2570 2570
2571 2571 class UserToPerm(Base, BaseModel):
2572 2572 __tablename__ = 'user_to_perm'
2573 2573 __table_args__ = (
2574 2574 UniqueConstraint('user_id', 'permission_id'),
2575 2575 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2576 2576 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2577 2577 )
2578 2578 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2579 2579 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2580 2580 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2581 2581
2582 2582 user = relationship('User')
2583 2583 permission = relationship('Permission', lazy='joined')
2584 2584
2585 2585 def __unicode__(self):
2586 2586 return u'<%s => %s >' % (self.user, self.permission)
2587 2587
2588 2588
2589 2589 class UserGroupRepoToPerm(Base, BaseModel):
2590 2590 __tablename__ = 'users_group_repo_to_perm'
2591 2591 __table_args__ = (
2592 2592 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2593 2593 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2594 2594 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2595 2595 )
2596 2596 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2597 2597 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2598 2598 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2599 2599 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2600 2600
2601 2601 users_group = relationship('UserGroup')
2602 2602 permission = relationship('Permission')
2603 2603 repository = relationship('Repository')
2604 2604
2605 2605 @classmethod
2606 2606 def create(cls, users_group, repository, permission):
2607 2607 n = cls()
2608 2608 n.users_group = users_group
2609 2609 n.repository = repository
2610 2610 n.permission = permission
2611 2611 Session().add(n)
2612 2612 return n
2613 2613
2614 2614 def __unicode__(self):
2615 2615 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2616 2616
2617 2617
2618 2618 class UserGroupUserGroupToPerm(Base, BaseModel):
2619 2619 __tablename__ = 'user_group_user_group_to_perm'
2620 2620 __table_args__ = (
2621 2621 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2622 2622 CheckConstraint('target_user_group_id != user_group_id'),
2623 2623 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2624 2624 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2625 2625 )
2626 2626 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2627 2627 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2628 2628 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2629 2629 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2630 2630
2631 2631 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2632 2632 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2633 2633 permission = relationship('Permission')
2634 2634
2635 2635 @classmethod
2636 2636 def create(cls, target_user_group, user_group, permission):
2637 2637 n = cls()
2638 2638 n.target_user_group = target_user_group
2639 2639 n.user_group = user_group
2640 2640 n.permission = permission
2641 2641 Session().add(n)
2642 2642 return n
2643 2643
2644 2644 def __unicode__(self):
2645 2645 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2646 2646
2647 2647
2648 2648 class UserGroupToPerm(Base, BaseModel):
2649 2649 __tablename__ = 'users_group_to_perm'
2650 2650 __table_args__ = (
2651 2651 UniqueConstraint('users_group_id', 'permission_id',),
2652 2652 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2653 2653 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2654 2654 )
2655 2655 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2656 2656 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2657 2657 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2658 2658
2659 2659 users_group = relationship('UserGroup')
2660 2660 permission = relationship('Permission')
2661 2661
2662 2662
2663 2663 class UserRepoGroupToPerm(Base, BaseModel):
2664 2664 __tablename__ = 'user_repo_group_to_perm'
2665 2665 __table_args__ = (
2666 2666 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2667 2667 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2668 2668 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2669 2669 )
2670 2670
2671 2671 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2672 2672 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2673 2673 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2674 2674 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2675 2675
2676 2676 user = relationship('User')
2677 2677 group = relationship('RepoGroup')
2678 2678 permission = relationship('Permission')
2679 2679
2680 2680 @classmethod
2681 2681 def create(cls, user, repository_group, permission):
2682 2682 n = cls()
2683 2683 n.user = user
2684 2684 n.group = repository_group
2685 2685 n.permission = permission
2686 2686 Session().add(n)
2687 2687 return n
2688 2688
2689 2689
2690 2690 class UserGroupRepoGroupToPerm(Base, BaseModel):
2691 2691 __tablename__ = 'users_group_repo_group_to_perm'
2692 2692 __table_args__ = (
2693 2693 UniqueConstraint('users_group_id', 'group_id'),
2694 2694 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2695 2695 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2696 2696 )
2697 2697
2698 2698 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2699 2699 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2700 2700 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2701 2701 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2702 2702
2703 2703 users_group = relationship('UserGroup')
2704 2704 permission = relationship('Permission')
2705 2705 group = relationship('RepoGroup')
2706 2706
2707 2707 @classmethod
2708 2708 def create(cls, user_group, repository_group, permission):
2709 2709 n = cls()
2710 2710 n.users_group = user_group
2711 2711 n.group = repository_group
2712 2712 n.permission = permission
2713 2713 Session().add(n)
2714 2714 return n
2715 2715
2716 2716 def __unicode__(self):
2717 2717 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2718 2718
2719 2719
2720 2720 class Statistics(Base, BaseModel):
2721 2721 __tablename__ = 'statistics'
2722 2722 __table_args__ = (
2723 2723 UniqueConstraint('repository_id'),
2724 2724 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2725 2725 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2726 2726 )
2727 2727 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2728 2728 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2729 2729 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2730 2730 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2731 2731 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2732 2732 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2733 2733
2734 2734 repository = relationship('Repository', single_parent=True)
2735 2735
2736 2736
2737 2737 class UserFollowing(Base, BaseModel):
2738 2738 __tablename__ = 'user_followings'
2739 2739 __table_args__ = (
2740 2740 UniqueConstraint('user_id', 'follows_repository_id'),
2741 2741 UniqueConstraint('user_id', 'follows_user_id'),
2742 2742 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2743 2743 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2744 2744 )
2745 2745
2746 2746 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2747 2747 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2748 2748 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2749 2749 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2750 2750 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2751 2751
2752 2752 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2753 2753
2754 2754 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2755 2755 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2756 2756
2757 2757 @classmethod
2758 2758 def get_repo_followers(cls, repo_id):
2759 2759 return cls.query().filter(cls.follows_repo_id == repo_id)
2760 2760
2761 2761
2762 2762 class CacheKey(Base, BaseModel):
2763 2763 __tablename__ = 'cache_invalidation'
2764 2764 __table_args__ = (
2765 2765 UniqueConstraint('cache_key'),
2766 2766 Index('key_idx', 'cache_key'),
2767 2767 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2768 2768 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2769 2769 )
2770 2770 CACHE_TYPE_ATOM = 'ATOM'
2771 2771 CACHE_TYPE_RSS = 'RSS'
2772 2772 CACHE_TYPE_README = 'README'
2773 2773
2774 2774 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2775 2775 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2776 2776 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2777 2777 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2778 2778
2779 2779 def __init__(self, cache_key, cache_args=''):
2780 2780 self.cache_key = cache_key
2781 2781 self.cache_args = cache_args
2782 2782 self.cache_active = False
2783 2783
2784 2784 def __unicode__(self):
2785 2785 return u"<%s('%s:%s[%s]')>" % (
2786 2786 self.__class__.__name__,
2787 2787 self.cache_id, self.cache_key, self.cache_active)
2788 2788
2789 2789 def _cache_key_partition(self):
2790 2790 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2791 2791 return prefix, repo_name, suffix
2792 2792
2793 2793 def get_prefix(self):
2794 2794 """
2795 2795 Try to extract prefix from existing cache key. The key could consist
2796 2796 of prefix, repo_name, suffix
2797 2797 """
2798 2798 # this returns prefix, repo_name, suffix
2799 2799 return self._cache_key_partition()[0]
2800 2800
2801 2801 def get_suffix(self):
2802 2802 """
2803 2803 get suffix that might have been used in _get_cache_key to
2804 2804 generate self.cache_key. Only used for informational purposes
2805 2805 in repo_edit.mako.
2806 2806 """
2807 2807 # prefix, repo_name, suffix
2808 2808 return self._cache_key_partition()[2]
2809 2809
2810 2810 @classmethod
2811 2811 def delete_all_cache(cls):
2812 2812 """
2813 2813 Delete all cache keys from database.
2814 2814 Should only be run when all instances are down and all entries
2815 2815 thus stale.
2816 2816 """
2817 2817 cls.query().delete()
2818 2818 Session().commit()
2819 2819
2820 2820 @classmethod
2821 2821 def get_cache_key(cls, repo_name, cache_type):
2822 2822 """
2823 2823
2824 2824 Generate a cache key for this process of RhodeCode instance.
2825 2825 Prefix most likely will be process id or maybe explicitly set
2826 2826 instance_id from .ini file.
2827 2827 """
2828 2828 import rhodecode
2829 2829 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2830 2830
2831 2831 repo_as_unicode = safe_unicode(repo_name)
2832 2832 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2833 2833 if cache_type else repo_as_unicode
2834 2834
2835 2835 return u'{}{}'.format(prefix, key)
2836 2836
2837 2837 @classmethod
2838 2838 def set_invalidate(cls, repo_name, delete=False):
2839 2839 """
2840 2840 Mark all caches of a repo as invalid in the database.
2841 2841 """
2842 2842
2843 2843 try:
2844 2844 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2845 2845 if delete:
2846 2846 log.debug('cache objects deleted for repo %s',
2847 2847 safe_str(repo_name))
2848 2848 qry.delete()
2849 2849 else:
2850 2850 log.debug('cache objects marked as invalid for repo %s',
2851 2851 safe_str(repo_name))
2852 2852 qry.update({"cache_active": False})
2853 2853
2854 2854 Session().commit()
2855 2855 except Exception:
2856 2856 log.exception(
2857 2857 'Cache key invalidation failed for repository %s',
2858 2858 safe_str(repo_name))
2859 2859 Session().rollback()
2860 2860
2861 2861 @classmethod
2862 2862 def get_active_cache(cls, cache_key):
2863 2863 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2864 2864 if inv_obj:
2865 2865 return inv_obj
2866 2866 return None
2867 2867
2868 2868 @classmethod
2869 2869 def repo_context_cache(cls, compute_func, repo_name, cache_type,
2870 2870 thread_scoped=False):
2871 2871 """
2872 2872 @cache_region('long_term')
2873 2873 def _heavy_calculation(cache_key):
2874 2874 return 'result'
2875 2875
2876 2876 cache_context = CacheKey.repo_context_cache(
2877 2877 _heavy_calculation, repo_name, cache_type)
2878 2878
2879 2879 with cache_context as context:
2880 2880 context.invalidate()
2881 2881 computed = context.compute()
2882 2882
2883 2883 assert computed == 'result'
2884 2884 """
2885 2885 from rhodecode.lib import caches
2886 2886 return caches.InvalidationContext(
2887 2887 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
2888 2888
2889 2889
2890 2890 class ChangesetComment(Base, BaseModel):
2891 2891 __tablename__ = 'changeset_comments'
2892 2892 __table_args__ = (
2893 2893 Index('cc_revision_idx', 'revision'),
2894 2894 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2895 2895 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2896 2896 )
2897 2897
2898 2898 COMMENT_OUTDATED = u'comment_outdated'
2899 2899 COMMENT_TYPE_NOTE = u'note'
2900 2900 COMMENT_TYPE_TODO = u'todo'
2901 2901 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
2902 2902
2903 2903 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
2904 2904 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2905 2905 revision = Column('revision', String(40), nullable=True)
2906 2906 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2907 2907 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
2908 2908 line_no = Column('line_no', Unicode(10), nullable=True)
2909 2909 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
2910 2910 f_path = Column('f_path', Unicode(1000), nullable=True)
2911 2911 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2912 2912 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
2913 2913 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2914 2914 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2915 2915 renderer = Column('renderer', Unicode(64), nullable=True)
2916 2916 display_state = Column('display_state', Unicode(128), nullable=True)
2917 2917
2918 2918 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
2919 2919 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
2920 2920 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
2921 2921 author = relationship('User', lazy='joined')
2922 2922 repo = relationship('Repository')
2923 2923 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
2924 2924 pull_request = relationship('PullRequest', lazy='joined')
2925 2925 pull_request_version = relationship('PullRequestVersion')
2926 2926
2927 2927 @classmethod
2928 2928 def get_users(cls, revision=None, pull_request_id=None):
2929 2929 """
2930 2930 Returns user associated with this ChangesetComment. ie those
2931 2931 who actually commented
2932 2932
2933 2933 :param cls:
2934 2934 :param revision:
2935 2935 """
2936 2936 q = Session().query(User)\
2937 2937 .join(ChangesetComment.author)
2938 2938 if revision:
2939 2939 q = q.filter(cls.revision == revision)
2940 2940 elif pull_request_id:
2941 2941 q = q.filter(cls.pull_request_id == pull_request_id)
2942 2942 return q.all()
2943 2943
2944 2944 @classmethod
2945 2945 def get_index_from_version(cls, pr_version, versions):
2946 2946 num_versions = [x.pull_request_version_id for x in versions]
2947 2947 try:
2948 2948 return num_versions.index(pr_version) +1
2949 2949 except (IndexError, ValueError):
2950 2950 return
2951 2951
2952 2952 @property
2953 2953 def outdated(self):
2954 2954 return self.display_state == self.COMMENT_OUTDATED
2955 2955
2956 2956 def outdated_at_version(self, version):
2957 2957 """
2958 2958 Checks if comment is outdated for given pull request version
2959 2959 """
2960 2960 return self.outdated and self.pull_request_version_id != version
2961 2961
2962 2962 def older_than_version(self, version):
2963 2963 """
2964 2964 Checks if comment is made from previous version than given
2965 2965 """
2966 2966 if version is None:
2967 2967 return self.pull_request_version_id is not None
2968 2968
2969 2969 return self.pull_request_version_id < version
2970 2970
2971 2971 @property
2972 2972 def resolved(self):
2973 2973 return self.resolved_by[0] if self.resolved_by else None
2974 2974
2975 2975 @property
2976 2976 def is_todo(self):
2977 2977 return self.comment_type == self.COMMENT_TYPE_TODO
2978 2978
2979 2979 def get_index_version(self, versions):
2980 2980 return self.get_index_from_version(
2981 2981 self.pull_request_version_id, versions)
2982 2982
2983 2983 def render(self, mentions=False):
2984 2984 from rhodecode.lib import helpers as h
2985 2985 return h.render(self.text, renderer=self.renderer, mentions=mentions)
2986 2986
2987 2987 def __repr__(self):
2988 2988 if self.comment_id:
2989 2989 return '<DB:Comment #%s>' % self.comment_id
2990 2990 else:
2991 2991 return '<DB:Comment at %#x>' % id(self)
2992 2992
2993 2993
2994 2994 class ChangesetStatus(Base, BaseModel):
2995 2995 __tablename__ = 'changeset_statuses'
2996 2996 __table_args__ = (
2997 2997 Index('cs_revision_idx', 'revision'),
2998 2998 Index('cs_version_idx', 'version'),
2999 2999 UniqueConstraint('repo_id', 'revision', 'version'),
3000 3000 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3001 3001 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3002 3002 )
3003 3003 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3004 3004 STATUS_APPROVED = 'approved'
3005 3005 STATUS_REJECTED = 'rejected'
3006 3006 STATUS_UNDER_REVIEW = 'under_review'
3007 3007
3008 3008 STATUSES = [
3009 3009 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3010 3010 (STATUS_APPROVED, _("Approved")),
3011 3011 (STATUS_REJECTED, _("Rejected")),
3012 3012 (STATUS_UNDER_REVIEW, _("Under Review")),
3013 3013 ]
3014 3014
3015 3015 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3016 3016 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3017 3017 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3018 3018 revision = Column('revision', String(40), nullable=False)
3019 3019 status = Column('status', String(128), nullable=False, default=DEFAULT)
3020 3020 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3021 3021 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3022 3022 version = Column('version', Integer(), nullable=False, default=0)
3023 3023 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3024 3024
3025 3025 author = relationship('User', lazy='joined')
3026 3026 repo = relationship('Repository')
3027 3027 comment = relationship('ChangesetComment', lazy='joined')
3028 3028 pull_request = relationship('PullRequest', lazy='joined')
3029 3029
3030 3030 def __unicode__(self):
3031 return u"<%s('%s[%s]:%s')>" % (
3031 return u"<%s('%s[v%s]:%s')>" % (
3032 3032 self.__class__.__name__,
3033 3033 self.status, self.version, self.author
3034 3034 )
3035 3035
3036 3036 @classmethod
3037 3037 def get_status_lbl(cls, value):
3038 3038 return dict(cls.STATUSES).get(value)
3039 3039
3040 3040 @property
3041 3041 def status_lbl(self):
3042 3042 return ChangesetStatus.get_status_lbl(self.status)
3043 3043
3044 3044
3045 3045 class _PullRequestBase(BaseModel):
3046 3046 """
3047 3047 Common attributes of pull request and version entries.
3048 3048 """
3049 3049
3050 3050 # .status values
3051 3051 STATUS_NEW = u'new'
3052 3052 STATUS_OPEN = u'open'
3053 3053 STATUS_CLOSED = u'closed'
3054 3054
3055 3055 title = Column('title', Unicode(255), nullable=True)
3056 3056 description = Column(
3057 3057 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3058 3058 nullable=True)
3059 3059 # new/open/closed status of pull request (not approve/reject/etc)
3060 3060 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3061 3061 created_on = Column(
3062 3062 'created_on', DateTime(timezone=False), nullable=False,
3063 3063 default=datetime.datetime.now)
3064 3064 updated_on = Column(
3065 3065 'updated_on', DateTime(timezone=False), nullable=False,
3066 3066 default=datetime.datetime.now)
3067 3067
3068 3068 @declared_attr
3069 3069 def user_id(cls):
3070 3070 return Column(
3071 3071 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3072 3072 unique=None)
3073 3073
3074 3074 # 500 revisions max
3075 3075 _revisions = Column(
3076 3076 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3077 3077
3078 3078 @declared_attr
3079 3079 def source_repo_id(cls):
3080 3080 # TODO: dan: rename column to source_repo_id
3081 3081 return Column(
3082 3082 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3083 3083 nullable=False)
3084 3084
3085 3085 source_ref = Column('org_ref', Unicode(255), nullable=False)
3086 3086
3087 3087 @declared_attr
3088 3088 def target_repo_id(cls):
3089 3089 # TODO: dan: rename column to target_repo_id
3090 3090 return Column(
3091 3091 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3092 3092 nullable=False)
3093 3093
3094 3094 target_ref = Column('other_ref', Unicode(255), nullable=False)
3095 3095 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3096 3096
3097 3097 # TODO: dan: rename column to last_merge_source_rev
3098 3098 _last_merge_source_rev = Column(
3099 3099 'last_merge_org_rev', String(40), nullable=True)
3100 3100 # TODO: dan: rename column to last_merge_target_rev
3101 3101 _last_merge_target_rev = Column(
3102 3102 'last_merge_other_rev', String(40), nullable=True)
3103 3103 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3104 3104 merge_rev = Column('merge_rev', String(40), nullable=True)
3105 3105
3106 3106 @hybrid_property
3107 3107 def revisions(self):
3108 3108 return self._revisions.split(':') if self._revisions else []
3109 3109
3110 3110 @revisions.setter
3111 3111 def revisions(self, val):
3112 3112 self._revisions = ':'.join(val)
3113 3113
3114 3114 @declared_attr
3115 3115 def author(cls):
3116 3116 return relationship('User', lazy='joined')
3117 3117
3118 3118 @declared_attr
3119 3119 def source_repo(cls):
3120 3120 return relationship(
3121 3121 'Repository',
3122 3122 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3123 3123
3124 3124 @property
3125 3125 def source_ref_parts(self):
3126 3126 return self.unicode_to_reference(self.source_ref)
3127 3127
3128 3128 @declared_attr
3129 3129 def target_repo(cls):
3130 3130 return relationship(
3131 3131 'Repository',
3132 3132 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3133 3133
3134 3134 @property
3135 3135 def target_ref_parts(self):
3136 3136 return self.unicode_to_reference(self.target_ref)
3137 3137
3138 3138 @property
3139 3139 def shadow_merge_ref(self):
3140 3140 return self.unicode_to_reference(self._shadow_merge_ref)
3141 3141
3142 3142 @shadow_merge_ref.setter
3143 3143 def shadow_merge_ref(self, ref):
3144 3144 self._shadow_merge_ref = self.reference_to_unicode(ref)
3145 3145
3146 3146 def unicode_to_reference(self, raw):
3147 3147 """
3148 3148 Convert a unicode (or string) to a reference object.
3149 3149 If unicode evaluates to False it returns None.
3150 3150 """
3151 3151 if raw:
3152 3152 refs = raw.split(':')
3153 3153 return Reference(*refs)
3154 3154 else:
3155 3155 return None
3156 3156
3157 3157 def reference_to_unicode(self, ref):
3158 3158 """
3159 3159 Convert a reference object to unicode.
3160 3160 If reference is None it returns None.
3161 3161 """
3162 3162 if ref:
3163 3163 return u':'.join(ref)
3164 3164 else:
3165 3165 return None
3166 3166
3167 3167 def get_api_data(self):
3168 3168 from rhodecode.model.pull_request import PullRequestModel
3169 3169 pull_request = self
3170 3170 merge_status = PullRequestModel().merge_status(pull_request)
3171 3171
3172 3172 pull_request_url = url(
3173 3173 'pullrequest_show', repo_name=self.target_repo.repo_name,
3174 3174 pull_request_id=self.pull_request_id, qualified=True)
3175 3175
3176 3176 merge_data = {
3177 3177 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3178 3178 'reference': (
3179 3179 pull_request.shadow_merge_ref._asdict()
3180 3180 if pull_request.shadow_merge_ref else None),
3181 3181 }
3182 3182
3183 3183 data = {
3184 3184 'pull_request_id': pull_request.pull_request_id,
3185 3185 'url': pull_request_url,
3186 3186 'title': pull_request.title,
3187 3187 'description': pull_request.description,
3188 3188 'status': pull_request.status,
3189 3189 'created_on': pull_request.created_on,
3190 3190 'updated_on': pull_request.updated_on,
3191 3191 'commit_ids': pull_request.revisions,
3192 3192 'review_status': pull_request.calculated_review_status(),
3193 3193 'mergeable': {
3194 3194 'status': merge_status[0],
3195 3195 'message': unicode(merge_status[1]),
3196 3196 },
3197 3197 'source': {
3198 3198 'clone_url': pull_request.source_repo.clone_url(),
3199 3199 'repository': pull_request.source_repo.repo_name,
3200 3200 'reference': {
3201 3201 'name': pull_request.source_ref_parts.name,
3202 3202 'type': pull_request.source_ref_parts.type,
3203 3203 'commit_id': pull_request.source_ref_parts.commit_id,
3204 3204 },
3205 3205 },
3206 3206 'target': {
3207 3207 'clone_url': pull_request.target_repo.clone_url(),
3208 3208 'repository': pull_request.target_repo.repo_name,
3209 3209 'reference': {
3210 3210 'name': pull_request.target_ref_parts.name,
3211 3211 'type': pull_request.target_ref_parts.type,
3212 3212 'commit_id': pull_request.target_ref_parts.commit_id,
3213 3213 },
3214 3214 },
3215 3215 'merge': merge_data,
3216 3216 'author': pull_request.author.get_api_data(include_secrets=False,
3217 3217 details='basic'),
3218 3218 'reviewers': [
3219 3219 {
3220 3220 'user': reviewer.get_api_data(include_secrets=False,
3221 3221 details='basic'),
3222 3222 'reasons': reasons,
3223 3223 'review_status': st[0][1].status if st else 'not_reviewed',
3224 3224 }
3225 3225 for reviewer, reasons, st in pull_request.reviewers_statuses()
3226 3226 ]
3227 3227 }
3228 3228
3229 3229 return data
3230 3230
3231 3231
3232 3232 class PullRequest(Base, _PullRequestBase):
3233 3233 __tablename__ = 'pull_requests'
3234 3234 __table_args__ = (
3235 3235 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3236 3236 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3237 3237 )
3238 3238
3239 3239 pull_request_id = Column(
3240 3240 'pull_request_id', Integer(), nullable=False, primary_key=True)
3241 3241
3242 3242 def __repr__(self):
3243 3243 if self.pull_request_id:
3244 3244 return '<DB:PullRequest #%s>' % self.pull_request_id
3245 3245 else:
3246 3246 return '<DB:PullRequest at %#x>' % id(self)
3247 3247
3248 3248 reviewers = relationship('PullRequestReviewers',
3249 3249 cascade="all, delete, delete-orphan")
3250 3250 statuses = relationship('ChangesetStatus')
3251 3251 comments = relationship('ChangesetComment',
3252 3252 cascade="all, delete, delete-orphan")
3253 3253 versions = relationship('PullRequestVersion',
3254 3254 cascade="all, delete, delete-orphan",
3255 3255 lazy='dynamic')
3256 3256
3257
3258 3257 @classmethod
3259 3258 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3260 3259 internal_methods=None):
3261 3260
3262 3261 class PullRequestDisplay(object):
3263 3262 """
3264 3263 Special object wrapper for showing PullRequest data via Versions
3265 3264 It mimics PR object as close as possible. This is read only object
3266 3265 just for display
3267 3266 """
3268 3267
3269 3268 def __init__(self, attrs, internal=None):
3270 3269 self.attrs = attrs
3271 3270 # internal have priority over the given ones via attrs
3272 3271 self.internal = internal or ['versions']
3273 3272
3274 3273 def __getattr__(self, item):
3275 3274 if item in self.internal:
3276 3275 return getattr(self, item)
3277 3276 try:
3278 3277 return self.attrs[item]
3279 3278 except KeyError:
3280 3279 raise AttributeError(
3281 3280 '%s object has no attribute %s' % (self, item))
3282 3281
3283 3282 def __repr__(self):
3284 3283 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3285 3284
3286 3285 def versions(self):
3287 3286 return pull_request_obj.versions.order_by(
3288 3287 PullRequestVersion.pull_request_version_id).all()
3289 3288
3290 3289 def is_closed(self):
3291 3290 return pull_request_obj.is_closed()
3292 3291
3292 @property
3293 def pull_request_version_id(self):
3294 return getattr(pull_request_obj, 'pull_request_version_id', None)
3295
3293 3296 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3294 3297
3295 3298 attrs.author = StrictAttributeDict(
3296 3299 pull_request_obj.author.get_api_data())
3297 3300 if pull_request_obj.target_repo:
3298 3301 attrs.target_repo = StrictAttributeDict(
3299 3302 pull_request_obj.target_repo.get_api_data())
3300 3303 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3301 3304
3302 3305 if pull_request_obj.source_repo:
3303 3306 attrs.source_repo = StrictAttributeDict(
3304 3307 pull_request_obj.source_repo.get_api_data())
3305 3308 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3306 3309
3307 3310 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3308 3311 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3309 3312 attrs.revisions = pull_request_obj.revisions
3310 3313
3311 3314 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3312 3315
3313 3316 return PullRequestDisplay(attrs, internal=internal_methods)
3314 3317
3315 3318 def is_closed(self):
3316 3319 return self.status == self.STATUS_CLOSED
3317 3320
3318 3321 def __json__(self):
3319 3322 return {
3320 3323 'revisions': self.revisions,
3321 3324 }
3322 3325
3323 3326 def calculated_review_status(self):
3324 3327 from rhodecode.model.changeset_status import ChangesetStatusModel
3325 3328 return ChangesetStatusModel().calculated_review_status(self)
3326 3329
3327 3330 def reviewers_statuses(self):
3328 3331 from rhodecode.model.changeset_status import ChangesetStatusModel
3329 3332 return ChangesetStatusModel().reviewers_statuses(self)
3330 3333
3331 3334 @property
3332 3335 def workspace_id(self):
3333 3336 from rhodecode.model.pull_request import PullRequestModel
3334 3337 return PullRequestModel()._workspace_id(self)
3335 3338
3336 3339 def get_shadow_repo(self):
3337 3340 workspace_id = self.workspace_id
3338 3341 vcs_obj = self.target_repo.scm_instance()
3339 3342 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3340 3343 workspace_id)
3341 3344 return vcs_obj._get_shadow_instance(shadow_repository_path)
3342 3345
3343 3346
3344 3347 class PullRequestVersion(Base, _PullRequestBase):
3345 3348 __tablename__ = 'pull_request_versions'
3346 3349 __table_args__ = (
3347 3350 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3348 3351 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3349 3352 )
3350 3353
3351 3354 pull_request_version_id = Column(
3352 3355 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3353 3356 pull_request_id = Column(
3354 3357 'pull_request_id', Integer(),
3355 3358 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3356 3359 pull_request = relationship('PullRequest')
3357 3360
3358 3361 def __repr__(self):
3359 3362 if self.pull_request_version_id:
3360 3363 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3361 3364 else:
3362 3365 return '<DB:PullRequestVersion at %#x>' % id(self)
3363 3366
3364 3367 @property
3365 3368 def reviewers(self):
3366 3369 return self.pull_request.reviewers
3367 3370
3368 3371 @property
3369 3372 def versions(self):
3370 3373 return self.pull_request.versions
3371 3374
3372 3375 def is_closed(self):
3373 3376 # calculate from original
3374 3377 return self.pull_request.status == self.STATUS_CLOSED
3375 3378
3376 3379 def calculated_review_status(self):
3377 3380 return self.pull_request.calculated_review_status()
3378 3381
3379 3382 def reviewers_statuses(self):
3380 3383 return self.pull_request.reviewers_statuses()
3381 3384
3382 3385
3383 3386 class PullRequestReviewers(Base, BaseModel):
3384 3387 __tablename__ = 'pull_request_reviewers'
3385 3388 __table_args__ = (
3386 3389 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3387 3390 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3388 3391 )
3389 3392
3390 3393 def __init__(self, user=None, pull_request=None, reasons=None):
3391 3394 self.user = user
3392 3395 self.pull_request = pull_request
3393 3396 self.reasons = reasons or []
3394 3397
3395 3398 @hybrid_property
3396 3399 def reasons(self):
3397 3400 if not self._reasons:
3398 3401 return []
3399 3402 return self._reasons
3400 3403
3401 3404 @reasons.setter
3402 3405 def reasons(self, val):
3403 3406 val = val or []
3404 3407 if any(not isinstance(x, basestring) for x in val):
3405 3408 raise Exception('invalid reasons type, must be list of strings')
3406 3409 self._reasons = val
3407 3410
3408 3411 pull_requests_reviewers_id = Column(
3409 3412 'pull_requests_reviewers_id', Integer(), nullable=False,
3410 3413 primary_key=True)
3411 3414 pull_request_id = Column(
3412 3415 "pull_request_id", Integer(),
3413 3416 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3414 3417 user_id = Column(
3415 3418 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3416 3419 _reasons = Column(
3417 3420 'reason', MutationList.as_mutable(
3418 3421 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3419 3422
3420 3423 user = relationship('User')
3421 3424 pull_request = relationship('PullRequest')
3422 3425
3423 3426
3424 3427 class Notification(Base, BaseModel):
3425 3428 __tablename__ = 'notifications'
3426 3429 __table_args__ = (
3427 3430 Index('notification_type_idx', 'type'),
3428 3431 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3429 3432 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3430 3433 )
3431 3434
3432 3435 TYPE_CHANGESET_COMMENT = u'cs_comment'
3433 3436 TYPE_MESSAGE = u'message'
3434 3437 TYPE_MENTION = u'mention'
3435 3438 TYPE_REGISTRATION = u'registration'
3436 3439 TYPE_PULL_REQUEST = u'pull_request'
3437 3440 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3438 3441
3439 3442 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3440 3443 subject = Column('subject', Unicode(512), nullable=True)
3441 3444 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3442 3445 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3443 3446 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3444 3447 type_ = Column('type', Unicode(255))
3445 3448
3446 3449 created_by_user = relationship('User')
3447 3450 notifications_to_users = relationship('UserNotification', lazy='joined',
3448 3451 cascade="all, delete, delete-orphan")
3449 3452
3450 3453 @property
3451 3454 def recipients(self):
3452 3455 return [x.user for x in UserNotification.query()\
3453 3456 .filter(UserNotification.notification == self)\
3454 3457 .order_by(UserNotification.user_id.asc()).all()]
3455 3458
3456 3459 @classmethod
3457 3460 def create(cls, created_by, subject, body, recipients, type_=None):
3458 3461 if type_ is None:
3459 3462 type_ = Notification.TYPE_MESSAGE
3460 3463
3461 3464 notification = cls()
3462 3465 notification.created_by_user = created_by
3463 3466 notification.subject = subject
3464 3467 notification.body = body
3465 3468 notification.type_ = type_
3466 3469 notification.created_on = datetime.datetime.now()
3467 3470
3468 3471 for u in recipients:
3469 3472 assoc = UserNotification()
3470 3473 assoc.notification = notification
3471 3474
3472 3475 # if created_by is inside recipients mark his notification
3473 3476 # as read
3474 3477 if u.user_id == created_by.user_id:
3475 3478 assoc.read = True
3476 3479
3477 3480 u.notifications.append(assoc)
3478 3481 Session().add(notification)
3479 3482
3480 3483 return notification
3481 3484
3482 3485 @property
3483 3486 def description(self):
3484 3487 from rhodecode.model.notification import NotificationModel
3485 3488 return NotificationModel().make_description(self)
3486 3489
3487 3490
3488 3491 class UserNotification(Base, BaseModel):
3489 3492 __tablename__ = 'user_to_notification'
3490 3493 __table_args__ = (
3491 3494 UniqueConstraint('user_id', 'notification_id'),
3492 3495 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3493 3496 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3494 3497 )
3495 3498 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3496 3499 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3497 3500 read = Column('read', Boolean, default=False)
3498 3501 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3499 3502
3500 3503 user = relationship('User', lazy="joined")
3501 3504 notification = relationship('Notification', lazy="joined",
3502 3505 order_by=lambda: Notification.created_on.desc(),)
3503 3506
3504 3507 def mark_as_read(self):
3505 3508 self.read = True
3506 3509 Session().add(self)
3507 3510
3508 3511
3509 3512 class Gist(Base, BaseModel):
3510 3513 __tablename__ = 'gists'
3511 3514 __table_args__ = (
3512 3515 Index('g_gist_access_id_idx', 'gist_access_id'),
3513 3516 Index('g_created_on_idx', 'created_on'),
3514 3517 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3515 3518 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3516 3519 )
3517 3520 GIST_PUBLIC = u'public'
3518 3521 GIST_PRIVATE = u'private'
3519 3522 DEFAULT_FILENAME = u'gistfile1.txt'
3520 3523
3521 3524 ACL_LEVEL_PUBLIC = u'acl_public'
3522 3525 ACL_LEVEL_PRIVATE = u'acl_private'
3523 3526
3524 3527 gist_id = Column('gist_id', Integer(), primary_key=True)
3525 3528 gist_access_id = Column('gist_access_id', Unicode(250))
3526 3529 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3527 3530 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3528 3531 gist_expires = Column('gist_expires', Float(53), nullable=False)
3529 3532 gist_type = Column('gist_type', Unicode(128), nullable=False)
3530 3533 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3531 3534 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3532 3535 acl_level = Column('acl_level', Unicode(128), nullable=True)
3533 3536
3534 3537 owner = relationship('User')
3535 3538
3536 3539 def __repr__(self):
3537 3540 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3538 3541
3539 3542 @classmethod
3540 3543 def get_or_404(cls, id_):
3541 3544 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3542 3545 if not res:
3543 3546 raise HTTPNotFound
3544 3547 return res
3545 3548
3546 3549 @classmethod
3547 3550 def get_by_access_id(cls, gist_access_id):
3548 3551 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3549 3552
3550 3553 def gist_url(self):
3551 3554 import rhodecode
3552 3555 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3553 3556 if alias_url:
3554 3557 return alias_url.replace('{gistid}', self.gist_access_id)
3555 3558
3556 3559 return url('gist', gist_id=self.gist_access_id, qualified=True)
3557 3560
3558 3561 @classmethod
3559 3562 def base_path(cls):
3560 3563 """
3561 3564 Returns base path when all gists are stored
3562 3565
3563 3566 :param cls:
3564 3567 """
3565 3568 from rhodecode.model.gist import GIST_STORE_LOC
3566 3569 q = Session().query(RhodeCodeUi)\
3567 3570 .filter(RhodeCodeUi.ui_key == URL_SEP)
3568 3571 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3569 3572 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3570 3573
3571 3574 def get_api_data(self):
3572 3575 """
3573 3576 Common function for generating gist related data for API
3574 3577 """
3575 3578 gist = self
3576 3579 data = {
3577 3580 'gist_id': gist.gist_id,
3578 3581 'type': gist.gist_type,
3579 3582 'access_id': gist.gist_access_id,
3580 3583 'description': gist.gist_description,
3581 3584 'url': gist.gist_url(),
3582 3585 'expires': gist.gist_expires,
3583 3586 'created_on': gist.created_on,
3584 3587 'modified_at': gist.modified_at,
3585 3588 'content': None,
3586 3589 'acl_level': gist.acl_level,
3587 3590 }
3588 3591 return data
3589 3592
3590 3593 def __json__(self):
3591 3594 data = dict(
3592 3595 )
3593 3596 data.update(self.get_api_data())
3594 3597 return data
3595 3598 # SCM functions
3596 3599
3597 3600 def scm_instance(self, **kwargs):
3598 3601 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
3599 3602 return get_vcs_instance(
3600 3603 repo_path=safe_str(full_repo_path), create=False)
3601 3604
3602 3605
3603 3606 class ExternalIdentity(Base, BaseModel):
3604 3607 __tablename__ = 'external_identities'
3605 3608 __table_args__ = (
3606 3609 Index('local_user_id_idx', 'local_user_id'),
3607 3610 Index('external_id_idx', 'external_id'),
3608 3611 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3609 3612 'mysql_charset': 'utf8'})
3610 3613
3611 3614 external_id = Column('external_id', Unicode(255), default=u'',
3612 3615 primary_key=True)
3613 3616 external_username = Column('external_username', Unicode(1024), default=u'')
3614 3617 local_user_id = Column('local_user_id', Integer(),
3615 3618 ForeignKey('users.user_id'), primary_key=True)
3616 3619 provider_name = Column('provider_name', Unicode(255), default=u'',
3617 3620 primary_key=True)
3618 3621 access_token = Column('access_token', String(1024), default=u'')
3619 3622 alt_token = Column('alt_token', String(1024), default=u'')
3620 3623 token_secret = Column('token_secret', String(1024), default=u'')
3621 3624
3622 3625 @classmethod
3623 3626 def by_external_id_and_provider(cls, external_id, provider_name,
3624 3627 local_user_id=None):
3625 3628 """
3626 3629 Returns ExternalIdentity instance based on search params
3627 3630
3628 3631 :param external_id:
3629 3632 :param provider_name:
3630 3633 :return: ExternalIdentity
3631 3634 """
3632 3635 query = cls.query()
3633 3636 query = query.filter(cls.external_id == external_id)
3634 3637 query = query.filter(cls.provider_name == provider_name)
3635 3638 if local_user_id:
3636 3639 query = query.filter(cls.local_user_id == local_user_id)
3637 3640 return query.first()
3638 3641
3639 3642 @classmethod
3640 3643 def user_by_external_id_and_provider(cls, external_id, provider_name):
3641 3644 """
3642 3645 Returns User instance based on search params
3643 3646
3644 3647 :param external_id:
3645 3648 :param provider_name:
3646 3649 :return: User
3647 3650 """
3648 3651 query = User.query()
3649 3652 query = query.filter(cls.external_id == external_id)
3650 3653 query = query.filter(cls.provider_name == provider_name)
3651 3654 query = query.filter(User.user_id == cls.local_user_id)
3652 3655 return query.first()
3653 3656
3654 3657 @classmethod
3655 3658 def by_local_user_id(cls, local_user_id):
3656 3659 """
3657 3660 Returns all tokens for user
3658 3661
3659 3662 :param local_user_id:
3660 3663 :return: ExternalIdentity
3661 3664 """
3662 3665 query = cls.query()
3663 3666 query = query.filter(cls.local_user_id == local_user_id)
3664 3667 return query
3665 3668
3666 3669
3667 3670 class Integration(Base, BaseModel):
3668 3671 __tablename__ = 'integrations'
3669 3672 __table_args__ = (
3670 3673 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3671 3674 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3672 3675 )
3673 3676
3674 3677 integration_id = Column('integration_id', Integer(), primary_key=True)
3675 3678 integration_type = Column('integration_type', String(255))
3676 3679 enabled = Column('enabled', Boolean(), nullable=False)
3677 3680 name = Column('name', String(255), nullable=False)
3678 3681 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
3679 3682 default=False)
3680 3683
3681 3684 settings = Column(
3682 3685 'settings_json', MutationObj.as_mutable(
3683 3686 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3684 3687 repo_id = Column(
3685 3688 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
3686 3689 nullable=True, unique=None, default=None)
3687 3690 repo = relationship('Repository', lazy='joined')
3688 3691
3689 3692 repo_group_id = Column(
3690 3693 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
3691 3694 nullable=True, unique=None, default=None)
3692 3695 repo_group = relationship('RepoGroup', lazy='joined')
3693 3696
3694 3697 @property
3695 3698 def scope(self):
3696 3699 if self.repo:
3697 3700 return repr(self.repo)
3698 3701 if self.repo_group:
3699 3702 if self.child_repos_only:
3700 3703 return repr(self.repo_group) + ' (child repos only)'
3701 3704 else:
3702 3705 return repr(self.repo_group) + ' (recursive)'
3703 3706 if self.child_repos_only:
3704 3707 return 'root_repos'
3705 3708 return 'global'
3706 3709
3707 3710 def __repr__(self):
3708 3711 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
3709 3712
3710 3713
3711 3714 class RepoReviewRuleUser(Base, BaseModel):
3712 3715 __tablename__ = 'repo_review_rules_users'
3713 3716 __table_args__ = (
3714 3717 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3715 3718 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3716 3719 )
3717 3720 repo_review_rule_user_id = Column(
3718 3721 'repo_review_rule_user_id', Integer(), primary_key=True)
3719 3722 repo_review_rule_id = Column("repo_review_rule_id",
3720 3723 Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3721 3724 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'),
3722 3725 nullable=False)
3723 3726 user = relationship('User')
3724 3727
3725 3728
3726 3729 class RepoReviewRuleUserGroup(Base, BaseModel):
3727 3730 __tablename__ = 'repo_review_rules_users_groups'
3728 3731 __table_args__ = (
3729 3732 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3730 3733 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3731 3734 )
3732 3735 repo_review_rule_users_group_id = Column(
3733 3736 'repo_review_rule_users_group_id', Integer(), primary_key=True)
3734 3737 repo_review_rule_id = Column("repo_review_rule_id",
3735 3738 Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3736 3739 users_group_id = Column("users_group_id", Integer(),
3737 3740 ForeignKey('users_groups.users_group_id'), nullable=False)
3738 3741 users_group = relationship('UserGroup')
3739 3742
3740 3743
3741 3744 class RepoReviewRule(Base, BaseModel):
3742 3745 __tablename__ = 'repo_review_rules'
3743 3746 __table_args__ = (
3744 3747 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3745 3748 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3746 3749 )
3747 3750
3748 3751 repo_review_rule_id = Column(
3749 3752 'repo_review_rule_id', Integer(), primary_key=True)
3750 3753 repo_id = Column(
3751 3754 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
3752 3755 repo = relationship('Repository', backref='review_rules')
3753 3756
3754 3757 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'),
3755 3758 default=u'*') # glob
3756 3759 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'),
3757 3760 default=u'*') # glob
3758 3761
3759 3762 use_authors_for_review = Column("use_authors_for_review", Boolean(),
3760 3763 nullable=False, default=False)
3761 3764 rule_users = relationship('RepoReviewRuleUser')
3762 3765 rule_user_groups = relationship('RepoReviewRuleUserGroup')
3763 3766
3764 3767 @hybrid_property
3765 3768 def branch_pattern(self):
3766 3769 return self._branch_pattern or '*'
3767 3770
3768 3771 def _validate_glob(self, value):
3769 3772 re.compile('^' + glob2re(value) + '$')
3770 3773
3771 3774 @branch_pattern.setter
3772 3775 def branch_pattern(self, value):
3773 3776 self._validate_glob(value)
3774 3777 self._branch_pattern = value or '*'
3775 3778
3776 3779 @hybrid_property
3777 3780 def file_pattern(self):
3778 3781 return self._file_pattern or '*'
3779 3782
3780 3783 @file_pattern.setter
3781 3784 def file_pattern(self, value):
3782 3785 self._validate_glob(value)
3783 3786 self._file_pattern = value or '*'
3784 3787
3785 3788 def matches(self, branch, files_changed):
3786 3789 """
3787 3790 Check if this review rule matches a branch/files in a pull request
3788 3791
3789 3792 :param branch: branch name for the commit
3790 3793 :param files_changed: list of file paths changed in the pull request
3791 3794 """
3792 3795
3793 3796 branch = branch or ''
3794 3797 files_changed = files_changed or []
3795 3798
3796 3799 branch_matches = True
3797 3800 if branch:
3798 3801 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
3799 3802 branch_matches = bool(branch_regex.search(branch))
3800 3803
3801 3804 files_matches = True
3802 3805 if self.file_pattern != '*':
3803 3806 files_matches = False
3804 3807 file_regex = re.compile(glob2re(self.file_pattern))
3805 3808 for filename in files_changed:
3806 3809 if file_regex.search(filename):
3807 3810 files_matches = True
3808 3811 break
3809 3812
3810 3813 return branch_matches and files_matches
3811 3814
3812 3815 @property
3813 3816 def review_users(self):
3814 3817 """ Returns the users which this rule applies to """
3815 3818
3816 3819 users = set()
3817 3820 users |= set([
3818 3821 rule_user.user for rule_user in self.rule_users
3819 3822 if rule_user.user.active])
3820 3823 users |= set(
3821 3824 member.user
3822 3825 for rule_user_group in self.rule_user_groups
3823 3826 for member in rule_user_group.users_group.members
3824 3827 if member.user.active
3825 3828 )
3826 3829 return users
3827 3830
3828 3831 def __repr__(self):
3829 3832 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
3830 3833 self.repo_review_rule_id, self.repo)
3831 3834
3832 3835
3833 3836 class DbMigrateVersion(Base, BaseModel):
3834 3837 __tablename__ = 'db_migrate_version'
3835 3838 __table_args__ = (
3836 3839 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3837 3840 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3838 3841 )
3839 3842 repository_id = Column('repository_id', String(250), primary_key=True)
3840 3843 repository_path = Column('repository_path', Text)
3841 3844 version = Column('version', Integer)
3842 3845
3843 3846
3844 3847 class DbSession(Base, BaseModel):
3845 3848 __tablename__ = 'db_session'
3846 3849 __table_args__ = (
3847 3850 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3848 3851 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3849 3852 )
3850 3853
3851 3854 def __repr__(self):
3852 3855 return '<DB:DbSession({})>'.format(self.id)
3853 3856
3854 3857 id = Column('id', Integer())
3855 3858 namespace = Column('namespace', String(255), primary_key=True)
3856 3859 accessed = Column('accessed', DateTime, nullable=False)
3857 3860 created = Column('created', DateTime, nullable=False)
3858 3861 data = Column('data', PickleType, nullable=False)
@@ -1,376 +1,378 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Model for notifications
24 24 """
25 25
26 26
27 27 import logging
28 28 import traceback
29 29
30 30 from pylons.i18n.translation import _, ungettext
31 31 from sqlalchemy.sql.expression import false, true
32 32 from mako import exceptions
33 33
34 34 import rhodecode
35 35 from rhodecode.lib import helpers as h
36 36 from rhodecode.lib.utils import PartialRenderer
37 37 from rhodecode.model import BaseModel
38 38 from rhodecode.model.db import Notification, User, UserNotification
39 39 from rhodecode.model.meta import Session
40 40 from rhodecode.model.settings import SettingsModel
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 class NotificationModel(BaseModel):
46 46
47 47 cls = Notification
48 48
49 49 def __get_notification(self, notification):
50 50 if isinstance(notification, Notification):
51 51 return notification
52 52 elif isinstance(notification, (int, long)):
53 53 return Notification.get(notification)
54 54 else:
55 55 if notification:
56 56 raise Exception('notification must be int, long or Instance'
57 57 ' of Notification got %s' % type(notification))
58 58
59 59 def create(
60 60 self, created_by, notification_subject, notification_body,
61 61 notification_type=Notification.TYPE_MESSAGE, recipients=None,
62 62 mention_recipients=None, with_email=True, email_kwargs=None):
63 63 """
64 64
65 65 Creates notification of given type
66 66
67 67 :param created_by: int, str or User instance. User who created this
68 68 notification
69 69 :param notification_subject: subject of notification itself
70 70 :param notification_body: body of notification text
71 71 :param notification_type: type of notification, based on that we
72 72 pick templates
73 73
74 74 :param recipients: list of int, str or User objects, when None
75 75 is given send to all admins
76 76 :param mention_recipients: list of int, str or User objects,
77 77 that were mentioned
78 78 :param with_email: send email with this notification
79 79 :param email_kwargs: dict with arguments to generate email
80 80 """
81 81
82 82 from rhodecode.lib.celerylib import tasks, run_task
83 83
84 84 if recipients and not getattr(recipients, '__iter__', False):
85 85 raise Exception('recipients must be an iterable object')
86 86
87 87 created_by_obj = self._get_user(created_by)
88 88 # default MAIN body if not given
89 89 email_kwargs = email_kwargs or {'body': notification_body}
90 90 mention_recipients = mention_recipients or set()
91 91
92 92 if not created_by_obj:
93 93 raise Exception('unknown user %s' % created_by)
94 94
95 95 if recipients is None:
96 96 # recipients is None means to all admins
97 97 recipients_objs = User.query().filter(User.admin == true()).all()
98 98 log.debug('sending notifications %s to admins: %s',
99 99 notification_type, recipients_objs)
100 100 else:
101 101 recipients_objs = []
102 102 for u in recipients:
103 103 obj = self._get_user(u)
104 104 if obj:
105 105 recipients_objs.append(obj)
106 106 else: # we didn't find this user, log the error and carry on
107 107 log.error('cannot notify unknown user %r', u)
108 108
109 109 recipients_objs = set(recipients_objs)
110 110 if not recipients_objs:
111 111 raise Exception('no valid recipients specified')
112 112
113 113 log.debug('sending notifications %s to %s',
114 114 notification_type, recipients_objs)
115 115
116 116 # add mentioned users into recipients
117 117 final_recipients = set(recipients_objs).union(mention_recipients)
118 118 notification = Notification.create(
119 119 created_by=created_by_obj, subject=notification_subject,
120 120 body=notification_body, recipients=final_recipients,
121 121 type_=notification_type
122 122 )
123 123
124 124 if not with_email: # skip sending email, and just create notification
125 125 return notification
126 126
127 127 # don't send email to person who created this comment
128 128 rec_objs = set(recipients_objs).difference(set([created_by_obj]))
129 129
130 130 # now notify all recipients in question
131 131
132 132 for recipient in rec_objs.union(mention_recipients):
133 133 # inject current recipient
134 134 email_kwargs['recipient'] = recipient
135 135 email_kwargs['mention'] = recipient in mention_recipients
136 136 (subject, headers, email_body,
137 137 email_body_plaintext) = EmailNotificationModel().render_email(
138 138 notification_type, **email_kwargs)
139 139
140 140 log.debug(
141 141 'Creating notification email task for user:`%s`', recipient)
142 142 task = run_task(
143 143 tasks.send_email, recipient.email, subject,
144 144 email_body_plaintext, email_body)
145 145 log.debug('Created email task: %s', task)
146 146
147 147 return notification
148 148
149 149 def delete(self, user, notification):
150 150 # we don't want to remove actual notification just the assignment
151 151 try:
152 152 notification = self.__get_notification(notification)
153 153 user = self._get_user(user)
154 154 if notification and user:
155 155 obj = UserNotification.query()\
156 156 .filter(UserNotification.user == user)\
157 157 .filter(UserNotification.notification == notification)\
158 158 .one()
159 159 Session().delete(obj)
160 160 return True
161 161 except Exception:
162 162 log.error(traceback.format_exc())
163 163 raise
164 164
165 165 def get_for_user(self, user, filter_=None):
166 166 """
167 167 Get mentions for given user, filter them if filter dict is given
168 168
169 169 :param user:
170 170 :param filter:
171 171 """
172 172 user = self._get_user(user)
173 173
174 174 q = UserNotification.query()\
175 175 .filter(UserNotification.user == user)\
176 176 .join((
177 177 Notification, UserNotification.notification_id ==
178 178 Notification.notification_id))
179 179
180 180 if filter_:
181 181 q = q.filter(Notification.type_.in_(filter_))
182 182
183 183 return q.all()
184 184
185 185 def mark_read(self, user, notification):
186 186 try:
187 187 notification = self.__get_notification(notification)
188 188 user = self._get_user(user)
189 189 if notification and user:
190 190 obj = UserNotification.query()\
191 191 .filter(UserNotification.user == user)\
192 192 .filter(UserNotification.notification == notification)\
193 193 .one()
194 194 obj.read = True
195 195 Session().add(obj)
196 196 return True
197 197 except Exception:
198 198 log.error(traceback.format_exc())
199 199 raise
200 200
201 201 def mark_all_read_for_user(self, user, filter_=None):
202 202 user = self._get_user(user)
203 203 q = UserNotification.query()\
204 204 .filter(UserNotification.user == user)\
205 205 .filter(UserNotification.read == false())\
206 206 .join((
207 207 Notification, UserNotification.notification_id ==
208 208 Notification.notification_id))
209 209 if filter_:
210 210 q = q.filter(Notification.type_.in_(filter_))
211 211
212 212 # this is a little inefficient but sqlalchemy doesn't support
213 213 # update on joined tables :(
214 214 for obj in q.all():
215 215 obj.read = True
216 216 Session().add(obj)
217 217
218 218 def get_unread_cnt_for_user(self, user):
219 219 user = self._get_user(user)
220 220 return UserNotification.query()\
221 221 .filter(UserNotification.read == false())\
222 222 .filter(UserNotification.user == user).count()
223 223
224 224 def get_unread_for_user(self, user):
225 225 user = self._get_user(user)
226 226 return [x.notification for x in UserNotification.query()
227 227 .filter(UserNotification.read == false())
228 228 .filter(UserNotification.user == user).all()]
229 229
230 230 def get_user_notification(self, user, notification):
231 231 user = self._get_user(user)
232 232 notification = self.__get_notification(notification)
233 233
234 234 return UserNotification.query()\
235 235 .filter(UserNotification.notification == notification)\
236 236 .filter(UserNotification.user == user).scalar()
237 237
238 def make_description(self, notification, show_age=True):
238 def make_description(self, notification, show_age=True, translate=None):
239 239 """
240 240 Creates a human readable description based on properties
241 241 of notification object
242 242 """
243 243
244 244 _map = {
245 245 notification.TYPE_CHANGESET_COMMENT: [
246 246 _('%(user)s commented on commit %(date_or_age)s'),
247 247 _('%(user)s commented on commit at %(date_or_age)s'),
248 248 ],
249 249 notification.TYPE_MESSAGE: [
250 250 _('%(user)s sent message %(date_or_age)s'),
251 251 _('%(user)s sent message at %(date_or_age)s'),
252 252 ],
253 253 notification.TYPE_MENTION: [
254 254 _('%(user)s mentioned you %(date_or_age)s'),
255 255 _('%(user)s mentioned you at %(date_or_age)s'),
256 256 ],
257 257 notification.TYPE_REGISTRATION: [
258 258 _('%(user)s registered in RhodeCode %(date_or_age)s'),
259 259 _('%(user)s registered in RhodeCode at %(date_or_age)s'),
260 260 ],
261 261 notification.TYPE_PULL_REQUEST: [
262 262 _('%(user)s opened new pull request %(date_or_age)s'),
263 263 _('%(user)s opened new pull request at %(date_or_age)s'),
264 264 ],
265 265 notification.TYPE_PULL_REQUEST_COMMENT: [
266 266 _('%(user)s commented on pull request %(date_or_age)s'),
267 267 _('%(user)s commented on pull request at %(date_or_age)s'),
268 268 ],
269 269 }
270 270
271 271 templates = _map[notification.type_]
272 272
273 273 if show_age:
274 274 template = templates[0]
275 275 date_or_age = h.age(notification.created_on)
276 if translate:
277 date_or_age = translate(date_or_age)
276 278 else:
277 279 template = templates[1]
278 280 date_or_age = h.format_date(notification.created_on)
279 281
280 282 return template % {
281 283 'user': notification.created_by_user.username,
282 284 'date_or_age': date_or_age,
283 285 }
284 286
285 287
286 288 class EmailNotificationModel(BaseModel):
287 289 TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT
288 290 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
289 291 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
290 292 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
291 293 TYPE_MAIN = Notification.TYPE_MESSAGE
292 294
293 295 TYPE_PASSWORD_RESET = 'password_reset'
294 296 TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation'
295 297 TYPE_EMAIL_TEST = 'email_test'
296 298 TYPE_TEST = 'test'
297 299
298 300 email_types = {
299 301 TYPE_MAIN: 'email_templates/main.mako',
300 302 TYPE_TEST: 'email_templates/test.mako',
301 303 TYPE_EMAIL_TEST: 'email_templates/email_test.mako',
302 304 TYPE_REGISTRATION: 'email_templates/user_registration.mako',
303 305 TYPE_PASSWORD_RESET: 'email_templates/password_reset.mako',
304 306 TYPE_PASSWORD_RESET_CONFIRMATION: 'email_templates/password_reset_confirmation.mako',
305 307 TYPE_COMMIT_COMMENT: 'email_templates/commit_comment.mako',
306 308 TYPE_PULL_REQUEST: 'email_templates/pull_request_review.mako',
307 309 TYPE_PULL_REQUEST_COMMENT: 'email_templates/pull_request_comment.mako',
308 310 }
309 311
310 312 def __init__(self):
311 313 """
312 314 Example usage::
313 315
314 316 (subject, headers, email_body,
315 317 email_body_plaintext) = EmailNotificationModel().render_email(
316 318 EmailNotificationModel.TYPE_TEST, **email_kwargs)
317 319
318 320 """
319 321 super(EmailNotificationModel, self).__init__()
320 322 self.rhodecode_instance_name = None
321 323
322 324 def _update_kwargs_for_render(self, kwargs):
323 325 """
324 326 Inject params required for Mako rendering
325 327
326 328 :param kwargs:
327 329 :return:
328 330 """
329 331 rhodecode_name = self.rhodecode_instance_name
330 332 if not rhodecode_name:
331 333 try:
332 334 rc_config = SettingsModel().get_all_settings()
333 335 except Exception:
334 336 log.exception('failed to fetch settings')
335 337 rc_config = {}
336 338 rhodecode_name = rc_config.get('rhodecode_title', '')
337 339 kwargs['rhodecode_instance_name'] = rhodecode_name
338 340
339 341 _kwargs = {
340 342 'instance_url': h.url('home', qualified=True),
341 343 }
342 344 _kwargs.update(kwargs)
343 345 return _kwargs
344 346
345 347 def get_renderer(self, type_):
346 348 template_name = self.email_types[type_]
347 349 return PartialRenderer(template_name)
348 350
349 351 def render_email(self, type_, **kwargs):
350 352 """
351 353 renders template for email, and returns a tuple of
352 354 (subject, email_headers, email_html_body, email_plaintext_body)
353 355 """
354 356 # translator and helpers inject
355 357 _kwargs = self._update_kwargs_for_render(kwargs)
356 358
357 359 email_template = self.get_renderer(type_)
358 360
359 361 subject = email_template.render('subject', **_kwargs)
360 362
361 363 try:
362 364 headers = email_template.render('headers', **_kwargs)
363 365 except AttributeError:
364 366 # it's not defined in template, ok we can skip it
365 367 headers = ''
366 368
367 369 try:
368 370 body_plaintext = email_template.render('body_plaintext', **_kwargs)
369 371 except AttributeError:
370 372 # it's not defined in template, ok we can skip it
371 373 body_plaintext = ''
372 374
373 375 # render WHOLE template
374 376 body = email_template.render(None, **_kwargs)
375 377
376 378 return subject, headers, body, body_plaintext
@@ -1,1409 +1,1420 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from sqlalchemy import or_
35 35
36 36 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 37 from rhodecode.lib.compat import OrderedDict
38 38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
39 39 from rhodecode.lib.markup_renderer import (
40 40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
41 41 from rhodecode.lib.utils import action_logger
42 42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 43 from rhodecode.lib.vcs.backends.base import (
44 44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 45 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 46 from rhodecode.lib.vcs.exceptions import (
47 47 CommitDoesNotExistError, EmptyRepositoryError)
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 50 from rhodecode.model.comment import CommentsModel
51 51 from rhodecode.model.db import (
52 52 PullRequest, PullRequestReviewers, ChangesetStatus,
53 PullRequestVersion, ChangesetComment)
53 PullRequestVersion, ChangesetComment, Repository)
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.notification import NotificationModel, \
56 56 EmailNotificationModel
57 57 from rhodecode.model.scm import ScmModel
58 58 from rhodecode.model.settings import VcsSettingsModel
59 59
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 # Data structure to hold the response data when updating commits during a pull
65 65 # request update.
66 66 UpdateResponse = namedtuple(
67 67 'UpdateResponse', 'executed, reason, new, old, changes')
68 68
69 69
70 70 class PullRequestModel(BaseModel):
71 71
72 72 cls = PullRequest
73 73
74 74 DIFF_CONTEXT = 3
75 75
76 76 MERGE_STATUS_MESSAGES = {
77 77 MergeFailureReason.NONE: lazy_ugettext(
78 78 'This pull request can be automatically merged.'),
79 79 MergeFailureReason.UNKNOWN: lazy_ugettext(
80 80 'This pull request cannot be merged because of an unhandled'
81 81 ' exception.'),
82 82 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
83 83 'This pull request cannot be merged because of merge conflicts.'),
84 84 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
85 85 'This pull request could not be merged because push to target'
86 86 ' failed.'),
87 87 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
88 88 'This pull request cannot be merged because the target is not a'
89 89 ' head.'),
90 90 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
91 91 'This pull request cannot be merged because the source contains'
92 92 ' more branches than the target.'),
93 93 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
94 94 'This pull request cannot be merged because the target has'
95 95 ' multiple heads.'),
96 96 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
97 97 'This pull request cannot be merged because the target repository'
98 98 ' is locked.'),
99 99 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
100 100 'This pull request cannot be merged because the target or the '
101 101 'source reference is missing.'),
102 102 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
103 103 'This pull request cannot be merged because the target '
104 104 'reference is missing.'),
105 105 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the source '
107 107 'reference is missing.'),
108 108 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
109 109 'This pull request cannot be merged because of conflicts related '
110 110 'to sub repositories.'),
111 111 }
112 112
113 113 UPDATE_STATUS_MESSAGES = {
114 114 UpdateFailureReason.NONE: lazy_ugettext(
115 115 'Pull request update successful.'),
116 116 UpdateFailureReason.UNKNOWN: lazy_ugettext(
117 117 'Pull request update failed because of an unknown error.'),
118 118 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
119 119 'No update needed because the source reference is already '
120 120 'up to date.'),
121 121 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
122 122 'Pull request cannot be updated because the reference type is '
123 123 'not supported for an update.'),
124 124 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 125 'This pull request cannot be updated because the target '
126 126 'reference is missing.'),
127 127 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 128 'This pull request cannot be updated because the source '
129 129 'reference is missing.'),
130 130 }
131 131
132 132 def __get_pull_request(self, pull_request):
133 133 return self._get_instance((
134 134 PullRequest, PullRequestVersion), pull_request)
135 135
136 136 def _check_perms(self, perms, pull_request, user, api=False):
137 137 if not api:
138 138 return h.HasRepoPermissionAny(*perms)(
139 139 user=user, repo_name=pull_request.target_repo.repo_name)
140 140 else:
141 141 return h.HasRepoPermissionAnyApi(*perms)(
142 142 user=user, repo_name=pull_request.target_repo.repo_name)
143 143
144 144 def check_user_read(self, pull_request, user, api=False):
145 145 _perms = ('repository.admin', 'repository.write', 'repository.read',)
146 146 return self._check_perms(_perms, pull_request, user, api)
147 147
148 148 def check_user_merge(self, pull_request, user, api=False):
149 149 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
150 150 return self._check_perms(_perms, pull_request, user, api)
151 151
152 152 def check_user_update(self, pull_request, user, api=False):
153 153 owner = user.user_id == pull_request.user_id
154 154 return self.check_user_merge(pull_request, user, api) or owner
155 155
156 156 def check_user_delete(self, pull_request, user):
157 157 owner = user.user_id == pull_request.user_id
158 158 _perms = ('repository.admin')
159 159 return self._check_perms(_perms, pull_request, user) or owner
160 160
161 161 def check_user_change_status(self, pull_request, user, api=False):
162 162 reviewer = user.user_id in [x.user_id for x in
163 163 pull_request.reviewers]
164 164 return self.check_user_update(pull_request, user, api) or reviewer
165 165
166 166 def get(self, pull_request):
167 167 return self.__get_pull_request(pull_request)
168 168
169 169 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
170 170 opened_by=None, order_by=None,
171 171 order_dir='desc'):
172 172 repo = None
173 173 if repo_name:
174 174 repo = self._get_repo(repo_name)
175 175
176 176 q = PullRequest.query()
177 177
178 178 # source or target
179 179 if repo and source:
180 180 q = q.filter(PullRequest.source_repo == repo)
181 181 elif repo:
182 182 q = q.filter(PullRequest.target_repo == repo)
183 183
184 184 # closed,opened
185 185 if statuses:
186 186 q = q.filter(PullRequest.status.in_(statuses))
187 187
188 188 # opened by filter
189 189 if opened_by:
190 190 q = q.filter(PullRequest.user_id.in_(opened_by))
191 191
192 192 if order_by:
193 193 order_map = {
194 194 'name_raw': PullRequest.pull_request_id,
195 195 'title': PullRequest.title,
196 196 'updated_on_raw': PullRequest.updated_on,
197 197 'target_repo': PullRequest.target_repo_id
198 198 }
199 199 if order_dir == 'asc':
200 200 q = q.order_by(order_map[order_by].asc())
201 201 else:
202 202 q = q.order_by(order_map[order_by].desc())
203 203
204 204 return q
205 205
206 206 def count_all(self, repo_name, source=False, statuses=None,
207 207 opened_by=None):
208 208 """
209 209 Count the number of pull requests for a specific repository.
210 210
211 211 :param repo_name: target or source repo
212 212 :param source: boolean flag to specify if repo_name refers to source
213 213 :param statuses: list of pull request statuses
214 214 :param opened_by: author user of the pull request
215 215 :returns: int number of pull requests
216 216 """
217 217 q = self._prepare_get_all_query(
218 218 repo_name, source=source, statuses=statuses, opened_by=opened_by)
219 219
220 220 return q.count()
221 221
222 222 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
223 223 offset=0, length=None, order_by=None, order_dir='desc'):
224 224 """
225 225 Get all pull requests for a specific repository.
226 226
227 227 :param repo_name: target or source repo
228 228 :param source: boolean flag to specify if repo_name refers to source
229 229 :param statuses: list of pull request statuses
230 230 :param opened_by: author user of the pull request
231 231 :param offset: pagination offset
232 232 :param length: length of returned list
233 233 :param order_by: order of the returned list
234 234 :param order_dir: 'asc' or 'desc' ordering direction
235 235 :returns: list of pull requests
236 236 """
237 237 q = self._prepare_get_all_query(
238 238 repo_name, source=source, statuses=statuses, opened_by=opened_by,
239 239 order_by=order_by, order_dir=order_dir)
240 240
241 241 if length:
242 242 pull_requests = q.limit(length).offset(offset).all()
243 243 else:
244 244 pull_requests = q.all()
245 245
246 246 return pull_requests
247 247
248 248 def count_awaiting_review(self, repo_name, source=False, statuses=None,
249 249 opened_by=None):
250 250 """
251 251 Count the number of pull requests for a specific repository that are
252 252 awaiting review.
253 253
254 254 :param repo_name: target or source repo
255 255 :param source: boolean flag to specify if repo_name refers to source
256 256 :param statuses: list of pull request statuses
257 257 :param opened_by: author user of the pull request
258 258 :returns: int number of pull requests
259 259 """
260 260 pull_requests = self.get_awaiting_review(
261 261 repo_name, source=source, statuses=statuses, opened_by=opened_by)
262 262
263 263 return len(pull_requests)
264 264
265 265 def get_awaiting_review(self, repo_name, source=False, statuses=None,
266 266 opened_by=None, offset=0, length=None,
267 267 order_by=None, order_dir='desc'):
268 268 """
269 269 Get all pull requests for a specific repository that are awaiting
270 270 review.
271 271
272 272 :param repo_name: target or source repo
273 273 :param source: boolean flag to specify if repo_name refers to source
274 274 :param statuses: list of pull request statuses
275 275 :param opened_by: author user of the pull request
276 276 :param offset: pagination offset
277 277 :param length: length of returned list
278 278 :param order_by: order of the returned list
279 279 :param order_dir: 'asc' or 'desc' ordering direction
280 280 :returns: list of pull requests
281 281 """
282 282 pull_requests = self.get_all(
283 283 repo_name, source=source, statuses=statuses, opened_by=opened_by,
284 284 order_by=order_by, order_dir=order_dir)
285 285
286 286 _filtered_pull_requests = []
287 287 for pr in pull_requests:
288 288 status = pr.calculated_review_status()
289 289 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
290 290 ChangesetStatus.STATUS_UNDER_REVIEW]:
291 291 _filtered_pull_requests.append(pr)
292 292 if length:
293 293 return _filtered_pull_requests[offset:offset+length]
294 294 else:
295 295 return _filtered_pull_requests
296 296
297 297 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
298 298 opened_by=None, user_id=None):
299 299 """
300 300 Count the number of pull requests for a specific repository that are
301 301 awaiting review from a specific user.
302 302
303 303 :param repo_name: target or source repo
304 304 :param source: boolean flag to specify if repo_name refers to source
305 305 :param statuses: list of pull request statuses
306 306 :param opened_by: author user of the pull request
307 307 :param user_id: reviewer user of the pull request
308 308 :returns: int number of pull requests
309 309 """
310 310 pull_requests = self.get_awaiting_my_review(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 user_id=user_id)
313 313
314 314 return len(pull_requests)
315 315
316 316 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
317 317 opened_by=None, user_id=None, offset=0,
318 318 length=None, order_by=None, order_dir='desc'):
319 319 """
320 320 Get all pull requests for a specific repository that are awaiting
321 321 review from a specific user.
322 322
323 323 :param repo_name: target or source repo
324 324 :param source: boolean flag to specify if repo_name refers to source
325 325 :param statuses: list of pull request statuses
326 326 :param opened_by: author user of the pull request
327 327 :param user_id: reviewer user of the pull request
328 328 :param offset: pagination offset
329 329 :param length: length of returned list
330 330 :param order_by: order of the returned list
331 331 :param order_dir: 'asc' or 'desc' ordering direction
332 332 :returns: list of pull requests
333 333 """
334 334 pull_requests = self.get_all(
335 335 repo_name, source=source, statuses=statuses, opened_by=opened_by,
336 336 order_by=order_by, order_dir=order_dir)
337 337
338 338 _my = PullRequestModel().get_not_reviewed(user_id)
339 339 my_participation = []
340 340 for pr in pull_requests:
341 341 if pr in _my:
342 342 my_participation.append(pr)
343 343 _filtered_pull_requests = my_participation
344 344 if length:
345 345 return _filtered_pull_requests[offset:offset+length]
346 346 else:
347 347 return _filtered_pull_requests
348 348
349 349 def get_not_reviewed(self, user_id):
350 350 return [
351 351 x.pull_request for x in PullRequestReviewers.query().filter(
352 352 PullRequestReviewers.user_id == user_id).all()
353 353 ]
354 354
355 355 def _prepare_participating_query(self, user_id=None, statuses=None,
356 356 order_by=None, order_dir='desc'):
357 357 q = PullRequest.query()
358 358 if user_id:
359 359 reviewers_subquery = Session().query(
360 360 PullRequestReviewers.pull_request_id).filter(
361 361 PullRequestReviewers.user_id == user_id).subquery()
362 362 user_filter= or_(
363 363 PullRequest.user_id == user_id,
364 364 PullRequest.pull_request_id.in_(reviewers_subquery)
365 365 )
366 366 q = PullRequest.query().filter(user_filter)
367 367
368 368 # closed,opened
369 369 if statuses:
370 370 q = q.filter(PullRequest.status.in_(statuses))
371 371
372 372 if order_by:
373 373 order_map = {
374 374 'name_raw': PullRequest.pull_request_id,
375 375 'title': PullRequest.title,
376 376 'updated_on_raw': PullRequest.updated_on,
377 377 'target_repo': PullRequest.target_repo_id
378 378 }
379 379 if order_dir == 'asc':
380 380 q = q.order_by(order_map[order_by].asc())
381 381 else:
382 382 q = q.order_by(order_map[order_by].desc())
383 383
384 384 return q
385 385
386 386 def count_im_participating_in(self, user_id=None, statuses=None):
387 387 q = self._prepare_participating_query(user_id, statuses=statuses)
388 388 return q.count()
389 389
390 390 def get_im_participating_in(
391 391 self, user_id=None, statuses=None, offset=0,
392 392 length=None, order_by=None, order_dir='desc'):
393 393 """
394 394 Get all Pull requests that i'm participating in, or i have opened
395 395 """
396 396
397 397 q = self._prepare_participating_query(
398 398 user_id, statuses=statuses, order_by=order_by,
399 399 order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def get_versions(self, pull_request):
409 409 """
410 410 returns version of pull request sorted by ID descending
411 411 """
412 412 return PullRequestVersion.query()\
413 413 .filter(PullRequestVersion.pull_request == pull_request)\
414 414 .order_by(PullRequestVersion.pull_request_version_id.asc())\
415 415 .all()
416 416
417 417 def create(self, created_by, source_repo, source_ref, target_repo,
418 418 target_ref, revisions, reviewers, title, description=None):
419 419 created_by_user = self._get_user(created_by)
420 420 source_repo = self._get_repo(source_repo)
421 421 target_repo = self._get_repo(target_repo)
422 422
423 423 pull_request = PullRequest()
424 424 pull_request.source_repo = source_repo
425 425 pull_request.source_ref = source_ref
426 426 pull_request.target_repo = target_repo
427 427 pull_request.target_ref = target_ref
428 428 pull_request.revisions = revisions
429 429 pull_request.title = title
430 430 pull_request.description = description
431 431 pull_request.author = created_by_user
432 432
433 433 Session().add(pull_request)
434 434 Session().flush()
435 435
436 436 reviewer_ids = set()
437 437 # members / reviewers
438 438 for reviewer_object in reviewers:
439 439 if isinstance(reviewer_object, tuple):
440 440 user_id, reasons = reviewer_object
441 441 else:
442 442 user_id, reasons = reviewer_object, []
443 443
444 444 user = self._get_user(user_id)
445 445 reviewer_ids.add(user.user_id)
446 446
447 447 reviewer = PullRequestReviewers(user, pull_request, reasons)
448 448 Session().add(reviewer)
449 449
450 450 # Set approval status to "Under Review" for all commits which are
451 451 # part of this pull request.
452 452 ChangesetStatusModel().set_status(
453 453 repo=target_repo,
454 454 status=ChangesetStatus.STATUS_UNDER_REVIEW,
455 455 user=created_by_user,
456 456 pull_request=pull_request
457 457 )
458 458
459 459 self.notify_reviewers(pull_request, reviewer_ids)
460 460 self._trigger_pull_request_hook(
461 461 pull_request, created_by_user, 'create')
462 462
463 463 return pull_request
464 464
465 465 def _trigger_pull_request_hook(self, pull_request, user, action):
466 466 pull_request = self.__get_pull_request(pull_request)
467 467 target_scm = pull_request.target_repo.scm_instance()
468 468 if action == 'create':
469 469 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
470 470 elif action == 'merge':
471 471 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
472 472 elif action == 'close':
473 473 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
474 474 elif action == 'review_status_change':
475 475 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
476 476 elif action == 'update':
477 477 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
478 478 else:
479 479 return
480 480
481 481 trigger_hook(
482 482 username=user.username,
483 483 repo_name=pull_request.target_repo.repo_name,
484 484 repo_alias=target_scm.alias,
485 485 pull_request=pull_request)
486 486
487 487 def _get_commit_ids(self, pull_request):
488 488 """
489 489 Return the commit ids of the merged pull request.
490 490
491 491 This method is not dealing correctly yet with the lack of autoupdates
492 492 nor with the implicit target updates.
493 493 For example: if a commit in the source repo is already in the target it
494 494 will be reported anyways.
495 495 """
496 496 merge_rev = pull_request.merge_rev
497 497 if merge_rev is None:
498 498 raise ValueError('This pull request was not merged yet')
499 499
500 500 commit_ids = list(pull_request.revisions)
501 501 if merge_rev not in commit_ids:
502 502 commit_ids.append(merge_rev)
503 503
504 504 return commit_ids
505 505
506 506 def merge(self, pull_request, user, extras):
507 507 log.debug("Merging pull request %s", pull_request.pull_request_id)
508 508 merge_state = self._merge_pull_request(pull_request, user, extras)
509 509 if merge_state.executed:
510 510 log.debug(
511 511 "Merge was successful, updating the pull request comments.")
512 512 self._comment_and_close_pr(pull_request, user, merge_state)
513 513 self._log_action('user_merged_pull_request', user, pull_request)
514 514 else:
515 515 log.warn("Merge failed, not updating the pull request.")
516 516 return merge_state
517 517
518 518 def _merge_pull_request(self, pull_request, user, extras):
519 519 target_vcs = pull_request.target_repo.scm_instance()
520 520 source_vcs = pull_request.source_repo.scm_instance()
521 521 target_ref = self._refresh_reference(
522 522 pull_request.target_ref_parts, target_vcs)
523 523
524 524 message = _(
525 525 'Merge pull request #%(pr_id)s from '
526 526 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
527 527 'pr_id': pull_request.pull_request_id,
528 528 'source_repo': source_vcs.name,
529 529 'source_ref_name': pull_request.source_ref_parts.name,
530 530 'pr_title': pull_request.title
531 531 }
532 532
533 533 workspace_id = self._workspace_id(pull_request)
534 534 use_rebase = self._use_rebase_for_merging(pull_request)
535 535
536 536 callback_daemon, extras = prepare_callback_daemon(
537 537 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
538 538 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
539 539
540 540 with callback_daemon:
541 541 # TODO: johbo: Implement a clean way to run a config_override
542 542 # for a single call.
543 543 target_vcs.config.set(
544 544 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
545 545 merge_state = target_vcs.merge(
546 546 target_ref, source_vcs, pull_request.source_ref_parts,
547 547 workspace_id, user_name=user.username,
548 548 user_email=user.email, message=message, use_rebase=use_rebase)
549 549 return merge_state
550 550
551 551 def _comment_and_close_pr(self, pull_request, user, merge_state):
552 552 pull_request.merge_rev = merge_state.merge_ref.commit_id
553 553 pull_request.updated_on = datetime.datetime.now()
554 554
555 555 CommentsModel().create(
556 556 text=unicode(_('Pull request merged and closed')),
557 557 repo=pull_request.target_repo.repo_id,
558 558 user=user.user_id,
559 559 pull_request=pull_request.pull_request_id,
560 560 f_path=None,
561 561 line_no=None,
562 562 closing_pr=True
563 563 )
564 564
565 565 Session().add(pull_request)
566 566 Session().flush()
567 567 # TODO: paris: replace invalidation with less radical solution
568 568 ScmModel().mark_for_invalidation(
569 569 pull_request.target_repo.repo_name)
570 570 self._trigger_pull_request_hook(pull_request, user, 'merge')
571 571
572 572 def has_valid_update_type(self, pull_request):
573 573 source_ref_type = pull_request.source_ref_parts.type
574 574 return source_ref_type in ['book', 'branch', 'tag']
575 575
576 576 def update_commits(self, pull_request):
577 577 """
578 578 Get the updated list of commits for the pull request
579 579 and return the new pull request version and the list
580 580 of commits processed by this update action
581 581 """
582 582 pull_request = self.__get_pull_request(pull_request)
583 583 source_ref_type = pull_request.source_ref_parts.type
584 584 source_ref_name = pull_request.source_ref_parts.name
585 585 source_ref_id = pull_request.source_ref_parts.commit_id
586 586
587 587 if not self.has_valid_update_type(pull_request):
588 588 log.debug(
589 589 "Skipping update of pull request %s due to ref type: %s",
590 590 pull_request, source_ref_type)
591 591 return UpdateResponse(
592 592 executed=False,
593 593 reason=UpdateFailureReason.WRONG_REF_TPYE,
594 594 old=pull_request, new=None, changes=None)
595 595
596 596 source_repo = pull_request.source_repo.scm_instance()
597 597 try:
598 598 source_commit = source_repo.get_commit(commit_id=source_ref_name)
599 599 except CommitDoesNotExistError:
600 600 return UpdateResponse(
601 601 executed=False,
602 602 reason=UpdateFailureReason.MISSING_SOURCE_REF,
603 603 old=pull_request, new=None, changes=None)
604 604
605 605 if source_ref_id == source_commit.raw_id:
606 606 log.debug("Nothing changed in pull request %s", pull_request)
607 607 return UpdateResponse(
608 608 executed=False,
609 609 reason=UpdateFailureReason.NO_CHANGE,
610 610 old=pull_request, new=None, changes=None)
611 611
612 612 # Finally there is a need for an update
613 613 pull_request_version = self._create_version_from_snapshot(pull_request)
614 614 self._link_comments_to_version(pull_request_version)
615 615
616 616 target_ref_type = pull_request.target_ref_parts.type
617 617 target_ref_name = pull_request.target_ref_parts.name
618 618 target_ref_id = pull_request.target_ref_parts.commit_id
619 619 target_repo = pull_request.target_repo.scm_instance()
620 620
621 621 try:
622 622 if target_ref_type in ('tag', 'branch', 'book'):
623 623 target_commit = target_repo.get_commit(target_ref_name)
624 624 else:
625 625 target_commit = target_repo.get_commit(target_ref_id)
626 626 except CommitDoesNotExistError:
627 627 return UpdateResponse(
628 628 executed=False,
629 629 reason=UpdateFailureReason.MISSING_TARGET_REF,
630 630 old=pull_request, new=None, changes=None)
631 631
632 632 # re-compute commit ids
633 633 old_commit_ids = set(pull_request.revisions)
634 634 pre_load = ["author", "branch", "date", "message"]
635 635 commit_ranges = target_repo.compare(
636 636 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
637 637 pre_load=pre_load)
638 638
639 639 ancestor = target_repo.get_common_ancestor(
640 640 target_commit.raw_id, source_commit.raw_id, source_repo)
641 641
642 642 pull_request.source_ref = '%s:%s:%s' % (
643 643 source_ref_type, source_ref_name, source_commit.raw_id)
644 644 pull_request.target_ref = '%s:%s:%s' % (
645 645 target_ref_type, target_ref_name, ancestor)
646 646 pull_request.revisions = [
647 647 commit.raw_id for commit in reversed(commit_ranges)]
648 648 pull_request.updated_on = datetime.datetime.now()
649 649 Session().add(pull_request)
650 650 new_commit_ids = set(pull_request.revisions)
651 651
652 652 changes = self._calculate_commit_id_changes(
653 653 old_commit_ids, new_commit_ids)
654 654
655 655 old_diff_data, new_diff_data = self._generate_update_diffs(
656 656 pull_request, pull_request_version)
657 657
658 658 CommentsModel().outdate_comments(
659 659 pull_request, old_diff_data=old_diff_data,
660 660 new_diff_data=new_diff_data)
661 661
662 662 file_changes = self._calculate_file_changes(
663 663 old_diff_data, new_diff_data)
664 664
665 665 # Add an automatic comment to the pull request
666 666 update_comment = CommentsModel().create(
667 667 text=self._render_update_message(changes, file_changes),
668 668 repo=pull_request.target_repo,
669 669 user=pull_request.author,
670 670 pull_request=pull_request,
671 671 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
672 672
673 673 # Update status to "Under Review" for added commits
674 674 for commit_id in changes.added:
675 675 ChangesetStatusModel().set_status(
676 676 repo=pull_request.source_repo,
677 677 status=ChangesetStatus.STATUS_UNDER_REVIEW,
678 678 comment=update_comment,
679 679 user=pull_request.author,
680 680 pull_request=pull_request,
681 681 revision=commit_id)
682 682
683 683 log.debug(
684 684 'Updated pull request %s, added_ids: %s, common_ids: %s, '
685 685 'removed_ids: %s', pull_request.pull_request_id,
686 686 changes.added, changes.common, changes.removed)
687 687 log.debug('Updated pull request with the following file changes: %s',
688 688 file_changes)
689 689
690 690 log.info(
691 691 "Updated pull request %s from commit %s to commit %s, "
692 692 "stored new version %s of this pull request.",
693 693 pull_request.pull_request_id, source_ref_id,
694 694 pull_request.source_ref_parts.commit_id,
695 695 pull_request_version.pull_request_version_id)
696 696 Session().commit()
697 697 self._trigger_pull_request_hook(pull_request, pull_request.author,
698 698 'update')
699 699
700 700 return UpdateResponse(
701 701 executed=True, reason=UpdateFailureReason.NONE,
702 702 old=pull_request, new=pull_request_version, changes=changes)
703 703
704 704 def _create_version_from_snapshot(self, pull_request):
705 705 version = PullRequestVersion()
706 706 version.title = pull_request.title
707 707 version.description = pull_request.description
708 708 version.status = pull_request.status
709 709 version.created_on = datetime.datetime.now()
710 710 version.updated_on = pull_request.updated_on
711 711 version.user_id = pull_request.user_id
712 712 version.source_repo = pull_request.source_repo
713 713 version.source_ref = pull_request.source_ref
714 714 version.target_repo = pull_request.target_repo
715 715 version.target_ref = pull_request.target_ref
716 716
717 717 version._last_merge_source_rev = pull_request._last_merge_source_rev
718 718 version._last_merge_target_rev = pull_request._last_merge_target_rev
719 719 version._last_merge_status = pull_request._last_merge_status
720 720 version.shadow_merge_ref = pull_request.shadow_merge_ref
721 721 version.merge_rev = pull_request.merge_rev
722 722
723 723 version.revisions = pull_request.revisions
724 724 version.pull_request = pull_request
725 725 Session().add(version)
726 726 Session().flush()
727 727
728 728 return version
729 729
730 730 def _generate_update_diffs(self, pull_request, pull_request_version):
731
731 732 diff_context = (
732 733 self.DIFF_CONTEXT +
733 734 CommentsModel.needed_extra_diff_context())
735
736 source_repo = pull_request_version.source_repo
737 source_ref_id = pull_request_version.source_ref_parts.commit_id
738 target_ref_id = pull_request_version.target_ref_parts.commit_id
734 739 old_diff = self._get_diff_from_pr_or_version(
735 pull_request_version, context=diff_context)
740 source_repo, source_ref_id, target_ref_id, context=diff_context)
741
742 source_repo = pull_request.source_repo
743 source_ref_id = pull_request.source_ref_parts.commit_id
744 target_ref_id = pull_request.target_ref_parts.commit_id
745
736 746 new_diff = self._get_diff_from_pr_or_version(
737 pull_request, context=diff_context)
747 source_repo, source_ref_id, target_ref_id, context=diff_context)
738 748
739 749 old_diff_data = diffs.DiffProcessor(old_diff)
740 750 old_diff_data.prepare()
741 751 new_diff_data = diffs.DiffProcessor(new_diff)
742 752 new_diff_data.prepare()
743 753
744 754 return old_diff_data, new_diff_data
745 755
746 756 def _link_comments_to_version(self, pull_request_version):
747 757 """
748 758 Link all unlinked comments of this pull request to the given version.
749 759
750 760 :param pull_request_version: The `PullRequestVersion` to which
751 761 the comments shall be linked.
752 762
753 763 """
754 764 pull_request = pull_request_version.pull_request
755 765 comments = ChangesetComment.query().filter(
756 766 # TODO: johbo: Should we query for the repo at all here?
757 767 # Pending decision on how comments of PRs are to be related
758 768 # to either the source repo, the target repo or no repo at all.
759 769 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
760 770 ChangesetComment.pull_request == pull_request,
761 771 ChangesetComment.pull_request_version == None)
762 772
763 773 # TODO: johbo: Find out why this breaks if it is done in a bulk
764 774 # operation.
765 775 for comment in comments:
766 776 comment.pull_request_version_id = (
767 777 pull_request_version.pull_request_version_id)
768 778 Session().add(comment)
769 779
770 780 def _calculate_commit_id_changes(self, old_ids, new_ids):
771 added = new_ids.difference(old_ids)
772 common = old_ids.intersection(new_ids)
773 removed = old_ids.difference(new_ids)
774 return ChangeTuple(added, common, removed)
781 added = [x for x in new_ids if x not in old_ids]
782 common = [x for x in new_ids if x in old_ids]
783 removed = [x for x in old_ids if x not in new_ids]
784 total = new_ids
785 return ChangeTuple(added, common, removed, total)
775 786
776 787 def _calculate_file_changes(self, old_diff_data, new_diff_data):
777 788
778 789 old_files = OrderedDict()
779 790 for diff_data in old_diff_data.parsed_diff:
780 791 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
781 792
782 793 added_files = []
783 794 modified_files = []
784 795 removed_files = []
785 796 for diff_data in new_diff_data.parsed_diff:
786 797 new_filename = diff_data['filename']
787 798 new_hash = md5_safe(diff_data['raw_diff'])
788 799
789 800 old_hash = old_files.get(new_filename)
790 801 if not old_hash:
791 802 # file is not present in old diff, means it's added
792 803 added_files.append(new_filename)
793 804 else:
794 805 if new_hash != old_hash:
795 806 modified_files.append(new_filename)
796 807 # now remove a file from old, since we have seen it already
797 808 del old_files[new_filename]
798 809
799 810 # removed files is when there are present in old, but not in NEW,
800 811 # since we remove old files that are present in new diff, left-overs
801 812 # if any should be the removed files
802 813 removed_files.extend(old_files.keys())
803 814
804 815 return FileChangeTuple(added_files, modified_files, removed_files)
805 816
806 817 def _render_update_message(self, changes, file_changes):
807 818 """
808 819 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
809 820 so it's always looking the same disregarding on which default
810 821 renderer system is using.
811 822
812 823 :param changes: changes named tuple
813 824 :param file_changes: file changes named tuple
814 825
815 826 """
816 827 new_status = ChangesetStatus.get_status_lbl(
817 828 ChangesetStatus.STATUS_UNDER_REVIEW)
818 829
819 830 changed_files = (
820 831 file_changes.added + file_changes.modified + file_changes.removed)
821 832
822 833 params = {
823 834 'under_review_label': new_status,
824 835 'added_commits': changes.added,
825 836 'removed_commits': changes.removed,
826 837 'changed_files': changed_files,
827 838 'added_files': file_changes.added,
828 839 'modified_files': file_changes.modified,
829 840 'removed_files': file_changes.removed,
830 841 }
831 842 renderer = RstTemplateRenderer()
832 843 return renderer.render('pull_request_update.mako', **params)
833 844
834 845 def edit(self, pull_request, title, description):
835 846 pull_request = self.__get_pull_request(pull_request)
836 847 if pull_request.is_closed():
837 848 raise ValueError('This pull request is closed')
838 849 if title:
839 850 pull_request.title = title
840 851 pull_request.description = description
841 852 pull_request.updated_on = datetime.datetime.now()
842 853 Session().add(pull_request)
843 854
844 855 def update_reviewers(self, pull_request, reviewer_data):
845 856 """
846 857 Update the reviewers in the pull request
847 858
848 859 :param pull_request: the pr to update
849 860 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
850 861 """
851 862
852 863 reviewers_reasons = {}
853 864 for user_id, reasons in reviewer_data:
854 865 if isinstance(user_id, (int, basestring)):
855 866 user_id = self._get_user(user_id).user_id
856 867 reviewers_reasons[user_id] = reasons
857 868
858 869 reviewers_ids = set(reviewers_reasons.keys())
859 870 pull_request = self.__get_pull_request(pull_request)
860 871 current_reviewers = PullRequestReviewers.query()\
861 872 .filter(PullRequestReviewers.pull_request ==
862 873 pull_request).all()
863 874 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
864 875
865 876 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
866 877 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
867 878
868 879 log.debug("Adding %s reviewers", ids_to_add)
869 880 log.debug("Removing %s reviewers", ids_to_remove)
870 881 changed = False
871 882 for uid in ids_to_add:
872 883 changed = True
873 884 _usr = self._get_user(uid)
874 885 reasons = reviewers_reasons[uid]
875 886 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
876 887 Session().add(reviewer)
877 888
878 889 self.notify_reviewers(pull_request, ids_to_add)
879 890
880 891 for uid in ids_to_remove:
881 892 changed = True
882 893 reviewer = PullRequestReviewers.query()\
883 894 .filter(PullRequestReviewers.user_id == uid,
884 895 PullRequestReviewers.pull_request == pull_request)\
885 896 .scalar()
886 897 if reviewer:
887 898 Session().delete(reviewer)
888 899 if changed:
889 900 pull_request.updated_on = datetime.datetime.now()
890 901 Session().add(pull_request)
891 902
892 903 return ids_to_add, ids_to_remove
893 904
894 905 def get_url(self, pull_request):
895 906 return h.url('pullrequest_show',
896 907 repo_name=safe_str(pull_request.target_repo.repo_name),
897 908 pull_request_id=pull_request.pull_request_id,
898 909 qualified=True)
899 910
900 911 def get_shadow_clone_url(self, pull_request):
901 912 """
902 913 Returns qualified url pointing to the shadow repository. If this pull
903 914 request is closed there is no shadow repository and ``None`` will be
904 915 returned.
905 916 """
906 917 if pull_request.is_closed():
907 918 return None
908 919 else:
909 920 pr_url = urllib.unquote(self.get_url(pull_request))
910 921 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
911 922
912 923 def notify_reviewers(self, pull_request, reviewers_ids):
913 924 # notification to reviewers
914 925 if not reviewers_ids:
915 926 return
916 927
917 928 pull_request_obj = pull_request
918 929 # get the current participants of this pull request
919 930 recipients = reviewers_ids
920 931 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
921 932
922 933 pr_source_repo = pull_request_obj.source_repo
923 934 pr_target_repo = pull_request_obj.target_repo
924 935
925 936 pr_url = h.url(
926 937 'pullrequest_show',
927 938 repo_name=pr_target_repo.repo_name,
928 939 pull_request_id=pull_request_obj.pull_request_id,
929 940 qualified=True,)
930 941
931 942 # set some variables for email notification
932 943 pr_target_repo_url = h.url(
933 944 'summary_home',
934 945 repo_name=pr_target_repo.repo_name,
935 946 qualified=True)
936 947
937 948 pr_source_repo_url = h.url(
938 949 'summary_home',
939 950 repo_name=pr_source_repo.repo_name,
940 951 qualified=True)
941 952
942 953 # pull request specifics
943 954 pull_request_commits = [
944 955 (x.raw_id, x.message)
945 956 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
946 957
947 958 kwargs = {
948 959 'user': pull_request.author,
949 960 'pull_request': pull_request_obj,
950 961 'pull_request_commits': pull_request_commits,
951 962
952 963 'pull_request_target_repo': pr_target_repo,
953 964 'pull_request_target_repo_url': pr_target_repo_url,
954 965
955 966 'pull_request_source_repo': pr_source_repo,
956 967 'pull_request_source_repo_url': pr_source_repo_url,
957 968
958 969 'pull_request_url': pr_url,
959 970 }
960 971
961 972 # pre-generate the subject for notification itself
962 973 (subject,
963 974 _h, _e, # we don't care about those
964 975 body_plaintext) = EmailNotificationModel().render_email(
965 976 notification_type, **kwargs)
966 977
967 978 # create notification objects, and emails
968 979 NotificationModel().create(
969 980 created_by=pull_request.author,
970 981 notification_subject=subject,
971 982 notification_body=body_plaintext,
972 983 notification_type=notification_type,
973 984 recipients=recipients,
974 985 email_kwargs=kwargs,
975 986 )
976 987
977 988 def delete(self, pull_request):
978 989 pull_request = self.__get_pull_request(pull_request)
979 990 self._cleanup_merge_workspace(pull_request)
980 991 Session().delete(pull_request)
981 992
982 993 def close_pull_request(self, pull_request, user):
983 994 pull_request = self.__get_pull_request(pull_request)
984 995 self._cleanup_merge_workspace(pull_request)
985 996 pull_request.status = PullRequest.STATUS_CLOSED
986 997 pull_request.updated_on = datetime.datetime.now()
987 998 Session().add(pull_request)
988 999 self._trigger_pull_request_hook(
989 1000 pull_request, pull_request.author, 'close')
990 1001 self._log_action('user_closed_pull_request', user, pull_request)
991 1002
992 1003 def close_pull_request_with_comment(self, pull_request, user, repo,
993 1004 message=None):
994 1005 status = ChangesetStatus.STATUS_REJECTED
995 1006
996 1007 if not message:
997 1008 message = (
998 1009 _('Status change %(transition_icon)s %(status)s') % {
999 1010 'transition_icon': '>',
1000 1011 'status': ChangesetStatus.get_status_lbl(status)})
1001 1012
1002 1013 internal_message = _('Closing with') + ' ' + message
1003 1014
1004 1015 comm = CommentsModel().create(
1005 1016 text=internal_message,
1006 1017 repo=repo.repo_id,
1007 1018 user=user.user_id,
1008 1019 pull_request=pull_request.pull_request_id,
1009 1020 f_path=None,
1010 1021 line_no=None,
1011 1022 status_change=ChangesetStatus.get_status_lbl(status),
1012 1023 status_change_type=status,
1013 1024 closing_pr=True
1014 1025 )
1015 1026
1016 1027 ChangesetStatusModel().set_status(
1017 1028 repo.repo_id,
1018 1029 status,
1019 1030 user.user_id,
1020 1031 comm,
1021 1032 pull_request=pull_request.pull_request_id
1022 1033 )
1023 1034 Session().flush()
1024 1035
1025 1036 PullRequestModel().close_pull_request(
1026 1037 pull_request.pull_request_id, user)
1027 1038
1028 1039 def merge_status(self, pull_request):
1029 1040 if not self._is_merge_enabled(pull_request):
1030 1041 return False, _('Server-side pull request merging is disabled.')
1031 1042 if pull_request.is_closed():
1032 1043 return False, _('This pull request is closed.')
1033 1044 merge_possible, msg = self._check_repo_requirements(
1034 1045 target=pull_request.target_repo, source=pull_request.source_repo)
1035 1046 if not merge_possible:
1036 1047 return merge_possible, msg
1037 1048
1038 1049 try:
1039 1050 resp = self._try_merge(pull_request)
1040 1051 log.debug("Merge response: %s", resp)
1041 1052 status = resp.possible, self.merge_status_message(
1042 1053 resp.failure_reason)
1043 1054 except NotImplementedError:
1044 1055 status = False, _('Pull request merging is not supported.')
1045 1056
1046 1057 return status
1047 1058
1048 1059 def _check_repo_requirements(self, target, source):
1049 1060 """
1050 1061 Check if `target` and `source` have compatible requirements.
1051 1062
1052 1063 Currently this is just checking for largefiles.
1053 1064 """
1054 1065 target_has_largefiles = self._has_largefiles(target)
1055 1066 source_has_largefiles = self._has_largefiles(source)
1056 1067 merge_possible = True
1057 1068 message = u''
1058 1069
1059 1070 if target_has_largefiles != source_has_largefiles:
1060 1071 merge_possible = False
1061 1072 if source_has_largefiles:
1062 1073 message = _(
1063 1074 'Target repository large files support is disabled.')
1064 1075 else:
1065 1076 message = _(
1066 1077 'Source repository large files support is disabled.')
1067 1078
1068 1079 return merge_possible, message
1069 1080
1070 1081 def _has_largefiles(self, repo):
1071 1082 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1072 1083 'extensions', 'largefiles')
1073 1084 return largefiles_ui and largefiles_ui[0].active
1074 1085
1075 1086 def _try_merge(self, pull_request):
1076 1087 """
1077 1088 Try to merge the pull request and return the merge status.
1078 1089 """
1079 1090 log.debug(
1080 1091 "Trying out if the pull request %s can be merged.",
1081 1092 pull_request.pull_request_id)
1082 1093 target_vcs = pull_request.target_repo.scm_instance()
1083 1094
1084 1095 # Refresh the target reference.
1085 1096 try:
1086 1097 target_ref = self._refresh_reference(
1087 1098 pull_request.target_ref_parts, target_vcs)
1088 1099 except CommitDoesNotExistError:
1089 1100 merge_state = MergeResponse(
1090 1101 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1091 1102 return merge_state
1092 1103
1093 1104 target_locked = pull_request.target_repo.locked
1094 1105 if target_locked and target_locked[0]:
1095 1106 log.debug("The target repository is locked.")
1096 1107 merge_state = MergeResponse(
1097 1108 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1098 1109 elif self._needs_merge_state_refresh(pull_request, target_ref):
1099 1110 log.debug("Refreshing the merge status of the repository.")
1100 1111 merge_state = self._refresh_merge_state(
1101 1112 pull_request, target_vcs, target_ref)
1102 1113 else:
1103 1114 possible = pull_request.\
1104 1115 _last_merge_status == MergeFailureReason.NONE
1105 1116 merge_state = MergeResponse(
1106 1117 possible, False, None, pull_request._last_merge_status)
1107 1118
1108 1119 return merge_state
1109 1120
1110 1121 def _refresh_reference(self, reference, vcs_repository):
1111 1122 if reference.type in ('branch', 'book'):
1112 1123 name_or_id = reference.name
1113 1124 else:
1114 1125 name_or_id = reference.commit_id
1115 1126 refreshed_commit = vcs_repository.get_commit(name_or_id)
1116 1127 refreshed_reference = Reference(
1117 1128 reference.type, reference.name, refreshed_commit.raw_id)
1118 1129 return refreshed_reference
1119 1130
1120 1131 def _needs_merge_state_refresh(self, pull_request, target_reference):
1121 1132 return not(
1122 1133 pull_request.revisions and
1123 1134 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1124 1135 target_reference.commit_id == pull_request._last_merge_target_rev)
1125 1136
1126 1137 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1127 1138 workspace_id = self._workspace_id(pull_request)
1128 1139 source_vcs = pull_request.source_repo.scm_instance()
1129 1140 use_rebase = self._use_rebase_for_merging(pull_request)
1130 1141 merge_state = target_vcs.merge(
1131 1142 target_reference, source_vcs, pull_request.source_ref_parts,
1132 1143 workspace_id, dry_run=True, use_rebase=use_rebase)
1133 1144
1134 1145 # Do not store the response if there was an unknown error.
1135 1146 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1136 1147 pull_request._last_merge_source_rev = \
1137 1148 pull_request.source_ref_parts.commit_id
1138 1149 pull_request._last_merge_target_rev = target_reference.commit_id
1139 1150 pull_request._last_merge_status = merge_state.failure_reason
1140 1151 pull_request.shadow_merge_ref = merge_state.merge_ref
1141 1152 Session().add(pull_request)
1142 1153 Session().commit()
1143 1154
1144 1155 return merge_state
1145 1156
1146 1157 def _workspace_id(self, pull_request):
1147 1158 workspace_id = 'pr-%s' % pull_request.pull_request_id
1148 1159 return workspace_id
1149 1160
1150 1161 def merge_status_message(self, status_code):
1151 1162 """
1152 1163 Return a human friendly error message for the given merge status code.
1153 1164 """
1154 1165 return self.MERGE_STATUS_MESSAGES[status_code]
1155 1166
1156 1167 def generate_repo_data(self, repo, commit_id=None, branch=None,
1157 1168 bookmark=None):
1158 1169 all_refs, selected_ref = \
1159 1170 self._get_repo_pullrequest_sources(
1160 1171 repo.scm_instance(), commit_id=commit_id,
1161 1172 branch=branch, bookmark=bookmark)
1162 1173
1163 1174 refs_select2 = []
1164 1175 for element in all_refs:
1165 1176 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1166 1177 refs_select2.append({'text': element[1], 'children': children})
1167 1178
1168 1179 return {
1169 1180 'user': {
1170 1181 'user_id': repo.user.user_id,
1171 1182 'username': repo.user.username,
1172 1183 'firstname': repo.user.firstname,
1173 1184 'lastname': repo.user.lastname,
1174 1185 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1175 1186 },
1176 1187 'description': h.chop_at_smart(repo.description, '\n'),
1177 1188 'refs': {
1178 1189 'all_refs': all_refs,
1179 1190 'selected_ref': selected_ref,
1180 1191 'select2_refs': refs_select2
1181 1192 }
1182 1193 }
1183 1194
1184 1195 def generate_pullrequest_title(self, source, source_ref, target):
1185 1196 return u'{source}#{at_ref} to {target}'.format(
1186 1197 source=source,
1187 1198 at_ref=source_ref,
1188 1199 target=target,
1189 1200 )
1190 1201
1191 1202 def _cleanup_merge_workspace(self, pull_request):
1192 1203 # Merging related cleanup
1193 1204 target_scm = pull_request.target_repo.scm_instance()
1194 1205 workspace_id = 'pr-%s' % pull_request.pull_request_id
1195 1206
1196 1207 try:
1197 1208 target_scm.cleanup_merge_workspace(workspace_id)
1198 1209 except NotImplementedError:
1199 1210 pass
1200 1211
1201 1212 def _get_repo_pullrequest_sources(
1202 1213 self, repo, commit_id=None, branch=None, bookmark=None):
1203 1214 """
1204 1215 Return a structure with repo's interesting commits, suitable for
1205 1216 the selectors in pullrequest controller
1206 1217
1207 1218 :param commit_id: a commit that must be in the list somehow
1208 1219 and selected by default
1209 1220 :param branch: a branch that must be in the list and selected
1210 1221 by default - even if closed
1211 1222 :param bookmark: a bookmark that must be in the list and selected
1212 1223 """
1213 1224
1214 1225 commit_id = safe_str(commit_id) if commit_id else None
1215 1226 branch = safe_str(branch) if branch else None
1216 1227 bookmark = safe_str(bookmark) if bookmark else None
1217 1228
1218 1229 selected = None
1219 1230
1220 1231 # order matters: first source that has commit_id in it will be selected
1221 1232 sources = []
1222 1233 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1223 1234 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1224 1235
1225 1236 if commit_id:
1226 1237 ref_commit = (h.short_id(commit_id), commit_id)
1227 1238 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1228 1239
1229 1240 sources.append(
1230 1241 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1231 1242 )
1232 1243
1233 1244 groups = []
1234 1245 for group_key, ref_list, group_name, match in sources:
1235 1246 group_refs = []
1236 1247 for ref_name, ref_id in ref_list:
1237 1248 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1238 1249 group_refs.append((ref_key, ref_name))
1239 1250
1240 1251 if not selected:
1241 1252 if set([commit_id, match]) & set([ref_id, ref_name]):
1242 1253 selected = ref_key
1243 1254
1244 1255 if group_refs:
1245 1256 groups.append((group_refs, group_name))
1246 1257
1247 1258 if not selected:
1248 1259 ref = commit_id or branch or bookmark
1249 1260 if ref:
1250 1261 raise CommitDoesNotExistError(
1251 1262 'No commit refs could be found matching: %s' % ref)
1252 1263 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1253 1264 selected = 'branch:%s:%s' % (
1254 1265 repo.DEFAULT_BRANCH_NAME,
1255 1266 repo.branches[repo.DEFAULT_BRANCH_NAME]
1256 1267 )
1257 1268 elif repo.commit_ids:
1258 1269 rev = repo.commit_ids[0]
1259 1270 selected = 'rev:%s:%s' % (rev, rev)
1260 1271 else:
1261 1272 raise EmptyRepositoryError()
1262 1273 return groups, selected
1263 1274
1264 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1265 pull_request = self.__get_pull_request(pull_request)
1266 return self._get_diff_from_pr_or_version(pull_request, context=context)
1275 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1276 return self._get_diff_from_pr_or_version(
1277 source_repo, source_ref_id, target_ref_id, context=context)
1267 1278
1268 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1269 source_repo = pr_or_version.source_repo
1270
1271 # we swap org/other ref since we run a simple diff on one repo
1272 target_ref_id = pr_or_version.target_ref_parts.commit_id
1273 source_ref_id = pr_or_version.source_ref_parts.commit_id
1279 def _get_diff_from_pr_or_version(
1280 self, source_repo, source_ref_id, target_ref_id, context):
1274 1281 target_commit = source_repo.get_commit(
1275 1282 commit_id=safe_str(target_ref_id))
1276 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1283 source_commit = source_repo.get_commit(
1284 commit_id=safe_str(source_ref_id))
1285 if isinstance(source_repo, Repository):
1277 1286 vcs_repo = source_repo.scm_instance()
1287 else:
1288 vcs_repo = source_repo
1278 1289
1279 1290 # TODO: johbo: In the context of an update, we cannot reach
1280 1291 # the old commit anymore with our normal mechanisms. It needs
1281 1292 # some sort of special support in the vcs layer to avoid this
1282 1293 # workaround.
1283 1294 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1284 1295 vcs_repo.alias == 'git'):
1285 1296 source_commit.raw_id = safe_str(source_ref_id)
1286 1297
1287 1298 log.debug('calculating diff between '
1288 1299 'source_ref:%s and target_ref:%s for repo `%s`',
1289 1300 target_ref_id, source_ref_id,
1290 1301 safe_unicode(vcs_repo.path))
1291 1302
1292 1303 vcs_diff = vcs_repo.get_diff(
1293 1304 commit1=target_commit, commit2=source_commit, context=context)
1294 1305 return vcs_diff
1295 1306
1296 1307 def _is_merge_enabled(self, pull_request):
1297 1308 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1298 1309 settings = settings_model.get_general_settings()
1299 1310 return settings.get('rhodecode_pr_merge_enabled', False)
1300 1311
1301 1312 def _use_rebase_for_merging(self, pull_request):
1302 1313 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1303 1314 settings = settings_model.get_general_settings()
1304 1315 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1305 1316
1306 1317 def _log_action(self, action, user, pull_request):
1307 1318 action_logger(
1308 1319 user,
1309 1320 '{action}:{pr_id}'.format(
1310 1321 action=action, pr_id=pull_request.pull_request_id),
1311 1322 pull_request.target_repo)
1312 1323
1313 1324
1314 1325 class MergeCheck(object):
1315 1326 """
1316 1327 Perform Merge Checks and returns a check object which stores information
1317 1328 about merge errors, and merge conditions
1318 1329 """
1319 1330 TODO_CHECK = 'todo'
1320 1331 PERM_CHECK = 'perm'
1321 1332 REVIEW_CHECK = 'review'
1322 1333 MERGE_CHECK = 'merge'
1323 1334
1324 1335 def __init__(self):
1325 1336 self.merge_possible = None
1326 1337 self.merge_msg = ''
1327 1338 self.failed = None
1328 1339 self.errors = []
1329 1340 self.error_details = OrderedDict()
1330 1341
1331 1342 def push_error(self, error_type, message, error_key, details):
1332 1343 self.failed = True
1333 1344 self.errors.append([error_type, message])
1334 1345 self.error_details[error_key] = dict(
1335 1346 details=details,
1336 1347 error_type=error_type,
1337 1348 message=message
1338 1349 )
1339 1350
1340 1351 @classmethod
1341 1352 def validate(cls, pull_request, user, fail_early=False, translator=None):
1342 1353 # if migrated to pyramid...
1343 1354 # _ = lambda: translator or _ # use passed in translator if any
1344 1355
1345 1356 merge_check = cls()
1346 1357
1347 1358 # permissions
1348 1359 user_allowed_to_merge = PullRequestModel().check_user_merge(
1349 1360 pull_request, user)
1350 1361 if not user_allowed_to_merge:
1351 1362 log.debug("MergeCheck: cannot merge, approval is pending.")
1352 1363
1353 1364 msg = _('User `{}` not allowed to perform merge').format(user)
1354 1365 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1355 1366 if fail_early:
1356 1367 return merge_check
1357 1368
1358 1369 # review status
1359 1370 review_status = pull_request.calculated_review_status()
1360 1371 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1361 1372 if not status_approved:
1362 1373 log.debug("MergeCheck: cannot merge, approval is pending.")
1363 1374
1364 1375 msg = _('Pull request reviewer approval is pending.')
1365 1376
1366 1377 merge_check.push_error(
1367 1378 'warning', msg, cls.REVIEW_CHECK, review_status)
1368 1379
1369 1380 if fail_early:
1370 1381 return merge_check
1371 1382
1372 1383 # left over TODOs
1373 1384 todos = CommentsModel().get_unresolved_todos(pull_request)
1374 1385 if todos:
1375 1386 log.debug("MergeCheck: cannot merge, {} "
1376 1387 "unresolved todos left.".format(len(todos)))
1377 1388
1378 1389 if len(todos) == 1:
1379 1390 msg = _('Cannot merge, {} TODO still not resolved.').format(
1380 1391 len(todos))
1381 1392 else:
1382 1393 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1383 1394 len(todos))
1384 1395
1385 1396 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1386 1397
1387 1398 if fail_early:
1388 1399 return merge_check
1389 1400
1390 1401 # merge possible
1391 1402 merge_status, msg = PullRequestModel().merge_status(pull_request)
1392 1403 merge_check.merge_possible = merge_status
1393 1404 merge_check.merge_msg = msg
1394 1405 if not merge_status:
1395 1406 log.debug(
1396 1407 "MergeCheck: cannot merge, pull request merge not possible.")
1397 1408 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1398 1409
1399 1410 if fail_early:
1400 1411 return merge_check
1401 1412
1402 1413 return merge_check
1403 1414
1404 1415
1405 1416 ChangeTuple = namedtuple('ChangeTuple',
1406 ['added', 'common', 'removed'])
1417 ['added', 'common', 'removed', 'total'])
1407 1418
1408 1419 FileChangeTuple = namedtuple('FileChangeTuple',
1409 1420 ['added', 'modified', 'removed'])
@@ -1,2261 +1,2337 b''
1 1 //Primary CSS
2 2
3 3 //--- IMPORTS ------------------//
4 4
5 5 @import 'helpers';
6 6 @import 'mixins';
7 7 @import 'rcicons';
8 8 @import 'fonts';
9 9 @import 'variables';
10 10 @import 'bootstrap-variables';
11 11 @import 'form-bootstrap';
12 12 @import 'codemirror';
13 13 @import 'legacy_code_styles';
14 14 @import 'progress-bar';
15 15
16 16 @import 'type';
17 17 @import 'alerts';
18 18 @import 'buttons';
19 19 @import 'tags';
20 20 @import 'code-block';
21 21 @import 'examples';
22 22 @import 'login';
23 23 @import 'main-content';
24 24 @import 'select2';
25 25 @import 'comments';
26 26 @import 'panels-bootstrap';
27 27 @import 'panels';
28 28 @import 'deform';
29 29
30 30 //--- BASE ------------------//
31 31 .noscript-error {
32 32 top: 0;
33 33 left: 0;
34 34 width: 100%;
35 35 z-index: 101;
36 36 text-align: center;
37 37 font-family: @text-semibold;
38 38 font-size: 120%;
39 39 color: white;
40 40 background-color: @alert2;
41 41 padding: 5px 0 5px 0;
42 42 }
43 43
44 44 html {
45 45 display: table;
46 46 height: 100%;
47 47 width: 100%;
48 48 }
49 49
50 50 body {
51 51 display: table-cell;
52 52 width: 100%;
53 53 }
54 54
55 55 //--- LAYOUT ------------------//
56 56
57 57 .hidden{
58 58 display: none !important;
59 59 }
60 60
61 61 .box{
62 62 float: left;
63 63 width: 100%;
64 64 }
65 65
66 66 .browser-header {
67 67 clear: both;
68 68 }
69 69 .main {
70 70 clear: both;
71 71 padding:0 0 @pagepadding;
72 72 height: auto;
73 73
74 74 &:after { //clearfix
75 75 content:"";
76 76 clear:both;
77 77 width:100%;
78 78 display:block;
79 79 }
80 80 }
81 81
82 82 .action-link{
83 83 margin-left: @padding;
84 84 padding-left: @padding;
85 85 border-left: @border-thickness solid @border-default-color;
86 86 }
87 87
88 88 input + .action-link, .action-link.first{
89 89 border-left: none;
90 90 }
91 91
92 92 .action-link.last{
93 93 margin-right: @padding;
94 94 padding-right: @padding;
95 95 }
96 96
97 97 .action-link.active,
98 98 .action-link.active a{
99 99 color: @grey4;
100 100 }
101 101
102 102 ul.simple-list{
103 103 list-style: none;
104 104 margin: 0;
105 105 padding: 0;
106 106 }
107 107
108 108 .main-content {
109 109 padding-bottom: @pagepadding;
110 110 }
111 111
112 112 .wide-mode-wrapper {
113 113 max-width:4000px !important;
114 114 }
115 115
116 116 .wrapper {
117 117 position: relative;
118 118 max-width: @wrapper-maxwidth;
119 119 margin: 0 auto;
120 120 }
121 121
122 122 #content {
123 123 clear: both;
124 124 padding: 0 @contentpadding;
125 125 }
126 126
127 127 .advanced-settings-fields{
128 128 input{
129 129 margin-left: @textmargin;
130 130 margin-right: @padding/2;
131 131 }
132 132 }
133 133
134 134 .cs_files_title {
135 135 margin: @pagepadding 0 0;
136 136 }
137 137
138 138 input.inline[type="file"] {
139 139 display: inline;
140 140 }
141 141
142 142 .error_page {
143 143 margin: 10% auto;
144 144
145 145 h1 {
146 146 color: @grey2;
147 147 }
148 148
149 149 .alert {
150 150 margin: @padding 0;
151 151 }
152 152
153 153 .error-branding {
154 154 font-family: @text-semibold;
155 155 color: @grey4;
156 156 }
157 157
158 158 .error_message {
159 159 font-family: @text-regular;
160 160 }
161 161
162 162 .sidebar {
163 163 min-height: 275px;
164 164 margin: 0;
165 165 padding: 0 0 @sidebarpadding @sidebarpadding;
166 166 border: none;
167 167 }
168 168
169 169 .main-content {
170 170 position: relative;
171 171 margin: 0 @sidebarpadding @sidebarpadding;
172 172 padding: 0 0 0 @sidebarpadding;
173 173 border-left: @border-thickness solid @grey5;
174 174
175 175 @media (max-width:767px) {
176 176 clear: both;
177 177 width: 100%;
178 178 margin: 0;
179 179 border: none;
180 180 }
181 181 }
182 182
183 183 .inner-column {
184 184 float: left;
185 185 width: 29.75%;
186 186 min-height: 150px;
187 187 margin: @sidebarpadding 2% 0 0;
188 188 padding: 0 2% 0 0;
189 189 border-right: @border-thickness solid @grey5;
190 190
191 191 @media (max-width:767px) {
192 192 clear: both;
193 193 width: 100%;
194 194 border: none;
195 195 }
196 196
197 197 ul {
198 198 padding-left: 1.25em;
199 199 }
200 200
201 201 &:last-child {
202 202 margin: @sidebarpadding 0 0;
203 203 border: none;
204 204 }
205 205
206 206 h4 {
207 207 margin: 0 0 @padding;
208 208 font-family: @text-semibold;
209 209 }
210 210 }
211 211 }
212 212 .error-page-logo {
213 213 width: 130px;
214 214 height: 160px;
215 215 }
216 216
217 217 // HEADER
218 218 .header {
219 219
220 220 // TODO: johbo: Fix login pages, so that they work without a min-height
221 221 // for the header and then remove the min-height. I chose a smaller value
222 222 // intentionally here to avoid rendering issues in the main navigation.
223 223 min-height: 49px;
224 224
225 225 position: relative;
226 226 vertical-align: bottom;
227 227 padding: 0 @header-padding;
228 228 background-color: @grey2;
229 229 color: @grey5;
230 230
231 231 .title {
232 232 overflow: visible;
233 233 }
234 234
235 235 &:before,
236 236 &:after {
237 237 content: "";
238 238 clear: both;
239 239 width: 100%;
240 240 }
241 241
242 242 // TODO: johbo: Avoids breaking "Repositories" chooser
243 243 .select2-container .select2-choice .select2-arrow {
244 244 display: none;
245 245 }
246 246 }
247 247
248 248 #header-inner {
249 249 &.title {
250 250 margin: 0;
251 251 }
252 252 &:before,
253 253 &:after {
254 254 content: "";
255 255 clear: both;
256 256 }
257 257 }
258 258
259 259 // Gists
260 260 #files_data {
261 261 clear: both; //for firefox
262 262 }
263 263 #gistid {
264 264 margin-right: @padding;
265 265 }
266 266
267 267 // Global Settings Editor
268 268 .textarea.editor {
269 269 float: left;
270 270 position: relative;
271 271 max-width: @texteditor-width;
272 272
273 273 select {
274 274 position: absolute;
275 275 top:10px;
276 276 right:0;
277 277 }
278 278
279 279 .CodeMirror {
280 280 margin: 0;
281 281 }
282 282
283 283 .help-block {
284 284 margin: 0 0 @padding;
285 285 padding:.5em;
286 286 background-color: @grey6;
287 287 }
288 288 }
289 289
290 290 ul.auth_plugins {
291 291 margin: @padding 0 @padding @legend-width;
292 292 padding: 0;
293 293
294 294 li {
295 295 margin-bottom: @padding;
296 296 line-height: 1em;
297 297 list-style-type: none;
298 298
299 299 .auth_buttons .btn {
300 300 margin-right: @padding;
301 301 }
302 302
303 303 &:before { content: none; }
304 304 }
305 305 }
306 306
307 307
308 308 // My Account PR list
309 309
310 310 #show_closed {
311 311 margin: 0 1em 0 0;
312 312 }
313 313
314 314 .pullrequestlist {
315 315 .closed {
316 316 background-color: @grey6;
317 317 }
318 318 .td-status {
319 319 padding-left: .5em;
320 320 }
321 321 .log-container .truncate {
322 322 height: 2.75em;
323 323 white-space: pre-line;
324 324 }
325 325 table.rctable .user {
326 326 padding-left: 0;
327 327 }
328 328 table.rctable {
329 329 td.td-description,
330 330 .rc-user {
331 331 min-width: auto;
332 332 }
333 333 }
334 334 }
335 335
336 336 // Pull Requests
337 337
338 338 .pullrequests_section_head {
339 339 display: block;
340 340 clear: both;
341 341 margin: @padding 0;
342 342 font-family: @text-bold;
343 343 }
344 344
345 345 .pr-origininfo, .pr-targetinfo {
346 346 position: relative;
347 347
348 348 .tag {
349 349 display: inline-block;
350 350 margin: 0 1em .5em 0;
351 351 }
352 352
353 353 .clone-url {
354 354 display: inline-block;
355 355 margin: 0 0 .5em 0;
356 356 padding: 0;
357 357 line-height: 1.2em;
358 358 }
359 359 }
360 360
361 361 .pr-pullinfo {
362 362 clear: both;
363 363 margin: .5em 0;
364 364 }
365 365
366 366 #pr-title-input {
367 367 width: 72%;
368 368 font-size: 1em;
369 369 font-family: @text-bold;
370 370 margin: 0;
371 371 padding: 0 0 0 @padding/4;
372 372 line-height: 1.7em;
373 373 color: @text-color;
374 374 letter-spacing: .02em;
375 375 }
376 376
377 377 #pullrequest_title {
378 378 width: 100%;
379 379 box-sizing: border-box;
380 380 }
381 381
382 382 #pr_open_message {
383 383 border: @border-thickness solid #fff;
384 384 border-radius: @border-radius;
385 385 padding: @padding-large-vertical @padding-large-vertical @padding-large-vertical 0;
386 386 text-align: right;
387 387 overflow: hidden;
388 388 }
389 389
390 390 .pr-submit-button {
391 391 float: right;
392 392 margin: 0 0 0 5px;
393 393 }
394 394
395 395 .pr-spacing-container {
396 396 padding: 20px;
397 397 clear: both
398 398 }
399 399
400 400 #pr-description-input {
401 401 margin-bottom: 0;
402 402 }
403 403
404 404 .pr-description-label {
405 405 vertical-align: top;
406 406 }
407 407
408 408 .perms_section_head {
409 409 min-width: 625px;
410 410
411 411 h2 {
412 412 margin-bottom: 0;
413 413 }
414 414
415 415 .label-checkbox {
416 416 float: left;
417 417 }
418 418
419 419 &.field {
420 420 margin: @space 0 @padding;
421 421 }
422 422
423 423 &:first-child.field {
424 424 margin-top: 0;
425 425
426 426 .label {
427 427 margin-top: 0;
428 428 padding-top: 0;
429 429 }
430 430
431 431 .radios {
432 432 padding-top: 0;
433 433 }
434 434 }
435 435
436 436 .radios {
437 437 float: right;
438 438 position: relative;
439 439 width: 405px;
440 440 }
441 441 }
442 442
443 443 //--- MODULES ------------------//
444 444
445 445
446 446 // Server Announcement
447 447 #server-announcement {
448 448 width: 95%;
449 449 margin: @padding auto;
450 450 padding: @padding;
451 451 border-width: 2px;
452 452 border-style: solid;
453 453 .border-radius(2px);
454 454 font-family: @text-bold;
455 455
456 456 &.info { border-color: @alert4; background-color: @alert4-inner; }
457 457 &.warning { border-color: @alert3; background-color: @alert3-inner; }
458 458 &.error { border-color: @alert2; background-color: @alert2-inner; }
459 459 &.success { border-color: @alert1; background-color: @alert1-inner; }
460 460 &.neutral { border-color: @grey3; background-color: @grey6; }
461 461 }
462 462
463 463 // Fixed Sidebar Column
464 464 .sidebar-col-wrapper {
465 465 padding-left: @sidebar-all-width;
466 466
467 467 .sidebar {
468 468 width: @sidebar-width;
469 469 margin-left: -@sidebar-all-width;
470 470 }
471 471 }
472 472
473 473 .sidebar-col-wrapper.scw-small {
474 474 padding-left: @sidebar-small-all-width;
475 475
476 476 .sidebar {
477 477 width: @sidebar-small-width;
478 478 margin-left: -@sidebar-small-all-width;
479 479 }
480 480 }
481 481
482 482
483 483 // FOOTER
484 484 #footer {
485 485 padding: 0;
486 486 text-align: center;
487 487 vertical-align: middle;
488 488 color: @grey2;
489 489 background-color: @grey6;
490 490
491 491 p {
492 492 margin: 0;
493 493 padding: 1em;
494 494 line-height: 1em;
495 495 }
496 496
497 497 .server-instance { //server instance
498 498 display: none;
499 499 }
500 500
501 501 .title {
502 502 float: none;
503 503 margin: 0 auto;
504 504 }
505 505 }
506 506
507 507 button.close {
508 508 padding: 0;
509 509 cursor: pointer;
510 510 background: transparent;
511 511 border: 0;
512 512 .box-shadow(none);
513 513 -webkit-appearance: none;
514 514 }
515 515
516 516 .close {
517 517 float: right;
518 518 font-size: 21px;
519 519 font-family: @text-bootstrap;
520 520 line-height: 1em;
521 521 font-weight: bold;
522 522 color: @grey2;
523 523
524 524 &:hover,
525 525 &:focus {
526 526 color: @grey1;
527 527 text-decoration: none;
528 528 cursor: pointer;
529 529 }
530 530 }
531 531
532 532 // GRID
533 533 .sorting,
534 534 .sorting_desc,
535 535 .sorting_asc {
536 536 cursor: pointer;
537 537 }
538 538 .sorting_desc:after {
539 539 content: "\00A0\25B2";
540 540 font-size: .75em;
541 541 }
542 542 .sorting_asc:after {
543 543 content: "\00A0\25BC";
544 544 font-size: .68em;
545 545 }
546 546
547 547
548 548 .user_auth_tokens {
549 549
550 550 &.truncate {
551 551 white-space: nowrap;
552 552 overflow: hidden;
553 553 text-overflow: ellipsis;
554 554 }
555 555
556 556 .fields .field .input {
557 557 margin: 0;
558 558 }
559 559
560 560 input#description {
561 561 width: 100px;
562 562 margin: 0;
563 563 }
564 564
565 565 .drop-menu {
566 566 // TODO: johbo: Remove this, should work out of the box when
567 567 // having multiple inputs inline
568 568 margin: 0 0 0 5px;
569 569 }
570 570 }
571 571 #user_list_table {
572 572 .closed {
573 573 background-color: @grey6;
574 574 }
575 575 }
576 576
577 577
578 578 input {
579 579 &.disabled {
580 580 opacity: .5;
581 581 }
582 582 }
583 583
584 584 // remove extra padding in firefox
585 585 input::-moz-focus-inner { border:0; padding:0 }
586 586
587 587 .adjacent input {
588 588 margin-bottom: @padding;
589 589 }
590 590
591 591 .permissions_boxes {
592 592 display: block;
593 593 }
594 594
595 595 //TODO: lisa: this should be in tables
596 596 .show_more_col {
597 597 width: 20px;
598 598 }
599 599
600 600 //FORMS
601 601
602 602 .medium-inline,
603 603 input#description.medium-inline {
604 604 display: inline;
605 605 width: @medium-inline-input-width;
606 606 min-width: 100px;
607 607 }
608 608
609 609 select {
610 610 //reset
611 611 -webkit-appearance: none;
612 612 -moz-appearance: none;
613 613
614 614 display: inline-block;
615 615 height: 28px;
616 616 width: auto;
617 617 margin: 0 @padding @padding 0;
618 618 padding: 0 18px 0 8px;
619 619 line-height:1em;
620 620 font-size: @basefontsize;
621 621 border: @border-thickness solid @rcblue;
622 622 background:white url("../images/dt-arrow-dn.png") no-repeat 100% 50%;
623 623 color: @rcblue;
624 624
625 625 &:after {
626 626 content: "\00A0\25BE";
627 627 }
628 628
629 629 &:focus {
630 630 outline: none;
631 631 }
632 632 }
633 633
634 634 option {
635 635 &:focus {
636 636 outline: none;
637 637 }
638 638 }
639 639
640 640 input,
641 641 textarea {
642 642 padding: @input-padding;
643 643 border: @input-border-thickness solid @border-highlight-color;
644 644 .border-radius (@border-radius);
645 645 font-family: @text-light;
646 646 font-size: @basefontsize;
647 647
648 648 &.input-sm {
649 649 padding: 5px;
650 650 }
651 651
652 652 &#description {
653 653 min-width: @input-description-minwidth;
654 654 min-height: 1em;
655 655 padding: 10px;
656 656 }
657 657 }
658 658
659 659 .field-sm {
660 660 input,
661 661 textarea {
662 662 padding: 5px;
663 663 }
664 664 }
665 665
666 666 textarea {
667 667 display: block;
668 668 clear: both;
669 669 width: 100%;
670 670 min-height: 100px;
671 671 margin-bottom: @padding;
672 672 .box-sizing(border-box);
673 673 overflow: auto;
674 674 }
675 675
676 676 label {
677 677 font-family: @text-light;
678 678 }
679 679
680 680 // GRAVATARS
681 681 // centers gravatar on username to the right
682 682
683 683 .gravatar {
684 684 display: inline;
685 685 min-width: 16px;
686 686 min-height: 16px;
687 687 margin: -5px 0;
688 688 padding: 0;
689 689 line-height: 1em;
690 690 border: 1px solid @grey4;
691 691 box-sizing: content-box;
692 692
693 693 &.gravatar-large {
694 694 margin: -0.5em .25em -0.5em 0;
695 695 }
696 696
697 697 & + .user {
698 698 display: inline;
699 699 margin: 0;
700 700 padding: 0 0 0 .17em;
701 701 line-height: 1em;
702 702 }
703 703 }
704 704
705 705 .user-inline-data {
706 706 display: inline-block;
707 707 float: left;
708 708 padding-left: .5em;
709 709 line-height: 1.3em;
710 710 }
711 711
712 712 .rc-user { // gravatar + user wrapper
713 713 float: left;
714 714 position: relative;
715 715 min-width: 100px;
716 716 max-width: 200px;
717 717 min-height: (@gravatar-size + @border-thickness * 2); // account for border
718 718 display: block;
719 719 padding: 0 0 0 (@gravatar-size + @basefontsize/2 + @border-thickness * 2);
720 720
721 721
722 722 .gravatar {
723 723 display: block;
724 724 position: absolute;
725 725 top: 0;
726 726 left: 0;
727 727 min-width: @gravatar-size;
728 728 min-height: @gravatar-size;
729 729 margin: 0;
730 730 }
731 731
732 732 .user {
733 733 display: block;
734 734 max-width: 175px;
735 735 padding-top: 2px;
736 736 overflow: hidden;
737 737 text-overflow: ellipsis;
738 738 }
739 739 }
740 740
741 741 .gist-gravatar,
742 742 .journal_container {
743 743 .gravatar-large {
744 744 margin: 0 .5em -10px 0;
745 745 }
746 746 }
747 747
748 748
749 749 // ADMIN SETTINGS
750 750
751 751 // Tag Patterns
752 752 .tag_patterns {
753 753 .tag_input {
754 754 margin-bottom: @padding;
755 755 }
756 756 }
757 757
758 758 .locked_input {
759 759 position: relative;
760 760
761 761 input {
762 762 display: inline;
763 763 margin-top: 3px;
764 764 }
765 765
766 766 br {
767 767 display: none;
768 768 }
769 769
770 770 .error-message {
771 771 float: left;
772 772 width: 100%;
773 773 }
774 774
775 775 .lock_input_button {
776 776 display: inline;
777 777 }
778 778
779 779 .help-block {
780 780 clear: both;
781 781 }
782 782 }
783 783
784 784 // Notifications
785 785
786 786 .notifications_buttons {
787 787 margin: 0 0 @space 0;
788 788 padding: 0;
789 789
790 790 .btn {
791 791 display: inline-block;
792 792 }
793 793 }
794 794
795 795 .notification-list {
796 796
797 797 div {
798 798 display: inline-block;
799 799 vertical-align: middle;
800 800 }
801 801
802 802 .container {
803 803 display: block;
804 804 margin: 0 0 @padding 0;
805 805 }
806 806
807 807 .delete-notifications {
808 808 margin-left: @padding;
809 809 text-align: right;
810 810 cursor: pointer;
811 811 }
812 812
813 813 .read-notifications {
814 814 margin-left: @padding/2;
815 815 text-align: right;
816 816 width: 35px;
817 817 cursor: pointer;
818 818 }
819 819
820 820 .icon-minus-sign {
821 821 color: @alert2;
822 822 }
823 823
824 824 .icon-ok-sign {
825 825 color: @alert1;
826 826 }
827 827 }
828 828
829 829 .user_settings {
830 830 float: left;
831 831 clear: both;
832 832 display: block;
833 833 width: 100%;
834 834
835 835 .gravatar_box {
836 836 margin-bottom: @padding;
837 837
838 838 &:after {
839 839 content: " ";
840 840 clear: both;
841 841 width: 100%;
842 842 }
843 843 }
844 844
845 845 .fields .field {
846 846 clear: both;
847 847 }
848 848 }
849 849
850 850 .advanced_settings {
851 851 margin-bottom: @space;
852 852
853 853 .help-block {
854 854 margin-left: 0;
855 855 }
856 856
857 857 button + .help-block {
858 858 margin-top: @padding;
859 859 }
860 860 }
861 861
862 862 // admin settings radio buttons and labels
863 863 .label-2 {
864 864 float: left;
865 865 width: @label2-width;
866 866
867 867 label {
868 868 color: @grey1;
869 869 }
870 870 }
871 871 .checkboxes {
872 872 float: left;
873 873 width: @checkboxes-width;
874 874 margin-bottom: @padding;
875 875
876 876 .checkbox {
877 877 width: 100%;
878 878
879 879 label {
880 880 margin: 0;
881 881 padding: 0;
882 882 }
883 883 }
884 884
885 885 .checkbox + .checkbox {
886 886 display: inline-block;
887 887 }
888 888
889 889 label {
890 890 margin-right: 1em;
891 891 }
892 892 }
893 893
894 894 // CHANGELOG
895 895 .container_header {
896 896 float: left;
897 897 display: block;
898 898 width: 100%;
899 899 margin: @padding 0 @padding;
900 900
901 901 #filter_changelog {
902 902 float: left;
903 903 margin-right: @padding;
904 904 }
905 905
906 906 .breadcrumbs_light {
907 907 display: inline-block;
908 908 }
909 909 }
910 910
911 911 .info_box {
912 912 float: right;
913 913 }
914 914
915 915
916 916 #graph_nodes {
917 917 padding-top: 43px;
918 918 }
919 919
920 920 #graph_content{
921 921
922 922 // adjust for table headers so that graph renders properly
923 923 // #graph_nodes padding - table cell padding
924 924 padding-top: (@space - (@basefontsize * 2.4));
925 925
926 926 &.graph_full_width {
927 927 width: 100%;
928 928 max-width: 100%;
929 929 }
930 930 }
931 931
932 932 #graph {
933 933 .flag_status {
934 934 margin: 0;
935 935 }
936 936
937 937 .pagination-left {
938 938 float: left;
939 939 clear: both;
940 940 }
941 941
942 942 .log-container {
943 943 max-width: 345px;
944 944
945 945 .message{
946 946 max-width: 340px;
947 947 }
948 948 }
949 949
950 950 .graph-col-wrapper {
951 951 padding-left: 110px;
952 952
953 953 #graph_nodes {
954 954 width: 100px;
955 955 margin-left: -110px;
956 956 float: left;
957 957 clear: left;
958 958 }
959 959 }
960 960 }
961 961
962 962 #filter_changelog {
963 963 float: left;
964 964 }
965 965
966 966
967 967 //--- THEME ------------------//
968 968
969 969 #logo {
970 970 float: left;
971 971 margin: 9px 0 0 0;
972 972
973 973 .header {
974 974 background-color: transparent;
975 975 }
976 976
977 977 a {
978 978 display: inline-block;
979 979 }
980 980
981 981 img {
982 982 height:30px;
983 983 }
984 984 }
985 985
986 986 .logo-wrapper {
987 987 float:left;
988 988 }
989 989
990 990 .branding{
991 991 float: left;
992 992 padding: 9px 2px;
993 993 line-height: 1em;
994 994 font-size: @navigation-fontsize;
995 995 }
996 996
997 997 img {
998 998 border: none;
999 999 outline: none;
1000 1000 }
1001 1001 user-profile-header
1002 1002 label {
1003 1003
1004 1004 input[type="checkbox"] {
1005 1005 margin-right: 1em;
1006 1006 }
1007 1007 input[type="radio"] {
1008 1008 margin-right: 1em;
1009 1009 }
1010 1010 }
1011 1011
1012 1012 .flag_status {
1013 1013 margin: 2px 8px 6px 2px;
1014 1014 &.under_review {
1015 1015 .circle(5px, @alert3);
1016 1016 }
1017 1017 &.approved {
1018 1018 .circle(5px, @alert1);
1019 1019 }
1020 1020 &.rejected,
1021 1021 &.forced_closed{
1022 1022 .circle(5px, @alert2);
1023 1023 }
1024 1024 &.not_reviewed {
1025 1025 .circle(5px, @grey5);
1026 1026 }
1027 1027 }
1028 1028
1029 1029 .flag_status_comment_box {
1030 1030 margin: 5px 6px 0px 2px;
1031 1031 }
1032 1032 .test_pattern_preview {
1033 1033 margin: @space 0;
1034 1034
1035 1035 p {
1036 1036 margin-bottom: 0;
1037 1037 border-bottom: @border-thickness solid @border-default-color;
1038 1038 color: @grey3;
1039 1039 }
1040 1040
1041 1041 .btn {
1042 1042 margin-bottom: @padding;
1043 1043 }
1044 1044 }
1045 1045 #test_pattern_result {
1046 1046 display: none;
1047 1047 &:extend(pre);
1048 1048 padding: .9em;
1049 1049 color: @grey3;
1050 1050 background-color: @grey7;
1051 1051 border-right: @border-thickness solid @border-default-color;
1052 1052 border-bottom: @border-thickness solid @border-default-color;
1053 1053 border-left: @border-thickness solid @border-default-color;
1054 1054 }
1055 1055
1056 1056 #repo_vcs_settings {
1057 1057 #inherit_overlay_vcs_default {
1058 1058 display: none;
1059 1059 }
1060 1060 #inherit_overlay_vcs_custom {
1061 1061 display: custom;
1062 1062 }
1063 1063 &.inherited {
1064 1064 #inherit_overlay_vcs_default {
1065 1065 display: block;
1066 1066 }
1067 1067 #inherit_overlay_vcs_custom {
1068 1068 display: none;
1069 1069 }
1070 1070 }
1071 1071 }
1072 1072
1073 1073 .issue-tracker-link {
1074 1074 color: @rcblue;
1075 1075 }
1076 1076
1077 1077 // Issue Tracker Table Show/Hide
1078 1078 #repo_issue_tracker {
1079 1079 #inherit_overlay {
1080 1080 display: none;
1081 1081 }
1082 1082 #custom_overlay {
1083 1083 display: custom;
1084 1084 }
1085 1085 &.inherited {
1086 1086 #inherit_overlay {
1087 1087 display: block;
1088 1088 }
1089 1089 #custom_overlay {
1090 1090 display: none;
1091 1091 }
1092 1092 }
1093 1093 }
1094 1094 table.issuetracker {
1095 1095 &.readonly {
1096 1096 tr, td {
1097 1097 color: @grey3;
1098 1098 }
1099 1099 }
1100 1100 .edit {
1101 1101 display: none;
1102 1102 }
1103 1103 .editopen {
1104 1104 .edit {
1105 1105 display: inline;
1106 1106 }
1107 1107 .entry {
1108 1108 display: none;
1109 1109 }
1110 1110 }
1111 1111 tr td.td-action {
1112 1112 min-width: 117px;
1113 1113 }
1114 1114 td input {
1115 1115 max-width: none;
1116 1116 min-width: 30px;
1117 1117 width: 80%;
1118 1118 }
1119 1119 .issuetracker_pref input {
1120 1120 width: 40%;
1121 1121 }
1122 1122 input.edit_issuetracker_update {
1123 1123 margin-right: 0;
1124 1124 width: auto;
1125 1125 }
1126 1126 }
1127 1127
1128 1128 table.integrations {
1129 1129 .td-icon {
1130 1130 width: 20px;
1131 1131 .integration-icon {
1132 1132 height: 20px;
1133 1133 width: 20px;
1134 1134 }
1135 1135 }
1136 1136 }
1137 1137
1138 1138 .integrations {
1139 1139 a.integration-box {
1140 1140 color: @text-color;
1141 1141 &:hover {
1142 1142 .panel {
1143 1143 background: #fbfbfb;
1144 1144 }
1145 1145 }
1146 1146 .integration-icon {
1147 1147 width: 30px;
1148 1148 height: 30px;
1149 1149 margin-right: 20px;
1150 1150 float: left;
1151 1151 }
1152 1152
1153 1153 .panel-body {
1154 1154 padding: 10px;
1155 1155 }
1156 1156 .panel {
1157 1157 margin-bottom: 10px;
1158 1158 }
1159 1159 h2 {
1160 1160 display: inline-block;
1161 1161 margin: 0;
1162 1162 min-width: 140px;
1163 1163 }
1164 1164 }
1165 1165 }
1166 1166
1167 1167 //Permissions Settings
1168 1168 #add_perm {
1169 1169 margin: 0 0 @padding;
1170 1170 cursor: pointer;
1171 1171 }
1172 1172
1173 1173 .perm_ac {
1174 1174 input {
1175 1175 width: 95%;
1176 1176 }
1177 1177 }
1178 1178
1179 1179 .autocomplete-suggestions {
1180 1180 width: auto !important; // overrides autocomplete.js
1181 1181 margin: 0;
1182 1182 border: @border-thickness solid @rcblue;
1183 1183 border-radius: @border-radius;
1184 1184 color: @rcblue;
1185 1185 background-color: white;
1186 1186 }
1187 1187 .autocomplete-selected {
1188 1188 background: #F0F0F0;
1189 1189 }
1190 1190 .ac-container-wrap {
1191 1191 margin: 0;
1192 1192 padding: 8px;
1193 1193 border-bottom: @border-thickness solid @rclightblue;
1194 1194 list-style-type: none;
1195 1195 cursor: pointer;
1196 1196
1197 1197 &:hover {
1198 1198 background-color: @rclightblue;
1199 1199 }
1200 1200
1201 1201 img {
1202 1202 height: @gravatar-size;
1203 1203 width: @gravatar-size;
1204 1204 margin-right: 1em;
1205 1205 }
1206 1206
1207 1207 strong {
1208 1208 font-weight: normal;
1209 1209 }
1210 1210 }
1211 1211
1212 1212 // Settings Dropdown
1213 1213 .user-menu .container {
1214 1214 padding: 0 4px;
1215 1215 margin: 0;
1216 1216 }
1217 1217
1218 1218 .user-menu .gravatar {
1219 1219 cursor: pointer;
1220 1220 }
1221 1221
1222 1222 .codeblock {
1223 1223 margin-bottom: @padding;
1224 1224 clear: both;
1225 1225
1226 1226 .stats{
1227 1227 overflow: hidden;
1228 1228 }
1229 1229
1230 1230 .message{
1231 1231 textarea{
1232 1232 margin: 0;
1233 1233 }
1234 1234 }
1235 1235
1236 1236 .code-header {
1237 1237 .stats {
1238 1238 line-height: 2em;
1239 1239
1240 1240 .revision_id {
1241 1241 margin-left: 0;
1242 1242 }
1243 1243 .buttons {
1244 1244 padding-right: 0;
1245 1245 }
1246 1246 }
1247 1247
1248 1248 .item{
1249 1249 margin-right: 0.5em;
1250 1250 }
1251 1251 }
1252 1252
1253 1253 #editor_container{
1254 1254 position: relative;
1255 1255 margin: @padding;
1256 1256 }
1257 1257 }
1258 1258
1259 1259 #file_history_container {
1260 1260 display: none;
1261 1261 }
1262 1262
1263 1263 .file-history-inner {
1264 1264 margin-bottom: 10px;
1265 1265 }
1266 1266
1267 1267 // Pull Requests
1268 1268 .summary-details {
1269 1269 width: 72%;
1270 1270 }
1271 1271 .pr-summary {
1272 1272 border-bottom: @border-thickness solid @grey5;
1273 1273 margin-bottom: @space;
1274 1274 }
1275 1275 .reviewers-title {
1276 1276 width: 25%;
1277 1277 min-width: 200px;
1278 1278 }
1279 1279 .reviewers {
1280 1280 width: 25%;
1281 1281 min-width: 200px;
1282 1282 }
1283 1283 .reviewers ul li {
1284 1284 position: relative;
1285 1285 width: 100%;
1286 1286 margin-bottom: 8px;
1287 1287 }
1288 1288 .reviewers_member {
1289 1289 width: 100%;
1290 1290 overflow: auto;
1291 1291 }
1292 1292 .reviewer_reason {
1293 1293 padding-left: 20px;
1294 1294 }
1295 1295 .reviewer_status {
1296 1296 display: inline-block;
1297 1297 vertical-align: top;
1298 1298 width: 7%;
1299 1299 min-width: 20px;
1300 1300 height: 1.2em;
1301 1301 margin-top: 3px;
1302 1302 line-height: 1em;
1303 1303 }
1304 1304
1305 1305 .reviewer_name {
1306 1306 display: inline-block;
1307 1307 max-width: 83%;
1308 1308 padding-right: 20px;
1309 1309 vertical-align: middle;
1310 1310 line-height: 1;
1311 1311
1312 1312 .rc-user {
1313 1313 min-width: 0;
1314 1314 margin: -2px 1em 0 0;
1315 1315 }
1316 1316
1317 1317 .reviewer {
1318 1318 float: left;
1319 1319 }
1320 1320
1321 1321 &.to-delete {
1322 1322 .user,
1323 1323 .reviewer {
1324 1324 text-decoration: line-through;
1325 1325 }
1326 1326 }
1327 1327 }
1328 1328
1329 1329 .reviewer_member_remove {
1330 1330 position: absolute;
1331 1331 right: 0;
1332 1332 top: 0;
1333 1333 width: 16px;
1334 1334 margin-bottom: 10px;
1335 1335 padding: 0;
1336 1336 color: black;
1337 1337 }
1338 1338 .reviewer_member_status {
1339 1339 margin-top: 5px;
1340 1340 }
1341 1341 .pr-summary #summary{
1342 1342 width: 100%;
1343 1343 }
1344 1344 .pr-summary .action_button:hover {
1345 1345 border: 0;
1346 1346 cursor: pointer;
1347 1347 }
1348 1348 .pr-details-title {
1349 1349 padding-bottom: 8px;
1350 1350 border-bottom: @border-thickness solid @grey5;
1351 1351
1352 1352 .action_button.disabled {
1353 1353 color: @grey4;
1354 1354 cursor: inherit;
1355 1355 }
1356 1356 .action_button {
1357 1357 color: @rcblue;
1358 1358 }
1359 1359 }
1360 1360 .pr-details-content {
1361 1361 margin-top: @textmargin;
1362 1362 margin-bottom: @textmargin;
1363 1363 }
1364 1364 .pr-description {
1365 1365 white-space:pre-wrap;
1366 1366 }
1367 1367 .group_members {
1368 1368 margin-top: 0;
1369 1369 padding: 0;
1370 1370 list-style: outside none none;
1371 1371
1372 1372 img {
1373 1373 height: @gravatar-size;
1374 1374 width: @gravatar-size;
1375 1375 margin-right: .5em;
1376 1376 margin-left: 3px;
1377 1377 }
1378 1378
1379 1379 .to-delete {
1380 1380 .user {
1381 1381 text-decoration: line-through;
1382 1382 }
1383 1383 }
1384 1384 }
1385 1385
1386 1386 .compare_view_commits_title {
1387 1387 .disabled {
1388 1388 cursor: inherit;
1389 1389 &:hover{
1390 1390 background-color: inherit;
1391 1391 color: inherit;
1392 1392 }
1393 1393 }
1394 1394 }
1395 1395
1396 .subtitle-compare {
1397 margin: -15px 0px 0px 0px;
1398 }
1399
1400 .comments-summary-td {
1401 border-top: 1px dashed @grey5;
1402 }
1403
1396 1404 // new entry in group_members
1397 1405 .td-author-new-entry {
1398 1406 background-color: rgba(red(@alert1), green(@alert1), blue(@alert1), 0.3);
1399 1407 }
1400 1408
1401 1409 .usergroup_member_remove {
1402 1410 width: 16px;
1403 1411 margin-bottom: 10px;
1404 1412 padding: 0;
1405 1413 color: black !important;
1406 1414 cursor: pointer;
1407 1415 }
1408 1416
1409 1417 .reviewer_ac .ac-input {
1410 1418 width: 92%;
1411 1419 margin-bottom: 1em;
1412 1420 }
1413 1421
1414 1422 .compare_view_commits tr{
1415 1423 height: 20px;
1416 1424 }
1417 1425 .compare_view_commits td {
1418 1426 vertical-align: top;
1419 1427 padding-top: 10px;
1420 1428 }
1421 1429 .compare_view_commits .author {
1422 1430 margin-left: 5px;
1423 1431 }
1424 1432
1433 .compare_view_commits {
1434 .color-a {
1435 color: @alert1;
1436 }
1437
1438 .color-c {
1439 color: @color3;
1440 }
1441
1442 .color-r {
1443 color: @color5;
1444 }
1445
1446 .color-a-bg {
1447 background-color: @alert1;
1448 }
1449
1450 .color-c-bg {
1451 background-color: @alert3;
1452 }
1453
1454 .color-r-bg {
1455 background-color: @alert2;
1456 }
1457
1458 .color-a-border {
1459 border: 1px solid @alert1;
1460 }
1461
1462 .color-c-border {
1463 border: 1px solid @alert3;
1464 }
1465
1466 .color-r-border {
1467 border: 1px solid @alert2;
1468 }
1469
1470 .commit-change-indicator {
1471 width: 15px;
1472 height: 15px;
1473 position: relative;
1474 left: 15px;
1475 }
1476
1477 .commit-change-content {
1478 text-align: center;
1479 vertical-align: middle;
1480 line-height: 15px;
1481 }
1482 }
1483
1425 1484 .compare_view_files {
1426 1485 width: 100%;
1427 1486
1428 1487 td {
1429 1488 vertical-align: middle;
1430 1489 }
1431 1490 }
1432 1491
1433 1492 .compare_view_filepath {
1434 1493 color: @grey1;
1435 1494 }
1436 1495
1437 1496 .show_more {
1438 1497 display: inline-block;
1439 1498 position: relative;
1440 1499 vertical-align: middle;
1441 1500 width: 4px;
1442 1501 height: @basefontsize;
1443 1502
1444 1503 &:after {
1445 1504 content: "\00A0\25BE";
1446 1505 display: inline-block;
1447 1506 width:10px;
1448 1507 line-height: 5px;
1449 1508 font-size: 12px;
1450 1509 cursor: pointer;
1451 1510 }
1452 1511 }
1453 1512
1454 1513 .journal_more .show_more {
1455 1514 display: inline;
1456 1515
1457 1516 &:after {
1458 1517 content: none;
1459 1518 }
1460 1519 }
1461 1520
1462 1521 .open .show_more:after,
1463 1522 .select2-dropdown-open .show_more:after {
1464 1523 .rotate(180deg);
1465 1524 margin-left: 4px;
1466 1525 }
1467 1526
1468 1527
1469 1528 .compare_view_commits .collapse_commit:after {
1470 1529 cursor: pointer;
1471 1530 content: "\00A0\25B4";
1472 1531 margin-left: -3px;
1473 1532 font-size: 17px;
1474 1533 color: @grey4;
1475 1534 }
1476 1535
1477 1536 .diff_links {
1478 1537 margin-left: 8px;
1479 1538 }
1480 1539
1481 1540 div.ancestor {
1482 1541 margin: -30px 0px;
1483 1542 }
1484 1543
1485 1544 .cs_icon_td input[type="checkbox"] {
1486 1545 display: none;
1487 1546 }
1488 1547
1489 1548 .cs_icon_td .expand_file_icon:after {
1490 1549 cursor: pointer;
1491 1550 content: "\00A0\25B6";
1492 1551 font-size: 12px;
1493 1552 color: @grey4;
1494 1553 }
1495 1554
1496 1555 .cs_icon_td .collapse_file_icon:after {
1497 1556 cursor: pointer;
1498 1557 content: "\00A0\25BC";
1499 1558 font-size: 12px;
1500 1559 color: @grey4;
1501 1560 }
1502 1561
1503 1562 /*new binary
1504 1563 NEW_FILENODE = 1
1505 1564 DEL_FILENODE = 2
1506 1565 MOD_FILENODE = 3
1507 1566 RENAMED_FILENODE = 4
1508 1567 COPIED_FILENODE = 5
1509 1568 CHMOD_FILENODE = 6
1510 1569 BIN_FILENODE = 7
1511 1570 */
1512 1571 .cs_files_expand {
1513 1572 font-size: @basefontsize + 5px;
1514 1573 line-height: 1.8em;
1515 1574 float: right;
1516 1575 }
1517 1576
1518 1577 .cs_files_expand span{
1519 1578 color: @rcblue;
1520 1579 cursor: pointer;
1521 1580 }
1522 1581 .cs_files {
1523 1582 clear: both;
1524 1583 padding-bottom: @padding;
1525 1584
1526 1585 .cur_cs {
1527 1586 margin: 10px 2px;
1528 1587 font-weight: bold;
1529 1588 }
1530 1589
1531 1590 .node {
1532 1591 float: left;
1533 1592 }
1534 1593
1535 1594 .changes {
1536 1595 float: right;
1537 1596 color: white;
1538 1597 font-size: @basefontsize - 4px;
1539 1598 margin-top: 4px;
1540 1599 opacity: 0.6;
1541 1600 filter: Alpha(opacity=60); /* IE8 and earlier */
1542 1601
1543 1602 .added {
1544 1603 background-color: @alert1;
1545 1604 float: left;
1546 1605 text-align: center;
1547 1606 }
1548 1607
1549 1608 .deleted {
1550 1609 background-color: @alert2;
1551 1610 float: left;
1552 1611 text-align: center;
1553 1612 }
1554 1613
1555 1614 .bin {
1556 1615 background-color: @alert1;
1557 1616 text-align: center;
1558 1617 }
1559 1618
1560 1619 /*new binary*/
1561 1620 .bin.bin1 {
1562 1621 background-color: @alert1;
1563 1622 text-align: center;
1564 1623 }
1565 1624
1566 1625 /*deleted binary*/
1567 1626 .bin.bin2 {
1568 1627 background-color: @alert2;
1569 1628 text-align: center;
1570 1629 }
1571 1630
1572 1631 /*mod binary*/
1573 1632 .bin.bin3 {
1574 1633 background-color: @grey2;
1575 1634 text-align: center;
1576 1635 }
1577 1636
1578 1637 /*rename file*/
1579 1638 .bin.bin4 {
1580 1639 background-color: @alert4;
1581 1640 text-align: center;
1582 1641 }
1583 1642
1584 1643 /*copied file*/
1585 1644 .bin.bin5 {
1586 1645 background-color: @alert4;
1587 1646 text-align: center;
1588 1647 }
1589 1648
1590 1649 /*chmod file*/
1591 1650 .bin.bin6 {
1592 1651 background-color: @grey2;
1593 1652 text-align: center;
1594 1653 }
1595 1654 }
1596 1655 }
1597 1656
1598 1657 .cs_files .cs_added, .cs_files .cs_A,
1599 1658 .cs_files .cs_added, .cs_files .cs_M,
1600 1659 .cs_files .cs_added, .cs_files .cs_D {
1601 1660 height: 16px;
1602 1661 padding-right: 10px;
1603 1662 margin-top: 7px;
1604 1663 text-align: left;
1605 1664 }
1606 1665
1607 1666 .cs_icon_td {
1608 1667 min-width: 16px;
1609 1668 width: 16px;
1610 1669 }
1611 1670
1612 1671 .pull-request-merge {
1613 1672 border: 1px solid @grey5;
1614 1673 padding: 10px 0px 20px;
1615 1674 margin-top: 10px;
1616 1675 margin-bottom: 20px;
1617 1676 }
1618 1677
1619 1678 .pull-request-merge ul {
1620 1679 padding: 0px 0px;
1621 1680 }
1622 1681
1623 1682 .pull-request-merge li:before{
1624 1683 content:none;
1625 1684 }
1626 1685
1627 1686 .pull-request-merge .pull-request-wrap {
1628 1687 height: auto;
1629 1688 padding: 0px 0px;
1630 1689 text-align: right;
1631 1690 }
1632 1691
1633 1692 .pull-request-merge span {
1634 1693 margin-right: 5px;
1635 1694 }
1636 1695
1637 1696 .pull-request-merge-actions {
1638 1697 height: 30px;
1639 1698 padding: 0px 0px;
1640 1699 }
1641 1700
1642 1701 .merge-status {
1643 1702 margin-right: 5px;
1644 1703 }
1645 1704
1646 1705 .merge-message {
1647 1706 font-size: 1.2em
1648 1707 }
1649 1708
1650 1709 .merge-message.success i,
1651 1710 .merge-icon.success i {
1652 1711 color:@alert1;
1653 1712 }
1654 1713
1655 1714 .merge-message.warning i,
1656 1715 .merge-icon.warning i {
1657 1716 color: @alert3;
1658 1717 }
1659 1718
1660 1719 .merge-message.error i,
1661 1720 .merge-icon.error i {
1662 1721 color:@alert2;
1663 1722 }
1664 1723
1665 1724 .pr-versions {
1725 font-size: 1.1em;
1726
1727 table {
1728 padding: 0px 5px;
1729 }
1730
1731 td {
1732 line-height: 15px;
1733 }
1734
1735 .flag_status {
1736 margin: 0;
1737 }
1738
1739 .compare-radio-button {
1666 1740 position: relative;
1667 top: 6px;
1668 }
1741 top: -3px;
1742 }
1743 }
1744
1669 1745
1670 1746 #close_pull_request {
1671 1747 margin-right: 0px;
1672 1748 }
1673 1749
1674 1750 .empty_data {
1675 1751 color: @grey4;
1676 1752 }
1677 1753
1678 1754 #changeset_compare_view_content {
1679 1755 margin-bottom: @space;
1680 1756 clear: both;
1681 1757 width: 100%;
1682 1758 box-sizing: border-box;
1683 1759 .border-radius(@border-radius);
1684 1760
1685 1761 .help-block {
1686 1762 margin: @padding 0;
1687 1763 color: @text-color;
1688 1764 }
1689 1765
1690 1766 .empty_data {
1691 1767 margin: @padding 0;
1692 1768 }
1693 1769
1694 1770 .alert {
1695 1771 margin-bottom: @space;
1696 1772 }
1697 1773 }
1698 1774
1699 1775 .table_disp {
1700 1776 .status {
1701 1777 width: auto;
1702 1778
1703 1779 .flag_status {
1704 1780 float: left;
1705 1781 }
1706 1782 }
1707 1783 }
1708 1784
1709 1785 .status_box_menu {
1710 1786 margin: 0;
1711 1787 }
1712 1788
1713 1789 .notification-table{
1714 1790 margin-bottom: @space;
1715 1791 display: table;
1716 1792 width: 100%;
1717 1793
1718 1794 .container{
1719 1795 display: table-row;
1720 1796
1721 1797 .notification-header{
1722 1798 border-bottom: @border-thickness solid @border-default-color;
1723 1799 }
1724 1800
1725 1801 .notification-subject{
1726 1802 display: table-cell;
1727 1803 }
1728 1804 }
1729 1805 }
1730 1806
1731 1807 // Notifications
1732 1808 .notification-header{
1733 1809 display: table;
1734 1810 width: 100%;
1735 1811 padding: floor(@basefontsize/2) 0;
1736 1812 line-height: 1em;
1737 1813
1738 1814 .desc, .delete-notifications, .read-notifications{
1739 1815 display: table-cell;
1740 1816 text-align: left;
1741 1817 }
1742 1818
1743 1819 .desc{
1744 1820 width: 1163px;
1745 1821 }
1746 1822
1747 1823 .delete-notifications, .read-notifications{
1748 1824 width: 35px;
1749 1825 min-width: 35px; //fixes when only one button is displayed
1750 1826 }
1751 1827 }
1752 1828
1753 1829 .notification-body {
1754 1830 .markdown-block,
1755 1831 .rst-block {
1756 1832 padding: @padding 0;
1757 1833 }
1758 1834
1759 1835 .notification-subject {
1760 1836 padding: @textmargin 0;
1761 1837 border-bottom: @border-thickness solid @border-default-color;
1762 1838 }
1763 1839 }
1764 1840
1765 1841
1766 1842 .notifications_buttons{
1767 1843 float: right;
1768 1844 }
1769 1845
1770 1846 #notification-status{
1771 1847 display: inline;
1772 1848 }
1773 1849
1774 1850 // Repositories
1775 1851
1776 1852 #summary.fields{
1777 1853 display: table;
1778 1854
1779 1855 .field{
1780 1856 display: table-row;
1781 1857
1782 1858 .label-summary{
1783 1859 display: table-cell;
1784 1860 min-width: @label-summary-minwidth;
1785 1861 padding-top: @padding/2;
1786 1862 padding-bottom: @padding/2;
1787 1863 padding-right: @padding/2;
1788 1864 }
1789 1865
1790 1866 .input{
1791 1867 display: table-cell;
1792 1868 padding: @padding/2;
1793 1869
1794 1870 input{
1795 1871 min-width: 29em;
1796 1872 padding: @padding/4;
1797 1873 }
1798 1874 }
1799 1875 .statistics, .downloads{
1800 1876 .disabled{
1801 1877 color: @grey4;
1802 1878 }
1803 1879 }
1804 1880 }
1805 1881 }
1806 1882
1807 1883 #summary{
1808 1884 width: 70%;
1809 1885 }
1810 1886
1811 1887
1812 1888 // Journal
1813 1889 .journal.title {
1814 1890 h5 {
1815 1891 float: left;
1816 1892 margin: 0;
1817 1893 width: 70%;
1818 1894 }
1819 1895
1820 1896 ul {
1821 1897 float: right;
1822 1898 display: inline-block;
1823 1899 margin: 0;
1824 1900 width: 30%;
1825 1901 text-align: right;
1826 1902
1827 1903 li {
1828 1904 display: inline;
1829 1905 font-size: @journal-fontsize;
1830 1906 line-height: 1em;
1831 1907
1832 1908 &:before { content: none; }
1833 1909 }
1834 1910 }
1835 1911 }
1836 1912
1837 1913 .filterexample {
1838 1914 position: absolute;
1839 1915 top: 95px;
1840 1916 left: @contentpadding;
1841 1917 color: @rcblue;
1842 1918 font-size: 11px;
1843 1919 font-family: @text-regular;
1844 1920 cursor: help;
1845 1921
1846 1922 &:hover {
1847 1923 color: @rcdarkblue;
1848 1924 }
1849 1925
1850 1926 @media (max-width:768px) {
1851 1927 position: relative;
1852 1928 top: auto;
1853 1929 left: auto;
1854 1930 display: block;
1855 1931 }
1856 1932 }
1857 1933
1858 1934
1859 1935 #journal{
1860 1936 margin-bottom: @space;
1861 1937
1862 1938 .journal_day{
1863 1939 margin-bottom: @textmargin/2;
1864 1940 padding-bottom: @textmargin/2;
1865 1941 font-size: @journal-fontsize;
1866 1942 border-bottom: @border-thickness solid @border-default-color;
1867 1943 }
1868 1944
1869 1945 .journal_container{
1870 1946 margin-bottom: @space;
1871 1947
1872 1948 .journal_user{
1873 1949 display: inline-block;
1874 1950 }
1875 1951 .journal_action_container{
1876 1952 display: block;
1877 1953 margin-top: @textmargin;
1878 1954
1879 1955 div{
1880 1956 display: inline;
1881 1957 }
1882 1958
1883 1959 div.journal_action_params{
1884 1960 display: block;
1885 1961 }
1886 1962
1887 1963 div.journal_repo:after{
1888 1964 content: "\A";
1889 1965 white-space: pre;
1890 1966 }
1891 1967
1892 1968 div.date{
1893 1969 display: block;
1894 1970 margin-bottom: @textmargin;
1895 1971 }
1896 1972 }
1897 1973 }
1898 1974 }
1899 1975
1900 1976 // Files
1901 1977 .edit-file-title {
1902 1978 border-bottom: @border-thickness solid @border-default-color;
1903 1979
1904 1980 .breadcrumbs {
1905 1981 margin-bottom: 0;
1906 1982 }
1907 1983 }
1908 1984
1909 1985 .edit-file-fieldset {
1910 1986 margin-top: @sidebarpadding;
1911 1987
1912 1988 .fieldset {
1913 1989 .left-label {
1914 1990 width: 13%;
1915 1991 }
1916 1992 .right-content {
1917 1993 width: 87%;
1918 1994 max-width: 100%;
1919 1995 }
1920 1996 .filename-label {
1921 1997 margin-top: 13px;
1922 1998 }
1923 1999 .commit-message-label {
1924 2000 margin-top: 4px;
1925 2001 }
1926 2002 .file-upload-input {
1927 2003 input {
1928 2004 display: none;
1929 2005 }
1930 2006 }
1931 2007 p {
1932 2008 margin-top: 5px;
1933 2009 }
1934 2010
1935 2011 }
1936 2012 .custom-path-link {
1937 2013 margin-left: 5px;
1938 2014 }
1939 2015 #commit {
1940 2016 resize: vertical;
1941 2017 }
1942 2018 }
1943 2019
1944 2020 .delete-file-preview {
1945 2021 max-height: 250px;
1946 2022 }
1947 2023
1948 2024 .new-file,
1949 2025 #filter_activate,
1950 2026 #filter_deactivate {
1951 2027 float: left;
1952 2028 margin: 0 0 0 15px;
1953 2029 }
1954 2030
1955 2031 h3.files_location{
1956 2032 line-height: 2.4em;
1957 2033 }
1958 2034
1959 2035 .browser-nav {
1960 2036 display: table;
1961 2037 margin-bottom: @space;
1962 2038
1963 2039
1964 2040 .info_box {
1965 2041 display: inline-table;
1966 2042 height: 2.5em;
1967 2043
1968 2044 .browser-cur-rev, .info_box_elem {
1969 2045 display: table-cell;
1970 2046 vertical-align: middle;
1971 2047 }
1972 2048
1973 2049 .info_box_elem {
1974 2050 border-top: @border-thickness solid @rcblue;
1975 2051 border-bottom: @border-thickness solid @rcblue;
1976 2052
1977 2053 #at_rev, a {
1978 2054 padding: 0.6em 0.9em;
1979 2055 margin: 0;
1980 2056 .box-shadow(none);
1981 2057 border: 0;
1982 2058 height: 12px;
1983 2059 }
1984 2060
1985 2061 input#at_rev {
1986 2062 max-width: 50px;
1987 2063 text-align: right;
1988 2064 }
1989 2065
1990 2066 &.previous {
1991 2067 border: @border-thickness solid @rcblue;
1992 2068 .disabled {
1993 2069 color: @grey4;
1994 2070 cursor: not-allowed;
1995 2071 }
1996 2072 }
1997 2073
1998 2074 &.next {
1999 2075 border: @border-thickness solid @rcblue;
2000 2076 .disabled {
2001 2077 color: @grey4;
2002 2078 cursor: not-allowed;
2003 2079 }
2004 2080 }
2005 2081 }
2006 2082
2007 2083 .browser-cur-rev {
2008 2084
2009 2085 span{
2010 2086 margin: 0;
2011 2087 color: @rcblue;
2012 2088 height: 12px;
2013 2089 display: inline-block;
2014 2090 padding: 0.7em 1em ;
2015 2091 border: @border-thickness solid @rcblue;
2016 2092 margin-right: @padding;
2017 2093 }
2018 2094 }
2019 2095 }
2020 2096
2021 2097 .search_activate {
2022 2098 display: table-cell;
2023 2099 vertical-align: middle;
2024 2100
2025 2101 input, label{
2026 2102 margin: 0;
2027 2103 padding: 0;
2028 2104 }
2029 2105
2030 2106 input{
2031 2107 margin-left: @textmargin;
2032 2108 }
2033 2109
2034 2110 }
2035 2111 }
2036 2112
2037 2113 .browser-cur-rev{
2038 2114 margin-bottom: @textmargin;
2039 2115 }
2040 2116
2041 2117 #node_filter_box_loading{
2042 2118 .info_text;
2043 2119 }
2044 2120
2045 2121 .browser-search {
2046 2122 margin: -25px 0px 5px 0px;
2047 2123 }
2048 2124
2049 2125 .node-filter {
2050 2126 font-size: @repo-title-fontsize;
2051 2127 padding: 4px 0px 0px 0px;
2052 2128
2053 2129 .node-filter-path {
2054 2130 float: left;
2055 2131 color: @grey4;
2056 2132 }
2057 2133 .node-filter-input {
2058 2134 float: left;
2059 2135 margin: -2px 0px 0px 2px;
2060 2136 input {
2061 2137 padding: 2px;
2062 2138 border: none;
2063 2139 font-size: @repo-title-fontsize;
2064 2140 }
2065 2141 }
2066 2142 }
2067 2143
2068 2144
2069 2145 .browser-result{
2070 2146 td a{
2071 2147 margin-left: 0.5em;
2072 2148 display: inline-block;
2073 2149
2074 2150 em{
2075 2151 font-family: @text-bold;
2076 2152 }
2077 2153 }
2078 2154 }
2079 2155
2080 2156 .browser-highlight{
2081 2157 background-color: @grey5-alpha;
2082 2158 }
2083 2159
2084 2160
2085 2161 // Search
2086 2162
2087 2163 .search-form{
2088 2164 #q {
2089 2165 width: @search-form-width;
2090 2166 }
2091 2167 .fields{
2092 2168 margin: 0 0 @space;
2093 2169 }
2094 2170
2095 2171 label{
2096 2172 display: inline-block;
2097 2173 margin-right: @textmargin;
2098 2174 padding-top: 0.25em;
2099 2175 }
2100 2176
2101 2177
2102 2178 .results{
2103 2179 clear: both;
2104 2180 margin: 0 0 @padding;
2105 2181 }
2106 2182 }
2107 2183
2108 2184 div.search-feedback-items {
2109 2185 display: inline-block;
2110 2186 padding:0px 0px 0px 96px;
2111 2187 }
2112 2188
2113 2189 div.search-code-body {
2114 2190 background-color: #ffffff; padding: 5px 0 5px 10px;
2115 2191 pre {
2116 2192 .match { background-color: #faffa6;}
2117 2193 .break { display: block; width: 100%; background-color: #DDE7EF; color: #747474; }
2118 2194 }
2119 2195 }
2120 2196
2121 2197 .expand_commit.search {
2122 2198 .show_more.open {
2123 2199 height: auto;
2124 2200 max-height: none;
2125 2201 }
2126 2202 }
2127 2203
2128 2204 .search-results {
2129 2205
2130 2206 h2 {
2131 2207 margin-bottom: 0;
2132 2208 }
2133 2209 .codeblock {
2134 2210 border: none;
2135 2211 background: transparent;
2136 2212 }
2137 2213
2138 2214 .codeblock-header {
2139 2215 border: none;
2140 2216 background: transparent;
2141 2217 }
2142 2218
2143 2219 .code-body {
2144 2220 border: @border-thickness solid @border-default-color;
2145 2221 .border-radius(@border-radius);
2146 2222 }
2147 2223
2148 2224 .td-commit {
2149 2225 &:extend(pre);
2150 2226 border-bottom: @border-thickness solid @border-default-color;
2151 2227 }
2152 2228
2153 2229 .message {
2154 2230 height: auto;
2155 2231 max-width: 350px;
2156 2232 white-space: normal;
2157 2233 text-overflow: initial;
2158 2234 overflow: visible;
2159 2235
2160 2236 .match { background-color: #faffa6;}
2161 2237 .break { background-color: #DDE7EF; width: 100%; color: #747474; display: block; }
2162 2238 }
2163 2239
2164 2240 }
2165 2241
2166 2242 table.rctable td.td-search-results div {
2167 2243 max-width: 100%;
2168 2244 }
2169 2245
2170 2246 #tip-box, .tip-box{
2171 2247 padding: @menupadding/2;
2172 2248 display: block;
2173 2249 border: @border-thickness solid @border-highlight-color;
2174 2250 .border-radius(@border-radius);
2175 2251 background-color: white;
2176 2252 z-index: 99;
2177 2253 white-space: pre-wrap;
2178 2254 }
2179 2255
2180 2256 #linktt {
2181 2257 width: 79px;
2182 2258 }
2183 2259
2184 2260 #help_kb .modal-content{
2185 2261 max-width: 750px;
2186 2262 margin: 10% auto;
2187 2263
2188 2264 table{
2189 2265 td,th{
2190 2266 border-bottom: none;
2191 2267 line-height: 2.5em;
2192 2268 }
2193 2269 th{
2194 2270 padding-bottom: @textmargin/2;
2195 2271 }
2196 2272 td.keys{
2197 2273 text-align: center;
2198 2274 }
2199 2275 }
2200 2276
2201 2277 .block-left{
2202 2278 width: 45%;
2203 2279 margin-right: 5%;
2204 2280 }
2205 2281 .modal-footer{
2206 2282 clear: both;
2207 2283 }
2208 2284 .key.tag{
2209 2285 padding: 0.5em;
2210 2286 background-color: @rcblue;
2211 2287 color: white;
2212 2288 border-color: @rcblue;
2213 2289 .box-shadow(none);
2214 2290 }
2215 2291 }
2216 2292
2217 2293
2218 2294
2219 2295 //--- IMPORTS FOR REFACTORED STYLES ------------------//
2220 2296
2221 2297 @import 'statistics-graph';
2222 2298 @import 'tables';
2223 2299 @import 'forms';
2224 2300 @import 'diff';
2225 2301 @import 'summary';
2226 2302 @import 'navigation';
2227 2303
2228 2304 //--- SHOW/HIDE SECTIONS --//
2229 2305
2230 2306 .btn-collapse {
2231 2307 float: right;
2232 2308 text-align: right;
2233 2309 font-family: @text-light;
2234 2310 font-size: @basefontsize;
2235 2311 cursor: pointer;
2236 2312 border: none;
2237 2313 color: @rcblue;
2238 2314 }
2239 2315
2240 2316 table.rctable,
2241 2317 table.dataTable {
2242 2318 .btn-collapse {
2243 2319 float: right;
2244 2320 text-align: right;
2245 2321 }
2246 2322 }
2247 2323
2248 2324
2249 2325 // TODO: johbo: Fix for IE10, this avoids that we see a border
2250 2326 // and padding around checkboxes and radio boxes. Move to the right place,
2251 2327 // or better: Remove this once we did the form refactoring.
2252 2328 input[type=checkbox],
2253 2329 input[type=radio] {
2254 2330 padding: 0;
2255 2331 border: none;
2256 2332 }
2257 2333
2258 2334 .toggle-ajax-spinner{
2259 2335 height: 16px;
2260 2336 width: 16px;
2261 2337 }
@@ -1,418 +1,440 b''
1 1 ## -*- coding: utf-8 -*-
2 2
3 3 <%inherit file="/base/base.mako"/>
4 4
5 5 <%def name="title()">
6 6 ${_('%s Changelog') % c.repo_name}
7 7 %if c.changelog_for_path:
8 8 /${c.changelog_for_path}
9 9 %endif
10 10 %if c.rhodecode_name:
11 11 &middot; ${h.branding(c.rhodecode_name)}
12 12 %endif
13 13 </%def>
14 14
15 15 <%def name="breadcrumbs_links()">
16 16 %if c.changelog_for_path:
17 17 /${c.changelog_for_path}
18 18 %endif
19 19 ${ungettext('showing %d out of %d commit', 'showing %d out of %d commits', c.showing_commits) % (c.showing_commits, c.total_cs)}
20 20 </%def>
21 21
22 22 <%def name="menu_bar_nav()">
23 23 ${self.menu_items(active='repositories')}
24 24 </%def>
25 25
26 26 <%def name="menu_bar_subnav()">
27 27 ${self.repo_menu(active='changelog')}
28 28 </%def>
29 29
30 30 <%def name="main()">
31 31
32 32 <div class="box">
33 33 <div class="title">
34 34 ${self.repo_page_title(c.rhodecode_db_repo)}
35 35 <ul class="links">
36 36 <li>
37 37 <a href="#" class="btn btn-small" id="rev_range_container" style="display:none;"></a>
38 38 %if c.rhodecode_db_repo.fork:
39 39 <span>
40 40 <a id="compare_fork_button"
41 41 title="${_('Compare fork with %s' % c.rhodecode_db_repo.fork.repo_name)}"
42 42 class="btn btn-small"
43 43 href="${h.url('compare_url',
44 44 repo_name=c.rhodecode_db_repo.fork.repo_name,
45 45 source_ref_type=c.rhodecode_db_repo.landing_rev[0],
46 46 source_ref=c.rhodecode_db_repo.landing_rev[1],
47 47 target_repo=c.repo_name,
48 48 target_ref_type='branch' if request.GET.get('branch') else c.rhodecode_db_repo.landing_rev[0],
49 49 target_ref=request.GET.get('branch') or c.rhodecode_db_repo.landing_rev[1],
50 50 merge=1)}"
51 51 >
52 52 <i class="icon-loop"></i>
53 53 ${_('Compare fork with Parent (%s)' % c.rhodecode_db_repo.fork.repo_name)}
54 54 </a>
55 55 </span>
56 56 %endif
57 57
58 58 ## pr open link
59 59 %if h.is_hg(c.rhodecode_repo) or h.is_git(c.rhodecode_repo):
60 60 <span>
61 61 <a id="open_new_pull_request" class="btn btn-small btn-success" href="${h.url('pullrequest_home',repo_name=c.repo_name)}">
62 62 ${_('Open new pull request')}
63 63 </a>
64 64 </span>
65 65 %endif
66 66
67 67 ## clear selection
68 68 <div title="${_('Clear selection')}" class="btn" id="rev_range_clear" style="display:none">
69 69 ${_('Clear selection')}
70 70 </div>
71 71
72 72 </li>
73 73 </ul>
74 74 </div>
75 75
76 76 % if c.pagination:
77 77
78 78 <div class="graph-header">
79 79 <div id="filter_changelog">
80 80 ${h.hidden('branch_filter')}
81 81 %if c.selected_name:
82 82 <div class="btn btn-default" id="clear_filter" >
83 83 ${_('Clear filter')}
84 84 </div>
85 85 %endif
86 86 </div>
87 87 ${self.breadcrumbs('breadcrumbs_light')}
88 88 </div>
89 89
90 90 <div id="graph">
91 91 <div class="graph-col-wrapper">
92 92 <div id="graph_nodes">
93 93 <div id="graph_canvas" data-graph='${c.jsdata|n}'></div>
94 94 </div>
95 95 <div id="graph_content" class="main-content graph_full_width">
96 96
97 97 <div class="table">
98 98 <table id="changesets" class="rctable">
99 99 <tr>
100 100 ## checkbox
101 101 <th></th>
102 102 <th colspan="2"></th>
103 103
104 104 <th>${_('Commit')}</th>
105 105 ## commit message expand arrow
106 106 <th></th>
107 107 <th>${_('Commit Message')}</th>
108 108
109 109 <th>${_('Age')}</th>
110 110 <th>${_('Author')}</th>
111 111
112 112 <th>${_('Refs')}</th>
113 113 </tr>
114 114 <tbody>
115 115 %for cnt,commit in enumerate(c.pagination):
116 116 <tr id="chg_${cnt+1}" class="container ${'tablerow%s' % (cnt%2)}">
117 117
118 118 <td class="td-checkbox">
119 119 ${h.checkbox(commit.raw_id,class_="commit-range")}
120 120 </td>
121 121 <td class="td-status">
122 122
123 123 %if c.statuses.get(commit.raw_id):
124 124 <div class="changeset-status-ico">
125 125 %if c.statuses.get(commit.raw_id)[2]:
126 126 <a class="tooltip" title="${_('Commit status: %s\nClick to open associated pull request #%s') % (h.commit_status_lbl(c.statuses.get(commit.raw_id)[0]), c.statuses.get(commit.raw_id)[2])}" href="${h.url('pullrequest_show',repo_name=c.statuses.get(commit.raw_id)[3],pull_request_id=c.statuses.get(commit.raw_id)[2])}">
127 127 <div class="${'flag_status %s' % c.statuses.get(commit.raw_id)[0]}"></div>
128 128 </a>
129 129 %else:
130 130 <a class="tooltip" title="${_('Commit status: %s') % h.commit_status_lbl(c.statuses.get(commit.raw_id)[0])}" href="${h.url('changeset_home',repo_name=c.repo_name,revision=commit.raw_id,anchor='comment-%s' % c.comments[commit.raw_id][0].comment_id)}">
131 131 <div class="${'flag_status %s' % c.statuses.get(commit.raw_id)[0]}"></div>
132 132 </a>
133 133 %endif
134 134 </div>
135 135 %else:
136 136 <div class="tooltip flag_status not_reviewed" title="${_('Commit status: Not Reviewed')}"></div>
137 137 %endif
138 138 </td>
139 139 <td class="td-comments comments-col">
140 140 %if c.comments.get(commit.raw_id):
141 141 <a title="${_('Commit has comments')}" href="${h.url('changeset_home',repo_name=c.repo_name,revision=commit.raw_id,anchor='comment-%s' % c.comments[commit.raw_id][0].comment_id)}">
142 142 <i class="icon-comment"></i> ${len(c.comments[commit.raw_id])}
143 143 </a>
144 144 %endif
145 145 </td>
146 146 <td class="td-hash">
147 147 <code>
148 148 <a href="${h.url('changeset_home',repo_name=c.repo_name,revision=commit.raw_id)}">
149 149 <span class="commit_hash">${h.show_id(commit)}</span>
150 150 </a>
151 151 </code>
152 152 </td>
153 153 <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_('Expand commit message')}">
154 154 <div class="show_more_col">
155 155 <i class="show_more"></i>&nbsp;
156 156 </div>
157 157 </td>
158 158 <td class="td-description mid">
159 159 <div class="log-container truncate-wrap">
160 160 <div class="message truncate" id="c-${commit.raw_id}">${h.urlify_commit_message(commit.message, c.repo_name)}</div>
161 161 </div>
162 162 </td>
163 163
164 164 <td class="td-time">
165 165 ${h.age_component(commit.date)}
166 166 </td>
167 167 <td class="td-user">
168 168 ${self.gravatar_with_user(commit.author)}
169 169 </td>
170 170
171 171 <td class="td-tags tags-col">
172 172 <div id="t-${commit.raw_id}">
173 173 ## branch
174 174 %if commit.branch:
175 175 <span class="branchtag tag" title="${_('Branch %s') % commit.branch}">
176 176 <a href="${h.url('changelog_home',repo_name=c.repo_name,branch=commit.branch)}"><i class="icon-code-fork"></i>${h.shorter(commit.branch)}</a>
177 177 </span>
178 178 %endif
179 179
180 180 ## bookmarks
181 181 %if h.is_hg(c.rhodecode_repo):
182 182 %for book in commit.bookmarks:
183 183 <span class="tag booktag" title="${_('Bookmark %s') % book}">
184 184 <a href="${h.url('files_home',repo_name=c.repo_name,revision=commit.raw_id)}"><i class="icon-bookmark"></i>${h.shorter(book)}</a>
185 185 </span>
186 186 %endfor
187 187 %endif
188 188
189 189 ## tags
190 190 %for tag in commit.tags:
191 191 <span class="tagtag tag" title="${_('Tag %s') % tag}">
192 192 <a href="${h.url('files_home',repo_name=c.repo_name,revision=commit.raw_id)}"><i class="icon-tag"></i>${h.shorter(tag)}</a>
193 193 </span>
194 194 %endfor
195 195
196 196 </div>
197 197 </td>
198 198 </tr>
199 199 %endfor
200 200 </tbody>
201 201 </table>
202 202 </div>
203 203 </div>
204 204 </div>
205 205 <div class="pagination-wh pagination-left">
206 206 ${c.pagination.pager('$link_previous ~2~ $link_next')}
207 207 </div>
208 208
209 209 <script type="text/javascript" src="${h.asset('js/jquery.commits-graph.js')}"></script>
210 210 <script type="text/javascript">
211 211 var cache = {};
212 212 $(function(){
213 213
214 214 // Create links to commit ranges when range checkboxes are selected
215 215 var $commitCheckboxes = $('.commit-range');
216 216 // cache elements
217 217 var $commitRangeContainer = $('#rev_range_container');
218 218 var $commitRangeClear = $('#rev_range_clear');
219 219
220 220 var checkboxRangeSelector = function(e){
221 221 var selectedCheckboxes = [];
222 222 for (pos in $commitCheckboxes){
223 223 if($commitCheckboxes[pos].checked){
224 224 selectedCheckboxes.push($commitCheckboxes[pos]);
225 225 }
226 226 }
227 227 var open_new_pull_request = $('#open_new_pull_request');
228 228 if(open_new_pull_request){
229 229 var selected_changes = selectedCheckboxes.length;
230 230 if (selected_changes > 1 || selected_changes == 1 && templateContext.repo_type != 'hg') {
231 231 open_new_pull_request.hide();
232 232 } else {
233 233 if (selected_changes == 1) {
234 234 open_new_pull_request.html(_gettext('Open new pull request for selected commit'));
235 235 } else if (selected_changes == 0) {
236 236 open_new_pull_request.html(_gettext('Open new pull request'));
237 237 }
238 238 open_new_pull_request.show();
239 239 }
240 240 }
241 241
242 242 if (selectedCheckboxes.length>0){
243 243 var revEnd = selectedCheckboxes[0].name;
244 244 var revStart = selectedCheckboxes[selectedCheckboxes.length-1].name;
245 245 var url = pyroutes.url('changeset_home',
246 246 {'repo_name': '${c.repo_name}',
247 247 'revision': revStart+'...'+revEnd});
248 248
249 249 var link = (revStart == revEnd)
250 250 ? _gettext('Show selected commit __S')
251 251 : _gettext('Show selected commits __S ... __E');
252 252
253 253 link = link.replace('__S', revStart.substr(0,6));
254 254 link = link.replace('__E', revEnd.substr(0,6));
255 255
256 256 $commitRangeContainer
257 257 .attr('href',url)
258 258 .html(link)
259 259 .show();
260 260
261 261 $commitRangeClear.show();
262 262 var _url = pyroutes.url('pullrequest_home',
263 263 {'repo_name': '${c.repo_name}',
264 264 'commit': revEnd});
265 265 open_new_pull_request.attr('href', _url);
266 266 $('#compare_fork_button').hide();
267 267 } else {
268 268 $commitRangeContainer.hide();
269 269 $commitRangeClear.hide();
270 270
271 271 %if c.branch_name:
272 272 var _url = pyroutes.url('pullrequest_home',
273 273 {'repo_name': '${c.repo_name}',
274 274 'branch':'${c.branch_name}'});
275 275 open_new_pull_request.attr('href', _url);
276 276 %else:
277 277 var _url = pyroutes.url('pullrequest_home',
278 278 {'repo_name': '${c.repo_name}'});
279 279 open_new_pull_request.attr('href', _url);
280 280 %endif
281 281 $('#compare_fork_button').show();
282 282 }
283 283 };
284 284
285 285 $commitCheckboxes.on('click', checkboxRangeSelector);
286 286
287 287 $commitRangeClear.on('click',function(e) {
288 288 $commitCheckboxes.attr('checked', false);
289 289 checkboxRangeSelector();
290 290 e.preventDefault();
291 291 });
292 292
293 293 // make sure the buttons are consistent when navigate back and forth
294 294 checkboxRangeSelector();
295 295
296 296
297 297 var msgs = $('.message');
298 298 // get first element height
299 299 var el = $('#graph_content .container')[0];
300 300 var row_h = el.clientHeight;
301 301 for (var i=0; i < msgs.length; i++) {
302 302 var m = msgs[i];
303 303
304 304 var h = m.clientHeight;
305 305 var pad = $(m).css('padding');
306 306 if (h > row_h) {
307 307 var offset = row_h - (h+12);
308 308 $(m.nextElementSibling).css('display','block');
309 309 $(m.nextElementSibling).css('margin-top',offset+'px');
310 310 }
311 311 }
312 312
313 313 $('.expand_commit').on('click',function(e){
314 314 var target_expand = $(this);
315 315 var cid = target_expand.data('commitId');
316 316
317 317 if (target_expand.hasClass('open')){
318 $('#c-'+cid).css({'height': '1.5em', 'white-space': 'nowrap', 'text-overflow': 'ellipsis', 'overflow':'hidden'});
319 $('#t-'+cid).css({'height': 'auto', 'line-height': '.9em', 'text-overflow': 'ellipsis', 'overflow':'hidden', 'white-space':'nowrap'});
318 $('#c-' + cid).css({
319 'height': '1.5em',
320 'white-space': 'nowrap',
321 'text-overflow': 'ellipsis',
322 'overflow': 'hidden'
323 });
324 $('#t-' + cid).css({
325 'height': 'auto',
326 'line-height': '.9em',
327 'text-overflow': 'ellipsis',
328 'overflow': 'hidden',
329 'white-space': 'nowrap'
330 });
320 331 target_expand.removeClass('open');
321 332 }
322 333 else {
323 $('#c-'+cid).css({'height': 'auto', 'white-space': 'pre-line', 'text-overflow': 'initial', 'overflow':'visible'});
324 $('#t-'+cid).css({'height': 'auto', 'max-height': 'none', 'text-overflow': 'initial', 'overflow':'visible', 'white-space':'normal'});
334 $('#c-' + cid).css({
335 'height': 'auto',
336 'white-space': 'pre-line',
337 'text-overflow': 'initial',
338 'overflow': 'visible'
339 });
340 $('#t-' + cid).css({
341 'height': 'auto',
342 'max-height': 'none',
343 'text-overflow': 'initial',
344 'overflow': 'visible',
345 'white-space': 'normal'
346 });
325 347 target_expand.addClass('open');
326 348 }
327 349 // redraw the graph
328 350 graph_options.height = $("#changesets").height();
329 351 $("canvas").remove();
330 352 $("[data-graph]").commits(graph_options);
331 353 });
332 354
333 355 $("#clear_filter").on("click", function() {
334 356 var filter = {'repo_name': '${c.repo_name}'};
335 357 window.location = pyroutes.url('changelog_home', filter);
336 358 });
337 359
338 360 $("#branch_filter").select2({
339 361 'dropdownAutoWidth': true,
340 362 'width': 'resolve',
341 363 'placeholder': "${c.selected_name or _('Filter changelog')}",
342 364 containerCssClass: "drop-menu",
343 365 dropdownCssClass: "drop-menu-dropdown",
344 366 query: function(query){
345 367 var key = 'cache';
346 368 var cached = cache[key] ;
347 369 if(cached) {
348 370 var data = {results: []};
349 371 //filter results
350 372 $.each(cached.results, function(){
351 373 var section = this.text;
352 374 var children = [];
353 375 $.each(this.children, function(){
354 376 if(query.term.length == 0 || this.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0 ){
355 377 children.push({'id': this.id, 'text': this.text, 'type': this.type})
356 378 }
357 379 });
358 380 data.results.push({'text': section, 'children': children});
359 381 query.callback({results: data.results});
360 382 });
361 383 }else{
362 384 $.ajax({
363 385 url: pyroutes.url('repo_refs_changelog_data', {'repo_name': '${c.repo_name}'}),
364 386 data: {},
365 387 dataType: 'json',
366 388 type: 'GET',
367 389 success: function(data) {
368 390 cache[key] = data;
369 391 query.callback({results: data.results});
370 392 }
371 393 })
372 394 }
373 395 }
374 396 });
375 397
376 398 $('#branch_filter').on('change', function(e){
377 399 var data = $('#branch_filter').select2('data');
378 400 var selected = data.text;
379 401 var filter = {'repo_name': '${c.repo_name}'};
380 402 if(data.type == 'branch' || data.type == 'branch_closed'){
381 403 filter["branch"] = selected;
382 404 }
383 405 else if (data.type == 'book'){
384 406 filter["bookmark"] = selected;
385 407 }
386 408 window.location = pyroutes.url('changelog_home', filter);
387 409 });
388 410
389 411 // Determine max number of edges per row in graph
390 412 var jsdata = $.parseJSON($("[data-graph]").attr('data-graph'));
391 413 var edgeCount = 1;
392 414 $.each(jsdata, function(i, item){
393 415 $.each(item[2], function(key, value) {
394 416 if (value[1] > edgeCount){
395 417 edgeCount = value[1];
396 418 }
397 419 });
398 420 });
399 421 var x_step = Math.min(18, Math.floor(86 / edgeCount));
400 422 var graph_options = {
401 423 width: 100,
402 424 height: $("#changesets").height(),
403 425 x_step: x_step,
404 426 y_step: 42,
405 427 dotRadius: 3.5,
406 428 lineWidth: 2.5
407 429 };
408 430 $("[data-graph]").commits(graph_options);
409 431
410 432 });
411 433
412 434 </script>
413 435 %else:
414 436 ${_('There are no changes yet')}
415 437 %endif
416 438 </div>
417 439 </div>
418 440 </%def>
@@ -1,668 +1,661 b''
1 1 <%namespace name="commentblock" file="/changeset/changeset_file_comment.mako"/>
2 2
3 3 <%def name="diff_line_anchor(filename, line, type)"><%
4 4 return '%s_%s_%i' % (h.safeid(filename), type, line)
5 5 %></%def>
6 6
7 7 <%def name="action_class(action)">
8 8 <%
9 9 return {
10 10 '-': 'cb-deletion',
11 11 '+': 'cb-addition',
12 12 ' ': 'cb-context',
13 13 }.get(action, 'cb-empty')
14 14 %>
15 15 </%def>
16 16
17 17 <%def name="op_class(op_id)">
18 18 <%
19 19 return {
20 20 DEL_FILENODE: 'deletion', # file deleted
21 21 BIN_FILENODE: 'warning' # binary diff hidden
22 22 }.get(op_id, 'addition')
23 23 %>
24 24 </%def>
25 25
26 26 <%def name="link_for(**kw)">
27 27 <%
28 28 new_args = request.GET.mixed()
29 29 new_args.update(kw)
30 30 return h.url('', **new_args)
31 31 %>
32 32 </%def>
33 33
34 34 <%def name="render_diffset(diffset, commit=None,
35 35
36 36 # collapse all file diff entries when there are more than this amount of files in the diff
37 37 collapse_when_files_over=20,
38 38
39 39 # collapse lines in the diff when more than this amount of lines changed in the file diff
40 40 lines_changed_limit=500,
41 41
42 42 # add a ruler at to the output
43 43 ruler_at_chars=0,
44 44
45 45 # show inline comments
46 46 use_comments=False,
47 47
48 48 # disable new comments
49 49 disable_new_comments=False,
50 50
51 51 # special file-comments that were deleted in previous versions
52 52 # it's used for showing outdated comments for deleted files in a PR
53 53 deleted_files_comments=None
54 54
55 55 )">
56 56
57 57 %if use_comments:
58 58 <div id="cb-comments-inline-container-template" class="js-template">
59 59 ${inline_comments_container([])}
60 60 </div>
61 61 <div class="js-template" id="cb-comment-inline-form-template">
62 62 <div class="comment-inline-form ac">
63 63
64 64 %if c.rhodecode_user.username != h.DEFAULT_USER:
65 65 ## render template for inline comments
66 66 ${commentblock.comment_form(form_type='inline')}
67 67 %else:
68 68 ${h.form('', class_='inline-form comment-form-login', method='get')}
69 69 <div class="pull-left">
70 70 <div class="comment-help pull-right">
71 71 ${_('You need to be logged in to leave comments.')} <a href="${h.route_path('login', _query={'came_from': h.url.current()})}">${_('Login now')}</a>
72 72 </div>
73 73 </div>
74 74 <div class="comment-button pull-right">
75 75 <button type="button" class="cb-comment-cancel" onclick="return Rhodecode.comments.cancelComment(this);">
76 76 ${_('Cancel')}
77 77 </button>
78 78 </div>
79 79 <div class="clearfix"></div>
80 80 ${h.end_form()}
81 81 %endif
82 82 </div>
83 83 </div>
84 84
85 85 %endif
86 86 <%
87 87 collapse_all = len(diffset.files) > collapse_when_files_over
88 88 %>
89 89
90 90 %if c.diffmode == 'sideside':
91 91 <style>
92 92 .wrapper {
93 93 max-width: 1600px !important;
94 94 }
95 95 </style>
96 96 %endif
97 97
98 98 %if ruler_at_chars:
99 99 <style>
100 100 .diff table.cb .cb-content:after {
101 101 content: "";
102 102 border-left: 1px solid blue;
103 103 position: absolute;
104 104 top: 0;
105 105 height: 18px;
106 106 opacity: .2;
107 107 z-index: 10;
108 108 //## +5 to account for diff action (+/-)
109 109 left: ${ruler_at_chars + 5}ch;
110 110 </style>
111 111 %endif
112 112
113 113 <div class="diffset ${disable_new_comments and 'diffset-comments-disabled'}">
114 114 <div class="diffset-heading ${diffset.limited_diff and 'diffset-heading-warning' or ''}">
115 115 %if commit:
116 116 <div class="pull-right">
117 117 <a class="btn tooltip" title="${_('Browse Files at revision {}').format(commit.raw_id)}" href="${h.url('files_home',repo_name=diffset.repo_name, revision=commit.raw_id, f_path='')}">
118 118 ${_('Browse Files')}
119 119 </a>
120 120 </div>
121 121 %endif
122 122 <h2 class="clearinner">
123 123 %if commit:
124 124 <a class="tooltip revision" title="${h.tooltip(commit.message)}" href="${h.url('changeset_home',repo_name=c.repo_name,revision=commit.raw_id)}">${'r%s:%s' % (commit.revision,h.short_id(commit.raw_id))}</a> -
125 125 ${h.age_component(commit.date)} -
126 126 %endif
127 127 %if diffset.limited_diff:
128 128 ${_('The requested commit is too big and content was truncated.')}
129 129
130 130 ${ungettext('%(num)s file changed.', '%(num)s files changed.', diffset.changed_files) % {'num': diffset.changed_files}}
131 131 <a href="${link_for(fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
132 132 %else:
133 133 ${ungettext('%(num)s file changed: %(linesadd)s inserted, ''%(linesdel)s deleted',
134 134 '%(num)s files changed: %(linesadd)s inserted, %(linesdel)s deleted', diffset.changed_files) % {'num': diffset.changed_files, 'linesadd': diffset.lines_added, 'linesdel': diffset.lines_deleted}}
135 135 %endif
136 136
137 <% at_ver = getattr(c, 'at_version_pos', None) %>
138 % if at_ver:
139 <div class="pull-right">
140 ${_('Showing changes at version %d') % at_ver}
141 </div>
142 % endif
143
144 137 </h2>
145 138 </div>
146 139
147 140 %if not diffset.files:
148 141 <p class="empty_data">${_('No files')}</p>
149 142 %endif
150 143
151 144 <div class="filediffs">
152 145 %for i, filediff in enumerate(diffset.files):
153 146
154 147 <%
155 148 lines_changed = filediff['patch']['stats']['added'] + filediff['patch']['stats']['deleted']
156 149 over_lines_changed_limit = lines_changed > lines_changed_limit
157 150 %>
158 151 <input ${collapse_all and 'checked' or ''} class="filediff-collapse-state" id="filediff-collapse-${id(filediff)}" type="checkbox">
159 152 <div
160 153 class="filediff"
161 154 data-f-path="${filediff['patch']['filename']}"
162 155 id="a_${h.FID('', filediff['patch']['filename'])}">
163 156 <label for="filediff-collapse-${id(filediff)}" class="filediff-heading">
164 157 <div class="filediff-collapse-indicator"></div>
165 158 ${diff_ops(filediff)}
166 159 </label>
167 160 ${diff_menu(filediff, use_comments=use_comments)}
168 161 <table class="cb cb-diff-${c.diffmode} code-highlight ${over_lines_changed_limit and 'cb-collapsed' or ''}">
169 162 %if not filediff.hunks:
170 163 %for op_id, op_text in filediff['patch']['stats']['ops'].items():
171 164 <tr>
172 165 <td class="cb-text cb-${op_class(op_id)}" ${c.diffmode == 'unified' and 'colspan=4' or 'colspan=6'}>
173 166 %if op_id == DEL_FILENODE:
174 167 ${_('File was deleted')}
175 168 %elif op_id == BIN_FILENODE:
176 169 ${_('Binary file hidden')}
177 170 %else:
178 171 ${op_text}
179 172 %endif
180 173 </td>
181 174 </tr>
182 175 %endfor
183 176 %endif
184 177 %if filediff.patch['is_limited_diff']:
185 178 <tr class="cb-warning cb-collapser">
186 179 <td class="cb-text" ${c.diffmode == 'unified' and 'colspan=4' or 'colspan=6'}>
187 180 ${_('The requested commit is too big and content was truncated.')} <a href="${link_for(fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
188 181 </td>
189 182 </tr>
190 183 %else:
191 184 %if over_lines_changed_limit:
192 185 <tr class="cb-warning cb-collapser">
193 186 <td class="cb-text" ${c.diffmode == 'unified' and 'colspan=4' or 'colspan=6'}>
194 187 ${_('This diff has been collapsed as it changes many lines, (%i lines changed)' % lines_changed)}
195 188 <a href="#" class="cb-expand"
196 189 onclick="$(this).closest('table').removeClass('cb-collapsed'); return false;">${_('Show them')}
197 190 </a>
198 191 <a href="#" class="cb-collapse"
199 192 onclick="$(this).closest('table').addClass('cb-collapsed'); return false;">${_('Hide them')}
200 193 </a>
201 194 </td>
202 195 </tr>
203 196 %endif
204 197 %endif
205 198
206 199 %for hunk in filediff.hunks:
207 200 <tr class="cb-hunk">
208 201 <td ${c.diffmode == 'unified' and 'colspan=3' or ''}>
209 202 ## TODO: dan: add ajax loading of more context here
210 203 ## <a href="#">
211 204 <i class="icon-more"></i>
212 205 ## </a>
213 206 </td>
214 207 <td ${c.diffmode == 'sideside' and 'colspan=5' or ''}>
215 208 @@
216 209 -${hunk.source_start},${hunk.source_length}
217 210 +${hunk.target_start},${hunk.target_length}
218 211 ${hunk.section_header}
219 212 </td>
220 213 </tr>
221 214 %if c.diffmode == 'unified':
222 215 ${render_hunk_lines_unified(hunk, use_comments=use_comments)}
223 216 %elif c.diffmode == 'sideside':
224 217 ${render_hunk_lines_sideside(hunk, use_comments=use_comments)}
225 218 %else:
226 219 <tr class="cb-line">
227 220 <td>unknown diff mode</td>
228 221 </tr>
229 222 %endif
230 223 %endfor
231 224
232 225 ## outdated comments that do not fit into currently displayed lines
233 226 % for lineno, comments in filediff.left_comments.items():
234 227
235 228 %if c.diffmode == 'unified':
236 229 <tr class="cb-line">
237 230 <td class="cb-data cb-context"></td>
238 231 <td class="cb-lineno cb-context"></td>
239 232 <td class="cb-lineno cb-context"></td>
240 233 <td class="cb-content cb-context">
241 234 ${inline_comments_container(comments)}
242 235 </td>
243 236 </tr>
244 237 %elif c.diffmode == 'sideside':
245 238 <tr class="cb-line">
246 239 <td class="cb-data cb-context"></td>
247 240 <td class="cb-lineno cb-context"></td>
248 241 <td class="cb-content cb-context"></td>
249 242
250 243 <td class="cb-data cb-context"></td>
251 244 <td class="cb-lineno cb-context"></td>
252 245 <td class="cb-content cb-context">
253 246 ${inline_comments_container(comments)}
254 247 </td>
255 248 </tr>
256 249 %endif
257 250
258 251 % endfor
259 252
260 253 </table>
261 254 </div>
262 255 %endfor
263 256
264 257 ## outdated comments that are made for a file that has been deleted
265 258 % for filename, comments_dict in (deleted_files_comments or {}).items():
266 259
267 260 <div class="filediffs filediff-outdated" style="display: none">
268 261 <input ${collapse_all and 'checked' or ''} class="filediff-collapse-state" id="filediff-collapse-${id(filename)}" type="checkbox">
269 262 <div class="filediff" data-f-path="${filename}" id="a_${h.FID('', filename)}">
270 263 <label for="filediff-collapse-${id(filename)}" class="filediff-heading">
271 264 <div class="filediff-collapse-indicator"></div>
272 265 <span class="pill">
273 266 ## file was deleted
274 267 <strong>${filename}</strong>
275 268 </span>
276 269 <span class="pill-group" style="float: left">
277 270 ## file op, doesn't need translation
278 271 <span class="pill" op="removed">removed in this version</span>
279 272 </span>
280 273 <a class="pill filediff-anchor" href="#a_${h.FID('', filename)}">ΒΆ</a>
281 274 <span class="pill-group" style="float: right">
282 275 <span class="pill" op="deleted">-${comments_dict['stats']}</span>
283 276 </span>
284 277 </label>
285 278
286 279 <table class="cb cb-diff-${c.diffmode} code-highlight ${over_lines_changed_limit and 'cb-collapsed' or ''}">
287 280 <tr>
288 281 % if c.diffmode == 'unified':
289 282 <td></td>
290 283 %endif
291 284
292 285 <td></td>
293 286 <td class="cb-text cb-${op_class(BIN_FILENODE)}" ${c.diffmode == 'unified' and 'colspan=4' or 'colspan=5'}>
294 287 ${_('File was deleted in this version, and outdated comments were made on it')}
295 288 </td>
296 289 </tr>
297 290 %if c.diffmode == 'unified':
298 291 <tr class="cb-line">
299 292 <td class="cb-data cb-context"></td>
300 293 <td class="cb-lineno cb-context"></td>
301 294 <td class="cb-lineno cb-context"></td>
302 295 <td class="cb-content cb-context">
303 296 ${inline_comments_container(comments_dict['comments'])}
304 297 </td>
305 298 </tr>
306 299 %elif c.diffmode == 'sideside':
307 300 <tr class="cb-line">
308 301 <td class="cb-data cb-context"></td>
309 302 <td class="cb-lineno cb-context"></td>
310 303 <td class="cb-content cb-context"></td>
311 304
312 305 <td class="cb-data cb-context"></td>
313 306 <td class="cb-lineno cb-context"></td>
314 307 <td class="cb-content cb-context">
315 308 ${inline_comments_container(comments_dict['comments'])}
316 309 </td>
317 310 </tr>
318 311 %endif
319 312 </table>
320 313 </div>
321 314 </div>
322 315 % endfor
323 316
324 317 </div>
325 318 </div>
326 319 </%def>
327 320
328 321 <%def name="diff_ops(filediff)">
329 322 <%
330 323 stats = filediff['patch']['stats']
331 324 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
332 325 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
333 326 %>
334 327 <span class="pill">
335 328 %if filediff.source_file_path and filediff.target_file_path:
336 329 %if filediff.source_file_path != filediff.target_file_path:
337 330 ## file was renamed
338 331 <strong>${filediff.target_file_path}</strong> β¬… <del>${filediff.source_file_path}</del>
339 332 %else:
340 333 ## file was modified
341 334 <strong>${filediff.source_file_path}</strong>
342 335 %endif
343 336 %else:
344 337 %if filediff.source_file_path:
345 338 ## file was deleted
346 339 <strong>${filediff.source_file_path}</strong>
347 340 %else:
348 341 ## file was added
349 342 <strong>${filediff.target_file_path}</strong>
350 343 %endif
351 344 %endif
352 345 </span>
353 346 <span class="pill-group" style="float: left">
354 347 %if filediff.patch['is_limited_diff']:
355 348 <span class="pill tooltip" op="limited" title="The stats for this diff are not complete">limited diff</span>
356 349 %endif
357 350 %if RENAMED_FILENODE in stats['ops']:
358 351 <span class="pill" op="renamed">renamed</span>
359 352 %endif
360 353
361 354 %if NEW_FILENODE in stats['ops']:
362 355 <span class="pill" op="created">created</span>
363 356 %if filediff['target_mode'].startswith('120'):
364 357 <span class="pill" op="symlink">symlink</span>
365 358 %else:
366 359 <span class="pill" op="mode">${nice_mode(filediff['target_mode'])}</span>
367 360 %endif
368 361 %endif
369 362
370 363 %if DEL_FILENODE in stats['ops']:
371 364 <span class="pill" op="removed">removed</span>
372 365 %endif
373 366
374 367 %if CHMOD_FILENODE in stats['ops']:
375 368 <span class="pill" op="mode">
376 369 ${nice_mode(filediff['source_mode'])} ➑ ${nice_mode(filediff['target_mode'])}
377 370 </span>
378 371 %endif
379 372 </span>
380 373
381 374 <a class="pill filediff-anchor" href="#a_${h.FID('', filediff.patch['filename'])}">ΒΆ</a>
382 375
383 376 <span class="pill-group" style="float: right">
384 377 %if BIN_FILENODE in stats['ops']:
385 378 <span class="pill" op="binary">binary</span>
386 379 %if MOD_FILENODE in stats['ops']:
387 380 <span class="pill" op="modified">modified</span>
388 381 %endif
389 382 %endif
390 383 %if stats['added']:
391 384 <span class="pill" op="added">+${stats['added']}</span>
392 385 %endif
393 386 %if stats['deleted']:
394 387 <span class="pill" op="deleted">-${stats['deleted']}</span>
395 388 %endif
396 389 </span>
397 390
398 391 </%def>
399 392
400 393 <%def name="nice_mode(filemode)">
401 394 ${filemode.startswith('100') and filemode[3:] or filemode}
402 395 </%def>
403 396
404 397 <%def name="diff_menu(filediff, use_comments=False)">
405 398 <div class="filediff-menu">
406 399 %if filediff.diffset.source_ref:
407 400 %if filediff.patch['operation'] in ['D', 'M']:
408 401 <a
409 402 class="tooltip"
410 403 href="${h.url('files_home',repo_name=filediff.diffset.repo_name,f_path=filediff.source_file_path,revision=filediff.diffset.source_ref)}"
411 404 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
412 405 >
413 406 ${_('Show file before')}
414 407 </a> |
415 408 %else:
416 409 <span
417 410 class="tooltip"
418 411 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
419 412 >
420 413 ${_('Show file before')}
421 414 </span> |
422 415 %endif
423 416 %if filediff.patch['operation'] in ['A', 'M']:
424 417 <a
425 418 class="tooltip"
426 419 href="${h.url('files_home',repo_name=filediff.diffset.source_repo_name,f_path=filediff.target_file_path,revision=filediff.diffset.target_ref)}"
427 420 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
428 421 >
429 422 ${_('Show file after')}
430 423 </a> |
431 424 %else:
432 425 <span
433 426 class="tooltip"
434 427 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
435 428 >
436 429 ${_('Show file after')}
437 430 </span> |
438 431 %endif
439 432 <a
440 433 class="tooltip"
441 434 title="${h.tooltip(_('Raw diff'))}"
442 435 href="${h.url('files_diff_home',repo_name=filediff.diffset.repo_name,f_path=filediff.target_file_path,diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='raw')}"
443 436 >
444 437 ${_('Raw diff')}
445 438 </a> |
446 439 <a
447 440 class="tooltip"
448 441 title="${h.tooltip(_('Download diff'))}"
449 442 href="${h.url('files_diff_home',repo_name=filediff.diffset.repo_name,f_path=filediff.target_file_path,diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='download')}"
450 443 >
451 444 ${_('Download diff')}
452 445 </a>
453 446 % if use_comments:
454 447 |
455 448 % endif
456 449
457 450 ## TODO: dan: refactor ignorews_url and context_url into the diff renderer same as diffmode=unified/sideside. Also use ajax to load more context (by clicking hunks)
458 451 %if hasattr(c, 'ignorews_url'):
459 452 ${c.ignorews_url(request.GET, h.FID('', filediff['patch']['filename']))}
460 453 %endif
461 454 %if hasattr(c, 'context_url'):
462 455 ${c.context_url(request.GET, h.FID('', filediff['patch']['filename']))}
463 456 %endif
464 457
465 458 %if use_comments:
466 459 <a href="#" onclick="return Rhodecode.comments.toggleComments(this);">
467 460 <span class="show-comment-button">${_('Show comments')}</span><span class="hide-comment-button">${_('Hide comments')}</span>
468 461 </a>
469 462 %endif
470 463 %endif
471 464 </div>
472 465 </%def>
473 466
474 467
475 468 <%def name="inline_comments_container(comments)">
476 469 <div class="inline-comments">
477 470 %for comment in comments:
478 471 ${commentblock.comment_block(comment, inline=True)}
479 472 %endfor
480 473
481 474 % if comments and comments[-1].outdated:
482 475 <span class="btn btn-secondary cb-comment-add-button comment-outdated}"
483 476 style="display: none;}">
484 477 ${_('Add another comment')}
485 478 </span>
486 479 % else:
487 480 <span onclick="return Rhodecode.comments.createComment(this)"
488 481 class="btn btn-secondary cb-comment-add-button">
489 482 ${_('Add another comment')}
490 483 </span>
491 484 % endif
492 485
493 486 </div>
494 487 </%def>
495 488
496 489
497 490 <%def name="render_hunk_lines_sideside(hunk, use_comments=False)">
498 491 %for i, line in enumerate(hunk.sideside):
499 492 <%
500 493 old_line_anchor, new_line_anchor = None, None
501 494 if line.original.lineno:
502 495 old_line_anchor = diff_line_anchor(hunk.filediff.source_file_path, line.original.lineno, 'o')
503 496 if line.modified.lineno:
504 497 new_line_anchor = diff_line_anchor(hunk.filediff.target_file_path, line.modified.lineno, 'n')
505 498 %>
506 499
507 500 <tr class="cb-line">
508 501 <td class="cb-data ${action_class(line.original.action)}"
509 502 data-line-number="${line.original.lineno}"
510 503 >
511 504 <div>
512 505 %if line.original.comments:
513 506 <i class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
514 507 %endif
515 508 </div>
516 509 </td>
517 510 <td class="cb-lineno ${action_class(line.original.action)}"
518 511 data-line-number="${line.original.lineno}"
519 512 %if old_line_anchor:
520 513 id="${old_line_anchor}"
521 514 %endif
522 515 >
523 516 %if line.original.lineno:
524 517 <a name="${old_line_anchor}" href="#${old_line_anchor}">${line.original.lineno}</a>
525 518 %endif
526 519 </td>
527 520 <td class="cb-content ${action_class(line.original.action)}"
528 521 data-line-number="o${line.original.lineno}"
529 522 >
530 523 %if use_comments and line.original.lineno:
531 524 ${render_add_comment_button()}
532 525 %endif
533 526 <span class="cb-code">${line.original.action} ${line.original.content or '' | n}</span>
534 527 %if use_comments and line.original.lineno and line.original.comments:
535 528 ${inline_comments_container(line.original.comments)}
536 529 %endif
537 530 </td>
538 531 <td class="cb-data ${action_class(line.modified.action)}"
539 532 data-line-number="${line.modified.lineno}"
540 533 >
541 534 <div>
542 535 %if line.modified.comments:
543 536 <i class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
544 537 %endif
545 538 </div>
546 539 </td>
547 540 <td class="cb-lineno ${action_class(line.modified.action)}"
548 541 data-line-number="${line.modified.lineno}"
549 542 %if new_line_anchor:
550 543 id="${new_line_anchor}"
551 544 %endif
552 545 >
553 546 %if line.modified.lineno:
554 547 <a name="${new_line_anchor}" href="#${new_line_anchor}">${line.modified.lineno}</a>
555 548 %endif
556 549 </td>
557 550 <td class="cb-content ${action_class(line.modified.action)}"
558 551 data-line-number="n${line.modified.lineno}"
559 552 >
560 553 %if use_comments and line.modified.lineno:
561 554 ${render_add_comment_button()}
562 555 %endif
563 556 <span class="cb-code">${line.modified.action} ${line.modified.content or '' | n}</span>
564 557 %if use_comments and line.modified.lineno and line.modified.comments:
565 558 ${inline_comments_container(line.modified.comments)}
566 559 %endif
567 560 </td>
568 561 </tr>
569 562 %endfor
570 563 </%def>
571 564
572 565
573 566 <%def name="render_hunk_lines_unified(hunk, use_comments=False)">
574 567 %for old_line_no, new_line_no, action, content, comments in hunk.unified:
575 568 <%
576 569 old_line_anchor, new_line_anchor = None, None
577 570 if old_line_no:
578 571 old_line_anchor = diff_line_anchor(hunk.filediff.source_file_path, old_line_no, 'o')
579 572 if new_line_no:
580 573 new_line_anchor = diff_line_anchor(hunk.filediff.target_file_path, new_line_no, 'n')
581 574 %>
582 575 <tr class="cb-line">
583 576 <td class="cb-data ${action_class(action)}">
584 577 <div>
585 578 %if comments:
586 579 <i class="icon-comment" onclick="return Rhodecode.comments.toggleLineComments(this)"></i>
587 580 %endif
588 581 </div>
589 582 </td>
590 583 <td class="cb-lineno ${action_class(action)}"
591 584 data-line-number="${old_line_no}"
592 585 %if old_line_anchor:
593 586 id="${old_line_anchor}"
594 587 %endif
595 588 >
596 589 %if old_line_anchor:
597 590 <a name="${old_line_anchor}" href="#${old_line_anchor}">${old_line_no}</a>
598 591 %endif
599 592 </td>
600 593 <td class="cb-lineno ${action_class(action)}"
601 594 data-line-number="${new_line_no}"
602 595 %if new_line_anchor:
603 596 id="${new_line_anchor}"
604 597 %endif
605 598 >
606 599 %if new_line_anchor:
607 600 <a name="${new_line_anchor}" href="#${new_line_anchor}">${new_line_no}</a>
608 601 %endif
609 602 </td>
610 603 <td class="cb-content ${action_class(action)}"
611 604 data-line-number="${new_line_no and 'n' or 'o'}${new_line_no or old_line_no}"
612 605 >
613 606 %if use_comments:
614 607 ${render_add_comment_button()}
615 608 %endif
616 609 <span class="cb-code">${action} ${content or '' | n}</span>
617 610 %if use_comments and comments:
618 611 ${inline_comments_container(comments)}
619 612 %endif
620 613 </td>
621 614 </tr>
622 615 %endfor
623 616 </%def>
624 617
625 618 <%def name="render_add_comment_button()">
626 619 <button class="btn btn-small btn-primary cb-comment-box-opener" onclick="return Rhodecode.comments.createComment(this)">
627 620 <span><i class="icon-comment"></i></span>
628 621 </button>
629 622 </%def>
630 623
631 624 <%def name="render_diffset_menu()">
632 625
633 626 <div class="diffset-menu clearinner">
634 627 <div class="pull-right">
635 628 <div class="btn-group">
636 629
637 630 <a
638 631 class="btn ${c.diffmode == 'sideside' and 'btn-primary'} tooltip"
639 632 title="${_('View side by side')}"
640 633 href="${h.url_replace(diffmode='sideside')}">
641 634 <span>${_('Side by Side')}</span>
642 635 </a>
643 636 <a
644 637 class="btn ${c.diffmode == 'unified' and 'btn-primary'} tooltip"
645 638 title="${_('View unified')}" href="${h.url_replace(diffmode='unified')}">
646 639 <span>${_('Unified')}</span>
647 640 </a>
648 641 </div>
649 642 </div>
650 643
651 644 <div class="pull-left">
652 645 <div class="btn-group">
653 646 <a
654 647 class="btn"
655 648 href="#"
656 649 onclick="$('input[class=filediff-collapse-state]').prop('checked', false); return false">${_('Expand All Files')}</a>
657 650 <a
658 651 class="btn"
659 652 href="#"
660 653 onclick="$('input[class=filediff-collapse-state]').prop('checked', true); return false">${_('Collapse All Files')}</a>
661 654 <a
662 655 class="btn"
663 656 href="#"
664 657 onclick="return Rhodecode.comments.toggleWideMode(this)">${_('Wide Mode Diff')}</a>
665 658 </div>
666 659 </div>
667 660 </div>
668 661 </%def>
@@ -1,713 +1,889 b''
1 1 <%inherit file="/base/base.mako"/>
2 <%namespace name="base" file="/base/base.mako"/>
2 3
3 4 <%def name="title()">
4 5 ${_('%s Pull Request #%s') % (c.repo_name, c.pull_request.pull_request_id)}
5 6 %if c.rhodecode_name:
6 7 &middot; ${h.branding(c.rhodecode_name)}
7 8 %endif
8 9 </%def>
9 10
10 11 <%def name="breadcrumbs_links()">
11 12 <span id="pr-title">
12 13 ${c.pull_request.title}
13 14 %if c.pull_request.is_closed():
14 15 (${_('Closed')})
15 16 %endif
16 17 </span>
17 18 <div id="pr-title-edit" class="input" style="display: none;">
18 19 ${h.text('pullrequest_title', id_="pr-title-input", class_="large", value=c.pull_request.title)}
19 20 </div>
20 21 </%def>
21 22
22 23 <%def name="menu_bar_nav()">
23 24 ${self.menu_items(active='repositories')}
24 25 </%def>
25 26
26 27 <%def name="menu_bar_subnav()">
27 28 ${self.repo_menu(active='showpullrequest')}
28 29 </%def>
29 30
30 31 <%def name="main()">
31 32
32 33 <script type="text/javascript">
33 34 // TODO: marcink switch this to pyroutes
34 35 AJAX_COMMENT_DELETE_URL = "${url('pullrequest_comment_delete',repo_name=c.repo_name,comment_id='__COMMENT_ID__')}";
35 36 templateContext.pull_request_data.pull_request_id = ${c.pull_request.pull_request_id};
36 37 </script>
37 38 <div class="box">
38 39
39 40 <div class="title">
40 41 ${self.repo_page_title(c.rhodecode_db_repo)}
41 42 </div>
42 43
43 44 ${self.breadcrumbs()}
44 45
45 46 <div class="box pr-summary">
46 47
47 48 <div class="summary-details block-left">
48 49 <% summary = lambda n:{False:'summary-short'}.get(n) %>
49 50 <div class="pr-details-title">
50 51 <a href="${h.url('pull_requests_global', pull_request_id=c.pull_request.pull_request_id)}">${_('Pull request #%s') % c.pull_request.pull_request_id}</a> ${_('From')} ${h.format_date(c.pull_request.created_on)}
51 52 %if c.allowed_to_update:
52 53 <div id="delete_pullrequest" class="pull-right action_button ${'' if c.allowed_to_delete else 'disabled' }" style="clear:inherit;padding: 0">
53 54 % if c.allowed_to_delete:
54 55 ${h.secure_form(url('pullrequest_delete', repo_name=c.pull_request.target_repo.repo_name, pull_request_id=c.pull_request.pull_request_id),method='delete')}
55 56 ${h.submit('remove_%s' % c.pull_request.pull_request_id, _('Delete'),
56 57 class_="btn btn-link btn-danger",onclick="return confirm('"+_('Confirm to delete this pull request')+"');")}
57 58 ${h.end_form()}
58 59 % else:
59 60 ${_('Delete')}
60 61 % endif
61 62 </div>
62 63 <div id="open_edit_pullrequest" class="pull-right action_button">${_('Edit')}</div>
63 64 <div id="close_edit_pullrequest" class="pull-right action_button" style="display: none;padding: 0">${_('Cancel')}</div>
64 65 %endif
65 66 </div>
66 67
67 68 <div id="summary" class="fields pr-details-content">
68 69 <div class="field">
69 70 <div class="label-summary">
70 71 <label>${_('Origin')}:</label>
71 72 </div>
72 73 <div class="input">
73 74 <div class="pr-origininfo">
74 75 ## branch link is only valid if it is a branch
75 76 <span class="tag">
76 77 %if c.pull_request.source_ref_parts.type == 'branch':
77 78 <a href="${h.url('changelog_home', repo_name=c.pull_request.source_repo.repo_name, branch=c.pull_request.source_ref_parts.name)}">${c.pull_request.source_ref_parts.type}: ${c.pull_request.source_ref_parts.name}</a>
78 79 %else:
79 80 ${c.pull_request.source_ref_parts.type}: ${c.pull_request.source_ref_parts.name}
80 81 %endif
81 82 </span>
82 83 <span class="clone-url">
83 84 <a href="${h.url('summary_home', repo_name=c.pull_request.source_repo.repo_name)}">${c.pull_request.source_repo.clone_url()}</a>
84 85 </span>
85 86 </div>
86 87 <div class="pr-pullinfo">
87 88 %if h.is_hg(c.pull_request.source_repo):
88 89 <input type="text" value="hg pull -r ${h.short_id(c.source_ref)} ${c.pull_request.source_repo.clone_url()}" readonly="readonly">
89 90 %elif h.is_git(c.pull_request.source_repo):
90 91 <input type="text" value="git pull ${c.pull_request.source_repo.clone_url()} ${c.pull_request.source_ref_parts.name}" readonly="readonly">
91 92 %endif
92 93 </div>
93 94 </div>
94 95 </div>
95 96 <div class="field">
96 97 <div class="label-summary">
97 98 <label>${_('Target')}:</label>
98 99 </div>
99 100 <div class="input">
100 101 <div class="pr-targetinfo">
101 102 ## branch link is only valid if it is a branch
102 103 <span class="tag">
103 104 %if c.pull_request.target_ref_parts.type == 'branch':
104 105 <a href="${h.url('changelog_home', repo_name=c.pull_request.target_repo.repo_name, branch=c.pull_request.target_ref_parts.name)}">${c.pull_request.target_ref_parts.type}: ${c.pull_request.target_ref_parts.name}</a>
105 106 %else:
106 107 ${c.pull_request.target_ref_parts.type}: ${c.pull_request.target_ref_parts.name}
107 108 %endif
108 109 </span>
109 110 <span class="clone-url">
110 111 <a href="${h.url('summary_home', repo_name=c.pull_request.target_repo.repo_name)}">${c.pull_request.target_repo.clone_url()}</a>
111 112 </span>
112 113 </div>
113 114 </div>
114 115 </div>
115 116
116 117 ## Link to the shadow repository.
117 118 <div class="field">
118 119 <div class="label-summary">
119 120 <label>${_('Merge')}:</label>
120 121 </div>
121 122 <div class="input">
122 123 % if not c.pull_request.is_closed() and c.pull_request.shadow_merge_ref:
123 124 <div class="pr-mergeinfo">
124 125 %if h.is_hg(c.pull_request.target_repo):
125 126 <input type="text" value="hg clone -u ${c.pull_request.shadow_merge_ref.name} ${c.shadow_clone_url} pull-request-${c.pull_request.pull_request_id}" readonly="readonly">
126 127 %elif h.is_git(c.pull_request.target_repo):
127 128 <input type="text" value="git clone --branch ${c.pull_request.shadow_merge_ref.name} ${c.shadow_clone_url} pull-request-${c.pull_request.pull_request_id}" readonly="readonly">
128 129 %endif
129 130 </div>
130 131 % else:
131 132 <div class="">
132 133 ${_('Shadow repository data not available')}.
133 134 </div>
134 135 % endif
135 136 </div>
136 137 </div>
137 138
138 139 <div class="field">
139 140 <div class="label-summary">
140 141 <label>${_('Review')}:</label>
141 142 </div>
142 143 <div class="input">
143 144 %if c.pull_request_review_status:
144 145 <div class="${'flag_status %s' % c.pull_request_review_status} tooltip pull-left"></div>
145 146 <span class="changeset-status-lbl tooltip">
146 147 %if c.pull_request.is_closed():
147 148 ${_('Closed')},
148 149 %endif
149 150 ${h.commit_status_lbl(c.pull_request_review_status)}
150 151 </span>
151 152 - ${ungettext('calculated based on %s reviewer vote', 'calculated based on %s reviewers votes', len(c.pull_request_reviewers)) % len(c.pull_request_reviewers)}
152 153 %endif
153 154 </div>
154 155 </div>
155 156 <div class="field">
156 157 <div class="pr-description-label label-summary">
157 158 <label>${_('Description')}:</label>
158 159 </div>
159 160 <div id="pr-desc" class="input">
160 161 <div class="pr-description">${h.urlify_commit_message(c.pull_request.description, c.repo_name)}</div>
161 162 </div>
162 163 <div id="pr-desc-edit" class="input textarea editor" style="display: none;">
163 164 <textarea id="pr-description-input" size="30">${c.pull_request.description}</textarea>
164 165 </div>
165 166 </div>
166 167
167 168 <div class="field">
168 169 <div class="label-summary">
169 <label>${_('Versions')} (${len(c.versions)+1}):</label>
170 <label>${_('Versions')}:</label>
170 171 </div>
171 172
173 <% outdated_comm_count_ver = len(c.inline_versions[None]['outdated']) %>
174 <% general_outdated_comm_count_ver = len(c.comment_versions[None]['outdated']) %>
175
172 176 <div class="pr-versions">
173 177 % if c.show_version_changes:
178 <% outdated_comm_count_ver = len(c.inline_versions[c.at_version_num]['outdated']) %>
179 <% general_outdated_comm_count_ver = len(c.comment_versions[c.at_version_num]['outdated']) %>
180 <div id="show-pr-versions" class="input btn btn-link" onclick="return versionController.toggleVersionView(this)"
181 data-toggle-on="${ungettext('{} version available for this pull request, show it.', '{} versions available for this pull request, show them.', len(c.versions)).format(len(c.versions))}"
182 data-toggle-off="${_('Hide all versions of this pull request')}">
183 ${ungettext('{} version available for this pull request, show it.', '{} versions available for this pull request, show them.', len(c.versions)).format(len(c.versions))}
184 </div>
174 185 <table>
175 ## CURRENTLY SELECT PR VERSION
176 <tr class="version-pr" style="display: ${'' if c.at_version_num is None else 'none'}">
177 <td>
178 % if c.at_version_num is None:
179 <i class="icon-ok link"></i>
180 % else:
181 <i class="icon-comment"></i>
182 <code>
183 ${len(c.comment_versions[None]['at'])}/${len(c.inline_versions[None]['at'])}
184 </code>
185 % endif
186 </td>
187 <td>
188 <code>
189 % if c.versions:
190 <a href="${h.url.current(version='latest')}">${_('latest')}</a>
191 % else:
192 ${_('initial')}
193 % endif
194 </code>
195 </td>
196 <td>
197 <code>${c.pull_request_latest.source_ref_parts.commit_id[:6]}</code>
198 </td>
199 <td>
200 ${_('created')} ${h.age_component(c.pull_request_latest.updated_on)}
201 </td>
202 <td align="right">
203 % if c.versions and c.at_version_num in [None, 'latest']:
204 <span id="show-pr-versions" class="btn btn-link" onclick="$('.version-pr').show(); $(this).hide(); return false">${_('Show all versions')}</span>
205 % endif
206 </td>
207 </tr>
208
209 186 ## SHOW ALL VERSIONS OF PR
210 187 <% ver_pr = None %>
211 188
212 189 % for data in reversed(list(enumerate(c.versions, 1))):
213 190 <% ver_pos = data[0] %>
214 191 <% ver = data[1] %>
215 192 <% ver_pr = ver.pull_request_version_id %>
193 <% display_row = '' if c.at_version and (c.at_version_num == ver_pr or c.from_version_num == ver_pr) else 'none' %>
216 194
217 <tr class="version-pr" style="display: ${'' if c.at_version_num == ver_pr else 'none'}">
195 <tr class="version-pr" style="display: ${display_row}">
196 <td>
197 <code>
198 <a href="${h.url.current(version=ver_pr or 'latest')}">v${ver_pos}</a>
199 </code>
200 </td>
218 201 <td>
219 % if c.at_version_num == ver_pr:
220 <i class="icon-ok link"></i>
221 % else:
202 <input ${'checked="checked"' if c.from_version_num == ver_pr else ''} class="compare-radio-button" type="radio" name="ver_source" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
203 <input ${'checked="checked"' if c.at_version_num == ver_pr else ''} class="compare-radio-button" type="radio" name="ver_target" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
204 </td>
205 <td>
206 <% review_status = c.review_versions[ver_pr].status if ver_pr in c.review_versions else 'not_reviewed' %>
207 <div class="${'flag_status %s' % review_status} tooltip pull-left" title="${_('Your review status at this version')}">
208 </div>
209 </td>
210 <td>
211 % if c.at_version_num != ver_pr:
222 212 <i class="icon-comment"></i>
223 <code class="tooltip" title="${_('Comment from pull request version {0}, general:{1} inline{2}').format(ver_pos, len(c.comment_versions[ver_pr]['at']), len(c.inline_versions[ver_pr]['at']))}">
213 <code class="tooltip" title="${_('Comment from pull request version {0}, general:{1} inline:{2}').format(ver_pos, len(c.comment_versions[ver_pr]['at']), len(c.inline_versions[ver_pr]['at']))}">
224 214 ${len(c.comment_versions[ver_pr]['at'])}/${len(c.inline_versions[ver_pr]['at'])}
225 215 </code>
226 216 % endif
227 217 </td>
228 218 <td>
229 <code>
230 <a href="${h.url.current(version=ver_pr)}">v${ver_pos}</a>
231 </code>
219 ##<code>${ver.source_ref_parts.commit_id[:6]}</code>
232 220 </td>
233 221 <td>
234 <code>${ver.source_ref_parts.commit_id[:6]}</code>
235 </td>
236 <td>
237 ${_('created')} ${h.age_component(ver.updated_on)}
238 </td>
239 <td align="right">
240 % if c.at_version_num == ver_pr:
241 <span id="show-pr-versions" class="btn btn-link" onclick="$('.version-pr').show(); $(this).hide(); return false">${_('Show all versions')}</span>
242 % endif
222 ${h.age_component(ver.updated_on)}
243 223 </td>
244 224 </tr>
245 225 % endfor
246 226
247 ## show comment/inline comments summary
248 227 <tr>
249 <td>
228 <td colspan="5">
229 <button id="show-version-diff" onclick="return versionController.showVersionDiff()" class="btn btn-sm" style="display: none" data-label-text="${_('show changes between versions')}">
230 ${_('show changes between versions')}
231 </button>
250 232 </td>
233 </tr>
251 234
252 <td colspan="4" style="border-top: 1px dashed #dbd9da">
253 <% outdated_comm_count_ver = len(c.inline_versions[c.at_version_num]['outdated']) %>
254 <% general_outdated_comm_count_ver = len(c.comment_versions[c.at_version_num]['outdated']) %>
255
235 ## show comment/inline comments summary
236 <%def name="comments_summary()">
237 <tr>
238 <td colspan="6" class="comments-summary-td">
256 239
257 240 % if c.at_version:
258 241 <% inline_comm_count_ver = len(c.inline_versions[c.at_version_num]['display']) %>
259 242 <% general_comm_count_ver = len(c.comment_versions[c.at_version_num]['display']) %>
260 243 ${_('Comments at this version')}:
261 244 % else:
262 245 <% inline_comm_count_ver = len(c.inline_versions[c.at_version_num]['until']) %>
263 246 <% general_comm_count_ver = len(c.comment_versions[c.at_version_num]['until']) %>
264 247 ${_('Comments for this pull request')}:
265 248 % endif
266 249
250
267 251 %if general_comm_count_ver:
268 252 <a href="#comments">${_("%d General ") % general_comm_count_ver}</a>
269 253 %else:
270 254 ${_("%d General ") % general_comm_count_ver}
271 255 %endif
272 256
273 257 %if inline_comm_count_ver:
274 258 , <a href="#" onclick="return Rhodecode.comments.nextComment();" id="inline-comments-counter">${_("%d Inline") % inline_comm_count_ver}</a>
275 259 %else:
276 260 , ${_("%d Inline") % inline_comm_count_ver}
277 261 %endif
278 262
279 263 %if outdated_comm_count_ver:
280 264 , <a href="#" onclick="showOutdated(); Rhodecode.comments.nextOutdatedComment(); return false;">${_("%d Outdated") % outdated_comm_count_ver}</a>
281 265 <a href="#" class="showOutdatedComments" onclick="showOutdated(this); return false;"> | ${_('show outdated comments')}</a>
282 266 <a href="#" class="hideOutdatedComments" style="display: none" onclick="hideOutdated(this); return false;"> | ${_('hide outdated comments')}</a>
283 267 %else:
284 268 , ${_("%d Outdated") % outdated_comm_count_ver}
285 269 %endif
286 270 </td>
287 271 </tr>
288
289 <tr>
290 <td></td>
291 <td colspan="4">
292 % if c.at_version:
293 <pre>
294 Changed commits:
295 * added: ${len(c.changes.added)}
296 * removed: ${len(c.changes.removed)}
297
298 % if not (c.file_changes.added+c.file_changes.modified+c.file_changes.removed):
299 No file changes found
300 % else:
301 Changed files:
302 %for file_name in c.file_changes.added:
303 * A <a href="#${'a_' + h.FID('', file_name)}">${file_name}</a>
304 %endfor
305 %for file_name in c.file_changes.modified:
306 * M <a href="#${'a_' + h.FID('', file_name)}">${file_name}</a>
307 %endfor
308 %for file_name in c.file_changes.removed:
309 * R ${file_name}
310 %endfor
311 % endif
312 </pre>
313 % endif
314 </td>
315 </tr>
272 </%def>
273 ${comments_summary()}
316 274 </table>
317 275 % else:
276 <div class="input">
318 277 ${_('Pull request versions not available')}.
278 </div>
279 <div>
280 <table>
281 ${comments_summary()}
282 </table>
283 </div>
319 284 % endif
320 285 </div>
321 286 </div>
322 287
323 288 <div id="pr-save" class="field" style="display: none;">
324 289 <div class="label-summary"></div>
325 290 <div class="input">
326 291 <span id="edit_pull_request" class="btn btn-small">${_('Save Changes')}</span>
327 292 </div>
328 293 </div>
329 294 </div>
330 295 </div>
331 296 <div>
332 297 ## AUTHOR
333 298 <div class="reviewers-title block-right">
334 299 <div class="pr-details-title">
335 300 ${_('Author')}
336 301 </div>
337 302 </div>
338 303 <div class="block-right pr-details-content reviewers">
339 304 <ul class="group_members">
340 305 <li>
341 306 ${self.gravatar_with_user(c.pull_request.author.email, 16)}
342 307 </li>
343 308 </ul>
344 309 </div>
345 310 ## REVIEWERS
346 311 <div class="reviewers-title block-right">
347 312 <div class="pr-details-title">
348 313 ${_('Pull request reviewers')}
349 314 %if c.allowed_to_update:
350 315 <span id="open_edit_reviewers" class="block-right action_button">${_('Edit')}</span>
351 316 <span id="close_edit_reviewers" class="block-right action_button" style="display: none;">${_('Close')}</span>
352 317 %endif
353 318 </div>
354 319 </div>
355 320 <div id="reviewers" class="block-right pr-details-content reviewers">
356 321 ## members goes here !
357 322 <input type="hidden" name="__start__" value="review_members:sequence">
358 323 <ul id="review_members" class="group_members">
359 324 %for member,reasons,status in c.pull_request_reviewers:
360 325 <li id="reviewer_${member.user_id}">
361 326 <div class="reviewers_member">
362 327 <div class="reviewer_status tooltip" title="${h.tooltip(h.commit_status_lbl(status[0][1].status if status else 'not_reviewed'))}">
363 328 <div class="${'flag_status %s' % (status[0][1].status if status else 'not_reviewed')} pull-left reviewer_member_status"></div>
364 329 </div>
365 330 <div id="reviewer_${member.user_id}_name" class="reviewer_name">
366 331 ${self.gravatar_with_user(member.email, 16)}
367 332 </div>
368 333 <input type="hidden" name="__start__" value="reviewer:mapping">
369 334 <input type="hidden" name="__start__" value="reasons:sequence">
370 335 %for reason in reasons:
371 336 <div class="reviewer_reason">- ${reason}</div>
372 337 <input type="hidden" name="reason" value="${reason}">
373 338
374 339 %endfor
375 340 <input type="hidden" name="__end__" value="reasons:sequence">
376 341 <input id="reviewer_${member.user_id}_input" type="hidden" value="${member.user_id}" name="user_id" />
377 342 <input type="hidden" name="__end__" value="reviewer:mapping">
378 343 %if c.allowed_to_update:
379 344 <div class="reviewer_member_remove action_button" onclick="removeReviewMember(${member.user_id}, true)" style="visibility: hidden;">
380 345 <i class="icon-remove-sign" ></i>
381 346 </div>
382 347 %endif
383 348 </div>
384 349 </li>
385 350 %endfor
386 351 </ul>
387 352 <input type="hidden" name="__end__" value="review_members:sequence">
388 353 %if not c.pull_request.is_closed():
389 354 <div id="add_reviewer_input" class='ac' style="display: none;">
390 355 %if c.allowed_to_update:
391 356 <div class="reviewer_ac">
392 357 ${h.text('user', class_='ac-input', placeholder=_('Add reviewer'))}
393 358 <div id="reviewers_container"></div>
394 359 </div>
395 360 <div>
396 361 <span id="update_pull_request" class="btn btn-small">${_('Save Changes')}</span>
397 362 </div>
398 363 %endif
399 364 </div>
400 365 %endif
401 366 </div>
402 367 </div>
403 368 </div>
404 369 <div class="box">
405 370 ##DIFF
406 371 <div class="table" >
407 372 <div id="changeset_compare_view_content">
408 373 ##CS
409 374 % if c.missing_requirements:
410 375 <div class="box">
411 376 <div class="alert alert-warning">
412 377 <div>
413 378 <strong>${_('Missing requirements:')}</strong>
414 379 ${_('These commits cannot be displayed, because this repository uses the Mercurial largefiles extension, which was not enabled.')}
415 380 </div>
416 381 </div>
417 382 </div>
418 383 % elif c.missing_commits:
419 384 <div class="box">
420 385 <div class="alert alert-warning">
421 386 <div>
422 387 <strong>${_('Missing commits')}:</strong>
423 388 ${_('This pull request cannot be displayed, because one or more commits no longer exist in the source repository.')}
424 389 ${_('Please update this pull request, push the commits back into the source repository, or consider closing this pull request.')}
425 390 </div>
426 391 </div>
427 392 </div>
428 393 % endif
394
429 395 <div class="compare_view_commits_title">
396 % if not c.compare_mode:
397
398 % if c.at_version_pos:
399 <h4>
400 ${_('Showing changes at v%d, commenting is disabled.') % c.at_version_pos}
401 </h4>
402 % endif
430 403
431 404 <div class="pull-left">
432 405 <div class="btn-group">
433 406 <a
434 407 class="btn"
435 408 href="#"
436 409 onclick="$('.compare_select').show();$('.compare_select_hidden').hide(); return false">
437 410 ${ungettext('Expand %s commit','Expand %s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
438 411 </a>
439 412 <a
440 413 class="btn"
441 414 href="#"
442 415 onclick="$('.compare_select').hide();$('.compare_select_hidden').show(); return false">
443 416 ${ungettext('Collapse %s commit','Collapse %s commits', len(c.commit_ranges)) % len(c.commit_ranges)}
444 417 </a>
445 418 </div>
446 419 </div>
447 420
448 421 <div class="pull-right">
449 422 % if c.allowed_to_update and not c.pull_request.is_closed():
450 423 <a id="update_commits" class="btn btn-primary pull-right">${_('Update commits')}</a>
451 424 % else:
452 425 <a class="tooltip btn disabled pull-right" disabled="disabled" title="${_('Update is disabled for current view')}">${_('Update commits')}</a>
453 426 % endif
454 427
455 428 </div>
456
429 % endif
457 430 </div>
458 431
459 432 % if not c.missing_commits:
433 % if c.compare_mode:
434 % if c.at_version:
435 <h4>
436 ${_('Commits and changes between v{ver_from} and {ver_to} of this pull request, commenting is disabled').format(ver_from=c.from_version_pos, ver_to=c.at_version_pos if c.at_version_pos else 'latest')}:
437 </h4>
438
439 <div class="subtitle-compare">
440 ${_('commits added: {}, removed: {}').format(len(c.commit_changes_summary.added), len(c.commit_changes_summary.removed))}
441 </div>
442
443 <div class="container">
444 <table class="rctable compare_view_commits">
445 <tr>
446 <th></th>
447 <th>${_('Time')}</th>
448 <th>${_('Author')}</th>
449 <th>${_('Commit')}</th>
450 <th></th>
451 <th>${_('Description')}</th>
452 </tr>
453
454 % for c_type, commit in c.commit_changes:
455 % if c_type in ['a', 'r']:
456 <%
457 if c_type == 'a':
458 cc_title = _('Commit added in displayed changes')
459 elif c_type == 'r':
460 cc_title = _('Commit removed in displayed changes')
461 else:
462 cc_title = ''
463 %>
464 <tr id="row-${commit.raw_id}" commit_id="${commit.raw_id}" class="compare_select">
465 <td>
466 <div class="commit-change-indicator color-${c_type}-border">
467 <div class="commit-change-content color-${c_type} tooltip" title="${cc_title}">
468 ${c_type.upper()}
469 </div>
470 </div>
471 </td>
472 <td class="td-time">
473 ${h.age_component(commit.date)}
474 </td>
475 <td class="td-user">
476 ${base.gravatar_with_user(commit.author, 16)}
477 </td>
478 <td class="td-hash">
479 <code>
480 <a href="${h.url('changeset_home', repo_name=c.target_repo.repo_name, revision=commit.raw_id)}">
481 r${commit.revision}:${h.short_id(commit.raw_id)}
482 </a>
483 ${h.hidden('revisions', commit.raw_id)}
484 </code>
485 </td>
486 <td class="expand_commit" data-commit-id="${commit.raw_id}" title="${_( 'Expand commit message')}">
487 <div class="show_more_col">
488 <i class="show_more"></i>
489 </div>
490 </td>
491 <td class="mid td-description">
492 <div class="log-container truncate-wrap">
493 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">
494 ${h.urlify_commit_message(commit.message, c.repo_name)}
495 </div>
496 </div>
497 </td>
498 </tr>
499 % endif
500 % endfor
501 </table>
502 </div>
503
504 <script>
505 $('.expand_commit').on('click',function(e){
506 var target_expand = $(this);
507 var cid = target_expand.data('commitId');
508
509 if (target_expand.hasClass('open')){
510 $('#c-'+cid).css({
511 'height': '1.5em',
512 'white-space': 'nowrap',
513 'text-overflow': 'ellipsis',
514 'overflow':'hidden'
515 });
516 target_expand.removeClass('open');
517 }
518 else {
519 $('#c-'+cid).css({
520 'height': 'auto',
521 'white-space': 'pre-line',
522 'text-overflow': 'initial',
523 'overflow':'visible'
524 });
525 target_expand.addClass('open');
526 }
527 });
528 </script>
529
530 % endif
531
532 % else:
460 533 <%include file="/compare/compare_commits.mako" />
534 % endif
535
461 536 <div class="cs_files">
462 537 <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/>
463 538 ${cbdiffs.render_diffset_menu()}
464 539 ${cbdiffs.render_diffset(
465 540 c.diffset, use_comments=True,
466 541 collapse_when_files_over=30,
467 542 disable_new_comments=not c.allowed_to_comment,
468 543 deleted_files_comments=c.deleted_files_comments)}
469 544 </div>
470 545 % else:
471 546 ## skipping commits we need to clear the view for missing commits
472 547 <div style="clear:both;"></div>
473 548 % endif
474 549
475 550 </div>
476 551 </div>
477 552
478 553 ## template for inline comment form
479 554 <%namespace name="comment" file="/changeset/changeset_file_comment.mako"/>
480 555
481 556 ## render general comments
482 557
483 558 <div id="comment-tr-show">
484 559 <div class="comment">
485 560 % if general_outdated_comm_count_ver:
486 561 <div class="meta">
487 562 % if general_outdated_comm_count_ver == 1:
488 563 ${_('there is {num} general comment from older versions').format(num=general_outdated_comm_count_ver)},
489 564 <a href="#" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show it')}</a>
490 565 % else:
491 566 ${_('there are {num} general comments from older versions').format(num=general_outdated_comm_count_ver)},
492 567 <a href="#" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show them')}</a>
493 568 % endif
494 569 </div>
495 570 % endif
496 571 </div>
497 572 </div>
498 573
499 574 ${comment.generate_comments(c.comments, include_pull_request=True, is_pull_request=True)}
500 575
501 576 % if not c.pull_request.is_closed():
502 577 ## merge status, and merge action
503 578 <div class="pull-request-merge">
504 579 <%include file="/pullrequests/pullrequest_merge_checks.mako"/>
505 580 </div>
506 581
507 582 ## main comment form and it status
508 583 ${comment.comments(h.url('pullrequest_comment', repo_name=c.repo_name,
509 584 pull_request_id=c.pull_request.pull_request_id),
510 585 c.pull_request_review_status,
511 586 is_pull_request=True, change_status=c.allowed_to_change_status)}
512 587 %endif
513 588
514 589 <script type="text/javascript">
515 590 if (location.hash) {
516 591 var result = splitDelimitedHash(location.hash);
517 592 var line = $('html').find(result.loc);
518 593 // show hidden comments if we use location.hash
519 594 if (line.hasClass('comment-general')) {
520 595 $(line).show();
521 596 } else if (line.hasClass('comment-inline')) {
522 597 $(line).show();
523 598 var $cb = $(line).closest('.cb');
524 599 $cb.removeClass('cb-collapsed')
525 600 }
526 601 if (line.length > 0){
527 602 offsetScroll(line, 70);
528 603 }
529 604 }
530 605
606 VersionController = function() {
607 var self = this;
608 this.$verSource = $('input[name=ver_source]');
609 this.$verTarget = $('input[name=ver_target]');
610
611 this.adjustRadioSelectors = function (curNode) {
612 var getVal = function(item) {
613 if (item == 'latest'){
614 return Number.MAX_SAFE_INTEGER
615 }
616 else {
617 return parseInt(item)
618 }
619 };
620
621 var curVal = getVal($(curNode).val());
622 $.each(self.$verSource, function(index, value){
623 var elVal = getVal($(value).val());
624 if(elVal > curVal){
625 $(value).attr('disabled', 'disabled');
626 $(value).removeAttr('checked');
627 }
628 else{
629 $(value).removeAttr('disabled');
630 }
631 });
632
633 self.setLockAction(false, $(curNode).data('verPos'));
634 };
635
636
637 this.attachVersionListener = function () {
638 self.$verTarget.change(function(e){
639 self.adjustRadioSelectors(this)
640 });
641 self.$verSource.change(function(e){
642 self.adjustRadioSelectors(self.$verTarget.filter(':checked'))
643 });
644 };
645
646 this.init = function () {
647
648 var curNode = self.$verTarget.filter(':checked');
649 self.adjustRadioSelectors(curNode);
650 self.setLockAction(true);
651 self.attachVersionListener();
652
653 };
654
655 this.setLockAction = function (state, selectedVersion) {
656 if(state){
657 $('#show-version-diff').attr('disabled','disabled')
658 $('#show-version-diff').addClass('disabled')
659 $('#show-version-diff').html($('#show-version-diff').data('labelText'));
660 }
661 else{
662 $('#show-version-diff').removeAttr('disabled');
663 $('#show-version-diff').removeClass('disabled')
664 //$('#show-version-diff').html(_gettext('show changes for v') + selectedVersion)
665 }
666
667 };
668
669 this.showVersionDiff = function(){
670 var target = self.$verTarget.filter(':checked');
671 var source = self.$verSource.filter(':checked');
672
673 if (target.val() && source.val()) {
674 var params = {
675 'pull_request_id': ${c.pull_request.pull_request_id},
676 'repo_name': templateContext.repo_name,
677 'version': target.val(),
678 'from_version': source.val()
679 };
680 window.location = pyroutes.url('pullrequest_show', params)
681 }
682
683 return false;
684 };
685
686 this.toggleVersionView = function (elem) {
687
688 if ($('#show-version-diff').is(':visible')) {
689 $('.version-pr').hide();
690 $('#show-version-diff').hide();
691 $(elem).html($(elem).data('toggleOn'))
692 } else {
693 $('.version-pr').show();
694 $('#show-version-diff').show();
695 $(elem).html($(elem).data('toggleOff'))
696 }
697
698 return false
699 }
700
701 };
702
703 versionController = new VersionController();
704 versionController.init();
705
706
531 707 $(function(){
532 708 ReviewerAutoComplete('user');
533 709 // custom code mirror
534 710 var codeMirrorInstance = initPullRequestsCodeMirror('#pr-description-input');
535 711
536 712 var PRDetails = {
537 713 editButton: $('#open_edit_pullrequest'),
538 714 closeButton: $('#close_edit_pullrequest'),
539 715 deleteButton: $('#delete_pullrequest'),
540 716 viewFields: $('#pr-desc, #pr-title'),
541 717 editFields: $('#pr-desc-edit, #pr-title-edit, #pr-save'),
542 718
543 719 init: function() {
544 720 var that = this;
545 721 this.editButton.on('click', function(e) { that.edit(); });
546 722 this.closeButton.on('click', function(e) { that.view(); });
547 723 },
548 724
549 725 edit: function(event) {
550 726 this.viewFields.hide();
551 727 this.editButton.hide();
552 728 this.deleteButton.hide();
553 729 this.closeButton.show();
554 730 this.editFields.show();
555 731 codeMirrorInstance.refresh();
556 732 },
557 733
558 734 view: function(event) {
559 735 this.editButton.show();
560 736 this.deleteButton.show();
561 737 this.editFields.hide();
562 738 this.closeButton.hide();
563 739 this.viewFields.show();
564 740 }
565 741 };
566 742
567 743 var ReviewersPanel = {
568 744 editButton: $('#open_edit_reviewers'),
569 745 closeButton: $('#close_edit_reviewers'),
570 746 addButton: $('#add_reviewer_input'),
571 747 removeButtons: $('.reviewer_member_remove'),
572 748
573 749 init: function() {
574 750 var that = this;
575 751 this.editButton.on('click', function(e) { that.edit(); });
576 752 this.closeButton.on('click', function(e) { that.close(); });
577 753 },
578 754
579 755 edit: function(event) {
580 756 this.editButton.hide();
581 757 this.closeButton.show();
582 758 this.addButton.show();
583 759 this.removeButtons.css('visibility', 'visible');
584 760 },
585 761
586 762 close: function(event) {
587 763 this.editButton.show();
588 764 this.closeButton.hide();
589 765 this.addButton.hide();
590 766 this.removeButtons.css('visibility', 'hidden');
591 767 }
592 768 };
593 769
594 770 PRDetails.init();
595 771 ReviewersPanel.init();
596 772
597 773 showOutdated = function(self){
598 774 $('.comment-inline.comment-outdated').show();
599 775 $('.filediff-outdated').show();
600 776 $('.showOutdatedComments').hide();
601 777 $('.hideOutdatedComments').show();
602 778 };
603 779
604 780 hideOutdated = function(self){
605 781 $('.comment-inline.comment-outdated').hide();
606 782 $('.filediff-outdated').hide();
607 783 $('.hideOutdatedComments').hide();
608 784 $('.showOutdatedComments').show();
609 785 };
610 786
611 787 refreshMergeChecks = function(){
612 788 var loadUrl = "${h.url.current(merge_checks=1)}";
613 789 $('.pull-request-merge').css('opacity', 0.3);
614 790 $('.pull-request-merge').load(
615 791 loadUrl,function() {
616 792 $('.pull-request-merge').css('opacity', 1);
617 793 }
618 794 );
619 795 };
620 796
621 797 $('#show-outdated-comments').on('click', function(e){
622 798 var button = $(this);
623 799 var outdated = $('.comment-outdated');
624 800
625 801 if (button.html() === "(Show)") {
626 802 button.html("(Hide)");
627 803 outdated.show();
628 804 } else {
629 805 button.html("(Show)");
630 806 outdated.hide();
631 807 }
632 808 });
633 809
634 810 $('.show-inline-comments').on('change', function(e){
635 811 var show = 'none';
636 812 var target = e.currentTarget;
637 813 if(target.checked){
638 814 show = ''
639 815 }
640 816 var boxid = $(target).attr('id_for');
641 817 var comments = $('#{0} .inline-comments'.format(boxid));
642 818 var fn_display = function(idx){
643 819 $(this).css('display', show);
644 820 };
645 821 $(comments).each(fn_display);
646 822 var btns = $('#{0} .inline-comments-button'.format(boxid));
647 823 $(btns).each(fn_display);
648 824 });
649 825
650 826 $('#merge_pull_request_form').submit(function() {
651 827 if (!$('#merge_pull_request').attr('disabled')) {
652 828 $('#merge_pull_request').attr('disabled', 'disabled');
653 829 }
654 830 return true;
655 831 });
656 832
657 833 $('#edit_pull_request').on('click', function(e){
658 834 var title = $('#pr-title-input').val();
659 835 var description = codeMirrorInstance.getValue();
660 836 editPullRequest(
661 837 "${c.repo_name}", "${c.pull_request.pull_request_id}",
662 838 title, description);
663 839 });
664 840
665 841 $('#update_pull_request').on('click', function(e){
666 842 updateReviewers(undefined, "${c.repo_name}", "${c.pull_request.pull_request_id}");
667 843 });
668 844
669 845 $('#update_commits').on('click', function(e){
670 846 var isDisabled = !$(e.currentTarget).attr('disabled');
671 847 $(e.currentTarget).text(_gettext('Updating...'));
672 848 $(e.currentTarget).attr('disabled', 'disabled');
673 849 if(isDisabled){
674 850 updateCommits("${c.repo_name}", "${c.pull_request.pull_request_id}");
675 851 }
676 852
677 853 });
678 854 // fixing issue with caches on firefox
679 855 $('#update_commits').removeAttr("disabled");
680 856
681 857 $('#close_pull_request').on('click', function(e){
682 858 closePullRequest("${c.repo_name}", "${c.pull_request.pull_request_id}");
683 859 });
684 860
685 861 $('.show-inline-comments').on('click', function(e){
686 862 var boxid = $(this).attr('data-comment-id');
687 863 var button = $(this);
688 864
689 865 if(button.hasClass("comments-visible")) {
690 866 $('#{0} .inline-comments'.format(boxid)).each(function(index){
691 867 $(this).hide();
692 868 });
693 869 button.removeClass("comments-visible");
694 870 } else {
695 871 $('#{0} .inline-comments'.format(boxid)).each(function(index){
696 872 $(this).show();
697 873 });
698 874 button.addClass("comments-visible");
699 875 }
700 876 });
701 877
702 878 // register submit callback on commentForm form to track TODOs
703 879 window.commentFormGlobalSubmitSuccessCallback = function(){
704 880 refreshMergeChecks();
705 881 };
706 882
707 883 })
708 884 </script>
709 885
710 886 </div>
711 887 </div>
712 888
713 889 </%def>
General Comments 0
You need to be logged in to leave comments. Login now