helpers.py
938 lines
| 29.9 KiB
| text/x-python
|
PythonLexer
r547 | """Helper functions | |||
Consists of functions to typically be used within templates, but also | ||||
available to Controllers. This module is available to both as 'h'. | ||||
""" | ||||
r734 | import random | |||
import hashlib | ||||
r966 | import StringIO | |||
r1101 | import urllib | |||
r1422 | import math | |||
r1837 | import logging | |||
r1101 | ||||
r1154 | from datetime import datetime | |||
r1716 | from pygments.formatters.html import HtmlFormatter | |||
r547 | from pygments import highlight as code_highlight | |||
r1110 | from pylons import url, request, config | |||
r547 | from pylons.i18n.translation import _, ungettext | |||
r1832 | from hashlib import md5 | |||
r1022 | ||||
r547 | from webhelpers.html import literal, HTML, escape | |||
from webhelpers.html.tools import * | ||||
from webhelpers.html.builder import make_tag | ||||
from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \ | ||||
r1766 | end_form, file, form, hidden, image, javascript_link, link_to, \ | |||
link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \ | ||||
submit, text, password, textarea, title, ul, xml_declaration, radio | ||||
from webhelpers.html.tools import auto_link, button_to, highlight, \ | ||||
js_obfuscate, mail_to, strip_links, strip_tags, tag_re | ||||
r547 | from webhelpers.number import format_byte_size, format_bit_size | |||
from webhelpers.pylonslib import Flash as _Flash | ||||
from webhelpers.pylonslib.secure_form import secure_form | ||||
from webhelpers.text import chop_at, collapse, convert_accented_entities, \ | ||||
convert_misc_entities, lchop, plural, rchop, remove_formatting, \ | ||||
replace_whitespace, urlify, truncate, wrap_paragraphs | ||||
r635 | from webhelpers.date import time_ago_in_words | |||
r1098 | from webhelpers.paginate import Page | |||
r698 | from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \ | |||
r1676 | convert_boolean_attrs, NotGiven, _make_safe_id_component | |||
r698 | ||||
r1753 | from rhodecode.lib.annotate import annotate_highlight | |||
r1101 | from rhodecode.lib.utils import repo_name_slug | |||
r2109 | from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \ | |||
get_changeset_safe | ||||
r1670 | from rhodecode.lib.markup_renderer import MarkupRenderer | |||
r1837 | log = logging.getLogger(__name__) | |||
r1789 | ||||
r2162 | def shorter(text, size=20): | |||
postfix = '...' | ||||
if len(text) > size: | ||||
return text[:size - len(postfix)] + postfix | ||||
return text | ||||
r698 | def _reset(name, value=None, id=NotGiven, type="reset", **attrs): | |||
r1154 | """ | |||
Reset button | ||||
r899 | """ | |||
r698 | _set_input_attrs(attrs, type, name, value) | |||
_set_id_attr(attrs, id, name) | ||||
convert_boolean_attrs(attrs, ["disabled"]) | ||||
return HTML.input(**attrs) | ||||
reset = _reset | ||||
r1676 | safeid = _make_safe_id_component | |||
r734 | ||||
r1789 | ||||
def FID(raw_id, path): | ||||
r1776 | """ | |||
r1832 | Creates a uniqe ID for filenode based on it's hash of path and revision | |||
it's safe to use in urls | ||||
r1789 | ||||
r1776 | :param raw_id: | |||
:param path: | ||||
""" | ||||
r1832 | ||||
r2148 | return 'C-%s-%s' % (short_id(raw_id), md5(safe_str(path)).hexdigest()[:12]) | |||
r1776 | ||||
r734 | def get_token(): | |||
"""Return the current authentication token, creating one if one doesn't | ||||
already exist. | ||||
""" | ||||
token_key = "_authentication_token" | ||||
from pylons import session | ||||
if not token_key in session: | ||||
try: | ||||
token = hashlib.sha1(str(random.getrandbits(128))).hexdigest() | ||||
except AttributeError: # Python < 2.4 | ||||
token = hashlib.sha1(str(random.randrange(2 ** 128))).hexdigest() | ||||
session[token_key] = token | ||||
if hasattr(session, 'save'): | ||||
session.save() | ||||
return session[token_key] | ||||
r2162 | ||||
r547 | class _GetError(object): | |||
r899 | """Get error from form_errors, and represent it as span wrapped error | |||
message | ||||
r1203 | ||||
r899 | :param field_name: field to fetch errors for | |||
:param form_errors: form errors dict | ||||
""" | ||||
r547 | ||||
def __call__(self, field_name, form_errors): | ||||
tmpl = """<span class="error_msg">%s</span>""" | ||||
if form_errors and form_errors.has_key(field_name): | ||||
return literal(tmpl % form_errors.get(field_name)) | ||||
get_error = _GetError() | ||||
r2162 | ||||
r547 | class _ToolTip(object): | |||
r631 | ||||
r547 | def __call__(self, tooltip_title, trim_at=50): | |||
r1203 | """Special function just to wrap our text into nice formatted | |||
r905 | autowrapped text | |||
r1203 | ||||
r604 | :param tooltip_title: | |||
r547 | """ | |||
r1352 | return escape(tooltip_title) | |||
r547 | tooltip = _ToolTip() | |||
r2162 | ||||
r547 | class _FilesBreadCrumbs(object): | |||
r631 | ||||
r547 | def __call__(self, repo_name, rev, paths): | |||
r955 | if isinstance(paths, str): | |||
r1176 | paths = safe_unicode(paths) | |||
r547 | url_l = [link_to(repo_name, url('files_home', | |||
repo_name=repo_name, | ||||
revision=rev, f_path=''))] | ||||
paths_l = paths.split('/') | ||||
r740 | for cnt, p in enumerate(paths_l): | |||
r547 | if p != '': | |||
r1789 | url_l.append(link_to(p, | |||
r1766 | url('files_home', | |||
repo_name=repo_name, | ||||
revision=rev, | ||||
f_path='/'.join(paths_l[:cnt + 1]) | ||||
) | ||||
) | ||||
) | ||||
r547 | ||||
return literal('/'.join(url_l)) | ||||
files_breadcrumbs = _FilesBreadCrumbs() | ||||
r899 | ||||
r2162 | ||||
r547 | class CodeHtmlFormatter(HtmlFormatter): | |||
r2162 | """ | |||
My code Html Formatter for source codes | ||||
r966 | """ | |||
r547 | ||||
def wrap(self, source, outfile): | ||||
return self._wrap_div(self._wrap_pre(self._wrap_code(source))) | ||||
def _wrap_code(self, source): | ||||
r740 | for cnt, it in enumerate(source): | |||
r547 | i, t = it | |||
r966 | t = '<div id="L%s">%s</div>' % (cnt + 1, t) | |||
r547 | yield i, t | |||
r966 | ||||
def _wrap_tablelinenos(self, inner): | ||||
dummyoutfile = StringIO.StringIO() | ||||
lncount = 0 | ||||
for t, line in inner: | ||||
if t: | ||||
lncount += 1 | ||||
dummyoutfile.write(line) | ||||
fl = self.linenostart | ||||
mw = len(str(lncount + fl - 1)) | ||||
sp = self.linenospecial | ||||
st = self.linenostep | ||||
la = self.lineanchors | ||||
aln = self.anchorlinenos | ||||
nocls = self.noclasses | ||||
if sp: | ||||
lines = [] | ||||
for i in range(fl, fl + lncount): | ||||
if i % st == 0: | ||||
if i % sp == 0: | ||||
if aln: | ||||
lines.append('<a href="#%s%d" class="special">%*d</a>' % | ||||
(la, i, mw, i)) | ||||
else: | ||||
lines.append('<span class="special">%*d</span>' % (mw, i)) | ||||
else: | ||||
if aln: | ||||
lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | ||||
else: | ||||
lines.append('%*d' % (mw, i)) | ||||
else: | ||||
lines.append('') | ||||
ls = '\n'.join(lines) | ||||
else: | ||||
lines = [] | ||||
for i in range(fl, fl + lncount): | ||||
if i % st == 0: | ||||
if aln: | ||||
lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | ||||
else: | ||||
lines.append('%*d' % (mw, i)) | ||||
else: | ||||
lines.append('') | ||||
ls = '\n'.join(lines) | ||||
# in case you wonder about the seemingly redundant <div> here: since the | ||||
# content in the other cell also is wrapped in a div, some browsers in | ||||
# some configurations seem to mess up the formatting... | ||||
if nocls: | ||||
yield 0, ('<table class="%stable">' % self.cssclass + | ||||
'<tr><td><div class="linenodiv" ' | ||||
'style="background-color: #f0f0f0; padding-right: 10px">' | ||||
'<pre style="line-height: 125%">' + | ||||
r1320 | ls + '</pre></div></td><td id="hlcode" class="code">') | |||
r966 | else: | |||
yield 0, ('<table class="%stable">' % self.cssclass + | ||||
'<tr><td class="linenos"><div class="linenodiv"><pre>' + | ||||
r1320 | ls + '</pre></div></td><td id="hlcode" class="code">') | |||
r966 | yield 0, dummyoutfile.getvalue() | |||
yield 0, '</td></tr></table>' | ||||
r547 | def pygmentize(filenode, **kwargs): | |||
r899 | """pygmentize function using pygments | |||
r1203 | ||||
r604 | :param filenode: | |||
r547 | """ | |||
r899 | ||||
r547 | return literal(code_highlight(filenode.content, | |||
filenode.lexer, CodeHtmlFormatter(**kwargs))) | ||||
r1781 | ||||
r1171 | def pygmentize_annotation(repo_name, filenode, **kwargs): | |||
r1781 | """ | |||
pygmentize function for annotation | ||||
r1203 | ||||
r604 | :param filenode: | |||
r547 | """ | |||
r631 | ||||
r547 | color_dict = {} | |||
r1781 | ||||
r947 | def gen_color(n=10000): | |||
r1203 | """generator for getting n of evenly distributed colors using | |||
r947 | hsv color and golden ratio. It always return same order of colors | |||
r1203 | ||||
r947 | :returns: RGB tuple | |||
r631 | """ | |||
r1461 | ||||
def hsv_to_rgb(h, s, v): | ||||
r1781 | if s == 0.0: | |||
return v, v, v | ||||
i = int(h * 6.0) # XXX assume int() truncates! | ||||
r1461 | f = (h * 6.0) - i | |||
p = v * (1.0 - s) | ||||
q = v * (1.0 - s * f) | ||||
t = v * (1.0 - s * (1.0 - f)) | ||||
i = i % 6 | ||||
r1781 | if i == 0: | |||
return v, t, p | ||||
if i == 1: | ||||
return q, v, p | ||||
if i == 2: | ||||
return p, v, t | ||||
if i == 3: | ||||
return p, q, v | ||||
if i == 4: | ||||
return t, p, v | ||||
if i == 5: | ||||
return v, p, q | ||||
r1461 | ||||
r547 | golden_ratio = 0.618033988749895 | |||
h = 0.22717784590367374 | ||||
r947 | ||||
r1320 | for _ in xrange(n): | |||
r547 | h += golden_ratio | |||
h %= 1 | ||||
HSV_tuple = [h, 0.95, 0.95] | ||||
r1461 | RGB_tuple = hsv_to_rgb(*HSV_tuple) | |||
r1781 | yield map(lambda x: str(int(x * 256)), RGB_tuple) | |||
r547 | ||||
cgenerator = gen_color() | ||||
r631 | ||||
r547 | def get_color_string(cs): | |||
r1781 | if cs in color_dict: | |||
r547 | col = color_dict[cs] | |||
else: | ||||
col = color_dict[cs] = cgenerator.next() | ||||
return "color: rgb(%s)! important;" % (', '.join(col)) | ||||
r631 | ||||
r1171 | def url_func(repo_name): | |||
r1352 | ||||
r1171 | def _url_func(changeset): | |||
r1352 | author = changeset.author | |||
date = changeset.date | ||||
message = tooltip(changeset.message) | ||||
r631 | ||||
r1352 | tooltip_html = ("<div style='font-size:0.8em'><b>Author:</b>" | |||
" %s<br/><b>Date:</b> %s</b><br/><b>Message:" | ||||
"</b> %s<br/></div>") | ||||
tooltip_html = tooltip_html % (author, date, message) | ||||
r1171 | lnk_format = '%5s:%s' % ('r%s' % changeset.revision, | |||
short_id(changeset.raw_id)) | ||||
uri = link_to( | ||||
lnk_format, | ||||
url('changeset_home', repo_name=repo_name, | ||||
revision=changeset.raw_id), | ||||
style=get_color_string(changeset.raw_id), | ||||
class_='tooltip', | ||||
title=tooltip_html | ||||
) | ||||
r631 | ||||
r1171 | uri += '\n' | |||
return uri | ||||
return _url_func | ||||
return literal(annotate_highlight(filenode, url_func(repo_name), **kwargs)) | ||||
r631 | ||||
r1781 | ||||
r999 | def is_following_repo(repo_name, user_id): | |||
from rhodecode.model.scm import ScmModel | ||||
return ScmModel().is_following_repo(repo_name, user_id) | ||||
r547 | flash = _Flash() | |||
r635 | #============================================================================== | |||
r1356 | # SCM FILTERS available via h. | |||
r635 | #============================================================================== | |||
r2007 | from rhodecode.lib.vcs.utils import author_name, author_email | |||
r2109 | from rhodecode.lib.utils2 import credentials_filter, age as _age | |||
r1764 | from rhodecode.model.db import User | |||
r547 | ||||
r1781 | age = lambda x: _age(x) | |||
r547 | capitalize = lambda x: x.capitalize() | |||
r1356 | email = author_email | |||
r636 | short_id = lambda x: x[:12] | |||
r1373 | hide_credentials = lambda x: ''.join(credentials_filter(x)) | |||
r660 | ||||
r1764 | ||||
r1959 | def is_git(repository): | |||
if hasattr(repository, 'alias'): | ||||
_type = repository.alias | ||||
elif hasattr(repository, 'repo_type'): | ||||
_type = repository.repo_type | ||||
else: | ||||
_type = repository | ||||
return _type == 'git' | ||||
def is_hg(repository): | ||||
if hasattr(repository, 'alias'): | ||||
_type = repository.alias | ||||
elif hasattr(repository, 'repo_type'): | ||||
_type = repository.repo_type | ||||
else: | ||||
_type = repository | ||||
return _type == 'hg' | ||||
r1767 | def email_or_none(author): | |||
_email = email(author) | ||||
if _email != '': | ||||
return _email | ||||
r1764 | ||||
# See if it contains a username we can get an email from | ||||
r1767 | user = User.get_by_username(author_name(author), case_insensitive=True, | |||
r1764 | cache=True) | |||
if user is not None: | ||||
return user.email | ||||
# No valid email, not a valid user in the system, none! | ||||
return None | ||||
r1781 | ||||
r1767 | def person(author): | |||
r1764 | # attr to return from fetched user | |||
person_getter = lambda usr: usr.username | ||||
r1781 | ||||
r1767 | # Valid email in the attribute passed, see if they're in the system | |||
_email = email(author) | ||||
if _email != '': | ||||
user = User.get_by_email(_email, case_insensitive=True, cache=True) | ||||
r1764 | if user is not None: | |||
return person_getter(user) | ||||
r1767 | return _email | |||
r1764 | ||||
# Maybe it's a username? | ||||
r1767 | _author = author_name(author) | |||
user = User.get_by_username(_author, case_insensitive=True, | ||||
r1764 | cache=True) | |||
if user is not None: | ||||
return person_getter(user) | ||||
# Still nothing? Just pass back the author name then | ||||
r1767 | return _author | |||
r1764 | ||||
r1959 | ||||
r712 | def bool2icon(value): | |||
r899 | """Returns True/False values represented as small html image of true/false | |||
r712 | icons | |||
r1203 | ||||
r712 | :param value: bool value | |||
""" | ||||
if value is True: | ||||
r1050 | return HTML.tag('img', src=url("/images/icons/accept.png"), | |||
alt=_('True')) | ||||
r712 | ||||
if value is False: | ||||
r1050 | return HTML.tag('img', src=url("/images/icons/cancel.png"), | |||
alt=_('False')) | ||||
r712 | ||||
return value | ||||
r1087 | def action_parser(user_log, feed=False): | |||
r2012 | """ | |||
This helper will action_map the specified string action into translated | ||||
r660 | fancy names with icons and links | |||
r1203 | ||||
r899 | :param user_log: user log instance | |||
r1087 | :param feed: use output for feeds (no html and fancy icons) | |||
r660 | """ | |||
r899 | ||||
r660 | action = user_log.action | |||
r840 | action_params = ' ' | |||
r660 | ||||
x = action.split(':') | ||||
if len(x) > 1: | ||||
action, action_params = x | ||||
r718 | def get_cs_links(): | |||
r2012 | revs_limit = 3 # display this amount always | |||
revs_top_limit = 50 # show upto this amount of changesets hidden | ||||
revs_ids = action_params.split(',') | ||||
r2014 | deleted = user_log.repository is None | |||
if deleted: | ||||
return ','.join(revs_ids) | ||||
r953 | repo_name = user_log.repository.repo_name | |||
r1045 | ||||
r1366 | repo = user_log.repository.scm_instance | |||
r1045 | ||||
r2012 | message = lambda rev: rev.message | |||
lnk = lambda rev, repo_name: ( | ||||
link_to('r%s:%s' % (rev.revision, rev.short_id), | ||||
url('changeset_home', repo_name=repo_name, | ||||
revision=rev.raw_id), | ||||
title=tooltip(message(rev)), class_='tooltip') | ||||
) | ||||
# get only max revs_top_limit of changeset for performance/ui reasons | ||||
revs = [ | ||||
x for x in repo.get_changesets(revs_ids[0], | ||||
revs_ids[:revs_top_limit][-1]) | ||||
] | ||||
r1359 | cs_links = [] | |||
r2012 | cs_links.append(" " + ', '.join( | |||
[lnk(rev, repo_name) for rev in revs[:revs_limit]] | ||||
) | ||||
) | ||||
r1009 | ||||
r2012 | compare_view = ( | |||
' <div class="compare_view tooltip" title="%s">' | ||||
'<a href="%s">%s</a> </div>' % ( | ||||
_('Show all combined changesets %s->%s') % ( | ||||
revs_ids[0], revs_ids[-1] | ||||
), | ||||
url('changeset_home', repo_name=repo_name, | ||||
revision='%s...%s' % (revs_ids[0], revs_ids[-1]) | ||||
), | ||||
_('compare view') | ||||
) | ||||
) | ||||
r1009 | ||||
r2012 | # if we have exactly one more than normally displayed | |||
# just display it, takes less space than displaying | ||||
# "and 1 more revisions" | ||||
if len(revs_ids) == revs_limit + 1: | ||||
Aras Pranckevicius
|
r1979 | rev = revs[revs_limit] | ||
r2012 | cs_links.append(", " + lnk(rev, repo_name)) | |||
Aras Pranckevicius
|
r1979 | |||
# hidden-by-default ones | ||||
r2012 | if len(revs_ids) > revs_limit + 1: | |||
uniq_id = revs_ids[0] | ||||
html_tmpl = ( | ||||
'<span> %s <a class="show_more" id="_%s" ' | ||||
'href="#more">%s</a> %s</span>' | ||||
) | ||||
r1087 | if not feed: | |||
r2012 | cs_links.append(html_tmpl % ( | |||
_('and'), | ||||
uniq_id, _('%s more') % (len(revs_ids) - revs_limit), | ||||
_('revisions') | ||||
) | ||||
) | ||||
r808 | ||||
r1087 | if not feed: | |||
Aras Pranckevicius
|
r1979 | html_tmpl = '<span id="%s" style="display:none">, %s </span>' | ||
r1087 | else: | |||
html_tmpl = '<span id="%s"> %s </span>' | ||||
r2012 | morelinks = ', '.join( | |||
[lnk(rev, repo_name) for rev in revs[revs_limit:]] | ||||
) | ||||
Aras Pranckevicius
|
r1979 | |||
r2012 | if len(revs_ids) > revs_top_limit: | |||
Aras Pranckevicius
|
r1979 | morelinks += ', ...' | ||
cs_links.append(html_tmpl % (uniq_id, morelinks)) | ||||
r1024 | if len(revs) > 1: | |||
r1359 | cs_links.append(compare_view) | |||
return ''.join(cs_links) | ||||
r734 | ||||
r718 | def get_fork_name(): | |||
r953 | repo_name = action_params | |||
r1055 | return _('fork name ') + str(link_to(action_params, url('summary_home', | |||
r1045 | repo_name=repo_name,))) | |||
r953 | ||||
r2012 | action_map = {'user_deleted_repo': (_('[deleted] repository'), None), | |||
'user_created_repo': (_('[created] repository'), None), | ||||
'user_created_fork': (_('[created] repository as fork'), None), | ||||
'user_forked_repo': (_('[forked] repository'), get_fork_name), | ||||
'user_updated_repo': (_('[updated] repository'), None), | ||||
'admin_deleted_repo': (_('[delete] repository'), None), | ||||
'admin_created_repo': (_('[created] repository'), None), | ||||
'admin_forked_repo': (_('[forked] repository'), None), | ||||
'admin_updated_repo': (_('[updated] repository'), None), | ||||
'push': (_('[pushed] into'), get_cs_links), | ||||
'push_local': (_('[committed via RhodeCode] into'), get_cs_links), | ||||
'push_remote': (_('[pulled from remote] into'), get_cs_links), | ||||
'pull': (_('[pulled] from'), None), | ||||
'started_following_repo': (_('[started following] repository'), None), | ||||
'stopped_following_repo': (_('[stopped following] repository'), None), | ||||
r735 | } | |||
r660 | ||||
r1087 | action_str = action_map.get(action, action) | |||
if feed: | ||||
action = action_str[0].replace('[', '').replace(']', '') | ||||
else: | ||||
r2012 | action = action_str[0]\ | |||
.replace('[', '<span class="journal_highlight">')\ | ||||
.replace(']', '</span>') | ||||
r1114 | ||||
r2012 | action_params_func = lambda: "" | |||
r1052 | ||||
r1114 | if callable(action_str[1]): | |||
r1052 | action_params_func = action_str[1] | |||
r953 | ||||
r1052 | return [literal(action), action_params_func] | |||
r808 | ||||
r1959 | ||||
r808 | def action_parser_icon(user_log): | |||
action = user_log.action | ||||
action_params = None | ||||
x = action.split(':') | ||||
if len(x) > 1: | ||||
action, action_params = x | ||||
r1114 | tmpl = """<img src="%s%s" alt="%s"/>""" | |||
r808 | map = {'user_deleted_repo':'database_delete.png', | |||
'user_created_repo':'database_add.png', | ||||
r1747 | 'user_created_fork':'arrow_divide.png', | |||
r808 | 'user_forked_repo':'arrow_divide.png', | |||
'user_updated_repo':'database_edit.png', | ||||
'admin_deleted_repo':'database_delete.png', | ||||
r899 | 'admin_created_repo':'database_add.png', | |||
r808 | 'admin_forked_repo':'arrow_divide.png', | |||
'admin_updated_repo':'database_edit.png', | ||||
'push':'script_add.png', | ||||
r1312 | 'push_local':'script_edit.png', | |||
r1114 | 'push_remote':'connect.png', | |||
r808 | 'pull':'down_16.png', | |||
'started_following_repo':'heart_add.png', | ||||
'stopped_following_repo':'heart_delete.png', | ||||
} | ||||
r1050 | return literal(tmpl % ((url('/images/icons/')), | |||
map.get(action, action), action)) | ||||
r660 | ||||
r635 | #============================================================================== | |||
r547 | # PERMS | |||
r635 | #============================================================================== | |||
r547 | from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \ | |||
HasRepoPermissionAny, HasRepoPermissionAll | ||||
r1959 | ||||
r635 | #============================================================================== | |||
r547 | # GRAVATAR URL | |||
r635 | #============================================================================== | |||
r547 | ||||
def gravatar_url(email_address, size=30): | ||||
r1629 | if (not str2bool(config['app_conf'].get('use_gravatar')) or | |||
not email_address or email_address == 'anonymous@rhodecode.org'): | ||||
r2012 | f = lambda a, l: min(l, key=lambda x: abs(x - a)) | |||
r1927 | return url("/images/user%s.png" % f(size, [14, 16, 20, 24, 30])) | |||
r1110 | ||||
r946 | ssl_enabled = 'https' == request.environ.get('wsgi.url_scheme') | |||
r547 | default = 'identicon' | |||
baseurl_nossl = "http://www.gravatar.com/avatar/" | ||||
baseurl_ssl = "https://secure.gravatar.com/avatar/" | ||||
baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl | ||||
r631 | ||||
r1101 | if isinstance(email_address, unicode): | |||
#hashlib crashes on unicode items | ||||
r1401 | email_address = safe_str(email_address) | |||
r547 | # construct the url | |||
gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?" | ||||
r2012 | gravatar_url += urllib.urlencode({'d': default, 's': str(size)}) | |||
r547 | ||||
return gravatar_url | ||||
r1098 | ||||
#============================================================================== | ||||
r1342 | # REPO PAGER, PAGER FOR REPOSITORY | |||
r1098 | #============================================================================== | |||
class RepoPage(Page): | ||||
def __init__(self, collection, page=1, items_per_page=20, | ||||
r1656 | item_count=None, url=None, **kwargs): | |||
r1098 | ||||
"""Create a "RepoPage" instance. special pager for paging | ||||
repository | ||||
""" | ||||
self._url_generator = url | ||||
# Safe the kwargs class-wide so they can be used in the pager() method | ||||
self.kwargs = kwargs | ||||
# Save a reference to the collection | ||||
self.original_collection = collection | ||||
self.collection = collection | ||||
# The self.page is the number of the current page. | ||||
# The first page has the number 1! | ||||
try: | ||||
r2012 | self.page = int(page) # make it int() if we get it as a string | |||
r1098 | except (ValueError, TypeError): | |||
self.page = 1 | ||||
self.items_per_page = items_per_page | ||||
# Unless the user tells us how many items the collections has | ||||
# we calculate that ourselves. | ||||
if item_count is not None: | ||||
self.item_count = item_count | ||||
else: | ||||
self.item_count = len(self.collection) | ||||
# Compute the number of the first and last available page | ||||
if self.item_count > 0: | ||||
self.first_page = 1 | ||||
r1422 | self.page_count = int(math.ceil(float(self.item_count) / | |||
self.items_per_page)) | ||||
r1098 | self.last_page = self.first_page + self.page_count - 1 | |||
r1818 | # Make sure that the requested page number is the range of | |||
r1766 | # valid pages | |||
r1098 | if self.page > self.last_page: | |||
self.page = self.last_page | ||||
elif self.page < self.first_page: | ||||
self.page = self.first_page | ||||
# Note: the number of items on this page can be less than | ||||
# items_per_page if the last page is not full | ||||
r1422 | self.first_item = max(0, (self.item_count) - (self.page * | |||
items_per_page)) | ||||
self.last_item = ((self.item_count - 1) - items_per_page * | ||||
(self.page - 1)) | ||||
r1098 | ||||
r1670 | self.items = list(self.collection[self.first_item:self.last_item + 1]) | |||
r1656 | ||||
r1098 | # Links to previous and next page | |||
if self.page > self.first_page: | ||||
self.previous_page = self.page - 1 | ||||
else: | ||||
self.previous_page = None | ||||
if self.page < self.last_page: | ||||
self.next_page = self.page + 1 | ||||
else: | ||||
self.next_page = None | ||||
# No items available | ||||
else: | ||||
self.first_page = None | ||||
self.page_count = 0 | ||||
self.last_page = None | ||||
self.first_item = None | ||||
self.last_item = None | ||||
self.previous_page = None | ||||
self.next_page = None | ||||
self.items = [] | ||||
# This is a subclass of the 'list' type. Initialise the list now. | ||||
r1656 | list.__init__(self, reversed(self.items)) | |||
r1098 | ||||
r990 | def changed_tooltip(nodes): | |||
r1342 | """ | |||
Generates a html string for changed nodes in changeset page. | ||||
It limits the output to 30 entries | ||||
r1818 | ||||
r1342 | :param nodes: LazyNodesGenerator | |||
""" | ||||
r990 | if nodes: | |||
pref = ': <br/> ' | ||||
suf = '' | ||||
if len(nodes) > 30: | ||||
suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) | ||||
r1257 | return literal(pref + '<br/> '.join([safe_unicode(x.path) | |||
for x in nodes[:30]]) + suf) | ||||
r990 | else: | |||
return ': ' + _('No Files') | ||||
r1159 | ||||
def repo_link(groups_and_repos): | ||||
r1342 | """ | |||
Makes a breadcrumbs link to repo within a group | ||||
joins » on each group to create a fancy link | ||||
r1818 | ||||
r1342 | ex:: | |||
group >> subgroup >> repo | ||||
r1818 | ||||
r1342 | :param groups_and_repos: | |||
""" | ||||
r1159 | groups, repo_name = groups_and_repos | |||
if not groups: | ||||
return repo_name | ||||
else: | ||||
def make_link(group): | ||||
r1547 | return link_to(group.name, url('repos_group_home', | |||
group_name=group.group_name)) | ||||
r1159 | return literal(' » '.join(map(make_link, groups)) + \ | |||
" » " + repo_name) | ||||
r1257 | ||||
r1959 | ||||
r1257 | def fancy_file_stats(stats): | |||
r1342 | """ | |||
Displays a fancy two colored bar for number of added/deleted | ||||
lines of code on file | ||||
r1818 | ||||
r1342 | :param stats: two element list of added/deleted lines of code | |||
""" | ||||
r1257 | a, d, t = stats[0], stats[1], stats[0] + stats[1] | |||
width = 100 | ||||
r1258 | unit = float(width) / (t or 1) | |||
r1257 | ||||
r1342 | # needs > 9% of width to be visible or 0 to be hidden | |||
a_p = max(9, unit * a) if a > 0 else 0 | ||||
d_p = max(9, unit * d) if d > 0 else 0 | ||||
r1257 | p_sum = a_p + d_p | |||
if p_sum > width: | ||||
#adjust the percentage to be == 100% since we adjusted to 9 | ||||
if a_p > d_p: | ||||
a_p = a_p - (p_sum - width) | ||||
else: | ||||
d_p = d_p - (p_sum - width) | ||||
a_v = a if a > 0 else '' | ||||
d_v = d if d > 0 else '' | ||||
def cgen(l_type): | ||||
r2118 | mapping = {'tr': 'top-right-rounded-corner-mid', | |||
'tl': 'top-left-rounded-corner-mid', | ||||
'br': 'bottom-right-rounded-corner-mid', | ||||
'bl': 'bottom-left-rounded-corner-mid'} | ||||
r1959 | map_getter = lambda x: mapping[x] | |||
r1257 | ||||
if l_type == 'a' and d_v: | ||||
#case when added and deleted are present | ||||
return ' '.join(map(map_getter, ['tl', 'bl'])) | ||||
if l_type == 'a' and not d_v: | ||||
return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl'])) | ||||
if l_type == 'd' and a_v: | ||||
return ' '.join(map(map_getter, ['tr', 'br'])) | ||||
if l_type == 'd' and not a_v: | ||||
return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl'])) | ||||
r1959 | d_a = '<div class="added %s" style="width:%s%%">%s</div>' % ( | |||
r2012 | cgen('a'), a_p, a_v | |||
r1959 | ) | |||
d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % ( | ||||
r2012 | cgen('d'), d_p, d_v | |||
r1959 | ) | |||
r1257 | return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d)) | |||
r1438 | ||||
r1840 | def urlify_text(text_): | |||
r1438 | import re | |||
r1766 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]''' | |||
'''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') | ||||
r1438 | ||||
def url_func(match_obj): | ||||
url_full = match_obj.groups()[0] | ||||
r1868 | return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) | |||
r1438 | ||||
r1840 | return literal(url_pat.sub(url_func, text_)) | |||
r1656 | ||||
r1959 | ||||
r1913 | def urlify_changesets(text_, repository): | |||
r2111 | """ | |||
Extract revision ids from changeset and make link from them | ||||
r2125 | ||||
r2111 | :param text_: | |||
:param repository: | ||||
""" | ||||
r1913 | import re | |||
URL_PAT = re.compile(r'([0-9a-fA-F]{12,})') | ||||
def url_func(match_obj): | ||||
rev = match_obj.groups()[0] | ||||
pref = '' | ||||
if match_obj.group().startswith(' '): | ||||
pref = ' ' | ||||
tmpl = ( | ||||
'%(pref)s<a class="%(cls)s" href="%(url)s">' | ||||
'%(rev)s' | ||||
'</a>' | ||||
) | ||||
return tmpl % { | ||||
'pref': pref, | ||||
'cls': 'revision-link', | ||||
'url': url('changeset_home', repo_name=repository, revision=rev), | ||||
'rev': rev, | ||||
} | ||||
newtext = URL_PAT.sub(url_func, text_) | ||||
return newtext | ||||
r1868 | ||||
r1959 | ||||
Aras Pranckevicius
|
r1878 | def urlify_commit(text_, repository=None, link_=None): | ||
r2028 | """ | |||
Parses given text message and makes proper links. | ||||
issues are linked to given issue-server, and rest is a changeset link | ||||
if link_ is given, in other case it's a plain text | ||||
:param text_: | ||||
:param repository: | ||||
:param link_: changeset link | ||||
""" | ||||
r1837 | import re | |||
import traceback | ||||
r2125 | ||||
r2111 | def escaper(string): | |||
return string.replace('<', '<').replace('>', '>') | ||||
r2125 | ||||
r2012 | def linkify_others(t, l): | |||
r1913 | urls = re.compile(r'(\<a.*?\<\/a\>)',) | |||
links = [] | ||||
for e in urls.split(t): | ||||
if not urls.match(e): | ||||
r2012 | links.append('<a class="message-link" href="%s">%s</a>' % (l, e)) | |||
r1913 | else: | |||
r1944 | links.append(e) | |||
r1913 | return ''.join(links) | |||
r2125 | ||||
r2111 | # urlify changesets - extrac revisions and make link out of them | |||
text_ = urlify_changesets(escaper(text_), repository) | ||||
r1837 | try: | |||
conf = config['app_conf'] | ||||
r1868 | ||||
r1933 | URL_PAT = re.compile(r'%s' % conf.get('issue_pat')) | |||
r1868 | ||||
r1837 | if URL_PAT: | |||
r1868 | ISSUE_SERVER_LNK = conf.get('issue_server_link') | |||
r1837 | ISSUE_PREFIX = conf.get('issue_prefix') | |||
r1868 | ||||
r1837 | def url_func(match_obj): | |||
r1912 | pref = '' | |||
if match_obj.group().startswith(' '): | ||||
pref = ' ' | ||||
r1933 | issue_id = ''.join(match_obj.groups()) | |||
r1840 | tmpl = ( | |||
r1912 | '%(pref)s<a class="%(cls)s" href="%(url)s">' | |||
r1865 | '%(issue-prefix)s%(id-repr)s' | |||
r1840 | '</a>' | |||
) | ||||
r1868 | url = ISSUE_SERVER_LNK.replace('{id}', issue_id) | |||
if repository: | ||||
url = url.replace('{repo}', repository) | ||||
r1913 | return tmpl % { | |||
r1912 | 'pref': pref, | |||
r1868 | 'cls': 'issue-tracker-link', | |||
'url': url, | ||||
'id-repr': issue_id, | ||||
'issue-prefix': ISSUE_PREFIX, | ||||
'serv': ISSUE_SERVER_LNK, | ||||
r1913 | } | |||
r1944 | ||||
Aras Pranckevicius
|
r1878 | newtext = URL_PAT.sub(url_func, text_) | ||
r1944 | ||||
r2028 | if link_: | |||
# wrap not links into final link => link_ | ||||
newtext = linkify_others(newtext, link_) | ||||
r1944 | ||||
Aras Pranckevicius
|
r1878 | return literal(newtext) | ||
r1837 | except: | |||
log.error(traceback.format_exc()) | ||||
pass | ||||
r1840 | return text_ | |||
r1670 | ||||
r1868 | ||||
r1670 | def rst(source): | |||
r1789 | return literal('<div class="rst-block">%s</div>' % | |||
r1766 | MarkupRenderer.rst(source)) | |||
r1789 | ||||
r1868 | ||||
r1769 | def rst_w_mentions(source): | |||
""" | ||||
Wrapped rst renderer with @mention highlighting | ||||
r1818 | ||||
r1769 | :param source: | |||
""" | ||||
r1789 | return literal('<div class="rst-block">%s</div>' % | |||
MarkupRenderer.rst_with_mentions(source)) | ||||