helpers.py
1195 lines
| 40.4 KiB
| text/x-python
|
PythonLexer
r547 | """Helper functions | |||
Consists of functions to typically be used within templates, but also | ||||
available to Controllers. This module is available to both as 'h'. | ||||
""" | ||||
r734 | import random | |||
import hashlib | ||||
r966 | import StringIO | |||
r1101 | import urllib | |||
r1422 | import math | |||
r1837 | import logging | |||
r2674 | import re | |||
r2836 | import urlparse | |||
r3070 | import textwrap | |||
r1101 | ||||
r1154 | from datetime import datetime | |||
r1716 | from pygments.formatters.html import HtmlFormatter | |||
r547 | from pygments import highlight as code_highlight | |||
r1110 | from pylons import url, request, config | |||
r547 | from pylons.i18n.translation import _, ungettext | |||
r1832 | from hashlib import md5 | |||
r1022 | ||||
r547 | from webhelpers.html import literal, HTML, escape | |||
from webhelpers.html.tools import * | ||||
from webhelpers.html.builder import make_tag | ||||
from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \ | ||||
r1766 | end_form, file, form, hidden, image, javascript_link, link_to, \ | |||
link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \ | ||||
submit, text, password, textarea, title, ul, xml_declaration, radio | ||||
from webhelpers.html.tools import auto_link, button_to, highlight, \ | ||||
js_obfuscate, mail_to, strip_links, strip_tags, tag_re | ||||
r547 | from webhelpers.number import format_byte_size, format_bit_size | |||
from webhelpers.pylonslib import Flash as _Flash | ||||
from webhelpers.pylonslib.secure_form import secure_form | ||||
from webhelpers.text import chop_at, collapse, convert_accented_entities, \ | ||||
convert_misc_entities, lchop, plural, rchop, remove_formatting, \ | ||||
replace_whitespace, urlify, truncate, wrap_paragraphs | ||||
r635 | from webhelpers.date import time_ago_in_words | |||
r1098 | from webhelpers.paginate import Page | |||
r698 | from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \ | |||
r1676 | convert_boolean_attrs, NotGiven, _make_safe_id_component | |||
r698 | ||||
r1753 | from rhodecode.lib.annotate import annotate_highlight | |||
r3375 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer | |||
r2109 | from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \ | |||
r2936 | get_changeset_safe, datetime_to_time, time_to_datetime, AttributeDict | |||
r1670 | from rhodecode.lib.markup_renderer import MarkupRenderer | |||
r2323 | from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError | |||
r3012 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset | |||
r2445 | from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT | |||
r2217 | from rhodecode.model.changeset_status import ChangesetStatusModel | |||
r2532 | from rhodecode.model.db import URL_SEP, Permission | |||
r1670 | ||||
r1837 | log = logging.getLogger(__name__) | |||
r1789 | ||||
r2466 | html_escape_table = { | |||
"&": "&", | ||||
'"': """, | ||||
"'": "'", | ||||
">": ">", | ||||
"<": "<", | ||||
} | ||||
def html_escape(text): | ||||
"""Produce entities within text.""" | ||||
r2958 | return "".join(html_escape_table.get(c, c) for c in text) | |||
r2466 | ||||
r2162 | def shorter(text, size=20): | |||
postfix = '...' | ||||
if len(text) > size: | ||||
return text[:size - len(postfix)] + postfix | ||||
return text | ||||
r698 | def _reset(name, value=None, id=NotGiven, type="reset", **attrs): | |||
r1154 | """ | |||
Reset button | ||||
r899 | """ | |||
r698 | _set_input_attrs(attrs, type, name, value) | |||
_set_id_attr(attrs, id, name) | ||||
convert_boolean_attrs(attrs, ["disabled"]) | ||||
return HTML.input(**attrs) | ||||
reset = _reset | ||||
r1676 | safeid = _make_safe_id_component | |||
r734 | ||||
r1789 | ||||
def FID(raw_id, path): | ||||
r1776 | """ | |||
r1832 | Creates a uniqe ID for filenode based on it's hash of path and revision | |||
it's safe to use in urls | ||||
r1789 | ||||
r1776 | :param raw_id: | |||
:param path: | ||||
""" | ||||
r1832 | ||||
r2148 | return 'C-%s-%s' % (short_id(raw_id), md5(safe_str(path)).hexdigest()[:12]) | |||
r1776 | ||||
r734 | def get_token(): | |||
"""Return the current authentication token, creating one if one doesn't | ||||
already exist. | ||||
""" | ||||
token_key = "_authentication_token" | ||||
from pylons import session | ||||
if not token_key in session: | ||||
try: | ||||
token = hashlib.sha1(str(random.getrandbits(128))).hexdigest() | ||||
r2203 | except AttributeError: # Python < 2.4 | |||
r734 | token = hashlib.sha1(str(random.randrange(2 ** 128))).hexdigest() | |||
session[token_key] = token | ||||
if hasattr(session, 'save'): | ||||
session.save() | ||||
return session[token_key] | ||||
r2162 | ||||
r547 | class _GetError(object): | |||
r899 | """Get error from form_errors, and represent it as span wrapped error | |||
message | ||||
r1203 | ||||
r899 | :param field_name: field to fetch errors for | |||
:param form_errors: form errors dict | ||||
""" | ||||
r547 | ||||
def __call__(self, field_name, form_errors): | ||||
tmpl = """<span class="error_msg">%s</span>""" | ||||
r2427 | if form_errors and field_name in form_errors: | |||
r547 | return literal(tmpl % form_errors.get(field_name)) | |||
get_error = _GetError() | ||||
r2162 | ||||
r547 | class _ToolTip(object): | |||
r631 | ||||
r547 | def __call__(self, tooltip_title, trim_at=50): | |||
r2427 | """ | |||
Special function just to wrap our text into nice formatted | ||||
r905 | autowrapped text | |||
r1203 | ||||
r604 | :param tooltip_title: | |||
r547 | """ | |||
r2427 | tooltip_title = escape(tooltip_title) | |||
tooltip_title = tooltip_title.replace('<', '<').replace('>', '>') | ||||
return tooltip_title | ||||
r547 | tooltip = _ToolTip() | |||
r2162 | ||||
r547 | class _FilesBreadCrumbs(object): | |||
r631 | ||||
r547 | def __call__(self, repo_name, rev, paths): | |||
r955 | if isinstance(paths, str): | |||
r1176 | paths = safe_unicode(paths) | |||
r547 | url_l = [link_to(repo_name, url('files_home', | |||
repo_name=repo_name, | ||||
r2686 | revision=rev, f_path=''), | |||
class_='ypjax-link')] | ||||
r547 | paths_l = paths.split('/') | |||
r740 | for cnt, p in enumerate(paths_l): | |||
r547 | if p != '': | |||
r1789 | url_l.append(link_to(p, | |||
r1766 | url('files_home', | |||
repo_name=repo_name, | ||||
revision=rev, | ||||
f_path='/'.join(paths_l[:cnt + 1]) | ||||
r2686 | ), | |||
class_='ypjax-link' | ||||
r1766 | ) | |||
) | ||||
r547 | ||||
return literal('/'.join(url_l)) | ||||
files_breadcrumbs = _FilesBreadCrumbs() | ||||
r899 | ||||
r2162 | ||||
r547 | class CodeHtmlFormatter(HtmlFormatter): | |||
r2162 | """ | |||
My code Html Formatter for source codes | ||||
r966 | """ | |||
r547 | ||||
def wrap(self, source, outfile): | ||||
return self._wrap_div(self._wrap_pre(self._wrap_code(source))) | ||||
def _wrap_code(self, source): | ||||
r740 | for cnt, it in enumerate(source): | |||
r547 | i, t = it | |||
r966 | t = '<div id="L%s">%s</div>' % (cnt + 1, t) | |||
r547 | yield i, t | |||
r966 | ||||
def _wrap_tablelinenos(self, inner): | ||||
dummyoutfile = StringIO.StringIO() | ||||
lncount = 0 | ||||
for t, line in inner: | ||||
if t: | ||||
lncount += 1 | ||||
dummyoutfile.write(line) | ||||
fl = self.linenostart | ||||
mw = len(str(lncount + fl - 1)) | ||||
sp = self.linenospecial | ||||
st = self.linenostep | ||||
la = self.lineanchors | ||||
aln = self.anchorlinenos | ||||
nocls = self.noclasses | ||||
if sp: | ||||
lines = [] | ||||
for i in range(fl, fl + lncount): | ||||
if i % st == 0: | ||||
if i % sp == 0: | ||||
if aln: | ||||
lines.append('<a href="#%s%d" class="special">%*d</a>' % | ||||
(la, i, mw, i)) | ||||
else: | ||||
lines.append('<span class="special">%*d</span>' % (mw, i)) | ||||
else: | ||||
if aln: | ||||
lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | ||||
else: | ||||
lines.append('%*d' % (mw, i)) | ||||
else: | ||||
lines.append('') | ||||
ls = '\n'.join(lines) | ||||
else: | ||||
lines = [] | ||||
for i in range(fl, fl + lncount): | ||||
if i % st == 0: | ||||
if aln: | ||||
lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | ||||
else: | ||||
lines.append('%*d' % (mw, i)) | ||||
else: | ||||
lines.append('') | ||||
ls = '\n'.join(lines) | ||||
# in case you wonder about the seemingly redundant <div> here: since the | ||||
# content in the other cell also is wrapped in a div, some browsers in | ||||
# some configurations seem to mess up the formatting... | ||||
if nocls: | ||||
yield 0, ('<table class="%stable">' % self.cssclass + | ||||
'<tr><td><div class="linenodiv" ' | ||||
'style="background-color: #f0f0f0; padding-right: 10px">' | ||||
'<pre style="line-height: 125%">' + | ||||
r1320 | ls + '</pre></div></td><td id="hlcode" class="code">') | |||
r966 | else: | |||
yield 0, ('<table class="%stable">' % self.cssclass + | ||||
'<tr><td class="linenos"><div class="linenodiv"><pre>' + | ||||
r1320 | ls + '</pre></div></td><td id="hlcode" class="code">') | |||
r966 | yield 0, dummyoutfile.getvalue() | |||
yield 0, '</td></tr></table>' | ||||
r547 | def pygmentize(filenode, **kwargs): | |||
r3375 | """ | |||
pygmentize function using pygments | ||||
r1203 | ||||
r604 | :param filenode: | |||
r547 | """ | |||
r3375 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer | |||
return literal(code_highlight(filenode.content, lexer, | ||||
CodeHtmlFormatter(**kwargs))) | ||||
r547 | ||||
r1781 | ||||
r1171 | def pygmentize_annotation(repo_name, filenode, **kwargs): | |||
r1781 | """ | |||
pygmentize function for annotation | ||||
r1203 | ||||
r604 | :param filenode: | |||
r547 | """ | |||
r631 | ||||
r547 | color_dict = {} | |||
r1781 | ||||
r947 | def gen_color(n=10000): | |||
r1203 | """generator for getting n of evenly distributed colors using | |||
r947 | hsv color and golden ratio. It always return same order of colors | |||
r1203 | ||||
r947 | :returns: RGB tuple | |||
r631 | """ | |||
r1461 | ||||
def hsv_to_rgb(h, s, v): | ||||
r1781 | if s == 0.0: | |||
return v, v, v | ||||
i = int(h * 6.0) # XXX assume int() truncates! | ||||
r1461 | f = (h * 6.0) - i | |||
p = v * (1.0 - s) | ||||
q = v * (1.0 - s * f) | ||||
t = v * (1.0 - s * (1.0 - f)) | ||||
i = i % 6 | ||||
r1781 | if i == 0: | |||
return v, t, p | ||||
if i == 1: | ||||
return q, v, p | ||||
if i == 2: | ||||
return p, v, t | ||||
if i == 3: | ||||
return p, q, v | ||||
if i == 4: | ||||
return t, p, v | ||||
if i == 5: | ||||
return v, p, q | ||||
r1461 | ||||
r547 | golden_ratio = 0.618033988749895 | |||
h = 0.22717784590367374 | ||||
r947 | ||||
r1320 | for _ in xrange(n): | |||
r547 | h += golden_ratio | |||
h %= 1 | ||||
HSV_tuple = [h, 0.95, 0.95] | ||||
r1461 | RGB_tuple = hsv_to_rgb(*HSV_tuple) | |||
r1781 | yield map(lambda x: str(int(x * 256)), RGB_tuple) | |||
r547 | ||||
cgenerator = gen_color() | ||||
r631 | ||||
r547 | def get_color_string(cs): | |||
r1781 | if cs in color_dict: | |||
r547 | col = color_dict[cs] | |||
else: | ||||
col = color_dict[cs] = cgenerator.next() | ||||
return "color: rgb(%s)! important;" % (', '.join(col)) | ||||
r631 | ||||
r1171 | def url_func(repo_name): | |||
r1352 | ||||
r1171 | def _url_func(changeset): | |||
r1352 | author = changeset.author | |||
date = changeset.date | ||||
message = tooltip(changeset.message) | ||||
r631 | ||||
r1352 | tooltip_html = ("<div style='font-size:0.8em'><b>Author:</b>" | |||
" %s<br/><b>Date:</b> %s</b><br/><b>Message:" | ||||
"</b> %s<br/></div>") | ||||
tooltip_html = tooltip_html % (author, date, message) | ||||
r1171 | lnk_format = '%5s:%s' % ('r%s' % changeset.revision, | |||
short_id(changeset.raw_id)) | ||||
uri = link_to( | ||||
lnk_format, | ||||
url('changeset_home', repo_name=repo_name, | ||||
revision=changeset.raw_id), | ||||
style=get_color_string(changeset.raw_id), | ||||
class_='tooltip', | ||||
title=tooltip_html | ||||
) | ||||
r631 | ||||
r1171 | uri += '\n' | |||
return uri | ||||
return _url_func | ||||
return literal(annotate_highlight(filenode, url_func(repo_name), **kwargs)) | ||||
r631 | ||||
r1781 | ||||
r999 | def is_following_repo(repo_name, user_id): | |||
from rhodecode.model.scm import ScmModel | ||||
return ScmModel().is_following_repo(repo_name, user_id) | ||||
r547 | flash = _Flash() | |||
r635 | #============================================================================== | |||
r1356 | # SCM FILTERS available via h. | |||
r635 | #============================================================================== | |||
r2007 | from rhodecode.lib.vcs.utils import author_name, author_email | |||
r2109 | from rhodecode.lib.utils2 import credentials_filter, age as _age | |||
r2239 | from rhodecode.model.db import User, ChangesetStatus | |||
r547 | ||||
r1781 | age = lambda x: _age(x) | |||
r547 | capitalize = lambda x: x.capitalize() | |||
r1356 | email = author_email | |||
r636 | short_id = lambda x: x[:12] | |||
r1373 | hide_credentials = lambda x: ''.join(credentials_filter(x)) | |||
r660 | ||||
r1764 | ||||
Vincent Duvert
|
r2416 | def fmt_date(date): | ||
if date: | ||||
r2445 | _fmt = _(u"%a, %d %b %Y %H:%M:%S").encode('utf8') | |||
return date.strftime(_fmt).decode('utf8') | ||||
r2427 | ||||
Vincent Duvert
|
r2416 | return "" | ||
r1959 | def is_git(repository): | |||
if hasattr(repository, 'alias'): | ||||
_type = repository.alias | ||||
elif hasattr(repository, 'repo_type'): | ||||
_type = repository.repo_type | ||||
else: | ||||
_type = repository | ||||
return _type == 'git' | ||||
def is_hg(repository): | ||||
if hasattr(repository, 'alias'): | ||||
_type = repository.alias | ||||
elif hasattr(repository, 'repo_type'): | ||||
_type = repository.repo_type | ||||
else: | ||||
_type = repository | ||||
return _type == 'hg' | ||||
r1767 | def email_or_none(author): | |||
r2725 | # extract email from the commit string | |||
r1767 | _email = email(author) | |||
if _email != '': | ||||
r2725 | # check it against RhodeCode database, and use the MAIN email for this | |||
# user | ||||
user = User.get_by_email(_email, case_insensitive=True, cache=True) | ||||
if user is not None: | ||||
return user.email | ||||
r1767 | return _email | |||
r1764 | ||||
# See if it contains a username we can get an email from | ||||
r1767 | user = User.get_by_username(author_name(author), case_insensitive=True, | |||
r1764 | cache=True) | |||
if user is not None: | ||||
return user.email | ||||
# No valid email, not a valid user in the system, none! | ||||
return None | ||||
r1781 | ||||
r2731 | def person(author, show_attr="username_and_name"): | |||
r1764 | # attr to return from fetched user | |||
r2731 | person_getter = lambda usr: getattr(usr, show_attr) | |||
r1781 | ||||
r1767 | # Valid email in the attribute passed, see if they're in the system | |||
_email = email(author) | ||||
if _email != '': | ||||
user = User.get_by_email(_email, case_insensitive=True, cache=True) | ||||
r1764 | if user is not None: | |||
return person_getter(user) | ||||
r1767 | return _email | |||
r1764 | ||||
# Maybe it's a username? | ||||
r1767 | _author = author_name(author) | |||
user = User.get_by_username(_author, case_insensitive=True, | ||||
r1764 | cache=True) | |||
if user is not None: | ||||
return person_getter(user) | ||||
# Still nothing? Just pass back the author name then | ||||
r1767 | return _author | |||
r1764 | ||||
r1959 | ||||
r2731 | def person_by_id(id_, show_attr="username_and_name"): | |||
r2726 | # attr to return from fetched user | |||
r2731 | person_getter = lambda usr: getattr(usr, show_attr) | |||
r2726 | ||||
#maybe it's an ID ? | ||||
if str(id_).isdigit() or isinstance(id_, int): | ||||
id_ = int(id_) | ||||
user = User.get(id_) | ||||
if user is not None: | ||||
return person_getter(user) | ||||
return id_ | ||||
r2674 | def desc_stylize(value): | |||
""" | ||||
converts tags from value into html equivalent | ||||
:param value: | ||||
""" | ||||
value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', | ||||
'<div class="metatag" tag="see">see => \\1 </div>', value) | ||||
value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', | ||||
'<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value) | ||||
r3156 | value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]', | |||
r2674 | '<div class="metatag" tag="\\1">\\1 => <a href="/\\2">\\2</a></div>', value) | |||
r2965 | value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]', | |||
r2674 | '<div class="metatag" tag="lang">\\2</div>', value) | |||
value = re.sub(r'\[([a-z]+)\]', | ||||
'<div class="metatag" tag="\\1">\\1</div>', value) | ||||
return value | ||||
r712 | def bool2icon(value): | |||
r899 | """Returns True/False values represented as small html image of true/false | |||
r712 | icons | |||
r1203 | ||||
r712 | :param value: bool value | |||
""" | ||||
if value is True: | ||||
r1050 | return HTML.tag('img', src=url("/images/icons/accept.png"), | |||
alt=_('True')) | ||||
r712 | ||||
if value is False: | ||||
r1050 | return HTML.tag('img', src=url("/images/icons/cancel.png"), | |||
alt=_('False')) | ||||
r712 | ||||
return value | ||||
r2958 | def action_parser(user_log, feed=False, parse_cs=False): | |||
r2012 | """ | |||
This helper will action_map the specified string action into translated | ||||
r660 | fancy names with icons and links | |||
r1203 | ||||
r899 | :param user_log: user log instance | |||
r1087 | :param feed: use output for feeds (no html and fancy icons) | |||
r2950 | :param parse_cs: parse Changesets into VCS instances | |||
r660 | """ | |||
r2958 | ||||
r660 | action = user_log.action | |||
r840 | action_params = ' ' | |||
r660 | ||||
x = action.split(':') | ||||
if len(x) > 1: | ||||
action, action_params = x | ||||
r718 | def get_cs_links(): | |||
r2012 | revs_limit = 3 # display this amount always | |||
revs_top_limit = 50 # show upto this amount of changesets hidden | ||||
revs_ids = action_params.split(',') | ||||
r2014 | deleted = user_log.repository is None | |||
if deleted: | ||||
return ','.join(revs_ids) | ||||
r953 | repo_name = user_log.repository.repo_name | |||
r1045 | ||||
r2323 | def lnk(rev, repo_name): | |||
r2950 | if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict): | |||
r2998 | lazy_cs = True | |||
if getattr(rev, 'op', None) and getattr(rev, 'ref_name', None): | ||||
lazy_cs = False | ||||
lbl = '?' | ||||
if rev.op == 'delete_branch': | ||||
lbl = '%s' % _('Deleted branch: %s') % rev.ref_name | ||||
title = '' | ||||
elif rev.op == 'tag': | ||||
lbl = '%s' % _('Created tag: %s') % rev.ref_name | ||||
title = '' | ||||
_url = '#' | ||||
else: | ||||
lbl = '%s' % (rev.short_id[:8]) | ||||
_url = url('changeset_home', repo_name=repo_name, | ||||
revision=rev.raw_id) | ||||
title = tooltip(rev.message) | ||||
r2323 | else: | |||
r2958 | ## changeset cannot be found/striped/removed etc. | |||
r2952 | lbl = ('%s' % rev)[:12] | |||
r2323 | _url = '#' | |||
title = _('Changeset not found') | ||||
r2958 | if parse_cs: | |||
return link_to(lbl, _url, title=title, class_='tooltip') | ||||
r2971 | return link_to(lbl, _url, raw_id=rev.raw_id, repo_name=repo_name, | |||
r2998 | class_='lazy-cs' if lazy_cs else '') | |||
r2203 | ||||
r3507 | def _get_op(rev_txt): | |||
_op = None | ||||
_name = rev_txt | ||||
if len(rev_txt.split('=>')) == 2: | ||||
_op, _name = rev_txt.split('=>') | ||||
return _op, _name | ||||
r2203 | revs = [] | |||
if len(filter(lambda v: v != '', revs_ids)) > 0: | ||||
r2958 | repo = None | |||
r2323 | for rev in revs_ids[:revs_top_limit]: | |||
r3507 | _op, _name = _get_op(rev) | |||
r2998 | ||||
r2958 | # we want parsed changesets, or new log store format is bad | |||
r2950 | if parse_cs: | |||
try: | ||||
r2958 | if repo is None: | |||
repo = user_log.repository.scm_instance | ||||
_rev = repo.get_changeset(rev) | ||||
revs.append(_rev) | ||||
r2950 | except ChangesetDoesNotExistError: | |||
log.error('cannot find revision %s in this repo' % rev) | ||||
revs.append(rev) | ||||
continue | ||||
else: | ||||
r2958 | _rev = AttributeDict({ | |||
r2950 | 'short_id': rev[:12], | |||
'raw_id': rev, | ||||
r2958 | 'message': '', | |||
r2998 | 'op': _op, | |||
'ref_name': _name | ||||
r2950 | }) | |||
r2958 | revs.append(_rev) | |||
r1359 | cs_links = [] | |||
r2012 | cs_links.append(" " + ', '.join( | |||
[lnk(rev, repo_name) for rev in revs[:revs_limit]] | ||||
) | ||||
) | ||||
r3507 | _op1, _name1 = _get_op(revs_ids[0]) | |||
_op2, _name2 = _get_op(revs_ids[-1]) | ||||
_rev = '%s...%s' % (_name1, _name2) | ||||
r1009 | ||||
r2012 | compare_view = ( | |||
' <div class="compare_view tooltip" title="%s">' | ||||
'<a href="%s">%s</a> </div>' % ( | ||||
_('Show all combined changesets %s->%s') % ( | ||||
r2954 | revs_ids[0][:12], revs_ids[-1][:12] | |||
r2012 | ), | |||
url('changeset_home', repo_name=repo_name, | ||||
r3507 | revision=_rev | |||
r2012 | ), | |||
_('compare view') | ||||
) | ||||
) | ||||
r1009 | ||||
r2012 | # if we have exactly one more than normally displayed | |||
# just display it, takes less space than displaying | ||||
# "and 1 more revisions" | ||||
if len(revs_ids) == revs_limit + 1: | ||||
Aras Pranckevicius
|
r1979 | rev = revs[revs_limit] | ||
r2012 | cs_links.append(", " + lnk(rev, repo_name)) | |||
Aras Pranckevicius
|
r1979 | |||
# hidden-by-default ones | ||||
r2012 | if len(revs_ids) > revs_limit + 1: | |||
uniq_id = revs_ids[0] | ||||
html_tmpl = ( | ||||
'<span> %s <a class="show_more" id="_%s" ' | ||||
'href="#more">%s</a> %s</span>' | ||||
) | ||||
r1087 | if not feed: | |||
r2012 | cs_links.append(html_tmpl % ( | |||
_('and'), | ||||
uniq_id, _('%s more') % (len(revs_ids) - revs_limit), | ||||
_('revisions') | ||||
) | ||||
) | ||||
r808 | ||||
r1087 | if not feed: | |||
Aras Pranckevicius
|
r1979 | html_tmpl = '<span id="%s" style="display:none">, %s </span>' | ||
r1087 | else: | |||
html_tmpl = '<span id="%s"> %s </span>' | ||||
r2012 | morelinks = ', '.join( | |||
[lnk(rev, repo_name) for rev in revs[revs_limit:]] | ||||
) | ||||
Aras Pranckevicius
|
r1979 | |||
r2012 | if len(revs_ids) > revs_top_limit: | |||
Aras Pranckevicius
|
r1979 | morelinks += ', ...' | ||
cs_links.append(html_tmpl % (uniq_id, morelinks)) | ||||
r1024 | if len(revs) > 1: | |||
r1359 | cs_links.append(compare_view) | |||
return ''.join(cs_links) | ||||
r734 | ||||
r718 | def get_fork_name(): | |||
r953 | repo_name = action_params | |||
r2914 | _url = url('summary_home', repo_name=repo_name) | |||
return _('fork name %s') % link_to(action_params, _url) | ||||
r953 | ||||
r2375 | def get_user_name(): | |||
user_name = action_params | ||||
return user_name | ||||
def get_users_group(): | ||||
group_name = action_params | ||||
return group_name | ||||
r2609 | def get_pull_request(): | |||
pull_request_id = action_params | ||||
r3063 | deleted = user_log.repository is None | |||
if deleted: | ||||
repo_name = user_log.repository_name | ||||
else: | ||||
repo_name = user_log.repository.repo_name | ||||
r2609 | return link_to(_('Pull request #%s') % pull_request_id, | |||
url('pullrequest_show', repo_name=repo_name, | ||||
pull_request_id=pull_request_id)) | ||||
r2375 | # action : translated str, callback(extractor), icon | |||
action_map = { | ||||
r2491 | 'user_deleted_repo': (_('[deleted] repository'), | |||
None, 'database_delete.png'), | ||||
'user_created_repo': (_('[created] repository'), | ||||
None, 'database_add.png'), | ||||
'user_created_fork': (_('[created] repository as fork'), | ||||
None, 'arrow_divide.png'), | ||||
'user_forked_repo': (_('[forked] repository'), | ||||
get_fork_name, 'arrow_divide.png'), | ||||
'user_updated_repo': (_('[updated] repository'), | ||||
None, 'database_edit.png'), | ||||
'admin_deleted_repo': (_('[delete] repository'), | ||||
None, 'database_delete.png'), | ||||
'admin_created_repo': (_('[created] repository'), | ||||
None, 'database_add.png'), | ||||
'admin_forked_repo': (_('[forked] repository'), | ||||
None, 'arrow_divide.png'), | ||||
'admin_updated_repo': (_('[updated] repository'), | ||||
None, 'database_edit.png'), | ||||
'admin_created_user': (_('[created] user'), | ||||
get_user_name, 'user_add.png'), | ||||
'admin_updated_user': (_('[updated] user'), | ||||
get_user_name, 'user_edit.png'), | ||||
r3415 | 'admin_created_users_group': (_('[created] user group'), | |||
r2491 | get_users_group, 'group_add.png'), | |||
r3415 | 'admin_updated_users_group': (_('[updated] user group'), | |||
r2491 | get_users_group, 'group_edit.png'), | |||
'user_commented_revision': (_('[commented] on revision in repository'), | ||||
get_cs_links, 'comment_add.png'), | ||||
r2609 | 'user_commented_pull_request': (_('[commented] on pull request for'), | |||
get_pull_request, 'comment_add.png'), | ||||
'user_closed_pull_request': (_('[closed] pull request for'), | ||||
get_pull_request, 'tick.png'), | ||||
r2491 | 'push': (_('[pushed] into'), | |||
get_cs_links, 'script_add.png'), | ||||
'push_local': (_('[committed via RhodeCode] into repository'), | ||||
get_cs_links, 'script_edit.png'), | ||||
'push_remote': (_('[pulled from remote] into repository'), | ||||
get_cs_links, 'connect.png'), | ||||
'pull': (_('[pulled] from'), | ||||
None, 'down_16.png'), | ||||
'started_following_repo': (_('[started following] repository'), | ||||
None, 'heart_add.png'), | ||||
'stopped_following_repo': (_('[stopped following] repository'), | ||||
None, 'heart_delete.png'), | ||||
r2375 | } | |||
r660 | ||||
r1087 | action_str = action_map.get(action, action) | |||
if feed: | ||||
action = action_str[0].replace('[', '').replace(']', '') | ||||
else: | ||||
r2012 | action = action_str[0]\ | |||
.replace('[', '<span class="journal_highlight">')\ | ||||
.replace(']', '</span>') | ||||
r1114 | ||||
r2012 | action_params_func = lambda: "" | |||
r1052 | ||||
r1114 | if callable(action_str[1]): | |||
r1052 | action_params_func = action_str[1] | |||
r953 | ||||
r2375 | def action_parser_icon(): | |||
action = user_log.action | ||||
action_params = None | ||||
x = action.split(':') | ||||
r1959 | ||||
r2375 | if len(x) > 1: | |||
action, action_params = x | ||||
r808 | ||||
r2375 | tmpl = """<img src="%s%s" alt="%s"/>""" | |||
ico = action_map.get(action, ['', '', ''])[2] | ||||
return literal(tmpl % ((url('/images/icons/')), ico, action)) | ||||
# returned callbacks we need to call to get | ||||
return [lambda: literal(action), action_params_func, action_parser_icon] | ||||
r660 | ||||
r635 | #============================================================================== | |||
r547 | # PERMS | |||
r635 | #============================================================================== | |||
r547 | from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \ | |||
r3222 | HasRepoPermissionAny, HasRepoPermissionAll, HasReposGroupPermissionAll, \ | |||
HasReposGroupPermissionAny | ||||
r547 | ||||
r1959 | ||||
r635 | #============================================================================== | |||
r547 | # GRAVATAR URL | |||
r635 | #============================================================================== | |||
r547 | ||||
def gravatar_url(email_address, size=30): | ||||
r3080 | from pylons import url # doh, we need to re-import url to mock it later | |||
r3367 | _def = 'anonymous@rhodecode.org' | |||
use_gravatar = str2bool(config['app_conf'].get('use_gravatar')) | ||||
email_address = email_address or _def | ||||
if (not use_gravatar or not email_address or email_address == _def): | ||||
r3080 | f = lambda a, l: min(l, key=lambda x: abs(x - a)) | |||
return url("/images/user%s.png" % f(size, [14, 16, 20, 24, 30])) | ||||
r3367 | if use_gravatar and config['app_conf'].get('alternative_gravatar_url'): | |||
r2751 | tmpl = config['app_conf'].get('alternative_gravatar_url', '') | |||
r2836 | parsed_url = urlparse.urlparse(url.current(qualified=True)) | |||
r2751 | tmpl = tmpl.replace('{email}', email_address)\ | |||
r2836 | .replace('{md5email}', hashlib.md5(email_address.lower()).hexdigest()) \ | |||
.replace('{netloc}', parsed_url.netloc)\ | ||||
.replace('{scheme}', parsed_url.scheme)\ | ||||
r2751 | .replace('{size}', str(size)) | |||
return tmpl | ||||
r946 | ssl_enabled = 'https' == request.environ.get('wsgi.url_scheme') | |||
r547 | default = 'identicon' | |||
baseurl_nossl = "http://www.gravatar.com/avatar/" | ||||
baseurl_ssl = "https://secure.gravatar.com/avatar/" | ||||
baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl | ||||
r631 | ||||
r1101 | if isinstance(email_address, unicode): | |||
#hashlib crashes on unicode items | ||||
r1401 | email_address = safe_str(email_address) | |||
r547 | # construct the url | |||
gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?" | ||||
r2012 | gravatar_url += urllib.urlencode({'d': default, 's': str(size)}) | |||
r547 | ||||
return gravatar_url | ||||
r1098 | ||||
#============================================================================== | ||||
r1342 | # REPO PAGER, PAGER FOR REPOSITORY | |||
r1098 | #============================================================================== | |||
class RepoPage(Page): | ||||
def __init__(self, collection, page=1, items_per_page=20, | ||||
r1656 | item_count=None, url=None, **kwargs): | |||
r1098 | ||||
"""Create a "RepoPage" instance. special pager for paging | ||||
repository | ||||
""" | ||||
self._url_generator = url | ||||
# Safe the kwargs class-wide so they can be used in the pager() method | ||||
self.kwargs = kwargs | ||||
# Save a reference to the collection | ||||
self.original_collection = collection | ||||
self.collection = collection | ||||
# The self.page is the number of the current page. | ||||
# The first page has the number 1! | ||||
try: | ||||
r2012 | self.page = int(page) # make it int() if we get it as a string | |||
r1098 | except (ValueError, TypeError): | |||
self.page = 1 | ||||
self.items_per_page = items_per_page | ||||
# Unless the user tells us how many items the collections has | ||||
# we calculate that ourselves. | ||||
if item_count is not None: | ||||
self.item_count = item_count | ||||
else: | ||||
self.item_count = len(self.collection) | ||||
# Compute the number of the first and last available page | ||||
if self.item_count > 0: | ||||
self.first_page = 1 | ||||
r1422 | self.page_count = int(math.ceil(float(self.item_count) / | |||
self.items_per_page)) | ||||
r1098 | self.last_page = self.first_page + self.page_count - 1 | |||
r1818 | # Make sure that the requested page number is the range of | |||
r1766 | # valid pages | |||
r1098 | if self.page > self.last_page: | |||
self.page = self.last_page | ||||
elif self.page < self.first_page: | ||||
self.page = self.first_page | ||||
# Note: the number of items on this page can be less than | ||||
# items_per_page if the last page is not full | ||||
r1422 | self.first_item = max(0, (self.item_count) - (self.page * | |||
items_per_page)) | ||||
self.last_item = ((self.item_count - 1) - items_per_page * | ||||
(self.page - 1)) | ||||
r1098 | ||||
r1670 | self.items = list(self.collection[self.first_item:self.last_item + 1]) | |||
r1656 | ||||
r1098 | # Links to previous and next page | |||
if self.page > self.first_page: | ||||
self.previous_page = self.page - 1 | ||||
else: | ||||
self.previous_page = None | ||||
if self.page < self.last_page: | ||||
self.next_page = self.page + 1 | ||||
else: | ||||
self.next_page = None | ||||
# No items available | ||||
else: | ||||
self.first_page = None | ||||
self.page_count = 0 | ||||
self.last_page = None | ||||
self.first_item = None | ||||
self.last_item = None | ||||
self.previous_page = None | ||||
self.next_page = None | ||||
self.items = [] | ||||
# This is a subclass of the 'list' type. Initialise the list now. | ||||
r1656 | list.__init__(self, reversed(self.items)) | |||
r1098 | ||||
r990 | def changed_tooltip(nodes): | |||
r1342 | """ | |||
Generates a html string for changed nodes in changeset page. | ||||
It limits the output to 30 entries | ||||
r1818 | ||||
r1342 | :param nodes: LazyNodesGenerator | |||
""" | ||||
r990 | if nodes: | |||
pref = ': <br/> ' | ||||
suf = '' | ||||
if len(nodes) > 30: | ||||
suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) | ||||
r1257 | return literal(pref + '<br/> '.join([safe_unicode(x.path) | |||
for x in nodes[:30]]) + suf) | ||||
r990 | else: | |||
return ': ' + _('No Files') | ||||
r1159 | ||||
r3012 | def repo_link(groups_and_repos, last_url=None): | |||
r1342 | """ | |||
Makes a breadcrumbs link to repo within a group | ||||
joins » on each group to create a fancy link | ||||
r1818 | ||||
r1342 | ex:: | |||
group >> subgroup >> repo | ||||
r1818 | ||||
r1342 | :param groups_and_repos: | |||
r3012 | :param last_url: | |||
r1342 | """ | |||
r1159 | groups, repo_name = groups_and_repos | |||
r3012 | last_link = link_to(repo_name, last_url) if last_url else repo_name | |||
r1159 | ||||
if not groups: | ||||
r3012 | if last_url: | |||
return last_link | ||||
r1159 | return repo_name | |||
else: | ||||
def make_link(group): | ||||
r3012 | return link_to(group.name, | |||
url('repos_group_home', group_name=group.group_name)) | ||||
return literal(' » '.join(map(make_link, groups) + [last_link])) | ||||
r1257 | ||||
r1959 | ||||
r1257 | def fancy_file_stats(stats): | |||
r1342 | """ | |||
Displays a fancy two colored bar for number of added/deleted | ||||
lines of code on file | ||||
r1818 | ||||
r1342 | :param stats: two element list of added/deleted lines of code | |||
""" | ||||
r2995 | def cgen(l_type, a_v, d_v): | |||
r2118 | mapping = {'tr': 'top-right-rounded-corner-mid', | |||
'tl': 'top-left-rounded-corner-mid', | ||||
'br': 'bottom-right-rounded-corner-mid', | ||||
'bl': 'bottom-left-rounded-corner-mid'} | ||||
r1959 | map_getter = lambda x: mapping[x] | |||
r1257 | ||||
if l_type == 'a' and d_v: | ||||
#case when added and deleted are present | ||||
return ' '.join(map(map_getter, ['tl', 'bl'])) | ||||
if l_type == 'a' and not d_v: | ||||
return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl'])) | ||||
if l_type == 'd' and a_v: | ||||
return ' '.join(map(map_getter, ['tr', 'br'])) | ||||
if l_type == 'd' and not a_v: | ||||
return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl'])) | ||||
r2995 | a, d = stats[0], stats[1] | |||
width = 100 | ||||
if a == 'b': | ||||
#binary mode | ||||
b_d = '<div class="bin%s %s" style="width:100%%">%s</div>' % (d, cgen('a', a_v='', d_v=0), 'bin') | ||||
b_a = '<div class="bin1" style="width:0%%">%s</div>' % ('bin') | ||||
return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d)) | ||||
t = stats[0] + stats[1] | ||||
unit = float(width) / (t or 1) | ||||
# needs > 9% of width to be visible or 0 to be hidden | ||||
a_p = max(9, unit * a) if a > 0 else 0 | ||||
d_p = max(9, unit * d) if d > 0 else 0 | ||||
p_sum = a_p + d_p | ||||
if p_sum > width: | ||||
#adjust the percentage to be == 100% since we adjusted to 9 | ||||
if a_p > d_p: | ||||
a_p = a_p - (p_sum - width) | ||||
else: | ||||
d_p = d_p - (p_sum - width) | ||||
a_v = a if a > 0 else '' | ||||
d_v = d if d > 0 else '' | ||||
r1959 | d_a = '<div class="added %s" style="width:%s%%">%s</div>' % ( | |||
r2995 | cgen('a', a_v, d_v), a_p, a_v | |||
r1959 | ) | |||
d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % ( | ||||
r2995 | cgen('d', a_v, d_v), d_p, d_v | |||
r1959 | ) | |||
r1257 | return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d)) | |||
r1438 | ||||
r3399 | def urlify_text(text_, safe=True): | |||
r3398 | """ | |||
Extrac urls from text and make html links out of them | ||||
:param text_: | ||||
""" | ||||
r1438 | ||||
r1766 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]''' | |||
'''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') | ||||
r1438 | ||||
def url_func(match_obj): | ||||
url_full = match_obj.groups()[0] | ||||
r1868 | return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) | |||
r3399 | _newtext = url_pat.sub(url_func, text_) | |||
if safe: | ||||
return literal(_newtext) | ||||
return _newtext | ||||
r1656 | ||||
r1959 | ||||
r1913 | def urlify_changesets(text_, repository): | |||
r2111 | """ | |||
Extract revision ids from changeset and make link from them | ||||
r2125 | ||||
r2111 | :param text_: | |||
r3398 | :param repository: repo name to build the URL with | |||
r2111 | """ | |||
r3385 | from pylons import url # doh, we need to re-import url to mock it later | |||
r3405 | URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') | |||
r1913 | ||||
def url_func(match_obj): | ||||
r3405 | rev = match_obj.groups()[1] | |||
pref = match_obj.groups()[0] | ||||
suf = match_obj.groups()[2] | ||||
r1913 | tmpl = ( | |||
'%(pref)s<a class="%(cls)s" href="%(url)s">' | ||||
r3405 | '%(rev)s</a>%(suf)s' | |||
r1913 | ) | |||
return tmpl % { | ||||
'pref': pref, | ||||
'cls': 'revision-link', | ||||
'url': url('changeset_home', repo_name=repository, revision=rev), | ||||
'rev': rev, | ||||
r3385 | 'suf': suf | |||
r1913 | } | |||
newtext = URL_PAT.sub(url_func, text_) | ||||
return newtext | ||||
r1868 | ||||
r1959 | ||||
Aras Pranckevicius
|
r1878 | def urlify_commit(text_, repository=None, link_=None): | ||
r2028 | """ | |||
Parses given text message and makes proper links. | ||||
issues are linked to given issue-server, and rest is a changeset link | ||||
if link_ is given, in other case it's a plain text | ||||
:param text_: | ||||
:param repository: | ||||
:param link_: changeset link | ||||
""" | ||||
r1837 | import traceback | |||
r3398 | from pylons import url # doh, we need to re-import url to mock it later | |||
r2125 | ||||
r2111 | def escaper(string): | |||
return string.replace('<', '<').replace('>', '>') | ||||
r2125 | ||||
r2012 | def linkify_others(t, l): | |||
r1913 | urls = re.compile(r'(\<a.*?\<\/a\>)',) | |||
links = [] | ||||
for e in urls.split(t): | ||||
if not urls.match(e): | ||||
r2012 | links.append('<a class="message-link" href="%s">%s</a>' % (l, e)) | |||
r1913 | else: | |||
r1944 | links.append(e) | |||
r1913 | return ''.join(links) | |||
r2125 | ||||
r2111 | # urlify changesets - extrac revisions and make link out of them | |||
Zachary Auclair
|
r2865 | newtext = urlify_changesets(escaper(text_), repository) | ||
r2111 | ||||
r3398 | # extract http/https links and make them real urls | |||
r3399 | newtext = urlify_text(newtext, safe=False) | |||
r3398 | ||||
r1837 | try: | |||
r3382 | from rhodecode import CONFIG | |||
conf = CONFIG | ||||
r1868 | ||||
Zachary Auclair
|
r2865 | # allow multiple issue servers to be used | ||
valid_indices = [ | ||||
r2866 | x.group(1) | |||
for x in map(lambda x: re.match(r'issue_pat(.*)', x), conf.keys()) | ||||
if x and 'issue_server_link%s' % x.group(1) in conf | ||||
and 'issue_prefix%s' % x.group(1) in conf | ||||
] | ||||
Zachary Auclair
|
r2865 | |||
r2866 | log.debug('found issue server suffixes `%s` during valuation of: %s' | |||
% (','.join(valid_indices), newtext)) | ||||
r1868 | ||||
Zachary Auclair
|
r2865 | for pattern_index in valid_indices: | ||
r2866 | ISSUE_PATTERN = conf.get('issue_pat%s' % pattern_index) | |||
ISSUE_SERVER_LNK = conf.get('issue_server_link%s' % pattern_index) | ||||
ISSUE_PREFIX = conf.get('issue_prefix%s' % pattern_index) | ||||
Zachary Auclair
|
r2865 | |||
r2866 | log.debug('pattern suffix `%s` PAT:%s SERVER_LINK:%s PREFIX:%s' | |||
% (pattern_index, ISSUE_PATTERN, ISSUE_SERVER_LNK, | ||||
ISSUE_PREFIX)) | ||||
Zachary Auclair
|
r2865 | |||
URL_PAT = re.compile(r'%s' % ISSUE_PATTERN) | ||||
r1868 | ||||
r1837 | def url_func(match_obj): | |||
r1912 | pref = '' | |||
if match_obj.group().startswith(' '): | ||||
pref = ' ' | ||||
r1933 | issue_id = ''.join(match_obj.groups()) | |||
r1840 | tmpl = ( | |||
r1912 | '%(pref)s<a class="%(cls)s" href="%(url)s">' | |||
r1865 | '%(issue-prefix)s%(id-repr)s' | |||
r1840 | '</a>' | |||
) | ||||
r1868 | url = ISSUE_SERVER_LNK.replace('{id}', issue_id) | |||
if repository: | ||||
url = url.replace('{repo}', repository) | ||||
r2339 | repo_name = repository.split(URL_SEP)[-1] | |||
url = url.replace('{repo_name}', repo_name) | ||||
Zachary Auclair
|
r2865 | |||
r1913 | return tmpl % { | |||
r1912 | 'pref': pref, | |||
r1868 | 'cls': 'issue-tracker-link', | |||
'url': url, | ||||
'id-repr': issue_id, | ||||
'issue-prefix': ISSUE_PREFIX, | ||||
'serv': ISSUE_SERVER_LNK, | ||||
r1913 | } | |||
Zachary Auclair
|
r2865 | newtext = URL_PAT.sub(url_func, newtext) | ||
r2866 | log.debug('processed prefix:`%s` => %s' % (pattern_index, newtext)) | |||
r1944 | ||||
Zachary Auclair
|
r2865 | # if we actually did something above | ||
r2956 | if link_: | |||
# wrap not links into final link => link_ | ||||
newtext = linkify_others(newtext, link_) | ||||
r1837 | except: | |||
log.error(traceback.format_exc()) | ||||
pass | ||||
r2966 | return literal(newtext) | |||
r1670 | ||||
r1868 | ||||
r1670 | def rst(source): | |||
r1789 | return literal('<div class="rst-block">%s</div>' % | |||
r1766 | MarkupRenderer.rst(source)) | |||
r1789 | ||||
r1868 | ||||
r1769 | def rst_w_mentions(source): | |||
""" | ||||
Wrapped rst renderer with @mention highlighting | ||||
r1818 | ||||
r1769 | :param source: | |||
""" | ||||
r1789 | return literal('<div class="rst-block">%s</div>' % | |||
MarkupRenderer.rst_with_mentions(source)) | ||||
r2217 | ||||
def changeset_status(repo, revision): | ||||
return ChangesetStatusModel().get_status(repo, revision) | ||||
r2239 | ||||
def changeset_status_lbl(changeset_status): | ||||
return dict(ChangesetStatus.STATUSES).get(changeset_status) | ||||
r2532 | ||||
def get_permission_name(key): | ||||
return dict(Permission.PERMS).get(key) | ||||
r3070 | ||||
def journal_filter_help(): | ||||
return _(textwrap.dedent(''' | ||||
Example filter terms: | ||||
repository:vcs | ||||
username:marcin | ||||
action:*push* | ||||
ip:127.0.0.1 | ||||
date:20120101 | ||||
date:[20120101100000 TO 20120102] | ||||
Generate wildcards using '*' character: | ||||
"repositroy:vcs*" - search everything starting with 'vcs' | ||||
"repository:*vcs*" - search for repository containing 'vcs' | ||||
Optional AND / OR operators in queries | ||||
"repository:vcs OR repository:test" | ||||
"username:test AND repository:test*" | ||||
''')) | ||||
r3110 | ||||
def not_mapped_error(repo_name): | ||||
flash(_('%s repository is not mapped to db perhaps' | ||||
' it was created or renamed from the filesystem' | ||||
' please run the application again' | ||||
' in order to rescan repositories') % repo_name, category='error') | ||||
r3125 | ||||
def ip_range(ip_addr): | ||||
from rhodecode.model.db import UserIpMap | ||||
s, e = UserIpMap._get_ip_range(ip_addr) | ||||
return '%s - %s' % (s, e) | ||||