##// END OF EJS Templates
renderer: Clean up obsolete code...
johbo -
r773:0b23acc0 default
parent child Browse files
Show More
@@ -1,306 +1,273 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Renderer for markup languages with ability to parse using rst or markdown
24 24 """
25 25
26 26 import re
27 27 import os
28 28 import logging
29 29 import itertools
30 30
31 31 from mako.lookup import TemplateLookup
32 32
33 33 from docutils.core import publish_parts
34 34 from docutils.parsers.rst import directives
35 35 import markdown
36 36
37 37 from rhodecode.lib.markdown_ext import (
38 38 UrlizeExtension, GithubFlavoredMarkdownExtension)
39 39 from rhodecode.lib.utils2 import safe_unicode, md5_safe, MENTIONS_REGEX
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43 # default renderer used to generate automated comments
44 44 DEFAULT_COMMENTS_RENDERER = 'rst'
45 45
46 46
47 47 class MarkupRenderer(object):
48 48 RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES = ['include', 'meta', 'raw']
49 49
50 50 MARKDOWN_PAT = re.compile(r'\.(md|mkdn?|mdown|markdown)$', re.IGNORECASE)
51 51 RST_PAT = re.compile(r'\.re?st$', re.IGNORECASE)
52 52 PLAIN_PAT = re.compile(r'^readme$', re.IGNORECASE)
53 53
54 # list of readme files to search in file tree and display in summary
55 # attached weights defines the search order lower is first
56 ALL_READMES = [
57 ('readme', 0), ('README', 0), ('Readme', 0),
58 ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1),
59 ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2),
60 ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2),
61 ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2),
62 ]
63 54 # extension together with weights. Lower is first means we control how
64 55 # extensions are attached to readme names with those.
65 56 PLAIN_EXTS = [
57 # prefer no extension
66 58 ('', 0), # special case that renders READMES names without extension
67 59 ('.text', 2), ('.TEXT', 2),
68 60 ('.txt', 3), ('.TXT', 3)
69 61 ]
70 62
71 63 RST_EXTS = [
72 64 ('.rst', 1), ('.rest', 1),
73 65 ('.RST', 2), ('.REST', 2)
74 66 ]
75 67
76 68 MARKDOWN_EXTS = [
77 69 ('.md', 1), ('.MD', 1),
78 70 ('.mkdn', 2), ('.MKDN', 2),
79 71 ('.mdown', 3), ('.MDOWN', 3),
80 72 ('.markdown', 4), ('.MARKDOWN', 4)
81 73 ]
82 74
83 ALL_EXTS = PLAIN_EXTS + MARKDOWN_EXTS + RST_EXTS
84
85 75 def _detect_renderer(self, source, filename=None):
86 76 """
87 77 runs detection of what renderer should be used for generating html
88 78 from a markup language
89 79
90 80 filename can be also explicitly a renderer name
91 81
92 82 :param source:
93 83 :param filename:
94 84 """
95 85
96 86 if MarkupRenderer.MARKDOWN_PAT.findall(filename):
97 87 detected_renderer = 'markdown'
98 88 elif MarkupRenderer.RST_PAT.findall(filename):
99 89 detected_renderer = 'rst'
100 90 elif MarkupRenderer.PLAIN_PAT.findall(filename):
101 91 detected_renderer = 'rst'
102 92 else:
103 93 detected_renderer = 'plain'
104 94
105 95 return getattr(MarkupRenderer, detected_renderer)
106 96
107 97 @classmethod
108 98 def renderer_from_filename(cls, filename, exclude):
109 99 """
110 100 Detect renderer markdown/rst from filename and optionally use exclude
111 101 list to remove some options. This is mostly used in helpers.
112 102 Returns None when no renderer can be detected.
113 103 """
114 104 def _filter(elements):
115 105 if isinstance(exclude, (list, tuple)):
116 106 return [x for x in elements if x not in exclude]
117 107 return elements
118 108
119 109 if filename.endswith(
120 110 tuple(_filter([x[0] for x in cls.MARKDOWN_EXTS if x[0]]))):
121 111 return 'markdown'
122 112 if filename.endswith(tuple(_filter([x[0] for x in cls.RST_EXTS if x[0]]))):
123 113 return 'rst'
124 114
125 115 return None
126 116
127 @classmethod
128 def generate_readmes(cls, all_readmes, extensions):
129 combined = itertools.product(all_readmes, extensions)
130 # sort by filename weight(y[0][1]) + extensions weight(y[1][1])
131 prioritized_readmes = sorted(combined, key=lambda y: y[0][1] + y[1][1])
132 # filename, extension
133 return [''.join([x[0][0], x[1][0]]) for x in prioritized_readmes]
134
135 def pick_readme_order(self, default_renderer):
136
137 if default_renderer == 'markdown':
138 markdown = self.generate_readmes(self.ALL_READMES, self.MARKDOWN_EXTS)
139 readme_order = markdown + self.generate_readmes(
140 self.ALL_READMES, self.RST_EXTS + self.PLAIN_EXTS)
141 elif default_renderer == 'rst':
142 markdown = self.generate_readmes(self.ALL_READMES, self.RST_EXTS)
143 readme_order = markdown + self.generate_readmes(
144 self.ALL_READMES, self.MARKDOWN_EXTS + self.PLAIN_EXTS)
145 else:
146 readme_order = self.generate_readmes(self.ALL_READMES, self.ALL_EXTS)
147
148 return readme_order
149
150 117 def render(self, source, filename=None):
151 118 """
152 119 Renders a given filename using detected renderer
153 120 it detects renderers based on file extension or mimetype.
154 121 At last it will just do a simple html replacing new lines with <br/>
155 122
156 123 :param file_name:
157 124 :param source:
158 125 """
159 126
160 127 renderer = self._detect_renderer(source, filename)
161 128 readme_data = renderer(source)
162 129 return readme_data
163 130
164 131 @classmethod
165 132 def _flavored_markdown(cls, text):
166 133 """
167 134 Github style flavored markdown
168 135
169 136 :param text:
170 137 """
171 138
172 139 # Extract pre blocks.
173 140 extractions = {}
174 141
175 142 def pre_extraction_callback(matchobj):
176 143 digest = md5_safe(matchobj.group(0))
177 144 extractions[digest] = matchobj.group(0)
178 145 return "{gfm-extraction-%s}" % digest
179 146 pattern = re.compile(r'<pre>.*?</pre>', re.MULTILINE | re.DOTALL)
180 147 text = re.sub(pattern, pre_extraction_callback, text)
181 148
182 149 # Prevent foo_bar_baz from ending up with an italic word in the middle.
183 150 def italic_callback(matchobj):
184 151 s = matchobj.group(0)
185 152 if list(s).count('_') >= 2:
186 153 return s.replace('_', r'\_')
187 154 return s
188 155 text = re.sub(r'^(?! {4}|\t)\w+_\w+_\w[\w_]*', italic_callback, text)
189 156
190 157 # Insert pre block extractions.
191 158 def pre_insert_callback(matchobj):
192 159 return '\n\n' + extractions[matchobj.group(1)]
193 160 text = re.sub(r'\{gfm-extraction-([0-9a-f]{32})\}',
194 161 pre_insert_callback, text)
195 162
196 163 return text
197 164
198 165 @classmethod
199 166 def urlify_text(cls, text):
200 167 url_pat = re.compile(r'(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]'
201 168 r'|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)')
202 169
203 170 def url_func(match_obj):
204 171 url_full = match_obj.groups()[0]
205 172 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
206 173
207 174 return url_pat.sub(url_func, text)
208 175
209 176 @classmethod
210 177 def plain(cls, source, universal_newline=True):
211 178 source = safe_unicode(source)
212 179 if universal_newline:
213 180 newline = '\n'
214 181 source = newline.join(source.splitlines())
215 182
216 183 source = cls.urlify_text(source)
217 184 return '<br />' + source.replace("\n", '<br />')
218 185
219 186 @classmethod
220 187 def markdown(cls, source, safe=True, flavored=True, mentions=False):
221 188 # It does not allow to insert inline HTML. In presence of HTML tags, it
222 189 # will replace them instead with [HTML_REMOVED]. This is controlled by
223 190 # the safe_mode=True parameter of the markdown method.
224 191 extensions = ['codehilite', 'extra', 'def_list', 'sane_lists']
225 192 if flavored:
226 193 extensions.append(GithubFlavoredMarkdownExtension())
227 194
228 195 if mentions:
229 196 mention_pat = re.compile(MENTIONS_REGEX)
230 197
231 198 def wrapp(match_obj):
232 199 uname = match_obj.groups()[0]
233 200 return ' **@%(uname)s** ' % {'uname': uname}
234 201 mention_hl = mention_pat.sub(wrapp, source).strip()
235 202 # we extracted mentions render with this using Mentions false
236 203 return cls.markdown(mention_hl, safe=safe, flavored=flavored,
237 204 mentions=False)
238 205
239 206 source = safe_unicode(source)
240 207 try:
241 208 if flavored:
242 209 source = cls._flavored_markdown(source)
243 210 return markdown.markdown(
244 211 source, extensions, safe_mode=True, enable_attributes=False)
245 212 except Exception:
246 213 log.exception('Error when rendering Markdown')
247 214 if safe:
248 215 log.debug('Fallback to render in plain mode')
249 216 return cls.plain(source)
250 217 else:
251 218 raise
252 219
253 220 @classmethod
254 221 def rst(cls, source, safe=True, mentions=False):
255 222 if mentions:
256 223 mention_pat = re.compile(MENTIONS_REGEX)
257 224
258 225 def wrapp(match_obj):
259 226 uname = match_obj.groups()[0]
260 227 return ' **@%(uname)s** ' % {'uname': uname}
261 228 mention_hl = mention_pat.sub(wrapp, source).strip()
262 229 # we extracted mentions render with this using Mentions false
263 230 return cls.rst(mention_hl, safe=safe, mentions=False)
264 231
265 232 source = safe_unicode(source)
266 233 try:
267 234 docutils_settings = dict(
268 235 [(alias, None) for alias in
269 236 cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES])
270 237
271 238 docutils_settings.update({'input_encoding': 'unicode',
272 239 'report_level': 4})
273 240
274 241 for k, v in docutils_settings.iteritems():
275 242 directives.register_directive(k, v)
276 243
277 244 parts = publish_parts(source=source,
278 245 writer_name="html4css1",
279 246 settings_overrides=docutils_settings)
280 247
281 248 return parts['html_title'] + parts["fragment"]
282 249 except Exception:
283 250 log.exception('Error when rendering RST')
284 251 if safe:
285 252 log.debug('Fallbacking to render in plain mode')
286 253 return cls.plain(source)
287 254 else:
288 255 raise
289 256
290 257
291 258 class RstTemplateRenderer(object):
292 259
293 260 def __init__(self):
294 261 base = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
295 262 rst_template_dirs = [os.path.join(base, 'templates', 'rst_templates')]
296 263 self.template_store = TemplateLookup(
297 264 directories=rst_template_dirs,
298 265 input_encoding='utf-8',
299 266 imports=['from rhodecode.lib import helpers as h'])
300 267
301 268 def _get_template(self, templatename):
302 269 return self.template_store.get_template(templatename)
303 270
304 271 def render(self, template_name, **kwargs):
305 272 template = self._get_template(template_name)
306 273 return template.render(**kwargs)
@@ -1,1070 +1,1053 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Repository model for rhodecode
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 from datetime import datetime
32 32
33 33 from sqlalchemy.sql import func
34 34 from sqlalchemy.sql.expression import true, or_
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h
39 39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 40 from rhodecode.lib.caching_query import FromCache
41 41 from rhodecode.lib.exceptions import AttachedForksError
42 42 from rhodecode.lib.hooks_base import log_delete_repository
43 43 from rhodecode.lib.markup_renderer import MarkupRenderer
44 44 from rhodecode.lib.utils import make_db_config
45 45 from rhodecode.lib.utils2 import (
46 46 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
47 47 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
48 48 from rhodecode.lib.vcs.backends import get_backend
49 49 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.db import (
52 52 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
53 53 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
54 54 RepoGroup, RepositoryField)
55 55 from rhodecode.model.scm import UserGroupList
56 56 from rhodecode.model.settings import VcsSettingsModel
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class RepoModel(BaseModel):
63 63
64 64 cls = Repository
65 65
66 66 def _get_user_group(self, users_group):
67 67 return self._get_instance(UserGroup, users_group,
68 68 callback=UserGroup.get_by_group_name)
69 69
70 70 def _get_repo_group(self, repo_group):
71 71 return self._get_instance(RepoGroup, repo_group,
72 72 callback=RepoGroup.get_by_group_name)
73 73
74 74 def _create_default_perms(self, repository, private):
75 75 # create default permission
76 76 default = 'repository.read'
77 77 def_user = User.get_default_user()
78 78 for p in def_user.user_perms:
79 79 if p.permission.permission_name.startswith('repository.'):
80 80 default = p.permission.permission_name
81 81 break
82 82
83 83 default_perm = 'repository.none' if private else default
84 84
85 85 repo_to_perm = UserRepoToPerm()
86 86 repo_to_perm.permission = Permission.get_by_key(default_perm)
87 87
88 88 repo_to_perm.repository = repository
89 89 repo_to_perm.user_id = def_user.user_id
90 90
91 91 return repo_to_perm
92 92
93 93 @LazyProperty
94 94 def repos_path(self):
95 95 """
96 96 Gets the repositories root path from database
97 97 """
98 98 settings_model = VcsSettingsModel(sa=self.sa)
99 99 return settings_model.get_repos_location()
100 100
101 101 def get(self, repo_id, cache=False):
102 102 repo = self.sa.query(Repository) \
103 103 .filter(Repository.repo_id == repo_id)
104 104
105 105 if cache:
106 106 repo = repo.options(FromCache("sql_cache_short",
107 107 "get_repo_%s" % repo_id))
108 108 return repo.scalar()
109 109
110 110 def get_repo(self, repository):
111 111 return self._get_repo(repository)
112 112
113 113 def get_by_repo_name(self, repo_name, cache=False):
114 114 repo = self.sa.query(Repository) \
115 115 .filter(Repository.repo_name == repo_name)
116 116
117 117 if cache:
118 118 repo = repo.options(FromCache("sql_cache_short",
119 119 "get_repo_%s" % repo_name))
120 120 return repo.scalar()
121 121
122 122 def _extract_id_from_repo_name(self, repo_name):
123 123 if repo_name.startswith('/'):
124 124 repo_name = repo_name.lstrip('/')
125 125 by_id_match = re.match(r'^_(\d{1,})', repo_name)
126 126 if by_id_match:
127 127 return by_id_match.groups()[0]
128 128
129 129 def get_repo_by_id(self, repo_name):
130 130 """
131 131 Extracts repo_name by id from special urls.
132 132 Example url is _11/repo_name
133 133
134 134 :param repo_name:
135 135 :return: repo object if matched else None
136 136 """
137 137 try:
138 138 _repo_id = self._extract_id_from_repo_name(repo_name)
139 139 if _repo_id:
140 140 return self.get(_repo_id)
141 141 except Exception:
142 142 log.exception('Failed to extract repo_name from URL')
143 143
144 144 return None
145 145
146 146 def get_url(self, repo):
147 147 return h.url('summary_home', repo_name=safe_str(repo.repo_name),
148 148 qualified=True)
149 149
150 150 def get_users(self, name_contains=None, limit=20, only_active=True):
151 151 # TODO: mikhail: move this method to the UserModel.
152 152 query = self.sa.query(User)
153 153 if only_active:
154 154 query = query.filter(User.active == true())
155 155
156 156 if name_contains:
157 157 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
158 158 query = query.filter(
159 159 or_(
160 160 User.name.ilike(ilike_expression),
161 161 User.lastname.ilike(ilike_expression),
162 162 User.username.ilike(ilike_expression)
163 163 )
164 164 )
165 165 query = query.limit(limit)
166 166 users = query.all()
167 167
168 168 _users = [
169 169 {
170 170 'id': user.user_id,
171 171 'first_name': user.name,
172 172 'last_name': user.lastname,
173 173 'username': user.username,
174 174 'icon_link': h.gravatar_url(user.email, 14),
175 175 'value_display': h.person(user.email),
176 176 'value': user.username,
177 177 'value_type': 'user',
178 178 'active': user.active,
179 179 }
180 180 for user in users
181 181 ]
182 182 return _users
183 183
184 184 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
185 185 # TODO: mikhail: move this method to the UserGroupModel.
186 186 query = self.sa.query(UserGroup)
187 187 if only_active:
188 188 query = query.filter(UserGroup.users_group_active == true())
189 189
190 190 if name_contains:
191 191 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
192 192 query = query.filter(
193 193 UserGroup.users_group_name.ilike(ilike_expression))\
194 194 .order_by(func.length(UserGroup.users_group_name))\
195 195 .order_by(UserGroup.users_group_name)
196 196
197 197 query = query.limit(limit)
198 198 user_groups = query.all()
199 199 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
200 200 user_groups = UserGroupList(user_groups, perm_set=perm_set)
201 201
202 202 _groups = [
203 203 {
204 204 'id': group.users_group_id,
205 205 # TODO: marcink figure out a way to generate the url for the
206 206 # icon
207 207 'icon_link': '',
208 208 'value_display': 'Group: %s (%d members)' % (
209 209 group.users_group_name, len(group.members),),
210 210 'value': group.users_group_name,
211 211 'value_type': 'user_group',
212 212 'active': group.users_group_active,
213 213 }
214 214 for group in user_groups
215 215 ]
216 216 return _groups
217 217
218 218 @classmethod
219 219 def update_repoinfo(cls, repositories=None):
220 220 if not repositories:
221 221 repositories = Repository.getAll()
222 222 for repo in repositories:
223 223 repo.update_commit_cache()
224 224
225 225 def get_repos_as_dict(self, repo_list=None, admin=False,
226 226 super_user_actions=False):
227 227
228 228 from rhodecode.lib.utils import PartialRenderer
229 229 _render = PartialRenderer('data_table/_dt_elements.html')
230 230 c = _render.c
231 231
232 232 def quick_menu(repo_name):
233 233 return _render('quick_menu', repo_name)
234 234
235 235 def repo_lnk(name, rtype, rstate, private, fork_of):
236 236 return _render('repo_name', name, rtype, rstate, private, fork_of,
237 237 short_name=not admin, admin=False)
238 238
239 239 def last_change(last_change):
240 240 return _render("last_change", last_change)
241 241
242 242 def rss_lnk(repo_name):
243 243 return _render("rss", repo_name)
244 244
245 245 def atom_lnk(repo_name):
246 246 return _render("atom", repo_name)
247 247
248 248 def last_rev(repo_name, cs_cache):
249 249 return _render('revision', repo_name, cs_cache.get('revision'),
250 250 cs_cache.get('raw_id'), cs_cache.get('author'),
251 251 cs_cache.get('message'))
252 252
253 253 def desc(desc):
254 254 if c.visual.stylify_metatags:
255 255 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
256 256 else:
257 257 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
258 258
259 259 def state(repo_state):
260 260 return _render("repo_state", repo_state)
261 261
262 262 def repo_actions(repo_name):
263 263 return _render('repo_actions', repo_name, super_user_actions)
264 264
265 265 def user_profile(username):
266 266 return _render('user_profile', username)
267 267
268 268 repos_data = []
269 269 for repo in repo_list:
270 270 cs_cache = repo.changeset_cache
271 271 row = {
272 272 "menu": quick_menu(repo.repo_name),
273 273
274 274 "name": repo_lnk(repo.repo_name, repo.repo_type,
275 275 repo.repo_state, repo.private, repo.fork),
276 276 "name_raw": repo.repo_name.lower(),
277 277
278 278 "last_change": last_change(repo.last_db_change),
279 279 "last_change_raw": datetime_to_time(repo.last_db_change),
280 280
281 281 "last_changeset": last_rev(repo.repo_name, cs_cache),
282 282 "last_changeset_raw": cs_cache.get('revision'),
283 283
284 284 "desc": desc(repo.description),
285 285 "owner": user_profile(repo.user.username),
286 286
287 287 "state": state(repo.repo_state),
288 288 "rss": rss_lnk(repo.repo_name),
289 289
290 290 "atom": atom_lnk(repo.repo_name),
291 291 }
292 292 if admin:
293 293 row.update({
294 294 "action": repo_actions(repo.repo_name),
295 295 })
296 296 repos_data.append(row)
297 297
298 298 return repos_data
299 299
300 300 def _get_defaults(self, repo_name):
301 301 """
302 302 Gets information about repository, and returns a dict for
303 303 usage in forms
304 304
305 305 :param repo_name:
306 306 """
307 307
308 308 repo_info = Repository.get_by_repo_name(repo_name)
309 309
310 310 if repo_info is None:
311 311 return None
312 312
313 313 defaults = repo_info.get_dict()
314 314 defaults['repo_name'] = repo_info.just_name
315 315
316 316 groups = repo_info.groups_with_parents
317 317 parent_group = groups[-1] if groups else None
318 318
319 319 # we use -1 as this is how in HTML, we mark an empty group
320 320 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
321 321
322 322 keys_to_process = (
323 323 {'k': 'repo_type', 'strip': False},
324 324 {'k': 'repo_enable_downloads', 'strip': True},
325 325 {'k': 'repo_description', 'strip': True},
326 326 {'k': 'repo_enable_locking', 'strip': True},
327 327 {'k': 'repo_landing_rev', 'strip': True},
328 328 {'k': 'clone_uri', 'strip': False},
329 329 {'k': 'repo_private', 'strip': True},
330 330 {'k': 'repo_enable_statistics', 'strip': True}
331 331 )
332 332
333 333 for item in keys_to_process:
334 334 attr = item['k']
335 335 if item['strip']:
336 336 attr = remove_prefix(item['k'], 'repo_')
337 337
338 338 val = defaults[attr]
339 339 if item['k'] == 'repo_landing_rev':
340 340 val = ':'.join(defaults[attr])
341 341 defaults[item['k']] = val
342 342 if item['k'] == 'clone_uri':
343 343 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
344 344
345 345 # fill owner
346 346 if repo_info.user:
347 347 defaults.update({'user': repo_info.user.username})
348 348 else:
349 349 replacement_user = User.get_first_super_admin().username
350 350 defaults.update({'user': replacement_user})
351 351
352 352 # fill repository users
353 353 for p in repo_info.repo_to_perm:
354 354 defaults.update({'u_perm_%s' % p.user.user_id:
355 355 p.permission.permission_name})
356 356
357 357 # fill repository groups
358 358 for p in repo_info.users_group_to_perm:
359 359 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
360 360 p.permission.permission_name})
361 361
362 362 return defaults
363 363
364 364 def update(self, repo, **kwargs):
365 365 try:
366 366 cur_repo = self._get_repo(repo)
367 367 source_repo_name = cur_repo.repo_name
368 368 if 'user' in kwargs:
369 369 cur_repo.user = User.get_by_username(kwargs['user'])
370 370
371 371 if 'repo_group' in kwargs:
372 372 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
373 373 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
374 374
375 375 update_keys = [
376 376 (1, 'repo_enable_downloads'),
377 377 (1, 'repo_description'),
378 378 (1, 'repo_enable_locking'),
379 379 (1, 'repo_landing_rev'),
380 380 (1, 'repo_private'),
381 381 (1, 'repo_enable_statistics'),
382 382 (0, 'clone_uri'),
383 383 (0, 'fork_id')
384 384 ]
385 385 for strip, k in update_keys:
386 386 if k in kwargs:
387 387 val = kwargs[k]
388 388 if strip:
389 389 k = remove_prefix(k, 'repo_')
390 390 if k == 'clone_uri':
391 391 from rhodecode.model.validators import Missing
392 392 _change = kwargs.get('clone_uri_change')
393 393 if _change in [Missing, 'OLD']:
394 394 # we don't change the value, so use original one
395 395 val = cur_repo.clone_uri
396 396
397 397 setattr(cur_repo, k, val)
398 398
399 399 new_name = cur_repo.get_new_name(kwargs['repo_name'])
400 400 cur_repo.repo_name = new_name
401 401
402 402 # if private flag is set, reset default permission to NONE
403 403 if kwargs.get('repo_private'):
404 404 EMPTY_PERM = 'repository.none'
405 405 RepoModel().grant_user_permission(
406 406 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
407 407 )
408 408
409 409 # handle extra fields
410 410 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
411 411 kwargs):
412 412 k = RepositoryField.un_prefix_key(field)
413 413 ex_field = RepositoryField.get_by_key_name(
414 414 key=k, repo=cur_repo)
415 415 if ex_field:
416 416 ex_field.field_value = kwargs[field]
417 417 self.sa.add(ex_field)
418 418 self.sa.add(cur_repo)
419 419
420 420 if source_repo_name != new_name:
421 421 # rename repository
422 422 self._rename_filesystem_repo(
423 423 old=source_repo_name, new=new_name)
424 424
425 425 return cur_repo
426 426 except Exception:
427 427 log.error(traceback.format_exc())
428 428 raise
429 429
430 430 def _create_repo(self, repo_name, repo_type, description, owner,
431 431 private=False, clone_uri=None, repo_group=None,
432 432 landing_rev='rev:tip', fork_of=None,
433 433 copy_fork_permissions=False, enable_statistics=False,
434 434 enable_locking=False, enable_downloads=False,
435 435 copy_group_permissions=False,
436 436 state=Repository.STATE_PENDING):
437 437 """
438 438 Create repository inside database with PENDING state, this should be
439 439 only executed by create() repo. With exception of importing existing
440 440 repos
441 441 """
442 442 from rhodecode.model.scm import ScmModel
443 443
444 444 owner = self._get_user(owner)
445 445 fork_of = self._get_repo(fork_of)
446 446 repo_group = self._get_repo_group(safe_int(repo_group))
447 447
448 448 try:
449 449 repo_name = safe_unicode(repo_name)
450 450 description = safe_unicode(description)
451 451 # repo name is just a name of repository
452 452 # while repo_name_full is a full qualified name that is combined
453 453 # with name and path of group
454 454 repo_name_full = repo_name
455 455 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
456 456
457 457 new_repo = Repository()
458 458 new_repo.repo_state = state
459 459 new_repo.enable_statistics = False
460 460 new_repo.repo_name = repo_name_full
461 461 new_repo.repo_type = repo_type
462 462 new_repo.user = owner
463 463 new_repo.group = repo_group
464 464 new_repo.description = description or repo_name
465 465 new_repo.private = private
466 466 new_repo.clone_uri = clone_uri
467 467 new_repo.landing_rev = landing_rev
468 468
469 469 new_repo.enable_statistics = enable_statistics
470 470 new_repo.enable_locking = enable_locking
471 471 new_repo.enable_downloads = enable_downloads
472 472
473 473 if repo_group:
474 474 new_repo.enable_locking = repo_group.enable_locking
475 475
476 476 if fork_of:
477 477 parent_repo = fork_of
478 478 new_repo.fork = parent_repo
479 479
480 480 events.trigger(events.RepoPreCreateEvent(new_repo))
481 481
482 482 self.sa.add(new_repo)
483 483
484 484 EMPTY_PERM = 'repository.none'
485 485 if fork_of and copy_fork_permissions:
486 486 repo = fork_of
487 487 user_perms = UserRepoToPerm.query() \
488 488 .filter(UserRepoToPerm.repository == repo).all()
489 489 group_perms = UserGroupRepoToPerm.query() \
490 490 .filter(UserGroupRepoToPerm.repository == repo).all()
491 491
492 492 for perm in user_perms:
493 493 UserRepoToPerm.create(
494 494 perm.user, new_repo, perm.permission)
495 495
496 496 for perm in group_perms:
497 497 UserGroupRepoToPerm.create(
498 498 perm.users_group, new_repo, perm.permission)
499 499 # in case we copy permissions and also set this repo to private
500 500 # override the default user permission to make it a private
501 501 # repo
502 502 if private:
503 503 RepoModel(self.sa).grant_user_permission(
504 504 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
505 505
506 506 elif repo_group and copy_group_permissions:
507 507 user_perms = UserRepoGroupToPerm.query() \
508 508 .filter(UserRepoGroupToPerm.group == repo_group).all()
509 509
510 510 group_perms = UserGroupRepoGroupToPerm.query() \
511 511 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
512 512
513 513 for perm in user_perms:
514 514 perm_name = perm.permission.permission_name.replace(
515 515 'group.', 'repository.')
516 516 perm_obj = Permission.get_by_key(perm_name)
517 517 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
518 518
519 519 for perm in group_perms:
520 520 perm_name = perm.permission.permission_name.replace(
521 521 'group.', 'repository.')
522 522 perm_obj = Permission.get_by_key(perm_name)
523 523 UserGroupRepoToPerm.create(
524 524 perm.users_group, new_repo, perm_obj)
525 525
526 526 if private:
527 527 RepoModel(self.sa).grant_user_permission(
528 528 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
529 529
530 530 else:
531 531 perm_obj = self._create_default_perms(new_repo, private)
532 532 self.sa.add(perm_obj)
533 533
534 534 # now automatically start following this repository as owner
535 535 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
536 536 owner.user_id)
537 537
538 538 # we need to flush here, in order to check if database won't
539 539 # throw any exceptions, create filesystem dirs at the very end
540 540 self.sa.flush()
541 541 events.trigger(events.RepoCreateEvent(new_repo))
542 542 return new_repo
543 543
544 544 except Exception:
545 545 log.error(traceback.format_exc())
546 546 raise
547 547
548 548 def create(self, form_data, cur_user):
549 549 """
550 550 Create repository using celery tasks
551 551
552 552 :param form_data:
553 553 :param cur_user:
554 554 """
555 555 from rhodecode.lib.celerylib import tasks, run_task
556 556 return run_task(tasks.create_repo, form_data, cur_user)
557 557
558 558 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
559 559 perm_deletions=None, check_perms=True,
560 560 cur_user=None):
561 561 if not perm_additions:
562 562 perm_additions = []
563 563 if not perm_updates:
564 564 perm_updates = []
565 565 if not perm_deletions:
566 566 perm_deletions = []
567 567
568 568 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
569 569
570 570 # update permissions
571 571 for member_id, perm, member_type in perm_updates:
572 572 member_id = int(member_id)
573 573 if member_type == 'user':
574 574 # this updates also current one if found
575 575 self.grant_user_permission(
576 576 repo=repo, user=member_id, perm=perm)
577 577 else: # set for user group
578 578 # check if we have permissions to alter this usergroup
579 579 member_name = UserGroup.get(member_id).users_group_name
580 580 if not check_perms or HasUserGroupPermissionAny(
581 581 *req_perms)(member_name, user=cur_user):
582 582 self.grant_user_group_permission(
583 583 repo=repo, group_name=member_id, perm=perm)
584 584
585 585 # set new permissions
586 586 for member_id, perm, member_type in perm_additions:
587 587 member_id = int(member_id)
588 588 if member_type == 'user':
589 589 self.grant_user_permission(
590 590 repo=repo, user=member_id, perm=perm)
591 591 else: # set for user group
592 592 # check if we have permissions to alter this usergroup
593 593 member_name = UserGroup.get(member_id).users_group_name
594 594 if not check_perms or HasUserGroupPermissionAny(
595 595 *req_perms)(member_name, user=cur_user):
596 596 self.grant_user_group_permission(
597 597 repo=repo, group_name=member_id, perm=perm)
598 598
599 599 # delete permissions
600 600 for member_id, perm, member_type in perm_deletions:
601 601 member_id = int(member_id)
602 602 if member_type == 'user':
603 603 self.revoke_user_permission(repo=repo, user=member_id)
604 604 else: # set for user group
605 605 # check if we have permissions to alter this usergroup
606 606 member_name = UserGroup.get(member_id).users_group_name
607 607 if not check_perms or HasUserGroupPermissionAny(
608 608 *req_perms)(member_name, user=cur_user):
609 609 self.revoke_user_group_permission(
610 610 repo=repo, group_name=member_id)
611 611
612 612 def create_fork(self, form_data, cur_user):
613 613 """
614 614 Simple wrapper into executing celery task for fork creation
615 615
616 616 :param form_data:
617 617 :param cur_user:
618 618 """
619 619 from rhodecode.lib.celerylib import tasks, run_task
620 620 return run_task(tasks.create_repo_fork, form_data, cur_user)
621 621
622 622 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
623 623 """
624 624 Delete given repository, forks parameter defines what do do with
625 625 attached forks. Throws AttachedForksError if deleted repo has attached
626 626 forks
627 627
628 628 :param repo:
629 629 :param forks: str 'delete' or 'detach'
630 630 :param fs_remove: remove(archive) repo from filesystem
631 631 """
632 632 if not cur_user:
633 633 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
634 634 repo = self._get_repo(repo)
635 635 if repo:
636 636 if forks == 'detach':
637 637 for r in repo.forks:
638 638 r.fork = None
639 639 self.sa.add(r)
640 640 elif forks == 'delete':
641 641 for r in repo.forks:
642 642 self.delete(r, forks='delete')
643 643 elif [f for f in repo.forks]:
644 644 raise AttachedForksError()
645 645
646 646 old_repo_dict = repo.get_dict()
647 647 events.trigger(events.RepoPreDeleteEvent(repo))
648 648 try:
649 649 self.sa.delete(repo)
650 650 if fs_remove:
651 651 self._delete_filesystem_repo(repo)
652 652 else:
653 653 log.debug('skipping removal from filesystem')
654 654 old_repo_dict.update({
655 655 'deleted_by': cur_user,
656 656 'deleted_on': time.time(),
657 657 })
658 658 log_delete_repository(**old_repo_dict)
659 659 events.trigger(events.RepoDeleteEvent(repo))
660 660 except Exception:
661 661 log.error(traceback.format_exc())
662 662 raise
663 663
664 664 def grant_user_permission(self, repo, user, perm):
665 665 """
666 666 Grant permission for user on given repository, or update existing one
667 667 if found
668 668
669 669 :param repo: Instance of Repository, repository_id, or repository name
670 670 :param user: Instance of User, user_id or username
671 671 :param perm: Instance of Permission, or permission_name
672 672 """
673 673 user = self._get_user(user)
674 674 repo = self._get_repo(repo)
675 675 permission = self._get_perm(perm)
676 676
677 677 # check if we have that permission already
678 678 obj = self.sa.query(UserRepoToPerm) \
679 679 .filter(UserRepoToPerm.user == user) \
680 680 .filter(UserRepoToPerm.repository == repo) \
681 681 .scalar()
682 682 if obj is None:
683 683 # create new !
684 684 obj = UserRepoToPerm()
685 685 obj.repository = repo
686 686 obj.user = user
687 687 obj.permission = permission
688 688 self.sa.add(obj)
689 689 log.debug('Granted perm %s to %s on %s', perm, user, repo)
690 690 action_logger_generic(
691 691 'granted permission: {} to user: {} on repo: {}'.format(
692 692 perm, user, repo), namespace='security.repo')
693 693 return obj
694 694
695 695 def revoke_user_permission(self, repo, user):
696 696 """
697 697 Revoke permission for user on given repository
698 698
699 699 :param repo: Instance of Repository, repository_id, or repository name
700 700 :param user: Instance of User, user_id or username
701 701 """
702 702
703 703 user = self._get_user(user)
704 704 repo = self._get_repo(repo)
705 705
706 706 obj = self.sa.query(UserRepoToPerm) \
707 707 .filter(UserRepoToPerm.repository == repo) \
708 708 .filter(UserRepoToPerm.user == user) \
709 709 .scalar()
710 710 if obj:
711 711 self.sa.delete(obj)
712 712 log.debug('Revoked perm on %s on %s', repo, user)
713 713 action_logger_generic(
714 714 'revoked permission from user: {} on repo: {}'.format(
715 715 user, repo), namespace='security.repo')
716 716
717 717 def grant_user_group_permission(self, repo, group_name, perm):
718 718 """
719 719 Grant permission for user group on given repository, or update
720 720 existing one if found
721 721
722 722 :param repo: Instance of Repository, repository_id, or repository name
723 723 :param group_name: Instance of UserGroup, users_group_id,
724 724 or user group name
725 725 :param perm: Instance of Permission, or permission_name
726 726 """
727 727 repo = self._get_repo(repo)
728 728 group_name = self._get_user_group(group_name)
729 729 permission = self._get_perm(perm)
730 730
731 731 # check if we have that permission already
732 732 obj = self.sa.query(UserGroupRepoToPerm) \
733 733 .filter(UserGroupRepoToPerm.users_group == group_name) \
734 734 .filter(UserGroupRepoToPerm.repository == repo) \
735 735 .scalar()
736 736
737 737 if obj is None:
738 738 # create new
739 739 obj = UserGroupRepoToPerm()
740 740
741 741 obj.repository = repo
742 742 obj.users_group = group_name
743 743 obj.permission = permission
744 744 self.sa.add(obj)
745 745 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
746 746 action_logger_generic(
747 747 'granted permission: {} to usergroup: {} on repo: {}'.format(
748 748 perm, group_name, repo), namespace='security.repo')
749 749
750 750 return obj
751 751
752 752 def revoke_user_group_permission(self, repo, group_name):
753 753 """
754 754 Revoke permission for user group on given repository
755 755
756 756 :param repo: Instance of Repository, repository_id, or repository name
757 757 :param group_name: Instance of UserGroup, users_group_id,
758 758 or user group name
759 759 """
760 760 repo = self._get_repo(repo)
761 761 group_name = self._get_user_group(group_name)
762 762
763 763 obj = self.sa.query(UserGroupRepoToPerm) \
764 764 .filter(UserGroupRepoToPerm.repository == repo) \
765 765 .filter(UserGroupRepoToPerm.users_group == group_name) \
766 766 .scalar()
767 767 if obj:
768 768 self.sa.delete(obj)
769 769 log.debug('Revoked perm to %s on %s', repo, group_name)
770 770 action_logger_generic(
771 771 'revoked permission from usergroup: {} on repo: {}'.format(
772 772 group_name, repo), namespace='security.repo')
773 773
774 774 def delete_stats(self, repo_name):
775 775 """
776 776 removes stats for given repo
777 777
778 778 :param repo_name:
779 779 """
780 780 repo = self._get_repo(repo_name)
781 781 try:
782 782 obj = self.sa.query(Statistics) \
783 783 .filter(Statistics.repository == repo).scalar()
784 784 if obj:
785 785 self.sa.delete(obj)
786 786 except Exception:
787 787 log.error(traceback.format_exc())
788 788 raise
789 789
790 790 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
791 791 field_type='str', field_desc=''):
792 792
793 793 repo = self._get_repo(repo_name)
794 794
795 795 new_field = RepositoryField()
796 796 new_field.repository = repo
797 797 new_field.field_key = field_key
798 798 new_field.field_type = field_type # python type
799 799 new_field.field_value = field_value
800 800 new_field.field_desc = field_desc
801 801 new_field.field_label = field_label
802 802 self.sa.add(new_field)
803 803 return new_field
804 804
805 805 def delete_repo_field(self, repo_name, field_key):
806 806 repo = self._get_repo(repo_name)
807 807 field = RepositoryField.get_by_key_name(field_key, repo)
808 808 if field:
809 809 self.sa.delete(field)
810 810
811 811 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
812 812 clone_uri=None, repo_store_location=None,
813 813 use_global_config=False):
814 814 """
815 815 makes repository on filesystem. It's group aware means it'll create
816 816 a repository within a group, and alter the paths accordingly of
817 817 group location
818 818
819 819 :param repo_name:
820 820 :param alias:
821 821 :param parent:
822 822 :param clone_uri:
823 823 :param repo_store_location:
824 824 """
825 825 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
826 826 from rhodecode.model.scm import ScmModel
827 827
828 828 if Repository.NAME_SEP in repo_name:
829 829 raise ValueError(
830 830 'repo_name must not contain groups got `%s`' % repo_name)
831 831
832 832 if isinstance(repo_group, RepoGroup):
833 833 new_parent_path = os.sep.join(repo_group.full_path_splitted)
834 834 else:
835 835 new_parent_path = repo_group or ''
836 836
837 837 if repo_store_location:
838 838 _paths = [repo_store_location]
839 839 else:
840 840 _paths = [self.repos_path, new_parent_path, repo_name]
841 841 # we need to make it str for mercurial
842 842 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
843 843
844 844 # check if this path is not a repository
845 845 if is_valid_repo(repo_path, self.repos_path):
846 846 raise Exception('This path %s is a valid repository' % repo_path)
847 847
848 848 # check if this path is a group
849 849 if is_valid_repo_group(repo_path, self.repos_path):
850 850 raise Exception('This path %s is a valid group' % repo_path)
851 851
852 852 log.info('creating repo %s in %s from url: `%s`',
853 853 repo_name, safe_unicode(repo_path),
854 854 obfuscate_url_pw(clone_uri))
855 855
856 856 backend = get_backend(repo_type)
857 857
858 858 config_repo = None if use_global_config else repo_name
859 859 if config_repo and new_parent_path:
860 860 config_repo = Repository.NAME_SEP.join(
861 861 (new_parent_path, config_repo))
862 862 config = make_db_config(clear_session=False, repo=config_repo)
863 863 config.set('extensions', 'largefiles', '')
864 864
865 865 # patch and reset hooks section of UI config to not run any
866 866 # hooks on creating remote repo
867 867 config.clear_section('hooks')
868 868
869 869 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
870 870 if repo_type == 'git':
871 871 repo = backend(
872 872 repo_path, config=config, create=True, src_url=clone_uri,
873 873 bare=True)
874 874 else:
875 875 repo = backend(
876 876 repo_path, config=config, create=True, src_url=clone_uri)
877 877
878 878 ScmModel().install_hooks(repo, repo_type=repo_type)
879 879
880 880 log.debug('Created repo %s with %s backend',
881 881 safe_unicode(repo_name), safe_unicode(repo_type))
882 882 return repo
883 883
884 884 def _rename_filesystem_repo(self, old, new):
885 885 """
886 886 renames repository on filesystem
887 887
888 888 :param old: old name
889 889 :param new: new name
890 890 """
891 891 log.info('renaming repo from %s to %s', old, new)
892 892
893 893 old_path = os.path.join(self.repos_path, old)
894 894 new_path = os.path.join(self.repos_path, new)
895 895 if os.path.isdir(new_path):
896 896 raise Exception(
897 897 'Was trying to rename to already existing dir %s' % new_path
898 898 )
899 899 shutil.move(old_path, new_path)
900 900
901 901 def _delete_filesystem_repo(self, repo):
902 902 """
903 903 removes repo from filesystem, the removal is acctually made by
904 904 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
905 905 repository is no longer valid for rhodecode, can be undeleted later on
906 906 by reverting the renames on this repository
907 907
908 908 :param repo: repo object
909 909 """
910 910 rm_path = os.path.join(self.repos_path, repo.repo_name)
911 911 repo_group = repo.group
912 912 log.info("Removing repository %s", rm_path)
913 913 # disable hg/git internal that it doesn't get detected as repo
914 914 alias = repo.repo_type
915 915
916 916 config = make_db_config(clear_session=False)
917 917 config.set('extensions', 'largefiles', '')
918 918 bare = getattr(repo.scm_instance(config=config), 'bare', False)
919 919
920 920 # skip this for bare git repos
921 921 if not bare:
922 922 # disable VCS repo
923 923 vcs_path = os.path.join(rm_path, '.%s' % alias)
924 924 if os.path.exists(vcs_path):
925 925 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
926 926
927 927 _now = datetime.now()
928 928 _ms = str(_now.microsecond).rjust(6, '0')
929 929 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
930 930 repo.just_name)
931 931 if repo_group:
932 932 # if repository is in group, prefix the removal path with the group
933 933 args = repo_group.full_path_splitted + [_d]
934 934 _d = os.path.join(*args)
935 935
936 936 if os.path.isdir(rm_path):
937 937 shutil.move(rm_path, os.path.join(self.repos_path, _d))
938 938
939 939
940 940 class ReadmeFinder:
941 941 """
942 942 Utility which knows how to find a readme for a specific commit.
943 943
944 944 The main idea is that this is a configurable algorithm. When creating an
945 945 instance you can define parameters, currently only the `default_renderer`.
946 946 Based on this configuration the method :meth:`search` behaves slightly
947 947 different.
948 948 """
949 949
950 950 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
951 951 path_re = re.compile(r'^docs?', re.IGNORECASE)
952 952
953 953 default_priorities = {
954 954 None: 0,
955 955 '.text': 2,
956 956 '.txt': 3,
957 957 '.rst': 1,
958 958 '.rest': 2,
959 959 '.md': 1,
960 960 '.mkdn': 2,
961 961 '.mdown': 3,
962 962 '.markdown': 4,
963 963 }
964 964
965 965 path_priority = {
966 966 'doc': 0,
967 967 'docs': 1,
968 968 }
969 969
970 970 FALLBACK_PRIORITY = 99
971 971
972 972 RENDERER_TO_EXTENSION = {
973 973 'rst': ['.rst', '.rest'],
974 974 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
975 975 }
976 976
977 977 def __init__(self, default_renderer=None):
978 978 self._default_renderer = default_renderer
979 979 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
980 980 default_renderer, [])
981 981
982 982 def search(self, commit, path='/'):
983 983 """
984 984 Find a readme in the given `commit`.
985 985 """
986 986 nodes = commit.get_nodes(path)
987 987 matches = self._match_readmes(nodes)
988 988 matches = self._sort_according_to_priority(matches)
989 989 if matches:
990 990 return matches[0].path
991 991
992 992 paths = self._match_paths(nodes)
993 993 paths = self._sort_paths_according_to_priority(paths)
994 994 for path in paths:
995 995 match = self.search(commit, path=path)
996 996 if match:
997 997 return match
998 998
999 999 return None
1000 1000
1001 1001 def _match_readmes(self, nodes):
1002 1002 for node in nodes:
1003 1003 if not node.is_file():
1004 1004 continue
1005 1005 path = node.path.rsplit('/', 1)[-1]
1006 1006 match = self.readme_re.match(path)
1007 1007 if match:
1008 1008 extension = match.group(1)
1009 1009 yield ReadmeMatch(node, match, self._priority(extension))
1010 1010
1011 1011 def _match_paths(self, nodes):
1012 1012 for node in nodes:
1013 1013 if not node.is_dir():
1014 1014 continue
1015 1015 match = self.path_re.match(node.path)
1016 1016 if match:
1017 1017 yield node.path
1018 1018
1019 1019 def _priority(self, extension):
1020 1020 renderer_priority = (
1021 1021 0 if extension in self._renderer_extensions else 1)
1022 1022 extension_priority = self.default_priorities.get(
1023 1023 extension, self.FALLBACK_PRIORITY)
1024 1024 return (renderer_priority, extension_priority)
1025 1025
1026 1026 def _sort_according_to_priority(self, matches):
1027 1027
1028 1028 def priority_and_path(match):
1029 1029 return (match.priority, match.path)
1030 1030
1031 1031 return sorted(matches, key=priority_and_path)
1032 1032
1033 1033 def _sort_paths_according_to_priority(self, paths):
1034 1034
1035 1035 def priority_and_path(path):
1036 1036 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1037 1037
1038 1038 return sorted(paths, key=priority_and_path)
1039 1039
1040 def search_old(self, commit):
1041 """
1042 Try to find a readme in the given `commit`.
1043 """
1044 renderer = MarkupRenderer()
1045 for f in renderer.pick_readme_order(self._default_renderer):
1046 log.debug("Trying README %s", f)
1047 try:
1048 node = commit.get_node(f)
1049 except NodeDoesNotExistError:
1050 continue
1051
1052 if not node.is_file():
1053 continue
1054
1055 return f
1056
1057 1040
1058 1041 class ReadmeMatch:
1059 1042
1060 1043 def __init__(self, node, match, priority):
1061 1044 self._node = node
1062 1045 self._match = match
1063 1046 self.priority = priority
1064 1047
1065 1048 @property
1066 1049 def path(self):
1067 1050 return self._node.path
1068 1051
1069 1052 def __repr__(self):
1070 1053 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,213 +1,179 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.markup_renderer import MarkupRenderer, RstTemplateRenderer
24 24
25 25
26 26 @pytest.mark.parametrize(
27 27 "filename, expected_renderer",
28 28 [
29 29 ('readme.md', 'markdown'),
30 30 ('readme.Md', 'markdown'),
31 31 ('readme.MdoWn', 'markdown'),
32 32 ('readme.rst', 'rst'),
33 33 ('readme.Rst', 'rst'),
34 34 ('readme.rest', 'rst'),
35 35 ('readme.rest', 'rst'),
36 36 ('readme', 'rst'),
37 37 ('README', 'rst'),
38 38
39 39 ('markdown.xml', 'plain'),
40 40 ('rest.xml', 'plain'),
41 41 ('readme.xml', 'plain'),
42 42
43 43 ('readme.mdx', 'plain'),
44 44 ('readme.rstx', 'plain'),
45 45 ('readmex', 'plain'),
46 46 ])
47 47 def test_detect_renderer(filename, expected_renderer):
48 48 detected_renderer = MarkupRenderer()._detect_renderer(
49 49 '', filename=filename).__name__
50 50 assert expected_renderer == detected_renderer
51 51
52 52
53 53 def test_markdown_xss_link():
54 54 xss_md = "[link](javascript:alert('XSS: pwned!'))"
55 55 rendered_html = MarkupRenderer.markdown(xss_md)
56 56 assert 'href="javascript:alert(\'XSS: pwned!\')"' not in rendered_html
57 57
58 58
59 59 def test_markdown_xss_inline_html():
60 60 xss_md = '\n'.join([
61 61 '> <a name="n"',
62 62 '> href="javascript:alert(\'XSS: pwned!\')">link</a>'])
63 63 rendered_html = MarkupRenderer.markdown(xss_md)
64 64 assert 'href="javascript:alert(\'XSS: pwned!\')">' not in rendered_html
65 65
66 66
67 67 def test_markdown_inline_html():
68 68 xss_md = '\n'.join(['> <a name="n"',
69 69 '> href="https://rhodecode.com">link</a>'])
70 70 rendered_html = MarkupRenderer.markdown(xss_md)
71 71 assert '[HTML_REMOVED]link[HTML_REMOVED]' in rendered_html
72 72
73 73
74 74 def test_rst_xss_link():
75 75 xss_rst = "`Link<javascript:alert('XSS: pwned!')>`_"
76 76 rendered_html = MarkupRenderer.rst(xss_rst)
77 77 assert "href=javascript:alert('XSS: pwned!')" not in rendered_html
78 78
79 79
80 80 @pytest.mark.xfail(reason='Bug in docutils. Waiting answer from the author')
81 81 def test_rst_xss_inline_html():
82 82 xss_rst = '<a href="javascript:alert(\'XSS: pwned!\')">link</a>'
83 83 rendered_html = MarkupRenderer.rst(xss_rst)
84 84 assert 'href="javascript:alert(' not in rendered_html
85 85
86 86
87 87 def test_rst_xss_raw_directive():
88 88 xss_rst = '\n'.join([
89 89 '.. raw:: html',
90 90 '',
91 91 ' <a href="javascript:alert(\'XSS: pwned!\')">link</a>'])
92 92 rendered_html = MarkupRenderer.rst(xss_rst)
93 93 assert 'href="javascript:alert(' not in rendered_html
94 94
95 95
96 96 def test_render_rst_template_without_files():
97 97 expected = u'''\
98 98 Auto status change to |under_review|
99 99
100 100 .. role:: added
101 101 .. role:: removed
102 102 .. parsed-literal::
103 103
104 104 Changed commits:
105 105 * :added:`2 added`
106 106 * :removed:`3 removed`
107 107
108 108 No file changes found
109 109
110 110 .. |under_review| replace:: *"NEW STATUS"*'''
111 111
112 112 params = {
113 113 'under_review_label': 'NEW STATUS',
114 114 'added_commits': ['a', 'b'],
115 115 'removed_commits': ['a', 'b', 'c'],
116 116 'changed_files': [],
117 117 'added_files': [],
118 118 'modified_files': [],
119 119 'removed_files': [],
120 120 }
121 121 renderer = RstTemplateRenderer()
122 122 rendered = renderer.render('pull_request_update.mako', **params)
123 123 assert expected == rendered
124 124
125 125
126 126 def test_render_rst_template_with_files():
127 127 expected = u'''\
128 128 Auto status change to |under_review|
129 129
130 130 .. role:: added
131 131 .. role:: removed
132 132 .. parsed-literal::
133 133
134 134 Changed commits:
135 135 * :added:`1 added`
136 136 * :removed:`3 removed`
137 137
138 138 Changed files:
139 139 * `A /path/a.py <#a_c--68ed34923b68>`_
140 140 * `A /path/b.js <#a_c--64f90608b607>`_
141 141 * `M /path/d.js <#a_c--85842bf30c6e>`_
142 142 * `M /path/Δ™.py <#a_c--d713adf009cd>`_
143 143 * R /path/ΕΊ.py
144 144
145 145 .. |under_review| replace:: *"NEW STATUS"*'''
146 146
147 147 added = ['/path/a.py', '/path/b.js']
148 148 modified = ['/path/d.js', u'/path/Δ™.py']
149 149 removed = [u'/path/ΕΊ.py']
150 150
151 151 params = {
152 152 'under_review_label': 'NEW STATUS',
153 153 'added_commits': ['a'],
154 154 'removed_commits': ['a', 'b', 'c'],
155 155 'changed_files': added + modified + removed,
156 156 'added_files': added,
157 157 'modified_files': modified,
158 158 'removed_files': removed,
159 159 }
160 160 renderer = RstTemplateRenderer()
161 161 rendered = renderer.render('pull_request_update.mako', **params)
162 162
163 163 assert expected == rendered
164 164
165 165
166 166 def test_render_rst_auto_status_template():
167 167 expected = u'''\
168 168 Auto status change to |new_status|
169 169
170 170 .. |new_status| replace:: *"NEW STATUS"*'''
171 171
172 172 params = {
173 173 'new_status_label': 'NEW STATUS',
174 174 'pull_request': None,
175 175 'commit_id': None,
176 176 }
177 177 renderer = RstTemplateRenderer()
178 178 rendered = renderer.render('auto_status_change.mako', **params)
179 179 assert expected == rendered
180
181
182 @pytest.mark.parametrize(
183 "readmes, exts, order",
184 [
185 ([], [], []),
186
187 ([('readme1', 0), ('text1', 1)], [('.ext', 0), ('.txt', 1)],
188 ['readme1.ext', 'readme1.txt', 'text1.ext', 'text1.txt']),
189
190 ([('readme2', 0), ('text2', 1)], [('.ext', 2), ('.txt', 1)],
191 ['readme2.txt', 'readme2.ext', 'text2.txt', 'text2.ext']),
192
193 ([('readme3', 0), ('text3', 1)], [('.XXX', 1)],
194 ['readme3.XXX', 'text3.XXX']),
195 ])
196 def test_generate_readmes(readmes, exts, order):
197 assert order == MarkupRenderer.generate_readmes(readmes, exts)
198
199
200 @pytest.mark.parametrize(
201 "renderer, expected_order",
202 [
203 ('plain', ['readme', 'README', 'Readme']),
204 ('text', ['readme', 'README', 'Readme']),
205 ('markdown', MarkupRenderer.generate_readmes(
206 MarkupRenderer.ALL_READMES, MarkupRenderer.MARKDOWN_EXTS)),
207 ('rst', MarkupRenderer.generate_readmes(
208 MarkupRenderer.ALL_READMES, MarkupRenderer.RST_EXTS)),
209 ])
210 def test_order_of_readme_generation(renderer, expected_order):
211 mkd_renderer = MarkupRenderer()
212 assert expected_order == mkd_renderer.pick_readme_order(
213 renderer)[:len(expected_order)]
General Comments 0
You need to be logged in to leave comments. Login now