Show More
@@ -1,229 +1,229 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # |
|
3 | 3 | # Kallithea documentation build configuration file, created by |
|
4 | 4 | # sphinx-quickstart on Sun Oct 10 16:46:37 2010. |
|
5 | 5 | # |
|
6 | 6 | # This file is execfile()d with the current directory set to its containing dir. |
|
7 | 7 | # |
|
8 | 8 | # Note that not all possible configuration values are present in this |
|
9 | 9 | # autogenerated file. |
|
10 | 10 | # |
|
11 | 11 | # All configuration values have a default; values that are commented out |
|
12 | 12 | # serve to show the default. |
|
13 | 13 | |
|
14 | 14 | import os |
|
15 | 15 | import sys |
|
16 | 16 | |
|
17 | from kallithea import __version__ | |
|
17 | import kallithea | |
|
18 | 18 | |
|
19 | 19 | |
|
20 | 20 | # If extensions (or modules to document with autodoc) are in another directory, |
|
21 | 21 | # add these directories to sys.path here. If the directory is relative to the |
|
22 | 22 | # documentation root, use os.path.abspath to make it absolute, like shown here. |
|
23 | 23 | sys.path.insert(0, os.path.abspath('..')) |
|
24 | 24 | |
|
25 | 25 | # -- General configuration ----------------------------------------------------- |
|
26 | 26 | |
|
27 | 27 | # If your documentation needs a minimal Sphinx version, state it here. |
|
28 | 28 | #needs_sphinx = '1.0' |
|
29 | 29 | |
|
30 | 30 | # Add any Sphinx extension module names here, as strings. They can be extensions |
|
31 | 31 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. |
|
32 | 32 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', |
|
33 | 33 | 'sphinx.ext.intersphinx', 'sphinx.ext.todo', |
|
34 | 34 | 'sphinx.ext.viewcode'] |
|
35 | 35 | |
|
36 | 36 | # Add any paths that contain templates here, relative to this directory. |
|
37 | 37 | templates_path = ['_templates'] |
|
38 | 38 | |
|
39 | 39 | # The suffix of source filenames. |
|
40 | 40 | source_suffix = '.rst' |
|
41 | 41 | |
|
42 | 42 | # The encoding of source files. |
|
43 | 43 | #source_encoding = 'utf-8-sig' |
|
44 | 44 | |
|
45 | 45 | # The master toctree document. |
|
46 | 46 | master_doc = 'index' |
|
47 | 47 | |
|
48 | 48 | # General information about the project. |
|
49 | 49 | project = 'Kallithea' |
|
50 | 50 | copyright = '2010-2020 by various authors, licensed as GPLv3.' |
|
51 | 51 | |
|
52 | 52 | # The version info for the project you're documenting, acts as replacement for |
|
53 | 53 | # |version| and |release|, also used in various other places throughout the |
|
54 | 54 | # built documents. |
|
55 | 55 | # |
|
56 | 56 | # The short X.Y version. |
|
57 | 57 | root = os.path.dirname(os.path.dirname(__file__)) |
|
58 | 58 | sys.path.append(root) |
|
59 | version = __version__ | |
|
59 | version = kallithea.__version__ | |
|
60 | 60 | # The full version, including alpha/beta/rc tags. |
|
61 | release = __version__ | |
|
61 | release = kallithea.__version__ | |
|
62 | 62 | |
|
63 | 63 | # The language for content autogenerated by Sphinx. Refer to documentation |
|
64 | 64 | # for a list of supported languages. |
|
65 | 65 | #language = None |
|
66 | 66 | |
|
67 | 67 | # There are two options for replacing |today|: either, you set today to some |
|
68 | 68 | # non-false value, then it is used: |
|
69 | 69 | #today = '' |
|
70 | 70 | # Else, today_fmt is used as the format for a strftime call. |
|
71 | 71 | #today_fmt = '%B %d, %Y' |
|
72 | 72 | |
|
73 | 73 | # List of patterns, relative to source directory, that match files and |
|
74 | 74 | # directories to ignore when looking for source files. |
|
75 | 75 | exclude_patterns = ['_build'] |
|
76 | 76 | |
|
77 | 77 | # The reST default role (used for this markup: `text`) to use for all documents. |
|
78 | 78 | #default_role = None |
|
79 | 79 | |
|
80 | 80 | # If true, '()' will be appended to :func: etc. cross-reference text. |
|
81 | 81 | #add_function_parentheses = True |
|
82 | 82 | |
|
83 | 83 | # If true, the current module name will be prepended to all description |
|
84 | 84 | # unit titles (such as .. function::). |
|
85 | 85 | #add_module_names = True |
|
86 | 86 | |
|
87 | 87 | # If true, sectionauthor and moduleauthor directives will be shown in the |
|
88 | 88 | # output. They are ignored by default. |
|
89 | 89 | #show_authors = False |
|
90 | 90 | |
|
91 | 91 | # The name of the Pygments (syntax highlighting) style to use. |
|
92 | 92 | pygments_style = 'sphinx' |
|
93 | 93 | highlight_language = 'none' |
|
94 | 94 | |
|
95 | 95 | # A list of ignored prefixes for module index sorting. |
|
96 | 96 | #modindex_common_prefix = [] |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | # -- Options for HTML output --------------------------------------------------- |
|
100 | 100 | |
|
101 | 101 | # The theme to use for HTML and HTML Help pages. See the documentation for |
|
102 | 102 | # a list of builtin themes. |
|
103 | 103 | html_theme = 'nature' |
|
104 | 104 | |
|
105 | 105 | # Theme options are theme-specific and customize the look and feel of a theme |
|
106 | 106 | # further. For a list of options available for each theme, see the |
|
107 | 107 | # documentation. |
|
108 | 108 | #html_theme_options = {} |
|
109 | 109 | |
|
110 | 110 | # Add any paths that contain custom themes here, relative to this directory. |
|
111 | 111 | html_theme_path = ['theme'] |
|
112 | 112 | |
|
113 | 113 | # The name for this set of Sphinx documents. If None, it defaults to |
|
114 | 114 | # "<project> v<release> documentation". |
|
115 | 115 | #html_title = None |
|
116 | 116 | |
|
117 | 117 | # A shorter title for the navigation bar. Default is the same as html_title. |
|
118 | 118 | #html_short_title = None |
|
119 | 119 | |
|
120 | 120 | # The name of an image file (relative to this directory) to place at the top |
|
121 | 121 | # of the sidebar. |
|
122 | 122 | #html_logo = None |
|
123 | 123 | |
|
124 | 124 | # The name of an image file (within the static path) to use as favicon of the |
|
125 | 125 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 |
|
126 | 126 | # pixels large. |
|
127 | 127 | #html_favicon = None |
|
128 | 128 | |
|
129 | 129 | # Add any paths that contain custom static files (such as style sheets) here, |
|
130 | 130 | # relative to this directory. They are copied after the builtin static files, |
|
131 | 131 | # so a file named "default.css" will overwrite the builtin "default.css". |
|
132 | 132 | #html_static_path = ['_static'] |
|
133 | 133 | |
|
134 | 134 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, |
|
135 | 135 | # using the given strftime format. |
|
136 | 136 | #html_last_updated_fmt = '%b %d, %Y' |
|
137 | 137 | |
|
138 | 138 | # If true, SmartyPants will be used to convert quotes and dashes to |
|
139 | 139 | # typographically correct entities. |
|
140 | 140 | #html_use_smartypants = True |
|
141 | 141 | |
|
142 | 142 | # Custom sidebar templates, maps document names to template names. |
|
143 | 143 | #html_sidebars = {} |
|
144 | 144 | |
|
145 | 145 | # Additional templates that should be rendered to pages, maps page names to |
|
146 | 146 | # template names. |
|
147 | 147 | #html_additional_pages = {} |
|
148 | 148 | |
|
149 | 149 | # If false, no module index is generated. |
|
150 | 150 | #html_domain_indices = True |
|
151 | 151 | |
|
152 | 152 | # If false, no index is generated. |
|
153 | 153 | #html_use_index = True |
|
154 | 154 | |
|
155 | 155 | # If true, the index is split into individual pages for each letter. |
|
156 | 156 | #html_split_index = False |
|
157 | 157 | |
|
158 | 158 | # If true, links to the reST sources are added to the pages. |
|
159 | 159 | #html_show_sourcelink = True |
|
160 | 160 | |
|
161 | 161 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. |
|
162 | 162 | #html_show_sphinx = True |
|
163 | 163 | |
|
164 | 164 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. |
|
165 | 165 | #html_show_copyright = True |
|
166 | 166 | |
|
167 | 167 | # If true, an OpenSearch description file will be output, and all pages will |
|
168 | 168 | # contain a <link> tag referring to it. The value of this option must be the |
|
169 | 169 | # base URL from which the finished HTML is served. |
|
170 | 170 | #html_use_opensearch = '' |
|
171 | 171 | |
|
172 | 172 | # This is the file name suffix for HTML files (e.g. ".xhtml"). |
|
173 | 173 | #html_file_suffix = None |
|
174 | 174 | |
|
175 | 175 | # Output file base name for HTML help builder. |
|
176 | 176 | htmlhelp_basename = 'Kallithea-docs' |
|
177 | 177 | |
|
178 | 178 | |
|
179 | 179 | # -- Options for LaTeX output -------------------------------------------------- |
|
180 | 180 | |
|
181 | 181 | # The paper size ('letter' or 'a4'). |
|
182 | 182 | #latex_paper_size = 'letter' |
|
183 | 183 | |
|
184 | 184 | # The font size ('10pt', '11pt' or '12pt'). |
|
185 | 185 | #latex_font_size = '10pt' |
|
186 | 186 | |
|
187 | 187 | # Grouping the document tree into LaTeX files. List of tuples |
|
188 | 188 | # (source start file, target name, title, author, documentclass [howto/manual]). |
|
189 | 189 | latex_documents = [ |
|
190 | 190 | ('index', 'Kallithea.tex', 'Kallithea Documentation', |
|
191 | 191 | 'Kallithea Developers', 'manual'), |
|
192 | 192 | ] |
|
193 | 193 | |
|
194 | 194 | # The name of an image file (relative to this directory) to place at the top of |
|
195 | 195 | # the title page. |
|
196 | 196 | #latex_logo = None |
|
197 | 197 | |
|
198 | 198 | # For "manual" documents, if this is true, then toplevel headings are parts, |
|
199 | 199 | # not chapters. |
|
200 | 200 | #latex_use_parts = False |
|
201 | 201 | |
|
202 | 202 | # If true, show page references after internal links. |
|
203 | 203 | #latex_show_pagerefs = False |
|
204 | 204 | |
|
205 | 205 | # If true, show URL addresses after external links. |
|
206 | 206 | #latex_show_urls = False |
|
207 | 207 | |
|
208 | 208 | # Additional stuff for the LaTeX preamble. |
|
209 | 209 | #latex_preamble = '' |
|
210 | 210 | |
|
211 | 211 | # Documents to append as an appendix to all manuals. |
|
212 | 212 | #latex_appendices = [] |
|
213 | 213 | |
|
214 | 214 | # If false, no module index is generated. |
|
215 | 215 | #latex_domain_indices = True |
|
216 | 216 | |
|
217 | 217 | |
|
218 | 218 | # -- Options for manual page output -------------------------------------------- |
|
219 | 219 | |
|
220 | 220 | # One entry per manual page. List of tuples |
|
221 | 221 | # (source start file, name, description, authors, manual section). |
|
222 | 222 | man_pages = [ |
|
223 | 223 | ('index', 'kallithea', 'Kallithea Documentation', |
|
224 | 224 | ['Kallithea Developers'], 1) |
|
225 | 225 | ] |
|
226 | 226 | |
|
227 | 227 | |
|
228 | 228 | # Example configuration for intersphinx: refer to the Python standard library. |
|
229 | 229 | intersphinx_mapping = {'http://docs.python.org/': None} |
@@ -1,649 +1,649 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | 15 | """ |
|
16 | 16 | kallithea.lib.base |
|
17 | 17 | ~~~~~~~~~~~~~~~~~~ |
|
18 | 18 | |
|
19 | 19 | The base Controller API |
|
20 | 20 | Provides the BaseController class for subclassing. And usage in different |
|
21 | 21 | controllers |
|
22 | 22 | |
|
23 | 23 | This file was forked by the Kallithea project in July 2014. |
|
24 | 24 | Original author and date, and relevant copyright and licensing information is below: |
|
25 | 25 | :created_on: Oct 06, 2010 |
|
26 | 26 | :author: marcink |
|
27 | 27 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
28 | 28 | :license: GPLv3, see LICENSE.md for more details. |
|
29 | 29 | """ |
|
30 | 30 | |
|
31 | 31 | import base64 |
|
32 | 32 | import datetime |
|
33 | 33 | import logging |
|
34 | 34 | import traceback |
|
35 | 35 | import warnings |
|
36 | 36 | |
|
37 | 37 | import decorator |
|
38 | 38 | import paste.auth.basic |
|
39 | 39 | import paste.httpexceptions |
|
40 | 40 | import paste.httpheaders |
|
41 | 41 | import webob.exc |
|
42 | 42 | from tg import TGController, config, render_template, request, response, session |
|
43 | 43 | from tg import tmpl_context as c |
|
44 | 44 | from tg.i18n import ugettext as _ |
|
45 | 45 | |
|
46 | from kallithea import BACKENDS, __version__ | |
|
46 | import kallithea | |
|
47 | 47 | from kallithea.config.routing import url |
|
48 | 48 | from kallithea.lib import auth_modules, ext_json |
|
49 | 49 | from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware |
|
50 | 50 | from kallithea.lib.exceptions import UserCreationError |
|
51 | 51 | from kallithea.lib.utils import get_repo_slug, is_valid_repo |
|
52 | 52 | from kallithea.lib.utils2 import AttributeDict, asbool, ascii_bytes, safe_int, safe_str, set_hook_environment |
|
53 | 53 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError |
|
54 | 54 | from kallithea.model import meta |
|
55 | 55 | from kallithea.model.db import PullRequest, Repository, Setting, User |
|
56 | 56 | from kallithea.model.scm import ScmModel |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | log = logging.getLogger(__name__) |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | def render(template_path): |
|
63 | 63 | return render_template({'url': url}, 'mako', template_path) |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | def _filter_proxy(ip): |
|
67 | 67 | """ |
|
68 | 68 | HEADERS can have multiple ips inside the left-most being the original |
|
69 | 69 | client, and each successive proxy that passed the request adding the IP |
|
70 | 70 | address where it received the request from. |
|
71 | 71 | |
|
72 | 72 | :param ip: |
|
73 | 73 | """ |
|
74 | 74 | if ',' in ip: |
|
75 | 75 | _ips = ip.split(',') |
|
76 | 76 | _first_ip = _ips[0].strip() |
|
77 | 77 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
78 | 78 | return _first_ip |
|
79 | 79 | return ip |
|
80 | 80 | |
|
81 | 81 | |
|
82 | 82 | def _get_ip_addr(environ): |
|
83 | 83 | proxy_key = 'HTTP_X_REAL_IP' |
|
84 | 84 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
85 | 85 | def_key = 'REMOTE_ADDR' |
|
86 | 86 | |
|
87 | 87 | ip = environ.get(proxy_key) |
|
88 | 88 | if ip: |
|
89 | 89 | return _filter_proxy(ip) |
|
90 | 90 | |
|
91 | 91 | ip = environ.get(proxy_key2) |
|
92 | 92 | if ip: |
|
93 | 93 | return _filter_proxy(ip) |
|
94 | 94 | |
|
95 | 95 | ip = environ.get(def_key, '0.0.0.0') |
|
96 | 96 | return _filter_proxy(ip) |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | def get_path_info(environ): |
|
100 | 100 | """Return PATH_INFO from environ ... using tg.original_request if available. |
|
101 | 101 | |
|
102 | 102 | In Python 3 WSGI, PATH_INFO is a unicode str, but kind of contains encoded |
|
103 | 103 | bytes. The code points are guaranteed to only use the lower 8 bit bits, and |
|
104 | 104 | encoding the string with the 1:1 encoding latin1 will give the |
|
105 | 105 | corresponding byte string ... which then can be decoded to proper unicode. |
|
106 | 106 | """ |
|
107 | 107 | org_req = environ.get('tg.original_request') |
|
108 | 108 | if org_req is not None: |
|
109 | 109 | environ = org_req.environ |
|
110 | 110 | return safe_str(environ['PATH_INFO'].encode('latin1')) |
|
111 | 111 | |
|
112 | 112 | |
|
113 | 113 | def log_in_user(user, remember, is_external_auth, ip_addr): |
|
114 | 114 | """ |
|
115 | 115 | Log a `User` in and update session and cookies. If `remember` is True, |
|
116 | 116 | the session cookie is set to expire in a year; otherwise, it expires at |
|
117 | 117 | the end of the browser session. |
|
118 | 118 | |
|
119 | 119 | Returns populated `AuthUser` object. |
|
120 | 120 | """ |
|
121 | 121 | # It should not be possible to explicitly log in as the default user. |
|
122 | 122 | assert not user.is_default_user, user |
|
123 | 123 | |
|
124 | 124 | auth_user = AuthUser.make(dbuser=user, is_external_auth=is_external_auth, ip_addr=ip_addr) |
|
125 | 125 | if auth_user is None: |
|
126 | 126 | return None |
|
127 | 127 | |
|
128 | 128 | user.update_lastlogin() |
|
129 | 129 | meta.Session().commit() |
|
130 | 130 | |
|
131 | 131 | # Start new session to prevent session fixation attacks. |
|
132 | 132 | session.invalidate() |
|
133 | 133 | session['authuser'] = cookie = auth_user.to_cookie() |
|
134 | 134 | |
|
135 | 135 | # If they want to be remembered, update the cookie. |
|
136 | 136 | # NOTE: Assumes that beaker defaults to browser session cookie. |
|
137 | 137 | if remember: |
|
138 | 138 | t = datetime.datetime.now() + datetime.timedelta(days=365) |
|
139 | 139 | session._set_cookie_expires(t) |
|
140 | 140 | |
|
141 | 141 | session.save() |
|
142 | 142 | |
|
143 | 143 | log.info('user %s is now authenticated and stored in ' |
|
144 | 144 | 'session, session attrs %s', user.username, cookie) |
|
145 | 145 | |
|
146 | 146 | # dumps session attrs back to cookie |
|
147 | 147 | session._update_cookie_out() |
|
148 | 148 | |
|
149 | 149 | return auth_user |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | class BasicAuth(paste.auth.basic.AuthBasicAuthenticator): |
|
153 | 153 | |
|
154 | 154 | def __init__(self, realm, authfunc, auth_http_code=None): |
|
155 | 155 | self.realm = realm |
|
156 | 156 | self.authfunc = authfunc |
|
157 | 157 | self._rc_auth_http_code = auth_http_code |
|
158 | 158 | |
|
159 | 159 | def build_authentication(self, environ): |
|
160 | 160 | head = paste.httpheaders.WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
161 | 161 | # Consume the whole body before sending a response |
|
162 | 162 | try: |
|
163 | 163 | request_body_size = int(environ.get('CONTENT_LENGTH', 0)) |
|
164 | 164 | except (ValueError): |
|
165 | 165 | request_body_size = 0 |
|
166 | 166 | environ['wsgi.input'].read(request_body_size) |
|
167 | 167 | if self._rc_auth_http_code and self._rc_auth_http_code == '403': |
|
168 | 168 | # return 403 if alternative http return code is specified in |
|
169 | 169 | # Kallithea config |
|
170 | 170 | return paste.httpexceptions.HTTPForbidden(headers=head) |
|
171 | 171 | return paste.httpexceptions.HTTPUnauthorized(headers=head) |
|
172 | 172 | |
|
173 | 173 | def authenticate(self, environ): |
|
174 | 174 | authorization = paste.httpheaders.AUTHORIZATION(environ) |
|
175 | 175 | if not authorization: |
|
176 | 176 | return self.build_authentication(environ) |
|
177 | 177 | (authmeth, auth) = authorization.split(' ', 1) |
|
178 | 178 | if 'basic' != authmeth.lower(): |
|
179 | 179 | return self.build_authentication(environ) |
|
180 | 180 | auth = safe_str(base64.b64decode(auth.strip())) |
|
181 | 181 | _parts = auth.split(':', 1) |
|
182 | 182 | if len(_parts) == 2: |
|
183 | 183 | username, password = _parts |
|
184 | 184 | if self.authfunc(username, password, environ) is not None: |
|
185 | 185 | return username |
|
186 | 186 | return self.build_authentication(environ) |
|
187 | 187 | |
|
188 | 188 | __call__ = authenticate |
|
189 | 189 | |
|
190 | 190 | |
|
191 | 191 | class BaseVCSController(object): |
|
192 | 192 | """Base controller for handling Mercurial/Git protocol requests |
|
193 | 193 | (coming from a VCS client, and not a browser). |
|
194 | 194 | """ |
|
195 | 195 | |
|
196 | 196 | scm_alias = None # 'hg' / 'git' |
|
197 | 197 | |
|
198 | 198 | def __init__(self, application, config): |
|
199 | 199 | self.application = application |
|
200 | 200 | self.config = config |
|
201 | 201 | # base path of repo locations |
|
202 | 202 | self.basepath = self.config['base_path'] |
|
203 | 203 | # authenticate this VCS request using the authentication modules |
|
204 | 204 | self.authenticate = BasicAuth('', auth_modules.authenticate, |
|
205 | 205 | config.get('auth_ret_code')) |
|
206 | 206 | |
|
207 | 207 | @classmethod |
|
208 | 208 | def parse_request(cls, environ): |
|
209 | 209 | """If request is parsed as a request for this VCS, return a namespace with the parsed request. |
|
210 | 210 | If the request is unknown, return None. |
|
211 | 211 | """ |
|
212 | 212 | raise NotImplementedError() |
|
213 | 213 | |
|
214 | 214 | def _authorize(self, environ, action, repo_name, ip_addr): |
|
215 | 215 | """Authenticate and authorize user. |
|
216 | 216 | |
|
217 | 217 | Since we're dealing with a VCS client and not a browser, we only |
|
218 | 218 | support HTTP basic authentication, either directly via raw header |
|
219 | 219 | inspection, or by using container authentication to delegate the |
|
220 | 220 | authentication to the web server. |
|
221 | 221 | |
|
222 | 222 | Returns (user, None) on successful authentication and authorization. |
|
223 | 223 | Returns (None, wsgi_app) to send the wsgi_app response to the client. |
|
224 | 224 | """ |
|
225 | 225 | # Use anonymous access if allowed for action on repo. |
|
226 | 226 | default_user = User.get_default_user() |
|
227 | 227 | default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr) |
|
228 | 228 | if default_authuser is None: |
|
229 | 229 | log.debug('No anonymous access at all') # move on to proper user auth |
|
230 | 230 | else: |
|
231 | 231 | if self._check_permission(action, default_authuser, repo_name): |
|
232 | 232 | return default_authuser, None |
|
233 | 233 | log.debug('Not authorized to access this repository as anonymous user') |
|
234 | 234 | |
|
235 | 235 | username = None |
|
236 | 236 | #============================================================== |
|
237 | 237 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE |
|
238 | 238 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS |
|
239 | 239 | #============================================================== |
|
240 | 240 | |
|
241 | 241 | # try to auth based on environ, container auth methods |
|
242 | 242 | log.debug('Running PRE-AUTH for container based authentication') |
|
243 | 243 | pre_auth = auth_modules.authenticate('', '', environ) |
|
244 | 244 | if pre_auth is not None and pre_auth.get('username'): |
|
245 | 245 | username = pre_auth['username'] |
|
246 | 246 | log.debug('PRE-AUTH got %s as username', username) |
|
247 | 247 | |
|
248 | 248 | # If not authenticated by the container, running basic auth |
|
249 | 249 | if not username: |
|
250 | 250 | self.authenticate.realm = self.config['realm'] |
|
251 | 251 | result = self.authenticate(environ) |
|
252 | 252 | if isinstance(result, str): |
|
253 | 253 | paste.httpheaders.AUTH_TYPE.update(environ, 'basic') |
|
254 | 254 | paste.httpheaders.REMOTE_USER.update(environ, result) |
|
255 | 255 | username = result |
|
256 | 256 | else: |
|
257 | 257 | return None, result.wsgi_application |
|
258 | 258 | |
|
259 | 259 | #============================================================== |
|
260 | 260 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME |
|
261 | 261 | #============================================================== |
|
262 | 262 | try: |
|
263 | 263 | user = User.get_by_username_or_email(username) |
|
264 | 264 | except Exception: |
|
265 | 265 | log.error(traceback.format_exc()) |
|
266 | 266 | return None, webob.exc.HTTPInternalServerError() |
|
267 | 267 | |
|
268 | 268 | authuser = AuthUser.make(dbuser=user, ip_addr=ip_addr) |
|
269 | 269 | if authuser is None: |
|
270 | 270 | return None, webob.exc.HTTPForbidden() |
|
271 | 271 | if not self._check_permission(action, authuser, repo_name): |
|
272 | 272 | return None, webob.exc.HTTPForbidden() |
|
273 | 273 | |
|
274 | 274 | return user, None |
|
275 | 275 | |
|
276 | 276 | def _handle_request(self, environ, start_response): |
|
277 | 277 | raise NotImplementedError() |
|
278 | 278 | |
|
279 | 279 | def _check_permission(self, action, authuser, repo_name): |
|
280 | 280 | """ |
|
281 | 281 | :param action: 'push' or 'pull' |
|
282 | 282 | :param user: `AuthUser` instance |
|
283 | 283 | :param repo_name: repository name |
|
284 | 284 | """ |
|
285 | 285 | if action == 'push': |
|
286 | 286 | if not HasPermissionAnyMiddleware('repository.write', |
|
287 | 287 | 'repository.admin')(authuser, |
|
288 | 288 | repo_name): |
|
289 | 289 | return False |
|
290 | 290 | |
|
291 | 291 | elif action == 'pull': |
|
292 | 292 | #any other action need at least read permission |
|
293 | 293 | if not HasPermissionAnyMiddleware('repository.read', |
|
294 | 294 | 'repository.write', |
|
295 | 295 | 'repository.admin')(authuser, |
|
296 | 296 | repo_name): |
|
297 | 297 | return False |
|
298 | 298 | |
|
299 | 299 | else: |
|
300 | 300 | assert False, action |
|
301 | 301 | |
|
302 | 302 | return True |
|
303 | 303 | |
|
304 | 304 | def _get_ip_addr(self, environ): |
|
305 | 305 | return _get_ip_addr(environ) |
|
306 | 306 | |
|
307 | 307 | def __call__(self, environ, start_response): |
|
308 | 308 | try: |
|
309 | 309 | # try parsing a request for this VCS - if it fails, call the wrapped app |
|
310 | 310 | parsed_request = self.parse_request(environ) |
|
311 | 311 | if parsed_request is None: |
|
312 | 312 | return self.application(environ, start_response) |
|
313 | 313 | |
|
314 | 314 | # skip passing error to error controller |
|
315 | 315 | environ['pylons.status_code_redirect'] = True |
|
316 | 316 | |
|
317 | 317 | # quick check if repo exists... |
|
318 | 318 | if not is_valid_repo(parsed_request.repo_name, self.basepath, self.scm_alias): |
|
319 | 319 | raise webob.exc.HTTPNotFound() |
|
320 | 320 | |
|
321 | 321 | if parsed_request.action is None: |
|
322 | 322 | # Note: the client doesn't get the helpful error message |
|
323 | 323 | raise webob.exc.HTTPBadRequest('Unable to detect pull/push action for %r! Are you using a nonstandard command or client?' % parsed_request.repo_name) |
|
324 | 324 | |
|
325 | 325 | #====================================================================== |
|
326 | 326 | # CHECK PERMISSIONS |
|
327 | 327 | #====================================================================== |
|
328 | 328 | ip_addr = self._get_ip_addr(environ) |
|
329 | 329 | user, response_app = self._authorize(environ, parsed_request.action, parsed_request.repo_name, ip_addr) |
|
330 | 330 | if response_app is not None: |
|
331 | 331 | return response_app(environ, start_response) |
|
332 | 332 | |
|
333 | 333 | #====================================================================== |
|
334 | 334 | # REQUEST HANDLING |
|
335 | 335 | #====================================================================== |
|
336 | 336 | set_hook_environment(user.username, ip_addr, |
|
337 | 337 | parsed_request.repo_name, self.scm_alias, parsed_request.action) |
|
338 | 338 | |
|
339 | 339 | try: |
|
340 | 340 | log.info('%s action on %s repo "%s" by "%s" from %s', |
|
341 | 341 | parsed_request.action, self.scm_alias, parsed_request.repo_name, user.username, ip_addr) |
|
342 | 342 | app = self._make_app(parsed_request) |
|
343 | 343 | return app(environ, start_response) |
|
344 | 344 | except Exception: |
|
345 | 345 | log.error(traceback.format_exc()) |
|
346 | 346 | raise webob.exc.HTTPInternalServerError() |
|
347 | 347 | |
|
348 | 348 | except webob.exc.HTTPException as e: |
|
349 | 349 | return e(environ, start_response) |
|
350 | 350 | |
|
351 | 351 | |
|
352 | 352 | class BaseController(TGController): |
|
353 | 353 | |
|
354 | 354 | def _before(self, *args, **kwargs): |
|
355 | 355 | """ |
|
356 | 356 | _before is called before controller methods and after __call__ |
|
357 | 357 | """ |
|
358 | 358 | if request.needs_csrf_check: |
|
359 | 359 | # CSRF protection: Whenever a request has ambient authority (whether |
|
360 | 360 | # through a session cookie or its origin IP address), it must include |
|
361 | 361 | # the correct token, unless the HTTP method is GET or HEAD (and thus |
|
362 | 362 | # guaranteed to be side effect free. In practice, the only situation |
|
363 | 363 | # where we allow side effects without ambient authority is when the |
|
364 | 364 | # authority comes from an API key; and that is handled above. |
|
365 | 365 | from kallithea.lib import helpers as h |
|
366 | 366 | token = request.POST.get(h.session_csrf_secret_name) |
|
367 | 367 | if not token or token != h.session_csrf_secret_token(): |
|
368 | 368 | log.error('CSRF check failed') |
|
369 | 369 | raise webob.exc.HTTPForbidden() |
|
370 | 370 | |
|
371 | c.kallithea_version = __version__ | |
|
371 | c.kallithea_version = kallithea.__version__ | |
|
372 | 372 | rc_config = Setting.get_app_settings() |
|
373 | 373 | |
|
374 | 374 | # Visual options |
|
375 | 375 | c.visual = AttributeDict({}) |
|
376 | 376 | |
|
377 | 377 | ## DB stored |
|
378 | 378 | c.visual.show_public_icon = asbool(rc_config.get('show_public_icon')) |
|
379 | 379 | c.visual.show_private_icon = asbool(rc_config.get('show_private_icon')) |
|
380 | 380 | c.visual.stylify_metalabels = asbool(rc_config.get('stylify_metalabels')) |
|
381 | 381 | c.visual.page_size = safe_int(rc_config.get('dashboard_items', 100)) |
|
382 | 382 | c.visual.admin_grid_items = safe_int(rc_config.get('admin_grid_items', 100)) |
|
383 | 383 | c.visual.repository_fields = asbool(rc_config.get('repository_fields')) |
|
384 | 384 | c.visual.show_version = asbool(rc_config.get('show_version')) |
|
385 | 385 | c.visual.use_gravatar = asbool(rc_config.get('use_gravatar')) |
|
386 | 386 | c.visual.gravatar_url = rc_config.get('gravatar_url') |
|
387 | 387 | |
|
388 | 388 | c.ga_code = rc_config.get('ga_code') |
|
389 | 389 | # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code |
|
390 | 390 | if c.ga_code and '<' not in c.ga_code: |
|
391 | 391 | c.ga_code = '''<script type="text/javascript"> |
|
392 | 392 | var _gaq = _gaq || []; |
|
393 | 393 | _gaq.push(['_setAccount', '%s']); |
|
394 | 394 | _gaq.push(['_trackPageview']); |
|
395 | 395 | |
|
396 | 396 | (function() { |
|
397 | 397 | var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; |
|
398 | 398 | ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; |
|
399 | 399 | var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); |
|
400 | 400 | })(); |
|
401 | 401 | </script>''' % c.ga_code |
|
402 | 402 | c.site_name = rc_config.get('title') |
|
403 | 403 | c.clone_uri_tmpl = rc_config.get('clone_uri_tmpl') or Repository.DEFAULT_CLONE_URI |
|
404 | 404 | c.clone_ssh_tmpl = rc_config.get('clone_ssh_tmpl') or Repository.DEFAULT_CLONE_SSH |
|
405 | 405 | |
|
406 | 406 | ## INI stored |
|
407 | 407 | c.visual.allow_repo_location_change = asbool(config.get('allow_repo_location_change', True)) |
|
408 | 408 | c.visual.allow_custom_hooks_settings = asbool(config.get('allow_custom_hooks_settings', True)) |
|
409 | 409 | c.ssh_enabled = asbool(config.get('ssh_enabled', False)) |
|
410 | 410 | |
|
411 | 411 | c.instance_id = config.get('instance_id') |
|
412 | 412 | c.issues_url = config.get('bugtracker', url('issues_url')) |
|
413 | 413 | # END CONFIG VARS |
|
414 | 414 | |
|
415 | 415 | c.repo_name = get_repo_slug(request) # can be empty |
|
416 | c.backends = list(BACKENDS) | |
|
416 | c.backends = list(kallithea.BACKENDS) | |
|
417 | 417 | |
|
418 | 418 | self.cut_off_limit = safe_int(config.get('cut_off_limit')) |
|
419 | 419 | |
|
420 | 420 | c.my_pr_count = PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count() |
|
421 | 421 | |
|
422 | 422 | self.scm_model = ScmModel() |
|
423 | 423 | |
|
424 | 424 | @staticmethod |
|
425 | 425 | def _determine_auth_user(session_authuser, ip_addr): |
|
426 | 426 | """ |
|
427 | 427 | Create an `AuthUser` object given the API key/bearer token |
|
428 | 428 | (if any) and the value of the authuser session cookie. |
|
429 | 429 | Returns None if no valid user is found (like not active or no access for IP). |
|
430 | 430 | """ |
|
431 | 431 | |
|
432 | 432 | # Authenticate by session cookie |
|
433 | 433 | # In ancient login sessions, 'authuser' may not be a dict. |
|
434 | 434 | # In that case, the user will have to log in again. |
|
435 | 435 | # v0.3 and earlier included an 'is_authenticated' key; if present, |
|
436 | 436 | # this must be True. |
|
437 | 437 | if isinstance(session_authuser, dict) and session_authuser.get('is_authenticated', True): |
|
438 | 438 | return AuthUser.from_cookie(session_authuser, ip_addr=ip_addr) |
|
439 | 439 | |
|
440 | 440 | # Authenticate by auth_container plugin (if enabled) |
|
441 | 441 | if any( |
|
442 | 442 | plugin.is_container_auth |
|
443 | 443 | for plugin in auth_modules.get_auth_plugins() |
|
444 | 444 | ): |
|
445 | 445 | try: |
|
446 | 446 | user_info = auth_modules.authenticate('', '', request.environ) |
|
447 | 447 | except UserCreationError as e: |
|
448 | 448 | from kallithea.lib import helpers as h |
|
449 | 449 | h.flash(e, 'error', logf=log.error) |
|
450 | 450 | else: |
|
451 | 451 | if user_info is not None: |
|
452 | 452 | username = user_info['username'] |
|
453 | 453 | user = User.get_by_username(username, case_insensitive=True) |
|
454 | 454 | return log_in_user(user, remember=False, is_external_auth=True, ip_addr=ip_addr) |
|
455 | 455 | |
|
456 | 456 | # User is default user (if active) or anonymous |
|
457 | 457 | default_user = User.get_default_user() |
|
458 | 458 | authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr) |
|
459 | 459 | if authuser is None: # fall back to anonymous |
|
460 | 460 | authuser = AuthUser(dbuser=default_user) # TODO: somehow use .make? |
|
461 | 461 | return authuser |
|
462 | 462 | |
|
463 | 463 | @staticmethod |
|
464 | 464 | def _basic_security_checks(): |
|
465 | 465 | """Perform basic security/sanity checks before processing the request.""" |
|
466 | 466 | |
|
467 | 467 | # Only allow the following HTTP request methods. |
|
468 | 468 | if request.method not in ['GET', 'HEAD', 'POST']: |
|
469 | 469 | raise webob.exc.HTTPMethodNotAllowed() |
|
470 | 470 | |
|
471 | 471 | # Also verify the _method override - no longer allowed. |
|
472 | 472 | if request.params.get('_method') is None: |
|
473 | 473 | pass # no override, no problem |
|
474 | 474 | else: |
|
475 | 475 | raise webob.exc.HTTPMethodNotAllowed() |
|
476 | 476 | |
|
477 | 477 | # Make sure CSRF token never appears in the URL. If so, invalidate it. |
|
478 | 478 | from kallithea.lib import helpers as h |
|
479 | 479 | if h.session_csrf_secret_name in request.GET: |
|
480 | 480 | log.error('CSRF key leak detected') |
|
481 | 481 | session.pop(h.session_csrf_secret_name, None) |
|
482 | 482 | session.save() |
|
483 | 483 | h.flash(_('CSRF token leak has been detected - all form tokens have been expired'), |
|
484 | 484 | category='error') |
|
485 | 485 | |
|
486 | 486 | # WebOb already ignores request payload parameters for anything other |
|
487 | 487 | # than POST/PUT, but double-check since other Kallithea code relies on |
|
488 | 488 | # this assumption. |
|
489 | 489 | if request.method not in ['POST', 'PUT'] and request.POST: |
|
490 | 490 | log.error('%r request with payload parameters; WebOb should have stopped this', request.method) |
|
491 | 491 | raise webob.exc.HTTPBadRequest() |
|
492 | 492 | |
|
493 | 493 | def __call__(self, environ, context): |
|
494 | 494 | try: |
|
495 | 495 | ip_addr = _get_ip_addr(environ) |
|
496 | 496 | self._basic_security_checks() |
|
497 | 497 | |
|
498 | 498 | api_key = request.GET.get('api_key') |
|
499 | 499 | try: |
|
500 | 500 | # Request.authorization may raise ValueError on invalid input |
|
501 | 501 | type, params = request.authorization |
|
502 | 502 | except (ValueError, TypeError): |
|
503 | 503 | pass |
|
504 | 504 | else: |
|
505 | 505 | if type.lower() == 'bearer': |
|
506 | 506 | api_key = params # bearer token is an api key too |
|
507 | 507 | |
|
508 | 508 | if api_key is None: |
|
509 | 509 | authuser = self._determine_auth_user( |
|
510 | 510 | session.get('authuser'), |
|
511 | 511 | ip_addr=ip_addr, |
|
512 | 512 | ) |
|
513 | 513 | needs_csrf_check = request.method not in ['GET', 'HEAD'] |
|
514 | 514 | |
|
515 | 515 | else: |
|
516 | 516 | dbuser = User.get_by_api_key(api_key) |
|
517 | 517 | if dbuser is None: |
|
518 | 518 | log.info('No db user found for authentication with API key ****%s from %s', |
|
519 | 519 | api_key[-4:], ip_addr) |
|
520 | 520 | authuser = AuthUser.make(dbuser=dbuser, is_external_auth=True, ip_addr=ip_addr) |
|
521 | 521 | needs_csrf_check = False # API key provides CSRF protection |
|
522 | 522 | |
|
523 | 523 | if authuser is None: |
|
524 | 524 | log.info('No valid user found') |
|
525 | 525 | raise webob.exc.HTTPForbidden() |
|
526 | 526 | |
|
527 | 527 | # set globals for auth user |
|
528 | 528 | request.authuser = authuser |
|
529 | 529 | request.ip_addr = ip_addr |
|
530 | 530 | request.needs_csrf_check = needs_csrf_check |
|
531 | 531 | |
|
532 | 532 | log.info('IP: %s User: %s Request: %s', |
|
533 | 533 | request.ip_addr, request.authuser, |
|
534 | 534 | get_path_info(environ), |
|
535 | 535 | ) |
|
536 | 536 | return super(BaseController, self).__call__(environ, context) |
|
537 | 537 | except webob.exc.HTTPException as e: |
|
538 | 538 | return e |
|
539 | 539 | |
|
540 | 540 | |
|
541 | 541 | class BaseRepoController(BaseController): |
|
542 | 542 | """ |
|
543 | 543 | Base class for controllers responsible for loading all needed data for |
|
544 | 544 | repository loaded items are |
|
545 | 545 | |
|
546 | 546 | c.db_repo_scm_instance: instance of scm repository |
|
547 | 547 | c.db_repo: instance of db |
|
548 | 548 | c.repository_followers: number of followers |
|
549 | 549 | c.repository_forks: number of forks |
|
550 | 550 | c.repository_following: weather the current user is following the current repo |
|
551 | 551 | """ |
|
552 | 552 | |
|
553 | 553 | def _before(self, *args, **kwargs): |
|
554 | 554 | super(BaseRepoController, self)._before(*args, **kwargs) |
|
555 | 555 | if c.repo_name: # extracted from request by base-base BaseController._before |
|
556 | 556 | _dbr = Repository.get_by_repo_name(c.repo_name) |
|
557 | 557 | if not _dbr: |
|
558 | 558 | return |
|
559 | 559 | |
|
560 | 560 | log.debug('Found repository in database %s with state `%s`', |
|
561 | 561 | _dbr, _dbr.repo_state) |
|
562 | 562 | route = getattr(request.environ.get('routes.route'), 'name', '') |
|
563 | 563 | |
|
564 | 564 | # allow to delete repos that are somehow damages in filesystem |
|
565 | 565 | if route in ['delete_repo']: |
|
566 | 566 | return |
|
567 | 567 | |
|
568 | 568 | if _dbr.repo_state in [Repository.STATE_PENDING]: |
|
569 | 569 | if route in ['repo_creating_home']: |
|
570 | 570 | return |
|
571 | 571 | check_url = url('repo_creating_home', repo_name=c.repo_name) |
|
572 | 572 | raise webob.exc.HTTPFound(location=check_url) |
|
573 | 573 | |
|
574 | 574 | dbr = c.db_repo = _dbr |
|
575 | 575 | c.db_repo_scm_instance = c.db_repo.scm_instance |
|
576 | 576 | if c.db_repo_scm_instance is None: |
|
577 | 577 | log.error('%s this repository is present in database but it ' |
|
578 | 578 | 'cannot be created as an scm instance', c.repo_name) |
|
579 | 579 | from kallithea.lib import helpers as h |
|
580 | 580 | h.flash(_('Repository not found in the filesystem'), |
|
581 | 581 | category='error') |
|
582 | 582 | raise webob.exc.HTTPNotFound() |
|
583 | 583 | |
|
584 | 584 | # some globals counter for menu |
|
585 | 585 | c.repository_followers = self.scm_model.get_followers(dbr) |
|
586 | 586 | c.repository_forks = self.scm_model.get_forks(dbr) |
|
587 | 587 | c.repository_pull_requests = self.scm_model.get_pull_requests(dbr) |
|
588 | 588 | c.repository_following = self.scm_model.is_following_repo( |
|
589 | 589 | c.repo_name, request.authuser.user_id) |
|
590 | 590 | |
|
591 | 591 | @staticmethod |
|
592 | 592 | def _get_ref_rev(repo, ref_type, ref_name, returnempty=False): |
|
593 | 593 | """ |
|
594 | 594 | Safe way to get changeset. If error occurs show error. |
|
595 | 595 | """ |
|
596 | 596 | from kallithea.lib import helpers as h |
|
597 | 597 | try: |
|
598 | 598 | return repo.scm_instance.get_ref_revision(ref_type, ref_name) |
|
599 | 599 | except EmptyRepositoryError as e: |
|
600 | 600 | if returnempty: |
|
601 | 601 | return repo.scm_instance.EMPTY_CHANGESET |
|
602 | 602 | h.flash(_('There are no changesets yet'), category='error') |
|
603 | 603 | raise webob.exc.HTTPNotFound() |
|
604 | 604 | except ChangesetDoesNotExistError as e: |
|
605 | 605 | h.flash(_('Changeset for %s %s not found in %s') % |
|
606 | 606 | (ref_type, ref_name, repo.repo_name), |
|
607 | 607 | category='error') |
|
608 | 608 | raise webob.exc.HTTPNotFound() |
|
609 | 609 | except RepositoryError as e: |
|
610 | 610 | log.error(traceback.format_exc()) |
|
611 | 611 | h.flash(e, category='error') |
|
612 | 612 | raise webob.exc.HTTPBadRequest() |
|
613 | 613 | |
|
614 | 614 | |
|
615 | 615 | @decorator.decorator |
|
616 | 616 | def jsonify(func, *args, **kwargs): |
|
617 | 617 | """Action decorator that formats output for JSON |
|
618 | 618 | |
|
619 | 619 | Given a function that will return content, this decorator will turn |
|
620 | 620 | the result into JSON, with a content-type of 'application/json' and |
|
621 | 621 | output it. |
|
622 | 622 | """ |
|
623 | 623 | response.headers['Content-Type'] = 'application/json; charset=utf-8' |
|
624 | 624 | data = func(*args, **kwargs) |
|
625 | 625 | if isinstance(data, (list, tuple)): |
|
626 | 626 | # A JSON list response is syntactically valid JavaScript and can be |
|
627 | 627 | # loaded and executed as JavaScript by a malicious third-party site |
|
628 | 628 | # using <script>, which can lead to cross-site data leaks. |
|
629 | 629 | # JSON responses should therefore be scalars or objects (i.e. Python |
|
630 | 630 | # dicts), because a JSON object is a syntax error if intepreted as JS. |
|
631 | 631 | msg = "JSON responses with Array envelopes are susceptible to " \ |
|
632 | 632 | "cross-site data leak attacks, see " \ |
|
633 | 633 | "https://web.archive.org/web/20120519231904/http://wiki.pylonshq.com/display/pylonsfaq/Warnings" |
|
634 | 634 | warnings.warn(msg, Warning, 2) |
|
635 | 635 | log.warning(msg) |
|
636 | 636 | log.debug("Returning JSON wrapped action output") |
|
637 | 637 | return ascii_bytes(ext_json.dumps(data)) |
|
638 | 638 | |
|
639 | 639 | @decorator.decorator |
|
640 | 640 | def IfSshEnabled(func, *args, **kwargs): |
|
641 | 641 | """Decorator for functions that can only be called if SSH access is enabled. |
|
642 | 642 | |
|
643 | 643 | If SSH access is disabled in the configuration file, HTTPNotFound is raised. |
|
644 | 644 | """ |
|
645 | 645 | if not c.ssh_enabled: |
|
646 | 646 | from kallithea.lib import helpers as h |
|
647 | 647 | h.flash(_("SSH access is disabled."), category='warning') |
|
648 | 648 | raise webob.exc.HTTPNotFound() |
|
649 | 649 | return func(*args, **kwargs) |
@@ -1,413 +1,406 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.hooks |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Hooks run by Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Aug 6, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import sys |
|
30 | 30 | import time |
|
31 | 31 | |
|
32 | 32 | import mercurial.scmutil |
|
33 | 33 | |
|
34 | import kallithea | |
|
34 | 35 | from kallithea.lib import helpers as h |
|
35 | 36 | from kallithea.lib.exceptions import UserCreationError |
|
36 | 37 | from kallithea.lib.utils import action_logger, make_ui |
|
37 | 38 | from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes, safe_str |
|
38 | 39 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
39 | 40 | from kallithea.model.db import Repository, User |
|
40 | 41 | |
|
41 | 42 | |
|
42 | 43 | def _get_scm_size(alias, root_path): |
|
43 | 44 | if not alias.startswith('.'): |
|
44 | 45 | alias += '.' |
|
45 | 46 | |
|
46 | 47 | size_scm, size_root = 0, 0 |
|
47 | 48 | for path, dirs, files in os.walk(root_path): |
|
48 | 49 | if path.find(alias) != -1: |
|
49 | 50 | for f in files: |
|
50 | 51 | try: |
|
51 | 52 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
52 | 53 | except OSError: |
|
53 | 54 | pass |
|
54 | 55 | else: |
|
55 | 56 | for f in files: |
|
56 | 57 | try: |
|
57 | 58 | size_root += os.path.getsize(os.path.join(path, f)) |
|
58 | 59 | except OSError: |
|
59 | 60 | pass |
|
60 | 61 | |
|
61 | 62 | size_scm_f = h.format_byte_size(size_scm) |
|
62 | 63 | size_root_f = h.format_byte_size(size_root) |
|
63 | 64 | size_total_f = h.format_byte_size(size_root + size_scm) |
|
64 | 65 | |
|
65 | 66 | return size_scm_f, size_root_f, size_total_f |
|
66 | 67 | |
|
67 | 68 | |
|
68 | 69 | def repo_size(ui, repo, hooktype=None, **kwargs): |
|
69 | 70 | """Show size of Mercurial repository. |
|
70 | 71 | |
|
71 | 72 | Called as Mercurial hook changegroup.repo_size after push. |
|
72 | 73 | """ |
|
73 | 74 | size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', safe_str(repo.root)) |
|
74 | 75 | |
|
75 | 76 | last_cs = repo[len(repo) - 1] |
|
76 | 77 | |
|
77 | 78 | msg = ('Repository size .hg: %s Checkout: %s Total: %s\n' |
|
78 | 79 | 'Last revision is now r%s:%s\n') % ( |
|
79 | 80 | size_hg_f, size_root_f, size_total_f, last_cs.rev(), ascii_str(last_cs.hex())[:12] |
|
80 | 81 | ) |
|
81 | 82 | ui.status(safe_bytes(msg)) |
|
82 | 83 | |
|
83 | 84 | |
|
84 | 85 | def log_pull_action(ui, repo, **kwargs): |
|
85 | 86 | """Logs user last pull action |
|
86 | 87 | |
|
87 | 88 | Called as Mercurial hook outgoing.pull_logger or from Kallithea before invoking Git. |
|
88 | 89 | |
|
89 | 90 | Does *not* use the action from the hook environment but is always 'pull'. |
|
90 | 91 | """ |
|
91 | 92 | ex = get_hook_environment() |
|
92 | 93 | |
|
93 | 94 | user = User.get_by_username(ex.username) |
|
94 | 95 | action = 'pull' |
|
95 | 96 | action_logger(user, action, ex.repository, ex.ip, commit=True) |
|
96 | 97 | # extension hook call |
|
97 | from kallithea import EXTENSIONS | |
|
98 | callback = getattr(EXTENSIONS, 'PULL_HOOK', None) | |
|
98 | callback = getattr(kallithea.EXTENSIONS, 'PULL_HOOK', None) | |
|
99 | 99 | if callable(callback): |
|
100 | 100 | kw = {} |
|
101 | 101 | kw.update(ex) |
|
102 | 102 | callback(**kw) |
|
103 | 103 | |
|
104 | 104 | |
|
105 | 105 | def log_push_action(ui, repo, node, node_last, **kwargs): |
|
106 | 106 | """ |
|
107 | 107 | Register that changes have been added to the repo - log the action *and* invalidate caches. |
|
108 | 108 | Note: This hook is not only logging, but also the side effect invalidating |
|
109 | 109 | caches! The function should perhaps be renamed. |
|
110 | 110 | |
|
111 | 111 | Called as Mercurial hook changegroup.kallithea_log_push_action . |
|
112 | 112 | |
|
113 | 113 | The pushed changesets is given by the revset 'node:node_last'. |
|
114 | 114 | """ |
|
115 | 115 | revs = [ascii_str(repo[r].hex()) for r in mercurial.scmutil.revrange(repo, [b'%s:%s' % (node, node_last)])] |
|
116 | 116 | process_pushed_raw_ids(revs) |
|
117 | 117 | |
|
118 | 118 | |
|
119 | 119 | def process_pushed_raw_ids(revs): |
|
120 | 120 | """ |
|
121 | 121 | Register that changes have been added to the repo - log the action *and* invalidate caches. |
|
122 | 122 | |
|
123 | 123 | Called from Mercurial changegroup.kallithea_log_push_action calling hook log_push_action, |
|
124 | 124 | or from the Git post-receive hook calling handle_git_post_receive ... |
|
125 | 125 | or from scm _handle_push. |
|
126 | 126 | """ |
|
127 | 127 | ex = get_hook_environment() |
|
128 | 128 | |
|
129 | 129 | action = '%s:%s' % (ex.action, ','.join(revs)) |
|
130 | 130 | action_logger(ex.username, action, ex.repository, ex.ip, commit=True) |
|
131 | 131 | |
|
132 | 132 | from kallithea.model.scm import ScmModel |
|
133 | 133 | ScmModel().mark_for_invalidation(ex.repository) |
|
134 | 134 | |
|
135 | 135 | # extension hook call |
|
136 | from kallithea import EXTENSIONS | |
|
137 | callback = getattr(EXTENSIONS, 'PUSH_HOOK', None) | |
|
136 | callback = getattr(kallithea.EXTENSIONS, 'PUSH_HOOK', None) | |
|
138 | 137 | if callable(callback): |
|
139 | 138 | kw = {'pushed_revs': revs} |
|
140 | 139 | kw.update(ex) |
|
141 | 140 | callback(**kw) |
|
142 | 141 | |
|
143 | 142 | |
|
144 | 143 | def log_create_repository(repository_dict, created_by, **kwargs): |
|
145 | 144 | """ |
|
146 | 145 | Post create repository Hook. |
|
147 | 146 | |
|
148 | 147 | :param repository: dict dump of repository object |
|
149 | 148 | :param created_by: username who created repository |
|
150 | 149 | |
|
151 | 150 | available keys of repository_dict: |
|
152 | 151 | |
|
153 | 152 | 'repo_type', |
|
154 | 153 | 'description', |
|
155 | 154 | 'private', |
|
156 | 155 | 'created_on', |
|
157 | 156 | 'enable_downloads', |
|
158 | 157 | 'repo_id', |
|
159 | 158 | 'owner_id', |
|
160 | 159 | 'enable_statistics', |
|
161 | 160 | 'clone_uri', |
|
162 | 161 | 'fork_id', |
|
163 | 162 | 'group_id', |
|
164 | 163 | 'repo_name' |
|
165 | 164 | |
|
166 | 165 | """ |
|
167 | from kallithea import EXTENSIONS | |
|
168 | callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None) | |
|
166 | callback = getattr(kallithea.EXTENSIONS, 'CREATE_REPO_HOOK', None) | |
|
169 | 167 | if callable(callback): |
|
170 | 168 | kw = {} |
|
171 | 169 | kw.update(repository_dict) |
|
172 | 170 | kw.update({'created_by': created_by}) |
|
173 | 171 | kw.update(kwargs) |
|
174 | 172 | callback(**kw) |
|
175 | 173 | |
|
176 | 174 | |
|
177 | 175 | def check_allowed_create_user(user_dict, created_by, **kwargs): |
|
178 | 176 | # pre create hooks |
|
179 | from kallithea import EXTENSIONS | |
|
180 | callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) | |
|
177 | callback = getattr(kallithea.EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) | |
|
181 | 178 | if callable(callback): |
|
182 | 179 | allowed, reason = callback(created_by=created_by, **user_dict) |
|
183 | 180 | if not allowed: |
|
184 | 181 | raise UserCreationError(reason) |
|
185 | 182 | |
|
186 | 183 | |
|
187 | 184 | def log_create_user(user_dict, created_by, **kwargs): |
|
188 | 185 | """ |
|
189 | 186 | Post create user Hook. |
|
190 | 187 | |
|
191 | 188 | :param user_dict: dict dump of user object |
|
192 | 189 | |
|
193 | 190 | available keys for user_dict: |
|
194 | 191 | |
|
195 | 192 | 'username', |
|
196 | 193 | 'full_name_or_username', |
|
197 | 194 | 'full_contact', |
|
198 | 195 | 'user_id', |
|
199 | 196 | 'name', |
|
200 | 197 | 'firstname', |
|
201 | 198 | 'short_contact', |
|
202 | 199 | 'admin', |
|
203 | 200 | 'lastname', |
|
204 | 201 | 'ip_addresses', |
|
205 | 202 | 'ldap_dn', |
|
206 | 203 | 'email', |
|
207 | 204 | 'api_key', |
|
208 | 205 | 'last_login', |
|
209 | 206 | 'full_name', |
|
210 | 207 | 'active', |
|
211 | 208 | 'password', |
|
212 | 209 | 'emails', |
|
213 | 210 | |
|
214 | 211 | """ |
|
215 | from kallithea import EXTENSIONS | |
|
216 | callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None) | |
|
212 | callback = getattr(kallithea.EXTENSIONS, 'CREATE_USER_HOOK', None) | |
|
217 | 213 | if callable(callback): |
|
218 | 214 | callback(created_by=created_by, **user_dict) |
|
219 | 215 | |
|
220 | 216 | |
|
221 | 217 | def log_create_pullrequest(pullrequest_dict, created_by, **kwargs): |
|
222 | 218 | """ |
|
223 | 219 | Post create pull request hook. |
|
224 | 220 | |
|
225 | 221 | :param pullrequest_dict: dict dump of pull request object |
|
226 | 222 | """ |
|
227 | from kallithea import EXTENSIONS | |
|
228 | callback = getattr(EXTENSIONS, 'CREATE_PULLREQUEST_HOOK', None) | |
|
223 | callback = getattr(kallithea.EXTENSIONS, 'CREATE_PULLREQUEST_HOOK', None) | |
|
229 | 224 | if callable(callback): |
|
230 | 225 | return callback(created_by=created_by, **pullrequest_dict) |
|
231 | 226 | |
|
232 | 227 | return 0 |
|
233 | 228 | |
|
234 | 229 | def log_delete_repository(repository_dict, deleted_by, **kwargs): |
|
235 | 230 | """ |
|
236 | 231 | Post delete repository Hook. |
|
237 | 232 | |
|
238 | 233 | :param repository: dict dump of repository object |
|
239 | 234 | :param deleted_by: username who deleted the repository |
|
240 | 235 | |
|
241 | 236 | available keys of repository_dict: |
|
242 | 237 | |
|
243 | 238 | 'repo_type', |
|
244 | 239 | 'description', |
|
245 | 240 | 'private', |
|
246 | 241 | 'created_on', |
|
247 | 242 | 'enable_downloads', |
|
248 | 243 | 'repo_id', |
|
249 | 244 | 'owner_id', |
|
250 | 245 | 'enable_statistics', |
|
251 | 246 | 'clone_uri', |
|
252 | 247 | 'fork_id', |
|
253 | 248 | 'group_id', |
|
254 | 249 | 'repo_name' |
|
255 | 250 | |
|
256 | 251 | """ |
|
257 | from kallithea import EXTENSIONS | |
|
258 | callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None) | |
|
252 | callback = getattr(kallithea.EXTENSIONS, 'DELETE_REPO_HOOK', None) | |
|
259 | 253 | if callable(callback): |
|
260 | 254 | kw = {} |
|
261 | 255 | kw.update(repository_dict) |
|
262 | 256 | kw.update({'deleted_by': deleted_by, |
|
263 | 257 | 'deleted_on': time.time()}) |
|
264 | 258 | kw.update(kwargs) |
|
265 | 259 | callback(**kw) |
|
266 | 260 | |
|
267 | 261 | |
|
268 | 262 | def log_delete_user(user_dict, deleted_by, **kwargs): |
|
269 | 263 | """ |
|
270 | 264 | Post delete user Hook. |
|
271 | 265 | |
|
272 | 266 | :param user_dict: dict dump of user object |
|
273 | 267 | |
|
274 | 268 | available keys for user_dict: |
|
275 | 269 | |
|
276 | 270 | 'username', |
|
277 | 271 | 'full_name_or_username', |
|
278 | 272 | 'full_contact', |
|
279 | 273 | 'user_id', |
|
280 | 274 | 'name', |
|
281 | 275 | 'firstname', |
|
282 | 276 | 'short_contact', |
|
283 | 277 | 'admin', |
|
284 | 278 | 'lastname', |
|
285 | 279 | 'ip_addresses', |
|
286 | 280 | 'ldap_dn', |
|
287 | 281 | 'email', |
|
288 | 282 | 'api_key', |
|
289 | 283 | 'last_login', |
|
290 | 284 | 'full_name', |
|
291 | 285 | 'active', |
|
292 | 286 | 'password', |
|
293 | 287 | 'emails', |
|
294 | 288 | |
|
295 | 289 | """ |
|
296 | from kallithea import EXTENSIONS | |
|
297 | callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None) | |
|
290 | callback = getattr(kallithea.EXTENSIONS, 'DELETE_USER_HOOK', None) | |
|
298 | 291 | if callable(callback): |
|
299 | 292 | callback(deleted_by=deleted_by, **user_dict) |
|
300 | 293 | |
|
301 | 294 | |
|
302 | 295 | def _hook_environment(repo_path): |
|
303 | 296 | """ |
|
304 | 297 | Create a light-weight environment for stand-alone scripts and return an UI and the |
|
305 | 298 | db repository. |
|
306 | 299 | |
|
307 | 300 | Git hooks are executed as subprocess of Git while Kallithea is waiting, and |
|
308 | 301 | they thus need enough info to be able to create an app environment and |
|
309 | 302 | connect to the database. |
|
310 | 303 | """ |
|
311 | 304 | import paste.deploy |
|
312 | 305 | |
|
313 | 306 | import kallithea.config.application |
|
314 | 307 | |
|
315 | 308 | extras = get_hook_environment() |
|
316 | 309 | |
|
317 | 310 | path_to_ini_file = extras['config'] |
|
318 | 311 | config = paste.deploy.appconfig('config:' + path_to_ini_file) |
|
319 | 312 | #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging |
|
320 | 313 | kallithea.config.application.make_app(config.global_conf, **config.local_conf) |
|
321 | 314 | |
|
322 | 315 | # fix if it's not a bare repo |
|
323 | 316 | if repo_path.endswith(os.sep + '.git'): |
|
324 | 317 | repo_path = repo_path[:-5] |
|
325 | 318 | |
|
326 | 319 | repo = Repository.get_by_full_path(repo_path) |
|
327 | 320 | if not repo: |
|
328 | 321 | raise OSError('Repository %s not found in database' % repo_path) |
|
329 | 322 | |
|
330 | 323 | baseui = make_ui() |
|
331 | 324 | return baseui, repo |
|
332 | 325 | |
|
333 | 326 | |
|
334 | 327 | def handle_git_pre_receive(repo_path, git_stdin_lines): |
|
335 | 328 | """Called from Git pre-receive hook. |
|
336 | 329 | The returned value is used as hook exit code and must be 0. |
|
337 | 330 | """ |
|
338 | 331 | # Currently unused. TODO: remove? |
|
339 | 332 | return 0 |
|
340 | 333 | |
|
341 | 334 | |
|
342 | 335 | def handle_git_post_receive(repo_path, git_stdin_lines): |
|
343 | 336 | """Called from Git post-receive hook. |
|
344 | 337 | The returned value is used as hook exit code and must be 0. |
|
345 | 338 | """ |
|
346 | 339 | try: |
|
347 | 340 | baseui, repo = _hook_environment(repo_path) |
|
348 | 341 | except HookEnvironmentError as e: |
|
349 | 342 | sys.stderr.write("Skipping Kallithea Git post-recieve hook %r.\nGit was apparently not invoked by Kallithea: %s\n" % (sys.argv[0], e)) |
|
350 | 343 | return 0 |
|
351 | 344 | |
|
352 | 345 | # the post push hook should never use the cached instance |
|
353 | 346 | scm_repo = repo.scm_instance_no_cache() |
|
354 | 347 | |
|
355 | 348 | rev_data = [] |
|
356 | 349 | for l in git_stdin_lines: |
|
357 | 350 | old_rev, new_rev, ref = l.strip().split(' ') |
|
358 | 351 | _ref_data = ref.split('/') |
|
359 | 352 | if _ref_data[1] in ['tags', 'heads']: |
|
360 | 353 | rev_data.append({'old_rev': old_rev, |
|
361 | 354 | 'new_rev': new_rev, |
|
362 | 355 | 'ref': ref, |
|
363 | 356 | 'type': _ref_data[1], |
|
364 | 357 | 'name': '/'.join(_ref_data[2:])}) |
|
365 | 358 | |
|
366 | 359 | git_revs = [] |
|
367 | 360 | for push_ref in rev_data: |
|
368 | 361 | _type = push_ref['type'] |
|
369 | 362 | if _type == 'heads': |
|
370 | 363 | if push_ref['old_rev'] == EmptyChangeset().raw_id: |
|
371 | 364 | # update the symbolic ref if we push new repo |
|
372 | 365 | if scm_repo.is_empty(): |
|
373 | 366 | scm_repo._repo.refs.set_symbolic_ref( |
|
374 | 367 | b'HEAD', |
|
375 | 368 | b'refs/heads/%s' % safe_bytes(push_ref['name'])) |
|
376 | 369 | |
|
377 | 370 | # build exclude list without the ref |
|
378 | 371 | cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*'] |
|
379 | 372 | stdout = scm_repo.run_git_command(cmd) |
|
380 | 373 | ref = push_ref['ref'] |
|
381 | 374 | heads = [head for head in stdout.splitlines() if head != ref] |
|
382 | 375 | # now list the git revs while excluding from the list |
|
383 | 376 | cmd = ['log', push_ref['new_rev'], '--reverse', '--pretty=format:%H'] |
|
384 | 377 | cmd.append('--not') |
|
385 | 378 | cmd.extend(heads) # empty list is ok |
|
386 | 379 | stdout = scm_repo.run_git_command(cmd) |
|
387 | 380 | git_revs += stdout.splitlines() |
|
388 | 381 | |
|
389 | 382 | elif push_ref['new_rev'] == EmptyChangeset().raw_id: |
|
390 | 383 | # delete branch case |
|
391 | 384 | git_revs += ['delete_branch=>%s' % push_ref['name']] |
|
392 | 385 | else: |
|
393 | 386 | cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref, |
|
394 | 387 | '--reverse', '--pretty=format:%H'] |
|
395 | 388 | stdout = scm_repo.run_git_command(cmd) |
|
396 | 389 | git_revs += stdout.splitlines() |
|
397 | 390 | |
|
398 | 391 | elif _type == 'tags': |
|
399 | 392 | git_revs += ['tag=>%s' % push_ref['name']] |
|
400 | 393 | |
|
401 | 394 | process_pushed_raw_ids(git_revs) |
|
402 | 395 | |
|
403 | 396 | return 0 |
|
404 | 397 | |
|
405 | 398 | |
|
406 | 399 | # Almost exactly like Mercurial contrib/hg-ssh: |
|
407 | 400 | def rejectpush(ui, **kwargs): |
|
408 | 401 | """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos. |
|
409 | 402 | Return value 1 will make the hook fail and reject the push. |
|
410 | 403 | """ |
|
411 | 404 | ex = get_hook_environment() |
|
412 | 405 | ui.warn(safe_bytes("Push access to %r denied\n" % ex.repository)) |
|
413 | 406 | return 1 |
@@ -1,519 +1,519 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.utils2 |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Some simple helper functions. |
|
19 | 19 | Note: all these functions should be independent of Kallithea classes, i.e. |
|
20 | 20 | models, controllers, etc. to prevent import cycles. |
|
21 | 21 | |
|
22 | 22 | This file was forked by the Kallithea project in July 2014. |
|
23 | 23 | Original author and date, and relevant copyright and licensing information is below: |
|
24 | 24 | :created_on: Jan 5, 2011 |
|
25 | 25 | :author: marcink |
|
26 | 26 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
27 | 27 | :license: GPLv3, see LICENSE.md for more details. |
|
28 | 28 | """ |
|
29 | 29 | |
|
30 | 30 | import binascii |
|
31 | 31 | import datetime |
|
32 | 32 | import json |
|
33 | 33 | import os |
|
34 | 34 | import re |
|
35 | 35 | import time |
|
36 | 36 | import urllib.parse |
|
37 | 37 | |
|
38 | 38 | import urlobject |
|
39 | 39 | from tg.i18n import ugettext as _ |
|
40 | 40 | from tg.i18n import ungettext |
|
41 | 41 | from tg.support.converters import asbool, aslist |
|
42 | 42 | from webhelpers2.text import collapse, remove_formatting, strip_tags |
|
43 | 43 | |
|
44 | import kallithea | |
|
44 | 45 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export |
|
45 | 46 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
46 | 47 | |
|
47 | 48 | |
|
48 | 49 | try: |
|
49 | 50 | import pwd |
|
50 | 51 | except ImportError: |
|
51 | 52 | pass |
|
52 | 53 | |
|
53 | 54 | |
|
54 | 55 | # mute pyflakes "imported but unused" |
|
55 | 56 | assert asbool |
|
56 | 57 | assert aslist |
|
57 | 58 | assert ascii_bytes |
|
58 | 59 | assert ascii_str |
|
59 | 60 | assert safe_bytes |
|
60 | 61 | assert safe_str |
|
61 | 62 | assert LazyProperty |
|
62 | 63 | |
|
63 | 64 | |
|
64 | 65 | def convert_line_endings(line, mode): |
|
65 | 66 | """ |
|
66 | 67 | Converts a given line "line end" according to given mode |
|
67 | 68 | |
|
68 | 69 | Available modes are:: |
|
69 | 70 | 0 - Unix |
|
70 | 71 | 1 - Mac |
|
71 | 72 | 2 - DOS |
|
72 | 73 | |
|
73 | 74 | :param line: given line to convert |
|
74 | 75 | :param mode: mode to convert to |
|
75 | 76 | :rtype: str |
|
76 | 77 | :return: converted line according to mode |
|
77 | 78 | """ |
|
78 | 79 | if mode == 0: |
|
79 | 80 | line = line.replace('\r\n', '\n') |
|
80 | 81 | line = line.replace('\r', '\n') |
|
81 | 82 | elif mode == 1: |
|
82 | 83 | line = line.replace('\r\n', '\r') |
|
83 | 84 | line = line.replace('\n', '\r') |
|
84 | 85 | elif mode == 2: |
|
85 | 86 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) |
|
86 | 87 | return line |
|
87 | 88 | |
|
88 | 89 | |
|
89 | 90 | def detect_mode(line, default): |
|
90 | 91 | """ |
|
91 | 92 | Detects line break for given line, if line break couldn't be found |
|
92 | 93 | given default value is returned |
|
93 | 94 | |
|
94 | 95 | :param line: str line |
|
95 | 96 | :param default: default |
|
96 | 97 | :rtype: int |
|
97 | 98 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS |
|
98 | 99 | """ |
|
99 | 100 | if line.endswith('\r\n'): |
|
100 | 101 | return 2 |
|
101 | 102 | elif line.endswith('\n'): |
|
102 | 103 | return 0 |
|
103 | 104 | elif line.endswith('\r'): |
|
104 | 105 | return 1 |
|
105 | 106 | else: |
|
106 | 107 | return default |
|
107 | 108 | |
|
108 | 109 | |
|
109 | 110 | def generate_api_key(): |
|
110 | 111 | """ |
|
111 | 112 | Generates a random (presumably unique) API key. |
|
112 | 113 | |
|
113 | 114 | This value is used in URLs and "Bearer" HTTP Authorization headers, |
|
114 | 115 | which in practice means it should only contain URL-safe characters |
|
115 | 116 | (RFC 3986): |
|
116 | 117 | |
|
117 | 118 | unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" |
|
118 | 119 | """ |
|
119 | 120 | # Hexadecimal certainly qualifies as URL-safe. |
|
120 | 121 | return ascii_str(binascii.hexlify(os.urandom(20))) |
|
121 | 122 | |
|
122 | 123 | |
|
123 | 124 | def safe_int(val, default=None): |
|
124 | 125 | """ |
|
125 | 126 | Returns int() of val if val is not convertable to int use default |
|
126 | 127 | instead |
|
127 | 128 | |
|
128 | 129 | :param val: |
|
129 | 130 | :param default: |
|
130 | 131 | """ |
|
131 | 132 | try: |
|
132 | 133 | val = int(val) |
|
133 | 134 | except (ValueError, TypeError): |
|
134 | 135 | val = default |
|
135 | 136 | return val |
|
136 | 137 | |
|
137 | 138 | |
|
138 | 139 | def remove_suffix(s, suffix): |
|
139 | 140 | if s.endswith(suffix): |
|
140 | 141 | s = s[:-1 * len(suffix)] |
|
141 | 142 | return s |
|
142 | 143 | |
|
143 | 144 | |
|
144 | 145 | def remove_prefix(s, prefix): |
|
145 | 146 | if s.startswith(prefix): |
|
146 | 147 | s = s[len(prefix):] |
|
147 | 148 | return s |
|
148 | 149 | |
|
149 | 150 | |
|
150 | 151 | def age(prevdate, show_short_version=False, now=None): |
|
151 | 152 | """ |
|
152 | 153 | turns a datetime into an age string. |
|
153 | 154 | If show_short_version is True, then it will generate a not so accurate but shorter string, |
|
154 | 155 | example: 2days ago, instead of 2 days and 23 hours ago. |
|
155 | 156 | |
|
156 | 157 | :param prevdate: datetime object |
|
157 | 158 | :param show_short_version: if it should approximate the date and return a shorter string |
|
158 | 159 | :rtype: str |
|
159 | 160 | :returns: str words describing age |
|
160 | 161 | """ |
|
161 | 162 | now = now or datetime.datetime.now() |
|
162 | 163 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] |
|
163 | 164 | deltas = {} |
|
164 | 165 | future = False |
|
165 | 166 | |
|
166 | 167 | if prevdate > now: |
|
167 | 168 | now, prevdate = prevdate, now |
|
168 | 169 | future = True |
|
169 | 170 | if future: |
|
170 | 171 | prevdate = prevdate.replace(microsecond=0) |
|
171 | 172 | # Get date parts deltas |
|
172 | 173 | from dateutil import relativedelta |
|
173 | 174 | for part in order: |
|
174 | 175 | d = relativedelta.relativedelta(now, prevdate) |
|
175 | 176 | deltas[part] = getattr(d, part + 's') |
|
176 | 177 | |
|
177 | 178 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, |
|
178 | 179 | # not 1 hour, -59 minutes and -59 seconds) |
|
179 | 180 | for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours |
|
180 | 181 | part = order[num] |
|
181 | 182 | carry_part = order[num - 1] |
|
182 | 183 | |
|
183 | 184 | if deltas[part] < 0: |
|
184 | 185 | deltas[part] += length |
|
185 | 186 | deltas[carry_part] -= 1 |
|
186 | 187 | |
|
187 | 188 | # Same thing for days except that the increment depends on the (variable) |
|
188 | 189 | # number of days in the month |
|
189 | 190 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] |
|
190 | 191 | if deltas['day'] < 0: |
|
191 | 192 | if prevdate.month == 2 and (prevdate.year % 4 == 0 and |
|
192 | 193 | (prevdate.year % 100 != 0 or prevdate.year % 400 == 0) |
|
193 | 194 | ): |
|
194 | 195 | deltas['day'] += 29 |
|
195 | 196 | else: |
|
196 | 197 | deltas['day'] += month_lengths[prevdate.month - 1] |
|
197 | 198 | |
|
198 | 199 | deltas['month'] -= 1 |
|
199 | 200 | |
|
200 | 201 | if deltas['month'] < 0: |
|
201 | 202 | deltas['month'] += 12 |
|
202 | 203 | deltas['year'] -= 1 |
|
203 | 204 | |
|
204 | 205 | # In short version, we want nicer handling of ages of more than a year |
|
205 | 206 | if show_short_version: |
|
206 | 207 | if deltas['year'] == 1: |
|
207 | 208 | # ages between 1 and 2 years: show as months |
|
208 | 209 | deltas['month'] += 12 |
|
209 | 210 | deltas['year'] = 0 |
|
210 | 211 | if deltas['year'] >= 2: |
|
211 | 212 | # ages 2+ years: round |
|
212 | 213 | if deltas['month'] > 6: |
|
213 | 214 | deltas['year'] += 1 |
|
214 | 215 | deltas['month'] = 0 |
|
215 | 216 | |
|
216 | 217 | # Format the result |
|
217 | 218 | fmt_funcs = { |
|
218 | 219 | 'year': lambda d: ungettext('%d year', '%d years', d) % d, |
|
219 | 220 | 'month': lambda d: ungettext('%d month', '%d months', d) % d, |
|
220 | 221 | 'day': lambda d: ungettext('%d day', '%d days', d) % d, |
|
221 | 222 | 'hour': lambda d: ungettext('%d hour', '%d hours', d) % d, |
|
222 | 223 | 'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d, |
|
223 | 224 | 'second': lambda d: ungettext('%d second', '%d seconds', d) % d, |
|
224 | 225 | } |
|
225 | 226 | |
|
226 | 227 | for i, part in enumerate(order): |
|
227 | 228 | value = deltas[part] |
|
228 | 229 | if value == 0: |
|
229 | 230 | continue |
|
230 | 231 | |
|
231 | 232 | if i < 5: |
|
232 | 233 | sub_part = order[i + 1] |
|
233 | 234 | sub_value = deltas[sub_part] |
|
234 | 235 | else: |
|
235 | 236 | sub_value = 0 |
|
236 | 237 | |
|
237 | 238 | if sub_value == 0 or show_short_version: |
|
238 | 239 | if future: |
|
239 | 240 | return _('in %s') % fmt_funcs[part](value) |
|
240 | 241 | else: |
|
241 | 242 | return _('%s ago') % fmt_funcs[part](value) |
|
242 | 243 | if future: |
|
243 | 244 | return _('in %s and %s') % (fmt_funcs[part](value), |
|
244 | 245 | fmt_funcs[sub_part](sub_value)) |
|
245 | 246 | else: |
|
246 | 247 | return _('%s and %s ago') % (fmt_funcs[part](value), |
|
247 | 248 | fmt_funcs[sub_part](sub_value)) |
|
248 | 249 | |
|
249 | 250 | return _('just now') |
|
250 | 251 | |
|
251 | 252 | |
|
252 | 253 | def uri_filter(uri): |
|
253 | 254 | """ |
|
254 | 255 | Removes user:password from given url string |
|
255 | 256 | |
|
256 | 257 | :param uri: |
|
257 | 258 | :rtype: str |
|
258 | 259 | :returns: filtered list of strings |
|
259 | 260 | """ |
|
260 | 261 | if not uri: |
|
261 | 262 | return [] |
|
262 | 263 | |
|
263 | 264 | proto = '' |
|
264 | 265 | |
|
265 | 266 | for pat in ('https://', 'http://', 'git://'): |
|
266 | 267 | if uri.startswith(pat): |
|
267 | 268 | uri = uri[len(pat):] |
|
268 | 269 | proto = pat |
|
269 | 270 | break |
|
270 | 271 | |
|
271 | 272 | # remove passwords and username |
|
272 | 273 | uri = uri[uri.find('@') + 1:] |
|
273 | 274 | |
|
274 | 275 | # get the port |
|
275 | 276 | cred_pos = uri.find(':') |
|
276 | 277 | if cred_pos == -1: |
|
277 | 278 | host, port = uri, None |
|
278 | 279 | else: |
|
279 | 280 | host, port = uri[:cred_pos], uri[cred_pos + 1:] |
|
280 | 281 | |
|
281 | 282 | return [_f for _f in [proto, host, port] if _f] |
|
282 | 283 | |
|
283 | 284 | |
|
284 | 285 | def credentials_filter(uri): |
|
285 | 286 | """ |
|
286 | 287 | Returns a url with removed credentials |
|
287 | 288 | |
|
288 | 289 | :param uri: |
|
289 | 290 | """ |
|
290 | 291 | |
|
291 | 292 | uri = uri_filter(uri) |
|
292 | 293 | # check if we have port |
|
293 | 294 | if len(uri) > 2 and uri[2]: |
|
294 | 295 | uri[2] = ':' + uri[2] |
|
295 | 296 | |
|
296 | 297 | return ''.join(uri) |
|
297 | 298 | |
|
298 | 299 | |
|
299 | 300 | def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None): |
|
300 | 301 | parsed_url = urlobject.URLObject(prefix_url) |
|
301 | 302 | prefix = urllib.parse.unquote(parsed_url.path.rstrip('/')) |
|
302 | 303 | try: |
|
303 | 304 | system_user = pwd.getpwuid(os.getuid()).pw_name |
|
304 | 305 | except NameError: # TODO: support all systems - especially Windows |
|
305 | 306 | system_user = 'kallithea' # hardcoded default value ... |
|
306 | 307 | args = { |
|
307 | 308 | 'scheme': parsed_url.scheme, |
|
308 | 309 | 'user': urllib.parse.quote(username or ''), |
|
309 | 310 | 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix") |
|
310 | 311 | 'prefix': prefix, # undocumented, empty or starting with / |
|
311 | 312 | 'repo': repo_name, |
|
312 | 313 | 'repoid': str(repo_id), |
|
313 | 314 | 'system_user': system_user, |
|
314 | 315 | 'hostname': parsed_url.hostname, |
|
315 | 316 | } |
|
316 | 317 | url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl) |
|
317 | 318 | |
|
318 | 319 | # remove leading @ sign if it's present. Case of empty user |
|
319 | 320 | url_obj = urlobject.URLObject(url) |
|
320 | 321 | if not url_obj.username: |
|
321 | 322 | url_obj = url_obj.with_username(None) |
|
322 | 323 | |
|
323 | 324 | return str(url_obj) |
|
324 | 325 | |
|
325 | 326 | |
|
326 | 327 | def get_changeset_safe(repo, rev): |
|
327 | 328 | """ |
|
328 | 329 | Safe version of get_changeset if this changeset doesn't exists for a |
|
329 | 330 | repo it returns a Dummy one instead |
|
330 | 331 | |
|
331 | 332 | :param repo: |
|
332 | 333 | :param rev: |
|
333 | 334 | """ |
|
334 | 335 | from kallithea.lib.vcs.backends.base import BaseRepository, EmptyChangeset |
|
335 | 336 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
336 | 337 | if not isinstance(repo, BaseRepository): |
|
337 | 338 | raise Exception('You must pass an Repository ' |
|
338 | 339 | 'object as first argument got %s' % type(repo)) |
|
339 | 340 | |
|
340 | 341 | try: |
|
341 | 342 | cs = repo.get_changeset(rev) |
|
342 | 343 | except (RepositoryError, LookupError): |
|
343 | 344 | cs = EmptyChangeset(requested_revision=rev) |
|
344 | 345 | return cs |
|
345 | 346 | |
|
346 | 347 | |
|
347 | 348 | def datetime_to_time(dt): |
|
348 | 349 | if dt: |
|
349 | 350 | return time.mktime(dt.timetuple()) |
|
350 | 351 | |
|
351 | 352 | |
|
352 | 353 | def time_to_datetime(tm): |
|
353 | 354 | if tm: |
|
354 | 355 | if isinstance(tm, str): |
|
355 | 356 | try: |
|
356 | 357 | tm = float(tm) |
|
357 | 358 | except ValueError: |
|
358 | 359 | return |
|
359 | 360 | return datetime.datetime.fromtimestamp(tm) |
|
360 | 361 | |
|
361 | 362 | |
|
362 | 363 | # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete() |
|
363 | 364 | # Check char before @ - it must not look like we are in an email addresses. |
|
364 | 365 | # Matching is greedy so we don't have to look beyond the end. |
|
365 | 366 | MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])') |
|
366 | 367 | |
|
367 | 368 | |
|
368 | 369 | def extract_mentioned_usernames(text): |
|
369 | 370 | r""" |
|
370 | 371 | Returns list of (possible) usernames @mentioned in given text. |
|
371 | 372 | |
|
372 | 373 | >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,') |
|
373 | 374 | ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz'] |
|
374 | 375 | """ |
|
375 | 376 | return MENTIONS_REGEX.findall(text) |
|
376 | 377 | |
|
377 | 378 | |
|
378 | 379 | def extract_mentioned_users(text): |
|
379 | 380 | """ Returns set of actual database Users @mentioned in given text. """ |
|
380 | 381 | from kallithea.model.db import User |
|
381 | 382 | result = set() |
|
382 | 383 | for name in extract_mentioned_usernames(text): |
|
383 | 384 | user = User.get_by_username(name, case_insensitive=True) |
|
384 | 385 | if user is not None and not user.is_default_user: |
|
385 | 386 | result.add(user) |
|
386 | 387 | return result |
|
387 | 388 | |
|
388 | 389 | |
|
389 | 390 | class AttributeDict(dict): |
|
390 | 391 | def __getattr__(self, attr): |
|
391 | 392 | return self.get(attr, None) |
|
392 | 393 | __setattr__ = dict.__setitem__ |
|
393 | 394 | __delattr__ = dict.__delitem__ |
|
394 | 395 | |
|
395 | 396 | |
|
396 | 397 | def obfuscate_url_pw(engine): |
|
397 | 398 | from sqlalchemy.engine import url as sa_url |
|
398 | 399 | from sqlalchemy.exc import ArgumentError |
|
399 | 400 | try: |
|
400 | 401 | _url = sa_url.make_url(engine or '') |
|
401 | 402 | except ArgumentError: |
|
402 | 403 | return engine |
|
403 | 404 | if _url.password: |
|
404 | 405 | _url.password = 'XXXXX' |
|
405 | 406 | return str(_url) |
|
406 | 407 | |
|
407 | 408 | |
|
408 | 409 | class HookEnvironmentError(Exception): pass |
|
409 | 410 | |
|
410 | 411 | |
|
411 | 412 | def get_hook_environment(): |
|
412 | 413 | """ |
|
413 | 414 | Get hook context by deserializing the global KALLITHEA_EXTRAS environment |
|
414 | 415 | variable. |
|
415 | 416 | |
|
416 | 417 | Called early in Git out-of-process hooks to get .ini config path so the |
|
417 | 418 | basic environment can be configured properly. Also used in all hooks to get |
|
418 | 419 | information about the action that triggered it. |
|
419 | 420 | """ |
|
420 | 421 | |
|
421 | 422 | try: |
|
422 | 423 | kallithea_extras = os.environ['KALLITHEA_EXTRAS'] |
|
423 | 424 | except KeyError: |
|
424 | 425 | raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found") |
|
425 | 426 | |
|
426 | 427 | extras = json.loads(kallithea_extras) |
|
427 | 428 | for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']: |
|
428 | 429 | try: |
|
429 | 430 | extras[k] |
|
430 | 431 | except KeyError: |
|
431 | 432 | raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras)) |
|
432 | 433 | |
|
433 | 434 | return AttributeDict(extras) |
|
434 | 435 | |
|
435 | 436 | |
|
436 | 437 | def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None): |
|
437 | 438 | """Prepare global context for running hooks by serializing data in the |
|
438 | 439 | global KALLITHEA_EXTRAS environment variable. |
|
439 | 440 | |
|
440 | 441 | Most importantly, this allow Git hooks to do proper logging and updating of |
|
441 | 442 | caches after pushes. |
|
442 | 443 | |
|
443 | 444 | Must always be called before anything with hooks are invoked. |
|
444 | 445 | """ |
|
445 | from kallithea import CONFIG | |
|
446 | 446 | extras = { |
|
447 | 447 | 'ip': ip_addr, # used in log_push/pull_action action_logger |
|
448 | 448 | 'username': username, |
|
449 | 449 | 'action': action or 'push_local', # used in log_push_action_raw_ids action_logger |
|
450 | 450 | 'repository': repo_name, |
|
451 | 451 | 'scm': repo_alias, # used to pick hack in log_push_action_raw_ids |
|
452 | 'config': CONFIG['__file__'], # used by git hook to read config | |
|
452 | 'config': kallithea.CONFIG['__file__'], # used by git hook to read config | |
|
453 | 453 | } |
|
454 | 454 | os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras) |
|
455 | 455 | |
|
456 | 456 | |
|
457 | 457 | def get_current_authuser(): |
|
458 | 458 | """ |
|
459 | 459 | Gets kallithea user from threadlocal tmpl_context variable if it's |
|
460 | 460 | defined, else returns None. |
|
461 | 461 | """ |
|
462 | 462 | from tg import tmpl_context |
|
463 | 463 | try: |
|
464 | 464 | return getattr(tmpl_context, 'authuser', None) |
|
465 | 465 | except TypeError: # No object (name: context) has been registered for this thread |
|
466 | 466 | return None |
|
467 | 467 | |
|
468 | 468 | |
|
469 | 469 | def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub): |
|
470 | 470 | return _cleanstringsub('_', s).rstrip('_') |
|
471 | 471 | |
|
472 | 472 | |
|
473 | 473 | def recursive_replace(str_, replace=' '): |
|
474 | 474 | """ |
|
475 | 475 | Recursive replace of given sign to just one instance |
|
476 | 476 | |
|
477 | 477 | :param str_: given string |
|
478 | 478 | :param replace: char to find and replace multiple instances |
|
479 | 479 | |
|
480 | 480 | Examples:: |
|
481 | 481 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
482 | 482 | 'Mighty-Mighty-Bo-sstones' |
|
483 | 483 | """ |
|
484 | 484 | |
|
485 | 485 | if str_.find(replace * 2) == -1: |
|
486 | 486 | return str_ |
|
487 | 487 | else: |
|
488 | 488 | str_ = str_.replace(replace * 2, replace) |
|
489 | 489 | return recursive_replace(str_, replace) |
|
490 | 490 | |
|
491 | 491 | |
|
492 | 492 | def repo_name_slug(value): |
|
493 | 493 | """ |
|
494 | 494 | Return slug of name of repository |
|
495 | 495 | This function is called on each creation/modification |
|
496 | 496 | of repository to prevent bad names in repo |
|
497 | 497 | """ |
|
498 | 498 | |
|
499 | 499 | slug = remove_formatting(value) |
|
500 | 500 | slug = strip_tags(slug) |
|
501 | 501 | |
|
502 | 502 | for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
503 | 503 | slug = slug.replace(c, '-') |
|
504 | 504 | slug = recursive_replace(slug, '-') |
|
505 | 505 | slug = collapse(slug, '-') |
|
506 | 506 | return slug |
|
507 | 507 | |
|
508 | 508 | |
|
509 | 509 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): |
|
510 | 510 | while True: |
|
511 | 511 | ok = input(prompt) |
|
512 | 512 | if ok in ('y', 'ye', 'yes'): |
|
513 | 513 | return True |
|
514 | 514 | if ok in ('n', 'no', 'nop', 'nope'): |
|
515 | 515 | return False |
|
516 | 516 | retries = retries - 1 |
|
517 | 517 | if retries < 0: |
|
518 | 518 | raise IOError |
|
519 | 519 | print(complaint) |
@@ -1,564 +1,564 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | these are form validation classes |
|
16 | 16 | http://formencode.org/module-formencode.validators.html |
|
17 | 17 | for list of all available validators |
|
18 | 18 | |
|
19 | 19 | we can create our own validators |
|
20 | 20 | |
|
21 | 21 | The table below outlines the options which can be used in a schema in addition to the validators themselves |
|
22 | 22 | pre_validators [] These validators will be applied before the schema |
|
23 | 23 | chained_validators [] These validators will be applied after the schema |
|
24 | 24 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present |
|
25 | 25 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed |
|
26 | 26 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. |
|
27 | 27 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | <name> = formencode.validators.<name of validator> |
|
31 | 31 | <name> must equal form name |
|
32 | 32 | list=[1,2,3,4,5] |
|
33 | 33 | for SELECT use formencode.All(OneOf(list), Int()) |
|
34 | 34 | |
|
35 | 35 | """ |
|
36 | 36 | import logging |
|
37 | 37 | |
|
38 | 38 | import formencode |
|
39 | 39 | from formencode import All |
|
40 | 40 | from tg.i18n import ugettext as _ |
|
41 | 41 | |
|
42 | from kallithea import BACKENDS | |
|
42 | import kallithea | |
|
43 | 43 | from kallithea.model import validators as v |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | def LoginForm(): |
|
50 | 50 | class _LoginForm(formencode.Schema): |
|
51 | 51 | allow_extra_fields = True |
|
52 | 52 | filter_extra_fields = True |
|
53 | 53 | username = v.UnicodeString( |
|
54 | 54 | strip=True, |
|
55 | 55 | min=1, |
|
56 | 56 | not_empty=True, |
|
57 | 57 | messages={ |
|
58 | 58 | 'empty': _('Please enter a login'), |
|
59 | 59 | 'tooShort': _('Enter a value %(min)i characters long or more')} |
|
60 | 60 | ) |
|
61 | 61 | |
|
62 | 62 | password = v.UnicodeString( |
|
63 | 63 | strip=False, |
|
64 | 64 | min=3, |
|
65 | 65 | not_empty=True, |
|
66 | 66 | messages={ |
|
67 | 67 | 'empty': _('Please enter a password'), |
|
68 | 68 | 'tooShort': _('Enter %(min)i characters or more')} |
|
69 | 69 | ) |
|
70 | 70 | |
|
71 | 71 | remember = v.StringBoolean(if_missing=False) |
|
72 | 72 | |
|
73 | 73 | chained_validators = [v.ValidAuth()] |
|
74 | 74 | return _LoginForm |
|
75 | 75 | |
|
76 | 76 | |
|
77 | 77 | def PasswordChangeForm(username): |
|
78 | 78 | class _PasswordChangeForm(formencode.Schema): |
|
79 | 79 | allow_extra_fields = True |
|
80 | 80 | filter_extra_fields = True |
|
81 | 81 | |
|
82 | 82 | current_password = v.ValidOldPassword(username)(not_empty=True) |
|
83 | 83 | new_password = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) |
|
84 | 84 | new_password_confirmation = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) |
|
85 | 85 | |
|
86 | 86 | chained_validators = [v.ValidPasswordsMatch('new_password', |
|
87 | 87 | 'new_password_confirmation')] |
|
88 | 88 | return _PasswordChangeForm |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | def UserForm(edit=False, old_data=None): |
|
92 | 92 | old_data = old_data or {} |
|
93 | 93 | |
|
94 | 94 | class _UserForm(formencode.Schema): |
|
95 | 95 | allow_extra_fields = True |
|
96 | 96 | filter_extra_fields = True |
|
97 | 97 | username = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
98 | 98 | v.ValidUsername(edit, old_data)) |
|
99 | 99 | if edit: |
|
100 | 100 | new_password = All( |
|
101 | 101 | v.ValidPassword(), |
|
102 | 102 | v.UnicodeString(strip=False, min=6, not_empty=False) |
|
103 | 103 | ) |
|
104 | 104 | password_confirmation = All( |
|
105 | 105 | v.ValidPassword(), |
|
106 | 106 | v.UnicodeString(strip=False, min=6, not_empty=False), |
|
107 | 107 | ) |
|
108 | 108 | admin = v.StringBoolean(if_missing=False) |
|
109 | 109 | chained_validators = [v.ValidPasswordsMatch('new_password', |
|
110 | 110 | 'password_confirmation')] |
|
111 | 111 | else: |
|
112 | 112 | password = All( |
|
113 | 113 | v.ValidPassword(), |
|
114 | 114 | v.UnicodeString(strip=False, min=6, not_empty=True) |
|
115 | 115 | ) |
|
116 | 116 | password_confirmation = All( |
|
117 | 117 | v.ValidPassword(), |
|
118 | 118 | v.UnicodeString(strip=False, min=6, not_empty=False) |
|
119 | 119 | ) |
|
120 | 120 | chained_validators = [v.ValidPasswordsMatch('password', |
|
121 | 121 | 'password_confirmation')] |
|
122 | 122 | |
|
123 | 123 | active = v.StringBoolean(if_missing=False) |
|
124 | 124 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
125 | 125 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
126 | 126 | email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data)) |
|
127 | 127 | extern_name = v.UnicodeString(strip=True, if_missing=None) |
|
128 | 128 | extern_type = v.UnicodeString(strip=True, if_missing=None) |
|
129 | 129 | return _UserForm |
|
130 | 130 | |
|
131 | 131 | |
|
132 | 132 | def UserGroupForm(edit=False, old_data=None, available_members=None): |
|
133 | 133 | old_data = old_data or {} |
|
134 | 134 | available_members = available_members or [] |
|
135 | 135 | |
|
136 | 136 | class _UserGroupForm(formencode.Schema): |
|
137 | 137 | allow_extra_fields = True |
|
138 | 138 | filter_extra_fields = True |
|
139 | 139 | |
|
140 | 140 | users_group_name = All( |
|
141 | 141 | v.UnicodeString(strip=True, min=1, not_empty=True), |
|
142 | 142 | v.ValidUserGroup(edit, old_data) |
|
143 | 143 | ) |
|
144 | 144 | user_group_description = v.UnicodeString(strip=True, min=1, |
|
145 | 145 | not_empty=False) |
|
146 | 146 | |
|
147 | 147 | users_group_active = v.StringBoolean(if_missing=False) |
|
148 | 148 | |
|
149 | 149 | if edit: |
|
150 | 150 | users_group_members = v.OneOf( |
|
151 | 151 | available_members, hideList=False, testValueList=True, |
|
152 | 152 | if_missing=None, not_empty=False |
|
153 | 153 | ) |
|
154 | 154 | |
|
155 | 155 | return _UserGroupForm |
|
156 | 156 | |
|
157 | 157 | |
|
158 | 158 | def RepoGroupForm(edit=False, old_data=None, repo_groups=None, |
|
159 | 159 | can_create_in_root=False): |
|
160 | 160 | old_data = old_data or {} |
|
161 | 161 | repo_groups = repo_groups or [] |
|
162 | 162 | repo_group_ids = [rg[0] for rg in repo_groups] |
|
163 | 163 | |
|
164 | 164 | class _RepoGroupForm(formencode.Schema): |
|
165 | 165 | allow_extra_fields = True |
|
166 | 166 | filter_extra_fields = False |
|
167 | 167 | |
|
168 | 168 | group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
169 | 169 | v.SlugifyName(), |
|
170 | 170 | v.ValidRegex(msg=_('Name must not contain only digits'))(r'(?!^\d+$)^.+$')) |
|
171 | 171 | group_description = v.UnicodeString(strip=True, min=1, |
|
172 | 172 | not_empty=False) |
|
173 | 173 | group_copy_permissions = v.StringBoolean(if_missing=False) |
|
174 | 174 | |
|
175 | 175 | if edit: |
|
176 | 176 | # FIXME: do a special check that we cannot move a group to one of |
|
177 | 177 | # its children |
|
178 | 178 | pass |
|
179 | 179 | |
|
180 | 180 | parent_group_id = All(v.CanCreateGroup(can_create_in_root), |
|
181 | 181 | v.OneOf(repo_group_ids, hideList=False, |
|
182 | 182 | testValueList=True, |
|
183 | 183 | if_missing=None, not_empty=True), |
|
184 | 184 | v.Int(min=-1, not_empty=True)) |
|
185 | 185 | chained_validators = [v.ValidRepoGroup(edit, old_data)] |
|
186 | 186 | |
|
187 | 187 | return _RepoGroupForm |
|
188 | 188 | |
|
189 | 189 | |
|
190 | 190 | def RegisterForm(edit=False, old_data=None): |
|
191 | 191 | class _RegisterForm(formencode.Schema): |
|
192 | 192 | allow_extra_fields = True |
|
193 | 193 | filter_extra_fields = True |
|
194 | 194 | username = All( |
|
195 | 195 | v.ValidUsername(edit, old_data), |
|
196 | 196 | v.UnicodeString(strip=True, min=1, not_empty=True) |
|
197 | 197 | ) |
|
198 | 198 | password = All( |
|
199 | 199 | v.ValidPassword(), |
|
200 | 200 | v.UnicodeString(strip=False, min=6, not_empty=True) |
|
201 | 201 | ) |
|
202 | 202 | password_confirmation = All( |
|
203 | 203 | v.ValidPassword(), |
|
204 | 204 | v.UnicodeString(strip=False, min=6, not_empty=True) |
|
205 | 205 | ) |
|
206 | 206 | active = v.StringBoolean(if_missing=False) |
|
207 | 207 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
208 | 208 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
209 | 209 | email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data)) |
|
210 | 210 | |
|
211 | 211 | chained_validators = [v.ValidPasswordsMatch('password', |
|
212 | 212 | 'password_confirmation')] |
|
213 | 213 | |
|
214 | 214 | return _RegisterForm |
|
215 | 215 | |
|
216 | 216 | |
|
217 | 217 | def PasswordResetRequestForm(): |
|
218 | 218 | class _PasswordResetRequestForm(formencode.Schema): |
|
219 | 219 | allow_extra_fields = True |
|
220 | 220 | filter_extra_fields = True |
|
221 | 221 | email = v.Email(not_empty=True) |
|
222 | 222 | return _PasswordResetRequestForm |
|
223 | 223 | |
|
224 | 224 | |
|
225 | 225 | def PasswordResetConfirmationForm(): |
|
226 | 226 | class _PasswordResetConfirmationForm(formencode.Schema): |
|
227 | 227 | allow_extra_fields = True |
|
228 | 228 | filter_extra_fields = True |
|
229 | 229 | |
|
230 | 230 | email = v.UnicodeString(strip=True, not_empty=True) |
|
231 | 231 | timestamp = v.Number(strip=True, not_empty=True) |
|
232 | 232 | token = v.UnicodeString(strip=True, not_empty=True) |
|
233 | 233 | password = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) |
|
234 | 234 | password_confirm = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) |
|
235 | 235 | |
|
236 | 236 | chained_validators = [v.ValidPasswordsMatch('password', |
|
237 | 237 | 'password_confirm')] |
|
238 | 238 | return _PasswordResetConfirmationForm |
|
239 | 239 | |
|
240 | 240 | |
|
241 | def RepoForm(edit=False, old_data=None, supported_backends=BACKENDS, | |
|
241 | def RepoForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS, | |
|
242 | 242 | repo_groups=None, landing_revs=None): |
|
243 | 243 | old_data = old_data or {} |
|
244 | 244 | repo_groups = repo_groups or [] |
|
245 | 245 | landing_revs = landing_revs or [] |
|
246 | 246 | repo_group_ids = [rg[0] for rg in repo_groups] |
|
247 | 247 | |
|
248 | 248 | class _RepoForm(formencode.Schema): |
|
249 | 249 | allow_extra_fields = True |
|
250 | 250 | filter_extra_fields = False |
|
251 | 251 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
252 | 252 | v.SlugifyName()) |
|
253 | 253 | repo_group = All(v.CanWriteGroup(old_data), |
|
254 | 254 | v.OneOf(repo_group_ids, hideList=True), |
|
255 | 255 | v.Int(min=-1, not_empty=True)) |
|
256 | 256 | repo_type = v.OneOf(supported_backends, required=False, |
|
257 | 257 | if_missing=old_data.get('repo_type')) |
|
258 | 258 | repo_description = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
259 | 259 | repo_private = v.StringBoolean(if_missing=False) |
|
260 | 260 | repo_landing_rev = v.OneOf(landing_revs, hideList=True) |
|
261 | 261 | repo_copy_permissions = v.StringBoolean(if_missing=False) |
|
262 | 262 | clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False)) |
|
263 | 263 | |
|
264 | 264 | repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
265 | 265 | repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
266 | 266 | |
|
267 | 267 | if edit: |
|
268 | 268 | owner = All(v.UnicodeString(not_empty=True), v.ValidRepoUser()) |
|
269 | 269 | # Not a real field - just for reference for validation: |
|
270 | 270 | # clone_uri_hidden = v.UnicodeString(if_missing='') |
|
271 | 271 | |
|
272 | 272 | chained_validators = [v.ValidCloneUri(), |
|
273 | 273 | v.ValidRepoName(edit, old_data)] |
|
274 | 274 | return _RepoForm |
|
275 | 275 | |
|
276 | 276 | |
|
277 | 277 | def RepoPermsForm(): |
|
278 | 278 | class _RepoPermsForm(formencode.Schema): |
|
279 | 279 | allow_extra_fields = True |
|
280 | 280 | filter_extra_fields = False |
|
281 | 281 | chained_validators = [v.ValidPerms(type_='repo')] |
|
282 | 282 | return _RepoPermsForm |
|
283 | 283 | |
|
284 | 284 | |
|
285 | 285 | def RepoGroupPermsForm(valid_recursive_choices): |
|
286 | 286 | class _RepoGroupPermsForm(formencode.Schema): |
|
287 | 287 | allow_extra_fields = True |
|
288 | 288 | filter_extra_fields = False |
|
289 | 289 | recursive = v.OneOf(valid_recursive_choices) |
|
290 | 290 | chained_validators = [v.ValidPerms(type_='repo_group')] |
|
291 | 291 | return _RepoGroupPermsForm |
|
292 | 292 | |
|
293 | 293 | |
|
294 | 294 | def UserGroupPermsForm(): |
|
295 | 295 | class _UserPermsForm(formencode.Schema): |
|
296 | 296 | allow_extra_fields = True |
|
297 | 297 | filter_extra_fields = False |
|
298 | 298 | chained_validators = [v.ValidPerms(type_='user_group')] |
|
299 | 299 | return _UserPermsForm |
|
300 | 300 | |
|
301 | 301 | |
|
302 | 302 | def RepoFieldForm(): |
|
303 | 303 | class _RepoFieldForm(formencode.Schema): |
|
304 | 304 | filter_extra_fields = True |
|
305 | 305 | allow_extra_fields = True |
|
306 | 306 | |
|
307 | 307 | new_field_key = All(v.FieldKey(), |
|
308 | 308 | v.UnicodeString(strip=True, min=3, not_empty=True)) |
|
309 | 309 | new_field_value = v.UnicodeString(not_empty=False, if_missing='') |
|
310 | 310 | new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'], |
|
311 | 311 | if_missing='str') |
|
312 | 312 | new_field_label = v.UnicodeString(not_empty=False) |
|
313 | 313 | new_field_desc = v.UnicodeString(not_empty=False) |
|
314 | 314 | |
|
315 | 315 | return _RepoFieldForm |
|
316 | 316 | |
|
317 | 317 | |
|
318 | def RepoForkForm(edit=False, old_data=None, supported_backends=BACKENDS, | |
|
318 | def RepoForkForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS, | |
|
319 | 319 | repo_groups=None, landing_revs=None): |
|
320 | 320 | old_data = old_data or {} |
|
321 | 321 | repo_groups = repo_groups or [] |
|
322 | 322 | landing_revs = landing_revs or [] |
|
323 | 323 | repo_group_ids = [rg[0] for rg in repo_groups] |
|
324 | 324 | |
|
325 | 325 | class _RepoForkForm(formencode.Schema): |
|
326 | 326 | allow_extra_fields = True |
|
327 | 327 | filter_extra_fields = False |
|
328 | 328 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
329 | 329 | v.SlugifyName()) |
|
330 | 330 | repo_group = All(v.CanWriteGroup(), |
|
331 | 331 | v.OneOf(repo_group_ids, hideList=True), |
|
332 | 332 | v.Int(min=-1, not_empty=True)) |
|
333 | 333 | repo_type = All(v.ValidForkType(old_data), v.OneOf(supported_backends)) |
|
334 | 334 | description = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
335 | 335 | private = v.StringBoolean(if_missing=False) |
|
336 | 336 | copy_permissions = v.StringBoolean(if_missing=False) |
|
337 | 337 | update_after_clone = v.StringBoolean(if_missing=False) |
|
338 | 338 | fork_parent_id = v.UnicodeString() |
|
339 | 339 | chained_validators = [v.ValidForkName(edit, old_data)] |
|
340 | 340 | landing_rev = v.OneOf(landing_revs, hideList=True) |
|
341 | 341 | |
|
342 | 342 | return _RepoForkForm |
|
343 | 343 | |
|
344 | 344 | |
|
345 | 345 | def ApplicationSettingsForm(): |
|
346 | 346 | class _ApplicationSettingsForm(formencode.Schema): |
|
347 | 347 | allow_extra_fields = True |
|
348 | 348 | filter_extra_fields = False |
|
349 | 349 | title = v.UnicodeString(strip=True, not_empty=False) |
|
350 | 350 | realm = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
351 | 351 | ga_code = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
352 | 352 | captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
353 | 353 | captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
354 | 354 | |
|
355 | 355 | return _ApplicationSettingsForm |
|
356 | 356 | |
|
357 | 357 | |
|
358 | 358 | def ApplicationVisualisationForm(): |
|
359 | 359 | class _ApplicationVisualisationForm(formencode.Schema): |
|
360 | 360 | allow_extra_fields = True |
|
361 | 361 | filter_extra_fields = False |
|
362 | 362 | show_public_icon = v.StringBoolean(if_missing=False) |
|
363 | 363 | show_private_icon = v.StringBoolean(if_missing=False) |
|
364 | 364 | stylify_metalabels = v.StringBoolean(if_missing=False) |
|
365 | 365 | |
|
366 | 366 | repository_fields = v.StringBoolean(if_missing=False) |
|
367 | 367 | lightweight_journal = v.StringBoolean(if_missing=False) |
|
368 | 368 | dashboard_items = v.Int(min=5, not_empty=True) |
|
369 | 369 | admin_grid_items = v.Int(min=5, not_empty=True) |
|
370 | 370 | show_version = v.StringBoolean(if_missing=False) |
|
371 | 371 | use_gravatar = v.StringBoolean(if_missing=False) |
|
372 | 372 | gravatar_url = v.UnicodeString(min=3) |
|
373 | 373 | clone_uri_tmpl = v.UnicodeString(min=3) |
|
374 | 374 | clone_ssh_tmpl = v.UnicodeString() |
|
375 | 375 | |
|
376 | 376 | return _ApplicationVisualisationForm |
|
377 | 377 | |
|
378 | 378 | |
|
379 | 379 | def ApplicationUiSettingsForm(): |
|
380 | 380 | class _ApplicationUiSettingsForm(formencode.Schema): |
|
381 | 381 | allow_extra_fields = True |
|
382 | 382 | filter_extra_fields = False |
|
383 | 383 | paths_root_path = All( |
|
384 | 384 | v.ValidPath(), |
|
385 | 385 | v.UnicodeString(strip=True, min=1, not_empty=True) |
|
386 | 386 | ) |
|
387 | 387 | hooks_changegroup_update = v.StringBoolean(if_missing=False) |
|
388 | 388 | hooks_changegroup_repo_size = v.StringBoolean(if_missing=False) |
|
389 | 389 | |
|
390 | 390 | extensions_largefiles = v.StringBoolean(if_missing=False) |
|
391 | 391 | extensions_hggit = v.StringBoolean(if_missing=False) |
|
392 | 392 | |
|
393 | 393 | return _ApplicationUiSettingsForm |
|
394 | 394 | |
|
395 | 395 | |
|
396 | 396 | def DefaultPermissionsForm(repo_perms_choices, group_perms_choices, |
|
397 | 397 | user_group_perms_choices, create_choices, |
|
398 | 398 | user_group_create_choices, fork_choices, |
|
399 | 399 | register_choices, extern_activate_choices): |
|
400 | 400 | class _DefaultPermissionsForm(formencode.Schema): |
|
401 | 401 | allow_extra_fields = True |
|
402 | 402 | filter_extra_fields = True |
|
403 | 403 | overwrite_default_repo = v.StringBoolean(if_missing=False) |
|
404 | 404 | overwrite_default_group = v.StringBoolean(if_missing=False) |
|
405 | 405 | overwrite_default_user_group = v.StringBoolean(if_missing=False) |
|
406 | 406 | anonymous = v.StringBoolean(if_missing=False) |
|
407 | 407 | default_repo_perm = v.OneOf(repo_perms_choices) |
|
408 | 408 | default_group_perm = v.OneOf(group_perms_choices) |
|
409 | 409 | default_user_group_perm = v.OneOf(user_group_perms_choices) |
|
410 | 410 | |
|
411 | 411 | default_repo_create = v.OneOf(create_choices) |
|
412 | 412 | default_user_group_create = v.OneOf(user_group_create_choices) |
|
413 | 413 | default_fork = v.OneOf(fork_choices) |
|
414 | 414 | |
|
415 | 415 | default_register = v.OneOf(register_choices) |
|
416 | 416 | default_extern_activate = v.OneOf(extern_activate_choices) |
|
417 | 417 | return _DefaultPermissionsForm |
|
418 | 418 | |
|
419 | 419 | |
|
420 | 420 | def CustomDefaultPermissionsForm(): |
|
421 | 421 | class _CustomDefaultPermissionsForm(formencode.Schema): |
|
422 | 422 | filter_extra_fields = True |
|
423 | 423 | allow_extra_fields = True |
|
424 | 424 | |
|
425 | 425 | create_repo_perm = v.StringBoolean(if_missing=False) |
|
426 | 426 | create_user_group_perm = v.StringBoolean(if_missing=False) |
|
427 | 427 | #create_repo_group_perm Impl. later |
|
428 | 428 | |
|
429 | 429 | fork_repo_perm = v.StringBoolean(if_missing=False) |
|
430 | 430 | |
|
431 | 431 | return _CustomDefaultPermissionsForm |
|
432 | 432 | |
|
433 | 433 | |
|
434 | def DefaultsForm(edit=False, old_data=None, supported_backends=BACKENDS): | |
|
434 | def DefaultsForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS): | |
|
435 | 435 | class _DefaultsForm(formencode.Schema): |
|
436 | 436 | allow_extra_fields = True |
|
437 | 437 | filter_extra_fields = True |
|
438 | 438 | default_repo_type = v.OneOf(supported_backends) |
|
439 | 439 | default_repo_private = v.StringBoolean(if_missing=False) |
|
440 | 440 | default_repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
441 | 441 | default_repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
442 | 442 | |
|
443 | 443 | return _DefaultsForm |
|
444 | 444 | |
|
445 | 445 | |
|
446 | 446 | def AuthSettingsForm(current_active_modules): |
|
447 | 447 | class _AuthSettingsForm(formencode.Schema): |
|
448 | 448 | allow_extra_fields = True |
|
449 | 449 | filter_extra_fields = True |
|
450 | 450 | auth_plugins = All(v.ValidAuthPlugins(), |
|
451 | 451 | v.UniqueListFromString()(not_empty=True)) |
|
452 | 452 | |
|
453 | 453 | def __init__(self, *args, **kwargs): |
|
454 | 454 | # The auth plugins tell us what form validators they use |
|
455 | 455 | if current_active_modules: |
|
456 | 456 | import kallithea.lib.auth_modules |
|
457 | 457 | from kallithea.lib.auth_modules import LazyFormencode |
|
458 | 458 | for module in current_active_modules: |
|
459 | 459 | plugin = kallithea.lib.auth_modules.loadplugin(module) |
|
460 | 460 | plugin_name = plugin.name |
|
461 | 461 | for sv in plugin.plugin_settings(): |
|
462 | 462 | newk = "auth_%s_%s" % (plugin_name, sv["name"]) |
|
463 | 463 | # can be a LazyFormencode object from plugin settings |
|
464 | 464 | validator = sv["validator"] |
|
465 | 465 | if isinstance(validator, LazyFormencode): |
|
466 | 466 | validator = validator() |
|
467 | 467 | # init all lazy validators from formencode.All |
|
468 | 468 | if isinstance(validator, All): |
|
469 | 469 | init_validators = [] |
|
470 | 470 | for validator in validator.validators: |
|
471 | 471 | if isinstance(validator, LazyFormencode): |
|
472 | 472 | validator = validator() |
|
473 | 473 | init_validators.append(validator) |
|
474 | 474 | validator.validators = init_validators |
|
475 | 475 | |
|
476 | 476 | self.add_field(newk, validator) |
|
477 | 477 | formencode.Schema.__init__(self, *args, **kwargs) |
|
478 | 478 | |
|
479 | 479 | return _AuthSettingsForm |
|
480 | 480 | |
|
481 | 481 | |
|
482 | 482 | def LdapSettingsForm(tls_reqcert_choices, search_scope_choices, |
|
483 | 483 | tls_kind_choices): |
|
484 | 484 | class _LdapSettingsForm(formencode.Schema): |
|
485 | 485 | allow_extra_fields = True |
|
486 | 486 | filter_extra_fields = True |
|
487 | 487 | #pre_validators = [LdapLibValidator] |
|
488 | 488 | ldap_active = v.StringBoolean(if_missing=False) |
|
489 | 489 | ldap_host = v.UnicodeString(strip=True,) |
|
490 | 490 | ldap_port = v.Number(strip=True,) |
|
491 | 491 | ldap_tls_kind = v.OneOf(tls_kind_choices) |
|
492 | 492 | ldap_tls_reqcert = v.OneOf(tls_reqcert_choices) |
|
493 | 493 | ldap_dn_user = v.UnicodeString(strip=True,) |
|
494 | 494 | ldap_dn_pass = v.UnicodeString(strip=True,) |
|
495 | 495 | ldap_base_dn = v.UnicodeString(strip=True,) |
|
496 | 496 | ldap_filter = v.UnicodeString(strip=True,) |
|
497 | 497 | ldap_search_scope = v.OneOf(search_scope_choices) |
|
498 | 498 | ldap_attr_login = v.AttrLoginValidator()(not_empty=True) |
|
499 | 499 | ldap_attr_firstname = v.UnicodeString(strip=True,) |
|
500 | 500 | ldap_attr_lastname = v.UnicodeString(strip=True,) |
|
501 | 501 | ldap_attr_email = v.UnicodeString(strip=True,) |
|
502 | 502 | |
|
503 | 503 | return _LdapSettingsForm |
|
504 | 504 | |
|
505 | 505 | |
|
506 | 506 | def UserExtraEmailForm(): |
|
507 | 507 | class _UserExtraEmailForm(formencode.Schema): |
|
508 | 508 | email = All(v.UniqSystemEmail(), v.Email(not_empty=True)) |
|
509 | 509 | return _UserExtraEmailForm |
|
510 | 510 | |
|
511 | 511 | |
|
512 | 512 | def UserExtraIpForm(): |
|
513 | 513 | class _UserExtraIpForm(formencode.Schema): |
|
514 | 514 | ip = v.ValidIp()(not_empty=True) |
|
515 | 515 | return _UserExtraIpForm |
|
516 | 516 | |
|
517 | 517 | |
|
518 | 518 | def PullRequestForm(repo_id): |
|
519 | 519 | class _PullRequestForm(formencode.Schema): |
|
520 | 520 | allow_extra_fields = True |
|
521 | 521 | filter_extra_fields = True |
|
522 | 522 | |
|
523 | 523 | org_repo = v.UnicodeString(strip=True, required=True) |
|
524 | 524 | org_ref = v.UnicodeString(strip=True, required=True) |
|
525 | 525 | other_repo = v.UnicodeString(strip=True, required=True) |
|
526 | 526 | other_ref = v.UnicodeString(strip=True, required=True) |
|
527 | 527 | |
|
528 | 528 | pullrequest_title = v.UnicodeString(strip=True, required=True) |
|
529 | 529 | pullrequest_desc = v.UnicodeString(strip=True, required=False) |
|
530 | 530 | |
|
531 | 531 | return _PullRequestForm |
|
532 | 532 | |
|
533 | 533 | |
|
534 | 534 | def PullRequestPostForm(): |
|
535 | 535 | class _PullRequestPostForm(formencode.Schema): |
|
536 | 536 | allow_extra_fields = True |
|
537 | 537 | filter_extra_fields = True |
|
538 | 538 | |
|
539 | 539 | pullrequest_title = v.UnicodeString(strip=True, required=True) |
|
540 | 540 | pullrequest_desc = v.UnicodeString(strip=True, required=False) |
|
541 | 541 | org_review_members = v.Set() |
|
542 | 542 | review_members = v.Set() |
|
543 | 543 | updaterev = v.UnicodeString(strip=True, required=False, if_missing=None) |
|
544 | 544 | owner = All(v.UnicodeString(strip=True, required=True), |
|
545 | 545 | v.ValidRepoUser()) |
|
546 | 546 | |
|
547 | 547 | return _PullRequestPostForm |
|
548 | 548 | |
|
549 | 549 | |
|
550 | 550 | def GistForm(lifetime_options): |
|
551 | 551 | class _GistForm(formencode.Schema): |
|
552 | 552 | allow_extra_fields = True |
|
553 | 553 | filter_extra_fields = True |
|
554 | 554 | |
|
555 | 555 | filename = All(v.BasePath()(), |
|
556 | 556 | v.UnicodeString(strip=True, required=False)) |
|
557 | 557 | description = v.UnicodeString(required=False, if_missing='') |
|
558 | 558 | lifetime = v.OneOf(lifetime_options) |
|
559 | 559 | mimetype = v.UnicodeString(required=False, if_missing=None) |
|
560 | 560 | content = v.UnicodeString(required=True, not_empty=True) |
|
561 | 561 | public = v.UnicodeString(required=False, if_missing='') |
|
562 | 562 | private = v.UnicodeString(required=False, if_missing='') |
|
563 | 563 | |
|
564 | 564 | return _GistForm |
@@ -1,762 +1,761 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.model.scm |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Scm model for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 9, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import logging |
|
29 | 29 | import os |
|
30 | 30 | import posixpath |
|
31 | 31 | import re |
|
32 | 32 | import sys |
|
33 | 33 | import traceback |
|
34 | 34 | |
|
35 | 35 | import pkg_resources |
|
36 | 36 | from tg.i18n import ugettext as _ |
|
37 | 37 | |
|
38 | 38 | import kallithea |
|
39 | from kallithea import BACKENDS | |
|
40 | 39 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel |
|
41 | 40 | from kallithea.lib.exceptions import IMCCommitError, NonRelativePathError |
|
42 | 41 | from kallithea.lib.hooks import process_pushed_raw_ids |
|
43 | 42 | from kallithea.lib.utils import action_logger, get_filesystem_repos, make_ui |
|
44 | 43 | from kallithea.lib.utils2 import safe_bytes, set_hook_environment |
|
45 | 44 | from kallithea.lib.vcs import get_backend |
|
46 | 45 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
47 | 46 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
48 | 47 | from kallithea.lib.vcs.nodes import FileNode |
|
49 | 48 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
50 | 49 | from kallithea.model.db import PullRequest, RepoGroup, Repository, Session, Ui, User, UserFollowing, UserLog |
|
51 | 50 | |
|
52 | 51 | |
|
53 | 52 | log = logging.getLogger(__name__) |
|
54 | 53 | |
|
55 | 54 | |
|
56 | 55 | class UserTemp(object): |
|
57 | 56 | def __init__(self, user_id): |
|
58 | 57 | self.user_id = user_id |
|
59 | 58 | |
|
60 | 59 | def __repr__(self): |
|
61 | 60 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
62 | 61 | |
|
63 | 62 | |
|
64 | 63 | class RepoTemp(object): |
|
65 | 64 | def __init__(self, repo_id): |
|
66 | 65 | self.repo_id = repo_id |
|
67 | 66 | |
|
68 | 67 | def __repr__(self): |
|
69 | 68 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
70 | 69 | |
|
71 | 70 | |
|
72 | 71 | class _PermCheckIterator(object): |
|
73 | 72 | def __init__(self, obj_list, obj_attr, perm_set, perm_checker, extra_kwargs=None): |
|
74 | 73 | """ |
|
75 | 74 | Creates iterator from given list of objects, additionally |
|
76 | 75 | checking permission for them from perm_set var |
|
77 | 76 | |
|
78 | 77 | :param obj_list: list of db objects |
|
79 | 78 | :param obj_attr: attribute of object to pass into perm_checker |
|
80 | 79 | :param perm_set: list of permissions to check |
|
81 | 80 | :param perm_checker: callable to check permissions against |
|
82 | 81 | """ |
|
83 | 82 | self.obj_list = obj_list |
|
84 | 83 | self.obj_attr = obj_attr |
|
85 | 84 | self.perm_set = perm_set |
|
86 | 85 | self.perm_checker = perm_checker |
|
87 | 86 | self.extra_kwargs = extra_kwargs or {} |
|
88 | 87 | |
|
89 | 88 | def __len__(self): |
|
90 | 89 | return len(self.obj_list) |
|
91 | 90 | |
|
92 | 91 | def __repr__(self): |
|
93 | 92 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
94 | 93 | |
|
95 | 94 | def __iter__(self): |
|
96 | 95 | for db_obj in self.obj_list: |
|
97 | 96 | # check permission at this level |
|
98 | 97 | name = getattr(db_obj, self.obj_attr, None) |
|
99 | 98 | if not self.perm_checker(*self.perm_set)( |
|
100 | 99 | name, self.__class__.__name__, **self.extra_kwargs): |
|
101 | 100 | continue |
|
102 | 101 | |
|
103 | 102 | yield db_obj |
|
104 | 103 | |
|
105 | 104 | |
|
106 | 105 | class RepoList(_PermCheckIterator): |
|
107 | 106 | |
|
108 | 107 | def __init__(self, db_repo_list, perm_level, extra_kwargs=None): |
|
109 | 108 | super(RepoList, self).__init__(obj_list=db_repo_list, |
|
110 | 109 | obj_attr='repo_name', perm_set=[perm_level], |
|
111 | 110 | perm_checker=HasRepoPermissionLevel, |
|
112 | 111 | extra_kwargs=extra_kwargs) |
|
113 | 112 | |
|
114 | 113 | |
|
115 | 114 | class RepoGroupList(_PermCheckIterator): |
|
116 | 115 | |
|
117 | 116 | def __init__(self, db_repo_group_list, perm_level, extra_kwargs=None): |
|
118 | 117 | super(RepoGroupList, self).__init__(obj_list=db_repo_group_list, |
|
119 | 118 | obj_attr='group_name', perm_set=[perm_level], |
|
120 | 119 | perm_checker=HasRepoGroupPermissionLevel, |
|
121 | 120 | extra_kwargs=extra_kwargs) |
|
122 | 121 | |
|
123 | 122 | |
|
124 | 123 | class UserGroupList(_PermCheckIterator): |
|
125 | 124 | |
|
126 | 125 | def __init__(self, db_user_group_list, perm_level, extra_kwargs=None): |
|
127 | 126 | super(UserGroupList, self).__init__(obj_list=db_user_group_list, |
|
128 | 127 | obj_attr='users_group_name', perm_set=[perm_level], |
|
129 | 128 | perm_checker=HasUserGroupPermissionLevel, |
|
130 | 129 | extra_kwargs=extra_kwargs) |
|
131 | 130 | |
|
132 | 131 | |
|
133 | 132 | class ScmModel(object): |
|
134 | 133 | """ |
|
135 | 134 | Generic Scm Model |
|
136 | 135 | """ |
|
137 | 136 | |
|
138 | 137 | def __get_repo(self, instance): |
|
139 | 138 | cls = Repository |
|
140 | 139 | if isinstance(instance, cls): |
|
141 | 140 | return instance |
|
142 | 141 | elif isinstance(instance, int): |
|
143 | 142 | return cls.get(instance) |
|
144 | 143 | elif isinstance(instance, str): |
|
145 | 144 | if instance.isdigit(): |
|
146 | 145 | return cls.get(int(instance)) |
|
147 | 146 | return cls.get_by_repo_name(instance) |
|
148 | 147 | raise Exception('given object must be int, basestr or Instance' |
|
149 | 148 | ' of %s got %s' % (type(cls), type(instance))) |
|
150 | 149 | |
|
151 | 150 | @LazyProperty |
|
152 | 151 | def repos_path(self): |
|
153 | 152 | """ |
|
154 | 153 | Gets the repositories root path from database |
|
155 | 154 | """ |
|
156 | 155 | |
|
157 | 156 | q = Ui.query().filter(Ui.ui_key == '/').one() |
|
158 | 157 | |
|
159 | 158 | return q.ui_value |
|
160 | 159 | |
|
161 | 160 | def repo_scan(self, repos_path=None): |
|
162 | 161 | """ |
|
163 | 162 | Listing of repositories in given path. This path should not be a |
|
164 | 163 | repository itself. Return a dictionary of repository objects mapping to |
|
165 | 164 | vcs instances. |
|
166 | 165 | |
|
167 | 166 | :param repos_path: path to directory containing repositories |
|
168 | 167 | """ |
|
169 | 168 | |
|
170 | 169 | if repos_path is None: |
|
171 | 170 | repos_path = self.repos_path |
|
172 | 171 | |
|
173 | 172 | log.info('scanning for repositories in %s', repos_path) |
|
174 | 173 | |
|
175 | 174 | baseui = make_ui() |
|
176 | 175 | repos = {} |
|
177 | 176 | |
|
178 | 177 | for name, path in get_filesystem_repos(repos_path): |
|
179 | 178 | # name need to be decomposed and put back together using the / |
|
180 | 179 | # since this is internal storage separator for kallithea |
|
181 | 180 | name = Repository.normalize_repo_name(name) |
|
182 | 181 | |
|
183 | 182 | try: |
|
184 | 183 | if name in repos: |
|
185 | 184 | raise RepositoryError('Duplicate repository name %s ' |
|
186 | 185 | 'found in %s' % (name, path)) |
|
187 | 186 | else: |
|
188 | 187 | |
|
189 | 188 | klass = get_backend(path[0]) |
|
190 | 189 | |
|
191 | if path[0] == 'hg' and path[0] in BACKENDS: | |
|
190 | if path[0] == 'hg' and path[0] in kallithea.BACKENDS: | |
|
192 | 191 | repos[name] = klass(path[1], baseui=baseui) |
|
193 | 192 | |
|
194 | if path[0] == 'git' and path[0] in BACKENDS: | |
|
193 | if path[0] == 'git' and path[0] in kallithea.BACKENDS: | |
|
195 | 194 | repos[name] = klass(path[1]) |
|
196 | 195 | except OSError: |
|
197 | 196 | continue |
|
198 | 197 | log.debug('found %s paths with repositories', len(repos)) |
|
199 | 198 | return repos |
|
200 | 199 | |
|
201 | 200 | def get_repos(self, repos): |
|
202 | 201 | """Return the repos the user has access to""" |
|
203 | 202 | return RepoList(repos, perm_level='read') |
|
204 | 203 | |
|
205 | 204 | def get_repo_groups(self, groups=None): |
|
206 | 205 | """Return the repo groups the user has access to |
|
207 | 206 | If no groups are specified, use top level groups. |
|
208 | 207 | """ |
|
209 | 208 | if groups is None: |
|
210 | 209 | groups = RepoGroup.query() \ |
|
211 | 210 | .filter(RepoGroup.parent_group_id == None).all() |
|
212 | 211 | return RepoGroupList(groups, perm_level='read') |
|
213 | 212 | |
|
214 | 213 | def mark_for_invalidation(self, repo_name): |
|
215 | 214 | """ |
|
216 | 215 | Mark caches of this repo invalid in the database. |
|
217 | 216 | |
|
218 | 217 | :param repo_name: the repo for which caches should be marked invalid |
|
219 | 218 | """ |
|
220 | 219 | log.debug("Marking %s as invalidated and update cache", repo_name) |
|
221 | 220 | repo = Repository.get_by_repo_name(repo_name) |
|
222 | 221 | if repo is not None: |
|
223 | 222 | repo.set_invalidate() |
|
224 | 223 | repo.update_changeset_cache() |
|
225 | 224 | |
|
226 | 225 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
227 | 226 | |
|
228 | 227 | f = UserFollowing.query() \ |
|
229 | 228 | .filter(UserFollowing.follows_repository_id == follow_repo_id) \ |
|
230 | 229 | .filter(UserFollowing.user_id == user_id).scalar() |
|
231 | 230 | |
|
232 | 231 | if f is not None: |
|
233 | 232 | try: |
|
234 | 233 | Session().delete(f) |
|
235 | 234 | action_logger(UserTemp(user_id), |
|
236 | 235 | 'stopped_following_repo', |
|
237 | 236 | RepoTemp(follow_repo_id)) |
|
238 | 237 | return |
|
239 | 238 | except Exception: |
|
240 | 239 | log.error(traceback.format_exc()) |
|
241 | 240 | raise |
|
242 | 241 | |
|
243 | 242 | try: |
|
244 | 243 | f = UserFollowing() |
|
245 | 244 | f.user_id = user_id |
|
246 | 245 | f.follows_repository_id = follow_repo_id |
|
247 | 246 | Session().add(f) |
|
248 | 247 | |
|
249 | 248 | action_logger(UserTemp(user_id), |
|
250 | 249 | 'started_following_repo', |
|
251 | 250 | RepoTemp(follow_repo_id)) |
|
252 | 251 | except Exception: |
|
253 | 252 | log.error(traceback.format_exc()) |
|
254 | 253 | raise |
|
255 | 254 | |
|
256 | 255 | def toggle_following_user(self, follow_user_id, user_id): |
|
257 | 256 | f = UserFollowing.query() \ |
|
258 | 257 | .filter(UserFollowing.follows_user_id == follow_user_id) \ |
|
259 | 258 | .filter(UserFollowing.user_id == user_id).scalar() |
|
260 | 259 | |
|
261 | 260 | if f is not None: |
|
262 | 261 | try: |
|
263 | 262 | Session().delete(f) |
|
264 | 263 | return |
|
265 | 264 | except Exception: |
|
266 | 265 | log.error(traceback.format_exc()) |
|
267 | 266 | raise |
|
268 | 267 | |
|
269 | 268 | try: |
|
270 | 269 | f = UserFollowing() |
|
271 | 270 | f.user_id = user_id |
|
272 | 271 | f.follows_user_id = follow_user_id |
|
273 | 272 | Session().add(f) |
|
274 | 273 | except Exception: |
|
275 | 274 | log.error(traceback.format_exc()) |
|
276 | 275 | raise |
|
277 | 276 | |
|
278 | 277 | def is_following_repo(self, repo_name, user_id): |
|
279 | 278 | r = Repository.query() \ |
|
280 | 279 | .filter(Repository.repo_name == repo_name).scalar() |
|
281 | 280 | |
|
282 | 281 | f = UserFollowing.query() \ |
|
283 | 282 | .filter(UserFollowing.follows_repository == r) \ |
|
284 | 283 | .filter(UserFollowing.user_id == user_id).scalar() |
|
285 | 284 | |
|
286 | 285 | return f is not None |
|
287 | 286 | |
|
288 | 287 | def is_following_user(self, username, user_id): |
|
289 | 288 | u = User.get_by_username(username) |
|
290 | 289 | |
|
291 | 290 | f = UserFollowing.query() \ |
|
292 | 291 | .filter(UserFollowing.follows_user == u) \ |
|
293 | 292 | .filter(UserFollowing.user_id == user_id).scalar() |
|
294 | 293 | |
|
295 | 294 | return f is not None |
|
296 | 295 | |
|
297 | 296 | def get_followers(self, repo): |
|
298 | 297 | repo = Repository.guess_instance(repo) |
|
299 | 298 | |
|
300 | 299 | return UserFollowing.query() \ |
|
301 | 300 | .filter(UserFollowing.follows_repository == repo).count() |
|
302 | 301 | |
|
303 | 302 | def get_forks(self, repo): |
|
304 | 303 | repo = Repository.guess_instance(repo) |
|
305 | 304 | return Repository.query() \ |
|
306 | 305 | .filter(Repository.fork == repo).count() |
|
307 | 306 | |
|
308 | 307 | def get_pull_requests(self, repo): |
|
309 | 308 | repo = Repository.guess_instance(repo) |
|
310 | 309 | return PullRequest.query() \ |
|
311 | 310 | .filter(PullRequest.other_repo == repo) \ |
|
312 | 311 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
313 | 312 | |
|
314 | 313 | def mark_as_fork(self, repo, fork, user): |
|
315 | 314 | repo = self.__get_repo(repo) |
|
316 | 315 | fork = self.__get_repo(fork) |
|
317 | 316 | if fork and repo.repo_id == fork.repo_id: |
|
318 | 317 | raise Exception("Cannot set repository as fork of itself") |
|
319 | 318 | |
|
320 | 319 | if fork and repo.repo_type != fork.repo_type: |
|
321 | 320 | raise RepositoryError("Cannot set repository as fork of repository with other type") |
|
322 | 321 | |
|
323 | 322 | repo.fork = fork |
|
324 | 323 | return repo |
|
325 | 324 | |
|
326 | 325 | def _handle_push(self, repo, username, ip_addr, action, repo_name, revisions): |
|
327 | 326 | """ |
|
328 | 327 | Handle that the repository has changed. |
|
329 | 328 | Adds an action log entry with the new revisions, and the head revision |
|
330 | 329 | cache and in-memory caches are invalidated/updated. |
|
331 | 330 | |
|
332 | 331 | :param username: username who pushes |
|
333 | 332 | :param action: push/push_local/push_remote |
|
334 | 333 | :param repo_name: name of repo |
|
335 | 334 | :param revisions: list of revisions that we pushed |
|
336 | 335 | """ |
|
337 | 336 | set_hook_environment(username, ip_addr, repo_name, repo_alias=repo.alias, action=action) |
|
338 | 337 | process_pushed_raw_ids(revisions) # also calls mark_for_invalidation |
|
339 | 338 | |
|
340 | 339 | def _get_IMC_module(self, scm_type): |
|
341 | 340 | """ |
|
342 | 341 | Returns InMemoryCommit class based on scm_type |
|
343 | 342 | |
|
344 | 343 | :param scm_type: |
|
345 | 344 | """ |
|
346 | 345 | if scm_type == 'hg': |
|
347 | 346 | from kallithea.lib.vcs.backends.hg import MercurialInMemoryChangeset |
|
348 | 347 | return MercurialInMemoryChangeset |
|
349 | 348 | |
|
350 | 349 | if scm_type == 'git': |
|
351 | 350 | from kallithea.lib.vcs.backends.git import GitInMemoryChangeset |
|
352 | 351 | return GitInMemoryChangeset |
|
353 | 352 | |
|
354 | 353 | raise Exception('Invalid scm_type, must be one of hg,git got %s' |
|
355 | 354 | % (scm_type,)) |
|
356 | 355 | |
|
357 | 356 | def pull_changes(self, repo, username, ip_addr, clone_uri=None): |
|
358 | 357 | """ |
|
359 | 358 | Pull from "clone URL" or fork origin. |
|
360 | 359 | """ |
|
361 | 360 | dbrepo = self.__get_repo(repo) |
|
362 | 361 | if clone_uri is None: |
|
363 | 362 | clone_uri = dbrepo.clone_uri or dbrepo.fork and dbrepo.fork.repo_full_path |
|
364 | 363 | if not clone_uri: |
|
365 | 364 | raise Exception("This repository doesn't have a clone uri") |
|
366 | 365 | |
|
367 | 366 | repo = dbrepo.scm_instance |
|
368 | 367 | repo_name = dbrepo.repo_name |
|
369 | 368 | try: |
|
370 | 369 | if repo.alias == 'git': |
|
371 | 370 | repo.fetch(clone_uri) |
|
372 | 371 | # git doesn't really have something like post-fetch action |
|
373 | 372 | # we fake that now. |
|
374 | 373 | # TODO: extract fetched revisions ... somehow ... |
|
375 | 374 | self._handle_push(repo, |
|
376 | 375 | username=username, |
|
377 | 376 | ip_addr=ip_addr, |
|
378 | 377 | action='push_remote', |
|
379 | 378 | repo_name=repo_name, |
|
380 | 379 | revisions=[]) |
|
381 | 380 | else: |
|
382 | 381 | set_hook_environment(username, ip_addr, dbrepo.repo_name, |
|
383 | 382 | repo.alias, action='push_remote') |
|
384 | 383 | repo.pull(clone_uri) |
|
385 | 384 | except Exception: |
|
386 | 385 | log.error(traceback.format_exc()) |
|
387 | 386 | raise |
|
388 | 387 | |
|
389 | 388 | def commit_change(self, repo, repo_name, cs, user, ip_addr, author, message, |
|
390 | 389 | content, f_path): |
|
391 | 390 | """ |
|
392 | 391 | Commit a change to a single file |
|
393 | 392 | |
|
394 | 393 | :param repo: a db_repo.scm_instance |
|
395 | 394 | """ |
|
396 | 395 | user = User.guess_instance(user) |
|
397 | 396 | IMC = self._get_IMC_module(repo.alias) |
|
398 | 397 | imc = IMC(repo) |
|
399 | 398 | imc.change(FileNode(f_path, content, mode=cs.get_file_mode(f_path))) |
|
400 | 399 | try: |
|
401 | 400 | tip = imc.commit(message=message, author=author, |
|
402 | 401 | parents=[cs], branch=cs.branch) |
|
403 | 402 | except Exception as e: |
|
404 | 403 | log.error(traceback.format_exc()) |
|
405 | 404 | # clear caches - we also want a fresh object if commit fails |
|
406 | 405 | self.mark_for_invalidation(repo_name) |
|
407 | 406 | raise IMCCommitError(str(e)) |
|
408 | 407 | self._handle_push(repo, |
|
409 | 408 | username=user.username, |
|
410 | 409 | ip_addr=ip_addr, |
|
411 | 410 | action='push_local', |
|
412 | 411 | repo_name=repo_name, |
|
413 | 412 | revisions=[tip.raw_id]) |
|
414 | 413 | return tip |
|
415 | 414 | |
|
416 | 415 | def _sanitize_path(self, f_path): |
|
417 | 416 | if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path: |
|
418 | 417 | raise NonRelativePathError('%s is not an relative path' % f_path) |
|
419 | 418 | if f_path: |
|
420 | 419 | f_path = posixpath.normpath(f_path) |
|
421 | 420 | return f_path |
|
422 | 421 | |
|
423 | 422 | def get_nodes(self, repo_name, revision, root_path='/', flat=True): |
|
424 | 423 | """ |
|
425 | 424 | Recursively walk root dir and return a set of all paths found. |
|
426 | 425 | |
|
427 | 426 | :param repo_name: name of repository |
|
428 | 427 | :param revision: revision for which to list nodes |
|
429 | 428 | :param root_path: root path to list |
|
430 | 429 | :param flat: return as a list, if False returns a dict with description |
|
431 | 430 | |
|
432 | 431 | """ |
|
433 | 432 | _files = list() |
|
434 | 433 | _dirs = list() |
|
435 | 434 | try: |
|
436 | 435 | _repo = self.__get_repo(repo_name) |
|
437 | 436 | changeset = _repo.scm_instance.get_changeset(revision) |
|
438 | 437 | root_path = root_path.lstrip('/') |
|
439 | 438 | for topnode, dirs, files in changeset.walk(root_path): |
|
440 | 439 | for f in files: |
|
441 | 440 | _files.append(f.path if flat else {"name": f.path, |
|
442 | 441 | "type": "file"}) |
|
443 | 442 | for d in dirs: |
|
444 | 443 | _dirs.append(d.path if flat else {"name": d.path, |
|
445 | 444 | "type": "dir"}) |
|
446 | 445 | except RepositoryError: |
|
447 | 446 | log.debug(traceback.format_exc()) |
|
448 | 447 | raise |
|
449 | 448 | |
|
450 | 449 | return _dirs, _files |
|
451 | 450 | |
|
452 | 451 | def create_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None, |
|
453 | 452 | author=None, trigger_push_hook=True): |
|
454 | 453 | """ |
|
455 | 454 | Commits specified nodes to repo. |
|
456 | 455 | |
|
457 | 456 | :param user: Kallithea User object or user_id, the committer |
|
458 | 457 | :param repo: Kallithea Repository object |
|
459 | 458 | :param message: commit message |
|
460 | 459 | :param nodes: mapping {filename:{'content':content},...} |
|
461 | 460 | :param parent_cs: parent changeset, can be empty than it's initial commit |
|
462 | 461 | :param author: author of commit, cna be different that committer only for git |
|
463 | 462 | :param trigger_push_hook: trigger push hooks |
|
464 | 463 | |
|
465 | 464 | :returns: new committed changeset |
|
466 | 465 | """ |
|
467 | 466 | |
|
468 | 467 | user = User.guess_instance(user) |
|
469 | 468 | scm_instance = repo.scm_instance_no_cache() |
|
470 | 469 | |
|
471 | 470 | processed_nodes = [] |
|
472 | 471 | for f_path in nodes: |
|
473 | 472 | content = nodes[f_path]['content'] |
|
474 | 473 | f_path = self._sanitize_path(f_path) |
|
475 | 474 | if not isinstance(content, str) and not isinstance(content, bytes): |
|
476 | 475 | content = content.read() |
|
477 | 476 | processed_nodes.append((f_path, content)) |
|
478 | 477 | |
|
479 | 478 | message = message |
|
480 | 479 | committer = user.full_contact |
|
481 | 480 | if not author: |
|
482 | 481 | author = committer |
|
483 | 482 | |
|
484 | 483 | IMC = self._get_IMC_module(scm_instance.alias) |
|
485 | 484 | imc = IMC(scm_instance) |
|
486 | 485 | |
|
487 | 486 | if not parent_cs: |
|
488 | 487 | parent_cs = EmptyChangeset(alias=scm_instance.alias) |
|
489 | 488 | |
|
490 | 489 | if isinstance(parent_cs, EmptyChangeset): |
|
491 | 490 | # EmptyChangeset means we we're editing empty repository |
|
492 | 491 | parents = None |
|
493 | 492 | else: |
|
494 | 493 | parents = [parent_cs] |
|
495 | 494 | # add multiple nodes |
|
496 | 495 | for path, content in processed_nodes: |
|
497 | 496 | imc.add(FileNode(path, content=content)) |
|
498 | 497 | |
|
499 | 498 | tip = imc.commit(message=message, |
|
500 | 499 | author=author, |
|
501 | 500 | parents=parents, |
|
502 | 501 | branch=parent_cs.branch) |
|
503 | 502 | |
|
504 | 503 | if trigger_push_hook: |
|
505 | 504 | self._handle_push(scm_instance, |
|
506 | 505 | username=user.username, |
|
507 | 506 | ip_addr=ip_addr, |
|
508 | 507 | action='push_local', |
|
509 | 508 | repo_name=repo.repo_name, |
|
510 | 509 | revisions=[tip.raw_id]) |
|
511 | 510 | else: |
|
512 | 511 | self.mark_for_invalidation(repo.repo_name) |
|
513 | 512 | return tip |
|
514 | 513 | |
|
515 | 514 | def update_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None, |
|
516 | 515 | author=None, trigger_push_hook=True): |
|
517 | 516 | """ |
|
518 | 517 | Commits specified nodes to repo. Again. |
|
519 | 518 | """ |
|
520 | 519 | user = User.guess_instance(user) |
|
521 | 520 | scm_instance = repo.scm_instance_no_cache() |
|
522 | 521 | |
|
523 | 522 | message = message |
|
524 | 523 | committer = user.full_contact |
|
525 | 524 | if not author: |
|
526 | 525 | author = committer |
|
527 | 526 | |
|
528 | 527 | imc_class = self._get_IMC_module(scm_instance.alias) |
|
529 | 528 | imc = imc_class(scm_instance) |
|
530 | 529 | |
|
531 | 530 | if not parent_cs: |
|
532 | 531 | parent_cs = EmptyChangeset(alias=scm_instance.alias) |
|
533 | 532 | |
|
534 | 533 | if isinstance(parent_cs, EmptyChangeset): |
|
535 | 534 | # EmptyChangeset means we we're editing empty repository |
|
536 | 535 | parents = None |
|
537 | 536 | else: |
|
538 | 537 | parents = [parent_cs] |
|
539 | 538 | |
|
540 | 539 | # add multiple nodes |
|
541 | 540 | for _filename, data in nodes.items(): |
|
542 | 541 | # new filename, can be renamed from the old one |
|
543 | 542 | filename = self._sanitize_path(data['filename']) |
|
544 | 543 | old_filename = self._sanitize_path(_filename) |
|
545 | 544 | content = data['content'] |
|
546 | 545 | |
|
547 | 546 | filenode = FileNode(old_filename, content=content) |
|
548 | 547 | op = data['op'] |
|
549 | 548 | if op == 'add': |
|
550 | 549 | imc.add(filenode) |
|
551 | 550 | elif op == 'del': |
|
552 | 551 | imc.remove(filenode) |
|
553 | 552 | elif op == 'mod': |
|
554 | 553 | if filename != old_filename: |
|
555 | 554 | # TODO: handle renames, needs vcs lib changes |
|
556 | 555 | imc.remove(filenode) |
|
557 | 556 | imc.add(FileNode(filename, content=content)) |
|
558 | 557 | else: |
|
559 | 558 | imc.change(filenode) |
|
560 | 559 | |
|
561 | 560 | # commit changes |
|
562 | 561 | tip = imc.commit(message=message, |
|
563 | 562 | author=author, |
|
564 | 563 | parents=parents, |
|
565 | 564 | branch=parent_cs.branch) |
|
566 | 565 | |
|
567 | 566 | if trigger_push_hook: |
|
568 | 567 | self._handle_push(scm_instance, |
|
569 | 568 | username=user.username, |
|
570 | 569 | ip_addr=ip_addr, |
|
571 | 570 | action='push_local', |
|
572 | 571 | repo_name=repo.repo_name, |
|
573 | 572 | revisions=[tip.raw_id]) |
|
574 | 573 | else: |
|
575 | 574 | self.mark_for_invalidation(repo.repo_name) |
|
576 | 575 | |
|
577 | 576 | def delete_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None, |
|
578 | 577 | author=None, trigger_push_hook=True): |
|
579 | 578 | """ |
|
580 | 579 | Deletes specified nodes from repo. |
|
581 | 580 | |
|
582 | 581 | :param user: Kallithea User object or user_id, the committer |
|
583 | 582 | :param repo: Kallithea Repository object |
|
584 | 583 | :param message: commit message |
|
585 | 584 | :param nodes: mapping {filename:{'content':content},...} |
|
586 | 585 | :param parent_cs: parent changeset, can be empty than it's initial commit |
|
587 | 586 | :param author: author of commit, cna be different that committer only for git |
|
588 | 587 | :param trigger_push_hook: trigger push hooks |
|
589 | 588 | |
|
590 | 589 | :returns: new committed changeset after deletion |
|
591 | 590 | """ |
|
592 | 591 | |
|
593 | 592 | user = User.guess_instance(user) |
|
594 | 593 | scm_instance = repo.scm_instance_no_cache() |
|
595 | 594 | |
|
596 | 595 | processed_nodes = [] |
|
597 | 596 | for f_path in nodes: |
|
598 | 597 | f_path = self._sanitize_path(f_path) |
|
599 | 598 | # content can be empty but for compatibility it allows same dicts |
|
600 | 599 | # structure as add_nodes |
|
601 | 600 | content = nodes[f_path].get('content') |
|
602 | 601 | processed_nodes.append((f_path, content)) |
|
603 | 602 | |
|
604 | 603 | message = message |
|
605 | 604 | committer = user.full_contact |
|
606 | 605 | if not author: |
|
607 | 606 | author = committer |
|
608 | 607 | |
|
609 | 608 | IMC = self._get_IMC_module(scm_instance.alias) |
|
610 | 609 | imc = IMC(scm_instance) |
|
611 | 610 | |
|
612 | 611 | if not parent_cs: |
|
613 | 612 | parent_cs = EmptyChangeset(alias=scm_instance.alias) |
|
614 | 613 | |
|
615 | 614 | if isinstance(parent_cs, EmptyChangeset): |
|
616 | 615 | # EmptyChangeset means we we're editing empty repository |
|
617 | 616 | parents = None |
|
618 | 617 | else: |
|
619 | 618 | parents = [parent_cs] |
|
620 | 619 | # add multiple nodes |
|
621 | 620 | for path, content in processed_nodes: |
|
622 | 621 | imc.remove(FileNode(path, content=content)) |
|
623 | 622 | |
|
624 | 623 | tip = imc.commit(message=message, |
|
625 | 624 | author=author, |
|
626 | 625 | parents=parents, |
|
627 | 626 | branch=parent_cs.branch) |
|
628 | 627 | |
|
629 | 628 | if trigger_push_hook: |
|
630 | 629 | self._handle_push(scm_instance, |
|
631 | 630 | username=user.username, |
|
632 | 631 | ip_addr=ip_addr, |
|
633 | 632 | action='push_local', |
|
634 | 633 | repo_name=repo.repo_name, |
|
635 | 634 | revisions=[tip.raw_id]) |
|
636 | 635 | else: |
|
637 | 636 | self.mark_for_invalidation(repo.repo_name) |
|
638 | 637 | return tip |
|
639 | 638 | |
|
640 | 639 | def get_unread_journal(self): |
|
641 | 640 | return UserLog.query().count() |
|
642 | 641 | |
|
643 | 642 | def get_repo_landing_revs(self, repo=None): |
|
644 | 643 | """ |
|
645 | 644 | Generates select option with tags branches and bookmarks (for hg only) |
|
646 | 645 | grouped by type |
|
647 | 646 | |
|
648 | 647 | :param repo: |
|
649 | 648 | """ |
|
650 | 649 | |
|
651 | 650 | hist_l = [] |
|
652 | 651 | choices = [] |
|
653 | 652 | hist_l.append(('rev:tip', _('latest tip'))) |
|
654 | 653 | choices.append('rev:tip') |
|
655 | 654 | if repo is None: |
|
656 | 655 | return choices, hist_l |
|
657 | 656 | |
|
658 | 657 | repo = self.__get_repo(repo) |
|
659 | 658 | repo = repo.scm_instance |
|
660 | 659 | |
|
661 | 660 | branches_group = ([('branch:%s' % k, k) for k, v in |
|
662 | 661 | repo.branches.items()], _("Branches")) |
|
663 | 662 | hist_l.append(branches_group) |
|
664 | 663 | choices.extend([x[0] for x in branches_group[0]]) |
|
665 | 664 | |
|
666 | 665 | if repo.alias == 'hg': |
|
667 | 666 | bookmarks_group = ([('book:%s' % k, k) for k, v in |
|
668 | 667 | repo.bookmarks.items()], _("Bookmarks")) |
|
669 | 668 | hist_l.append(bookmarks_group) |
|
670 | 669 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
671 | 670 | |
|
672 | 671 | tags_group = ([('tag:%s' % k, k) for k, v in |
|
673 | 672 | repo.tags.items()], _("Tags")) |
|
674 | 673 | hist_l.append(tags_group) |
|
675 | 674 | choices.extend([x[0] for x in tags_group[0]]) |
|
676 | 675 | |
|
677 | 676 | return choices, hist_l |
|
678 | 677 | |
|
679 | 678 | def _get_git_hook_interpreter(self): |
|
680 | 679 | """Return a suitable interpreter for Git hooks. |
|
681 | 680 | |
|
682 | 681 | Return a suitable string to be written in the POSIX #! shebang line for |
|
683 | 682 | Git hook scripts so they invoke Kallithea code with the right Python |
|
684 | 683 | interpreter and in the right environment. |
|
685 | 684 | """ |
|
686 | 685 | # Note: sys.executable might not point at a usable Python interpreter. For |
|
687 | 686 | # example, when using uwsgi, it will point at the uwsgi program itself. |
|
688 | 687 | # FIXME This may not work on Windows and may need a shell wrapper script. |
|
689 | 688 | return (kallithea.CONFIG.get('git_hook_interpreter') |
|
690 | 689 | or sys.executable |
|
691 | 690 | or '/usr/bin/env python3') |
|
692 | 691 | |
|
693 | 692 | def install_git_hooks(self, repo, force=False): |
|
694 | 693 | """ |
|
695 | 694 | Creates a kallithea hook inside a git repository |
|
696 | 695 | |
|
697 | 696 | :param repo: Instance of VCS repo |
|
698 | 697 | :param force: Overwrite existing non-Kallithea hooks |
|
699 | 698 | """ |
|
700 | 699 | |
|
701 | 700 | hooks_path = os.path.join(repo.path, 'hooks') |
|
702 | 701 | if not repo.bare: |
|
703 | 702 | hooks_path = os.path.join(repo.path, '.git', 'hooks') |
|
704 | 703 | if not os.path.isdir(hooks_path): |
|
705 | 704 | os.makedirs(hooks_path) |
|
706 | 705 | |
|
707 | 706 | tmpl_post = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter()) |
|
708 | 707 | tmpl_post += pkg_resources.resource_string( |
|
709 | 708 | 'kallithea', os.path.join('config', 'post_receive_tmpl.py') |
|
710 | 709 | ) |
|
711 | 710 | tmpl_pre = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter()) |
|
712 | 711 | tmpl_pre += pkg_resources.resource_string( |
|
713 | 712 | 'kallithea', os.path.join('config', 'pre_receive_tmpl.py') |
|
714 | 713 | ) |
|
715 | 714 | |
|
716 | 715 | for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: |
|
717 | 716 | hook_file = os.path.join(hooks_path, '%s-receive' % h_type) |
|
718 | 717 | other_hook = False |
|
719 | 718 | log.debug('Installing git hook in repo %s', repo) |
|
720 | 719 | if os.path.exists(hook_file): |
|
721 | 720 | # let's take a look at this hook, maybe it's kallithea ? |
|
722 | 721 | log.debug('hook exists, checking if it is from kallithea') |
|
723 | 722 | with open(hook_file, 'rb') as f: |
|
724 | 723 | data = f.read() |
|
725 | 724 | matches = re.search(br'^KALLITHEA_HOOK_VER\s*=\s*(.*)$', data, flags=re.MULTILINE) |
|
726 | 725 | if matches: |
|
727 | 726 | ver = matches.groups()[0] |
|
728 | 727 | log.debug('Found Kallithea hook - it has KALLITHEA_HOOK_VER %r', ver) |
|
729 | 728 | else: |
|
730 | 729 | log.debug('Found non-Kallithea hook at %s', hook_file) |
|
731 | 730 | other_hook = True |
|
732 | 731 | |
|
733 | 732 | if other_hook and not force: |
|
734 | 733 | log.warning('skipping overwriting hook file %s', hook_file) |
|
735 | 734 | else: |
|
736 | 735 | log.debug('writing %s hook file !', h_type) |
|
737 | 736 | try: |
|
738 | 737 | with open(hook_file, 'wb') as f: |
|
739 | 738 | tmpl = tmpl.replace(b'_TMPL_', safe_bytes(kallithea.__version__)) |
|
740 | 739 | f.write(tmpl) |
|
741 | 740 | os.chmod(hook_file, 0o755) |
|
742 | 741 | except IOError as e: |
|
743 | 742 | log.error('error writing hook %s: %s', hook_file, e) |
|
744 | 743 | |
|
745 | 744 | |
|
746 | 745 | def AvailableRepoGroupChoices(repo_group_perm_level, extras=()): |
|
747 | 746 | """Return group_id,string tuples with choices for all the repo groups where |
|
748 | 747 | the user has the necessary permissions. |
|
749 | 748 | |
|
750 | 749 | Top level is -1. |
|
751 | 750 | """ |
|
752 | 751 | groups = RepoGroup.query().all() |
|
753 | 752 | if HasPermissionAny('hg.admin')('available repo groups'): |
|
754 | 753 | groups.append(None) |
|
755 | 754 | else: |
|
756 | 755 | groups = list(RepoGroupList(groups, perm_level=repo_group_perm_level)) |
|
757 | 756 | if HasPermissionAny('hg.create.repository')('available repo groups'): |
|
758 | 757 | groups.append(None) |
|
759 | 758 | for extra in extras: |
|
760 | 759 | if not any(rg == extra for rg in groups): |
|
761 | 760 | groups.append(extra) |
|
762 | 761 | return RepoGroup.groups_choices(groups=groups) |
General Comments 0
You need to be logged in to leave comments.
Login now